code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
"""
import pytest
import os
import tarfile
from pathlib import Path
import nibabel as nib
import numpy as np
from ....tests.resource import setup as setuptestresources
from ....resource import get as getresource
from ..flame1 import flame1
from ...fixes import FLAMEO as FSLFLAMEO
from nipype.interfaces import fsl, ants
from nipype.pipeline import engine as pe
from templateflow.api import get as get_template
from ...imagemaths.merge import _merge, _merge_mask
from ...stats.model import _group_model
from ....utils import first
@pytest.fixture(scope="module")
def wakemandg_hensonrn(tmp_path_factory):
tmp_path = tmp_path_factory.mktemp(basename="wakemandg_hensonrn")
os.chdir(str(tmp_path))
setuptestresources()
inputtarpath = getresource("wakemandg_hensonrn_statmaps.tar.gz")
with tarfile.open(inputtarpath) as fp:
fp.extractall(tmp_path)
subjects = [f"{i+1:02d}" for i in range(16)]
suffixes = ["stat-effect_statmap", "stat-variance_statmap", "mask"]
data = {
suffix: [
tmp_path / f"sub-{subject}_task-faces_feature-taskBased_taskcontrast-facesGtScrambled_model-aggregateTaskBasedAcrossRuns_contrast-intercept_{suffix}.nii.gz"
for subject in subjects
]
for suffix in suffixes
}
data.update({
"subjects": subjects,
"spreadsheet": tmp_path / "subjects_age_sex.csv",
})
return data
@pytest.fixture(scope="module")
def mni_downsampled(tmp_path_factory):
tmp_path = tmp_path_factory.mktemp(basename="mni_downsampled")
os.chdir(str(tmp_path))
tpl = get_template("MNI152NLin2009cAsym", resolution=2, desc="brain", suffix="mask")
result = ants.ResampleImageBySpacing(
dimension=3,
input_image=tpl,
out_spacing=(6, 6, 6)
).run()
return result.outputs.output_image
@pytest.fixture(scope="module")
def wakemandg_hensonrn_downsampled(tmp_path_factory, wakemandg_hensonrn, mni_downsampled):
tmp_path = tmp_path_factory.mktemp(basename="wakemandg_hensonrn_downsampled")
os.chdir(str(tmp_path))
data = dict()
def _downsample(in_file):
result = ants.ApplyTransforms(
dimension=3,
input_image_type=0,
input_image=in_file,
reference_image=mni_downsampled,
interpolation="NearestNeighbor",
transforms=["identity"]
).run()
return result.outputs.output_image
for k, v in wakemandg_hensonrn.items():
if isinstance(v, list):
data[k] = [_downsample(f) if Path(f).exists() else f for f in v]
else:
data[k] = v
return data
@pytest.mark.timeout(600)
@pytest.mark.parametrize("use_var_cope", [False, True])
def test_FLAME1(tmp_path, wakemandg_hensonrn_downsampled, use_var_cope):
os.chdir(str(tmp_path))
# prepare
data = wakemandg_hensonrn_downsampled
cope_files = data["stat-effect_statmap"]
var_cope_files = data["stat-variance_statmap"]
mask_files = data["mask"]
subjects = data["subjects"]
spreadsheet_file = data["spreadsheet"]
regressors, contrasts, _ = _group_model(
subjects=subjects,
spreadsheet=spreadsheet_file,
variabledicts=[
{"name": "Sub", "type": "id"},
{"name": "Age", "type": "continuous"},
{"name": "ReactionTime", "type": "categorical"},
],
contrastdicts=[
{"variable": ["Age"], "type": "infer"},
{"variable": ["ReactionTime"], "type": "infer"}
]
)
# run FSL
merge_cope_file = _merge(cope_files, "t")
merge_var_cope_file = _merge(var_cope_files, "t")
merge_mask_file = _merge_mask(mask_files)
workflow = pe.Workflow("comparison", base_dir=str(tmp_path))
multipleregressdesign = pe.Node(
fsl.MultipleRegressDesign(
regressors=regressors,
contrasts=contrasts,
),
name="multipleregressdesign",
)
flameo = pe.Node(
FSLFLAMEO(
run_mode="flame1",
cope_file=merge_cope_file,
mask_file=merge_mask_file,
),
name="flameo"
)
if use_var_cope:
flameo.inputs.var_cope_file = merge_var_cope_file
workflow.connect(multipleregressdesign, "design_mat", flameo, "design_file")
workflow.connect(multipleregressdesign, "design_con", flameo, "t_con_file")
workflow.connect(multipleregressdesign, "design_fts", flameo, "f_con_file")
workflow.connect(multipleregressdesign, "design_grp", flameo, "cov_split_file")
execgraph = workflow.run()
# retrieve flameo again
for node in execgraph.nodes():
if node.name == "flameo":
flameo = node
result = flameo.result
r0 = dict(
cope=result.outputs.copes[0],
tstat=result.outputs.tstats[0],
fstat=first(result.outputs.fstats),
tdof=result.outputs.tdof[0],
)
# run halfpipe
if use_var_cope:
var_cope_files_or_none = var_cope_files
else:
var_cope_files_or_none = None
result = flame1(
cope_files=cope_files,
var_cope_files=var_cope_files_or_none,
mask_files=mask_files,
regressors=regressors,
contrasts=contrasts,
num_threads=1,
)
r1 = dict(
cope=result["copes"][0],
tstat=result["tstats"][0],
fstat=result["fstats"][2],
tdof=result["tdof"][0],
)
# compare
mask = nib.load(merge_mask_file).get_fdata() > 0
for k in set(r0.keys()) & set(r1.keys()):
a0 = nib.load(r0[k]).get_fdata()[mask]
a1 = nib.load(r1[k]).get_fdata()[mask]
# weak criteria, determined post-hoc
# we don't expect exactly identical results, because FSL and numpy
# use different numerics code and we use double precision while FSL
# uses single precision floating point
# so these assertions are here to verify that the small differences
# will not get any larger with future changes or optimizations
# no more than one percent of voxels can be more than one percent different
assert np.isclose(a0, a1, rtol=1e-2).mean() > 0.99, f"Too many diverging voxels for {k}"
# mean error average needs to be below 0.05
assert np.abs(a0 - a1).mean() < 0.05, f"Too high mean error average for {k}"
| [
"nipype.interfaces.ants.ApplyTransforms",
"numpy.abs",
"tarfile.open",
"numpy.isclose",
"nibabel.load",
"pathlib.Path",
"templateflow.api.get",
"nipype.interfaces.ants.ResampleImageBySpacing",
"pytest.mark.parametrize",
"nipype.interfaces.fsl.MultipleRegressDesign",
"pytest.fixture",
"pytest.m... | [((683, 713), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (697, 713), False, 'import pytest\n'), ((1566, 1596), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1580, 1596), False, 'import pytest\n'), ((1996, 2026), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (2010, 2026), False, 'import pytest\n'), ((2806, 2830), 'pytest.mark.timeout', 'pytest.mark.timeout', (['(600)'], {}), '(600)\n', (2825, 2830), False, 'import pytest\n'), ((2832, 2886), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_var_cope"""', '[False, True]'], {}), "('use_var_cope', [False, True])\n", (2855, 2886), False, 'import pytest\n'), ((1743, 1821), 'templateflow.api.get', 'get_template', (['"""MNI152NLin2009cAsym"""'], {'resolution': '(2)', 'desc': '"""brain"""', 'suffix': '"""mask"""'}), "('MNI152NLin2009cAsym', resolution=2, desc='brain', suffix='mask')\n", (1755, 1821), True, 'from templateflow.api import get as get_template\n'), ((960, 986), 'tarfile.open', 'tarfile.open', (['inputtarpath'], {}), '(inputtarpath)\n', (972, 986), False, 'import tarfile\n'), ((3974, 4043), 'nipype.interfaces.fsl.MultipleRegressDesign', 'fsl.MultipleRegressDesign', ([], {'regressors': 'regressors', 'contrasts': 'contrasts'}), '(regressors=regressors, contrasts=contrasts)\n', (3999, 4043), False, 'from nipype.interfaces import fsl, ants\n'), ((1836, 1921), 'nipype.interfaces.ants.ResampleImageBySpacing', 'ants.ResampleImageBySpacing', ([], {'dimension': '(3)', 'input_image': 'tpl', 'out_spacing': '(6, 6, 6)'}), '(dimension=3, input_image=tpl, out_spacing=(6, 6, 6)\n )\n', (1863, 1921), False, 'from nipype.interfaces import fsl, ants\n'), ((2296, 2469), 'nipype.interfaces.ants.ApplyTransforms', 'ants.ApplyTransforms', ([], {'dimension': '(3)', 'input_image_type': '(0)', 'input_image': 'in_file', 'reference_image': 'mni_downsampled', 'interpolation': '"""NearestNeighbor"""', 'transforms': "['identity']"}), "(dimension=3, input_image_type=0, input_image=in_file,\n reference_image=mni_downsampled, interpolation='NearestNeighbor',\n transforms=['identity'])\n", (2316, 2469), False, 'from nipype.interfaces import fsl, ants\n'), ((5625, 5650), 'nibabel.load', 'nib.load', (['merge_mask_file'], {}), '(merge_mask_file)\n', (5633, 5650), True, 'import nibabel as nib\n'), ((5727, 5742), 'nibabel.load', 'nib.load', (['r0[k]'], {}), '(r0[k])\n', (5735, 5742), True, 'import nibabel as nib\n'), ((5774, 5789), 'nibabel.load', 'nib.load', (['r1[k]'], {}), '(r1[k])\n', (5782, 5789), True, 'import nibabel as nib\n'), ((6299, 6328), 'numpy.isclose', 'np.isclose', (['a0', 'a1'], {'rtol': '(0.01)'}), '(a0, a1, rtol=0.01)\n', (6309, 6328), True, 'import numpy as np\n'), ((6449, 6464), 'numpy.abs', 'np.abs', (['(a0 - a1)'], {}), '(a0 - a1)\n', (6455, 6464), True, 'import numpy as np\n'), ((2712, 2719), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (2716, 2719), False, 'from pathlib import Path\n')] |
from prev_ob_models.utils import RunInClassDirectory, IsolatedCell
class MC(IsolatedCell):
def __init__(self):
with RunInClassDirectory(MC):
from neuron import h,gui
h.xopen("mitral.hoc")
h.xopen("memb.hoc")
h.celsius = 23
self.h = h
self.soma = self.h.soma
h.cvode_active(1)
| [
"neuron.h.cvode_active",
"neuron.h.xopen",
"prev_ob_models.utils.RunInClassDirectory"
] | [((133, 156), 'prev_ob_models.utils.RunInClassDirectory', 'RunInClassDirectory', (['MC'], {}), '(MC)\n', (152, 156), False, 'from prev_ob_models.utils import RunInClassDirectory, IsolatedCell\n'), ((211, 232), 'neuron.h.xopen', 'h.xopen', (['"""mitral.hoc"""'], {}), "('mitral.hoc')\n", (218, 232), False, 'from neuron import h, gui\n'), ((246, 265), 'neuron.h.xopen', 'h.xopen', (['"""memb.hoc"""'], {}), "('memb.hoc')\n", (253, 265), False, 'from neuron import h, gui\n'), ((374, 391), 'neuron.h.cvode_active', 'h.cvode_active', (['(1)'], {}), '(1)\n', (388, 391), False, 'from neuron import h, gui\n')] |
from __future__ import annotations
import mmap
import threading
from enum import Enum
from itertools import product
from pathlib import Path
from typing import (
TYPE_CHECKING,
Optional,
Sequence,
Set,
Sized,
SupportsInt,
Union,
cast,
overload,
)
import numpy as np
from ._util import AXIS, VoxelSize, get_reader, is_supported_file
from .structures import Attributes, ExpLoop, FrameMetadata, Metadata, XYPosLoop
try:
from functools import cached_property
except ImportError:
cached_property = property # type: ignore
if TYPE_CHECKING:
from typing import Any, Dict, List, Tuple
import dask.array as da
import xarray as xr
from typing_extensions import Literal
Index = Union[int, slice]
class ReadMode(str, Enum):
MMAP = "mmap"
SDK = "sdk"
class ND2File:
_memmap: mmap.mmap
_is_legacy: bool
def __init__(
self,
path: Union[Path, str],
validate_frames: bool = False,
search_window: int = 100,
) -> None:
"""Open an nd2 file.
Parameters
----------
path : Union[Path, str]
Filename of an nd2 file.
validate_frames : bool
Whether to verify (and attempt to fix) frames whose positions have been
shifted relative to the predicted offset (i.e. in a corrupted file).
This comes at a slight performance penalty at file open, but may "rescue"
some corrupt files. by default False.
search_window : int
When validate_frames is true, this is the search window (in KB) that will
be used to try to find the actual chunk position. by default 100 KB
"""
self._path = str(path)
self._rdr = get_reader(
self._path, validate_frames=validate_frames, search_window=search_window
)
self._closed = False
self._is_legacy = "Legacy" in type(self._rdr).__name__
self._lock = threading.RLock()
@staticmethod
def is_supported_file(path) -> bool:
return is_supported_file(path)
@property
def path(self):
"""Path of the image."""
return self._path
@property
def is_legacy(self) -> bool:
"""Whether file is a legacy nd2 (JPEG2000) file."""
return self._is_legacy
def open(self) -> None:
"""open file for reading."""
if self.closed:
self._rdr.open()
self._closed = False
def close(self) -> None:
"""Close file (may cause segfault if read when closed in some cases)."""
if not self.closed:
self._rdr.close()
self._closed = True
@property
def closed(self) -> bool:
"""Whether the file is closed."""
return self._closed
def __enter__(self) -> ND2File:
self.open()
return self
def __exit__(self, *_) -> None:
self.close()
def __getstate__(self):
state = self.__dict__.copy()
del state["_rdr"]
del state["_lock"]
return state
def __setstate__(self, d):
self.__dict__ = d
self._lock = threading.RLock()
self._rdr = get_reader(self._path)
if self._closed:
self._rdr.close()
@cached_property
def attributes(self) -> Attributes:
"""Core image attributes"""
return self._rdr.attributes
@cached_property
def text_info(self) -> Dict[str, Any]:
"""Misc text info."""
return self._rdr.text_info()
@cached_property
def experiment(self) -> List[ExpLoop]:
"""Loop information for each nd axis"""
return self._rdr.experiment()
@cached_property
def metadata(self) -> Union[Metadata, dict]:
"""Various metadata (will be dict if legacy format)."""
return self._rdr.metadata()
def frame_metadata(
self, seq_index: Union[int, tuple]
) -> Union[FrameMetadata, dict]:
"""Metadata for specific frame.
This includes the global metadata from the metadata function.
(will be dict if legacy format).
Parameters
----------
seq_index : Union[int, tuple]
frame index
Returns
-------
Union[FrameMetadata, dict]
dict if legacy format, else FrameMetadata
"""
idx = cast(
int,
self._seq_index_from_coords(seq_index)
if isinstance(seq_index, tuple)
else seq_index,
)
return self._rdr.frame_metadata(idx)
@cached_property
def custom_data(self) -> Dict[str, Any]:
"""Dict of various unstructured custom metadata."""
return self._rdr._custom_data()
@cached_property
def ndim(self) -> int:
"""number of dimensions"""
return len(self.shape)
@cached_property
def shape(self) -> Tuple[int, ...]:
"""size of each axis"""
return self._coord_shape + self._frame_shape
@cached_property
def sizes(self) -> Dict[str, int]:
"""names and sizes for each axis"""
attrs = self.attributes
dims = {AXIS._MAP[c[1]]: c[2] for c in self._rdr._coord_info()}
dims[AXIS.CHANNEL] = (
dims.pop(AXIS.CHANNEL)
if AXIS.CHANNEL in dims
else (attrs.channelCount or 1)
)
dims[AXIS.Y] = attrs.heightPx
dims[AXIS.X] = attrs.widthPx or -1
if self.components_per_channel == 3: # rgb
dims[AXIS.RGB] = self.components_per_channel
else:
# if not exactly 3 channels, throw them all into monochrome channels
dims[AXIS.CHANNEL] = attrs.componentCount
return {k: v for k, v in dims.items() if v != 1}
@property
def is_rgb(self) -> bool:
"""Whether the image is rgb"""
return self.components_per_channel in (3, 4)
@property
def components_per_channel(self) -> int:
"""Number of components per channel (e.g. 3 for rgb)"""
attrs = cast(Attributes, self.attributes)
return attrs.componentCount // (attrs.channelCount or 1)
@property
def size(self) -> int:
"""Total number of pixels in the volume."""
return int(np.prod(self.shape))
@property
def nbytes(self) -> int:
"""Total bytes of image data."""
return self.size * self.dtype.itemsize
@cached_property
def dtype(self) -> np.dtype:
"""Image data type"""
attrs = self.attributes
d = attrs.pixelDataType[0] if attrs.pixelDataType else "u"
return np.dtype(f"{d}{attrs.bitsPerComponentInMemory // 8}")
def voxel_size(self, channel: int = 0) -> VoxelSize:
"""XYZ voxel size.
Parameters
----------
channel : int
Channel for which to retrieve voxel info, by default 0
Returns
-------
VoxelSize
Named tuple with attrs `x`, `y`, and `z`.
"""
return VoxelSize(*self._rdr.voxel_size())
def asarray(self, position: Optional[int] = None) -> np.ndarray:
"""Read image into numpy array.
Parameters
----------
position : int, optional
A specific XY position to extract, by default (None) reads all.
Returns
-------
np.ndarray
Raises
------
ValueError
if `position` is a string and is not a valid position name
IndexError
if `position` is provided and is out of range
"""
final_shape = list(self.shape)
if position is None:
seqs: Sequence[int] = range(self._frame_count)
else:
if isinstance(position, str):
try:
position = self._position_names().index(position)
except ValueError as e:
raise ValueError(
f"{position!r} is not a valid position name"
) from e
try:
pidx = list(self.sizes).index(AXIS.POSITION)
except ValueError as exc:
if position > 0:
raise IndexError(
f"Position {position} is out of range. "
f"Only 1 position available"
) from exc
seqs = range(self._frame_count)
else:
if position >= self.sizes[AXIS.POSITION]:
raise IndexError(
f"Position {position} is out of range. "
f"Only {self.sizes[AXIS.POSITION]} positions available"
)
ranges: List[Union[range, tuple]] = [
range(x) for x in self._coord_shape
]
ranges[pidx] = (position,)
coords = list(zip(*product(*ranges)))
seqs = self._seq_index_from_coords(coords) # type: ignore
final_shape[pidx] = 1
arr: np.ndarray = np.stack([self._get_frame(i) for i in seqs])
return arr.reshape(final_shape)
def __array__(self) -> np.ndarray:
"""array protocol"""
return self.asarray()
def to_dask(self, wrapper=True, copy=True) -> da.Array:
"""Create dask array (delayed reader) representing image.
This generally works well, but it remains to be seen whether performance
is optimized, or if we're duplicating safety mechanisms. You may try
various combinations of `wrapper` and `copy`, setting both to `False`
will very likely cause segmentation faults in many cases. But setting
one of them to `False`, may slightly improve read speed in certain
cases.
Parameters
----------
wrapper : bool
If True (the default), the returned obect will be a thin subclass of
a :class:`dask.array.Array` (an
`ResourceBackedDaskArray`) that manages the opening
and closing of this file when getting chunks via compute(). If `wrapper`
is `False`, then a pure `da.Array` will be returned. However, when that
array is computed, it will incur a file open/close on *every* chunk
that is read (in the `_dask_block` method). As such `wrapper`
will generally be much faster, however, it *may* fail (i.e. result in
segmentation faults) with certain dask schedulers.
copy : bool
If `True` (the default), the dask chunk-reading function will return
an array copy. This can avoid segfaults in certain cases, though it
may also add overhead.
Returns
-------
da.Array
"""
from dask.array import map_blocks
chunks = [(1,) * x for x in self._coord_shape]
chunks += [(x,) for x in self._frame_shape]
dask_arr = map_blocks(
self._dask_block,
copy=copy,
chunks=chunks,
dtype=self.dtype,
)
if wrapper:
from resource_backed_dask_array import ResourceBackedDaskArray
# this subtype allows the dask array to re-open the underlying
# nd2 file on compute.
return ResourceBackedDaskArray.from_array(dask_arr, self)
return dask_arr
_NO_IDX = -1
def _seq_index_from_coords(
self, coords: Sequence
) -> Union[Sequence[int], SupportsInt]:
if not self._coord_shape:
return self._NO_IDX
return np.ravel_multi_index(coords, self._coord_shape)
def _dask_block(self, copy: bool, block_id: Tuple[int]) -> np.ndarray:
if isinstance(block_id, np.ndarray):
return
with self._lock:
was_closed = self.closed
if self.closed:
self.open()
try:
ncoords = len(self._coord_shape)
idx = self._seq_index_from_coords(block_id[:ncoords])
if idx == self._NO_IDX:
if any(block_id):
raise ValueError(
f"Cannot get chunk {block_id} for single frame image."
)
idx = 0
data = self._get_frame(cast(int, idx))
data = data.copy() if copy else data
return data[(np.newaxis,) * ncoords]
finally:
if was_closed:
self.close()
def to_xarray(
self,
delayed: bool = True,
squeeze: bool = True,
position: Optional[int] = None,
copy: bool = True,
) -> xr.DataArray:
"""Create labeled xarray representing image.
`array.dims` will be populated according to image metadata, and coordinates
will be populated based on pixel spacings. Additional metadata is available
in `array.attrs['metadata']`.
Parameters
----------
delayed : bool
Whether the DataArray should be backed by dask array or numpy array,
by default True (dask).
squeeze : bool
Whether to squeeze singleton dimensions, by default True
position : int, optional
A specific XY position to extract, by default (None) reads all.
copy : bool
Only applies when `delayed==True`. See `to_dask` for details.
Returns
-------
xr.DataArray
xarray with all axes labeled.
"""
import xarray as xr
data = self.to_dask(copy=copy) if delayed else self.asarray(position)
dims = list(self.sizes)
coords = self._expand_coords(squeeze)
if not squeeze:
for missing_dim in set(coords).difference(dims):
dims.insert(0, missing_dim)
missing_axes = len(dims) - data.ndim
if missing_axes > 0:
data = data[(np.newaxis,) * missing_axes]
if position is not None and not delayed and AXIS.POSITION in coords:
# if it's delayed, we do this using isel below instead.
coords[AXIS.POSITION] = [coords[AXIS.POSITION][position]]
x = xr.DataArray(
data,
dims=dims,
coords=coords,
attrs={
"metadata": {
"metadata": self.metadata,
"experiment": self.experiment,
"attributes": self.attributes,
"text_info": self.text_info,
}
},
)
if delayed and position is not None and AXIS.POSITION in coords:
x = x.isel({AXIS.POSITION: [position]})
return x.squeeze() if squeeze else x
@property
def _frame_coords(self) -> Set[str]:
return {AXIS.X, AXIS.Y, AXIS.CHANNEL, AXIS.RGB}
@property
def _raw_frame_shape(self) -> Tuple[int, int, int, int]:
"""sizes of each frame coordinate, prior to reshape"""
attr = self.attributes
return (
attr.heightPx,
attr.widthPx or -1,
attr.channelCount or 1,
self.components_per_channel,
)
@property
def _frame_shape(self) -> Tuple[int, ...]:
"""sizes of each frame coordinate, after reshape & squeeze"""
return tuple(v for k, v in self.sizes.items() if k in self._frame_coords)
@cached_property
def _coord_shape(self) -> Tuple[int, ...]:
"""sizes of each *non-frame* coordinate"""
return tuple(v for k, v in self.sizes.items() if k not in self._frame_coords)
@property
def _frame_count(self) -> int:
return int(np.prod(self._coord_shape))
def _get_frame(self, index: int) -> np.ndarray:
frame = self._rdr._read_image(index)
frame.shape = self._raw_frame_shape
return frame.transpose((2, 0, 1, 3)).squeeze()
def _expand_coords(self, squeeze: bool = True) -> dict:
"""Return a dict that can be used as the coords argument to xr.DataArray
Parameters
----------
squeeze : bool
whether to squeeze axes with length < 2, by default True
Returns
-------
dict
dict of axis name -> coordinates
"""
dx, dy, dz = self.voxel_size()
coords: Dict[str, Sized] = {
AXIS.Y: np.arange(self.attributes.heightPx) * dy,
AXIS.X: np.arange(self.attributes.widthPx or 1) * dx,
AXIS.CHANNEL: self._channel_names,
AXIS.POSITION: ["XYPos:0"], # maybe overwritten below
}
for c in self.experiment:
if squeeze and c.count <= 1:
continue
if c.type == "ZStackLoop":
coords[AXIS.Z] = np.arange(c.count) * c.parameters.stepUm
elif c.type == "TimeLoop":
coords[AXIS.TIME] = np.arange(c.count) * c.parameters.periodMs
elif c.type == "NETimeLoop":
pers = [np.arange(p.count) * p.periodMs for p in c.parameters.periods]
coords[AXIS.TIME] = np.hstack(pers)
elif c.type == "XYPosLoop":
coords[AXIS._MAP["XYPosLoop"]] = self._position_names(c)
if self.components_per_channel > 1:
coords[AXIS.RGB] = ["Red", "Green", "Blue", "alpha"][
: self.components_per_channel
]
# fix for Z axis missing from experiment:
if AXIS.Z in self.sizes and AXIS.Z not in coords:
coords[AXIS.Z] = np.arange(self.sizes[AXIS.Z]) * dz
if squeeze:
return {k: v for k, v in coords.items() if len(v) > 1}
return coords
def _position_names(self, loop: Optional[XYPosLoop] = None) -> List[str]:
if loop is None:
for c in self.experiment:
if c.type == "XYPosLoop":
loop = c
break
if loop is None:
return ["XYPos:0"]
return [p.name or f"XYPos:{i}" for i, p in enumerate(loop.parameters.points)]
@property
def _channel_names(self) -> List[str]:
return self._rdr.channel_names()
def __repr__(self) -> str:
try:
details = " (closed)" if self.closed else f" {self.dtype}: {self.sizes!r}"
extra = f": {Path(self.path).name!r}{details}"
except Exception:
extra = ""
return f"<ND2File at {hex(id(self))}{extra}>"
@overload
def imread(
file: Union[Path, str],
dask: Literal[False] = False,
xarray: Literal[False] = False,
validate_frames: bool = False,
) -> np.ndarray:
...
@overload
def imread(
file: Union[Path, str],
dask: bool = ...,
xarray: Literal[True] = True,
validate_frames: bool = False,
) -> xr.DataArray:
...
@overload
def imread(
file: Union[Path, str],
dask: Literal[True] = ...,
xarray=False,
validate_frames: bool = False,
) -> da.Array:
...
def imread(
file: Union[Path, str],
dask: bool = False,
xarray: bool = False,
validate_frames: bool = False,
):
"""Open `file`, return requested array type, and close `file`.
Parameters
----------
file : Union[Path, str]
Filepath (`str`) or `Path` object to ND2 file.
dask : bool
If `True`, returns a (delayed) `dask.array.Array`. This will avoid reading
any data from disk until specifically requested by using `.compute()` or
casting to a numpy array with `np.asarray()`. By default `False`.
xarray : bool
If `True`, returns an `xarray.DataArray`, `array.dims` will be populated
according to image metadata, and coordinates will be populated based on pixel
spacings. Additional metadata is available in `array.attrs['metadata']`.
If `dask` is also `True`, will return an xarray backed by a delayed dask array.
By default `False`.
validate_frames : bool
Whether to verify (and attempt to fix) frames whose positions have been
shifted relative to the predicted offset (i.e. in a corrupted file).
This comes at a slight performance penalty at file open, but may "rescue"
some corrupt files. by default False.
Returns
-------
Union[np.ndarray, dask.array.Array, xarray.DataArray]
Array subclass, depending on arguments used.
"""
with ND2File(file, validate_frames=validate_frames) as nd2:
if xarray:
return nd2.to_xarray(delayed=dask)
elif dask:
return nd2.to_dask()
else:
return nd2.asarray()
| [
"numpy.prod",
"numpy.hstack",
"pathlib.Path",
"numpy.ravel_multi_index",
"dask.array.map_blocks",
"threading.RLock",
"itertools.product",
"resource_backed_dask_array.ResourceBackedDaskArray.from_array",
"xarray.DataArray",
"numpy.dtype",
"typing.cast",
"numpy.arange"
] | [((1978, 1995), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1993, 1995), False, 'import threading\n'), ((3150, 3167), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (3165, 3167), False, 'import threading\n'), ((6028, 6061), 'typing.cast', 'cast', (['Attributes', 'self.attributes'], {}), '(Attributes, self.attributes)\n', (6032, 6061), False, 'from typing import TYPE_CHECKING, Optional, Sequence, Set, Sized, SupportsInt, Union, cast, overload\n'), ((6592, 6645), 'numpy.dtype', 'np.dtype', (['f"""{d}{attrs.bitsPerComponentInMemory // 8}"""'], {}), "(f'{d}{attrs.bitsPerComponentInMemory // 8}')\n", (6600, 6645), True, 'import numpy as np\n'), ((10911, 10983), 'dask.array.map_blocks', 'map_blocks', (['self._dask_block'], {'copy': 'copy', 'chunks': 'chunks', 'dtype': 'self.dtype'}), '(self._dask_block, copy=copy, chunks=chunks, dtype=self.dtype)\n', (10921, 10983), False, 'from dask.array import map_blocks\n'), ((11550, 11597), 'numpy.ravel_multi_index', 'np.ravel_multi_index', (['coords', 'self._coord_shape'], {}), '(coords, self._coord_shape)\n', (11570, 11597), True, 'import numpy as np\n'), ((14206, 14399), 'xarray.DataArray', 'xr.DataArray', (['data'], {'dims': 'dims', 'coords': 'coords', 'attrs': "{'metadata': {'metadata': self.metadata, 'experiment': self.experiment,\n 'attributes': self.attributes, 'text_info': self.text_info}}"}), "(data, dims=dims, coords=coords, attrs={'metadata': {'metadata':\n self.metadata, 'experiment': self.experiment, 'attributes': self.\n attributes, 'text_info': self.text_info}})\n", (14218, 14399), True, 'import xarray as xr\n'), ((6240, 6259), 'numpy.prod', 'np.prod', (['self.shape'], {}), '(self.shape)\n', (6247, 6259), True, 'import numpy as np\n'), ((11268, 11318), 'resource_backed_dask_array.ResourceBackedDaskArray.from_array', 'ResourceBackedDaskArray.from_array', (['dask_arr', 'self'], {}), '(dask_arr, self)\n', (11302, 11318), False, 'from resource_backed_dask_array import ResourceBackedDaskArray\n'), ((15683, 15709), 'numpy.prod', 'np.prod', (['self._coord_shape'], {}), '(self._coord_shape)\n', (15690, 15709), True, 'import numpy as np\n'), ((16381, 16416), 'numpy.arange', 'np.arange', (['self.attributes.heightPx'], {}), '(self.attributes.heightPx)\n', (16390, 16416), True, 'import numpy as np\n'), ((16443, 16482), 'numpy.arange', 'np.arange', (['(self.attributes.widthPx or 1)'], {}), '(self.attributes.widthPx or 1)\n', (16452, 16482), True, 'import numpy as np\n'), ((17547, 17576), 'numpy.arange', 'np.arange', (['self.sizes[AXIS.Z]'], {}), '(self.sizes[AXIS.Z])\n', (17556, 17576), True, 'import numpy as np\n'), ((12289, 12303), 'typing.cast', 'cast', (['int', 'idx'], {}), '(int, idx)\n', (12293, 12303), False, 'from typing import TYPE_CHECKING, Optional, Sequence, Set, Sized, SupportsInt, Union, cast, overload\n'), ((16786, 16804), 'numpy.arange', 'np.arange', (['c.count'], {}), '(c.count)\n', (16795, 16804), True, 'import numpy as np\n'), ((16902, 16920), 'numpy.arange', 'np.arange', (['c.count'], {}), '(c.count)\n', (16911, 16920), True, 'import numpy as np\n'), ((17109, 17124), 'numpy.hstack', 'np.hstack', (['pers'], {}), '(pers)\n', (17118, 17124), True, 'import numpy as np\n'), ((18329, 18344), 'pathlib.Path', 'Path', (['self.path'], {}), '(self.path)\n', (18333, 18344), False, 'from pathlib import Path\n'), ((8868, 8884), 'itertools.product', 'product', (['*ranges'], {}), '(*ranges)\n', (8875, 8884), False, 'from itertools import product\n'), ((17010, 17028), 'numpy.arange', 'np.arange', (['p.count'], {}), '(p.count)\n', (17019, 17028), True, 'import numpy as np\n')] |
# Copyright 2015-2016 Mirantis, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
from mcv_consoler.common import config
from mcv_consoler.version import version
def _get_parser():
parser = argparse.ArgumentParser(
prog=config.PROJECT_NAME,
formatter_class=argparse.RawTextHelpFormatter,
description=config.PROJECT_DESCRIPTION,
epilog=config.RUN_DESCRIPTION)
operation = parser.add_mutually_exclusive_group(required=True)
operation.add_argument(
"--run",
nargs="+",
help="Run one of specified test suites.")
operation.add_argument(
"--compare-resources",
default=False,
help="Compare current resources with yaml-file")
operation.add_argument(
"--remove-trash",
default=False,
nargs='?',
help="Find trash and remove it")
parser.add_argument(
"--config",
help="Provide custom config file instead of the default one")
parser.add_argument(
'--os-ssh-key', type=argparse.FileType('rt'),
help='SSH key for OpenStack nodes. If not set fetched from FUEL '
'master.')
parser.add_argument(
'--os-openrc', type=argparse.FileType('rt'),
help='Shell script contain definition of environment variables used '
'by OpenStack CLI client for authentication. If not set etched '
'from FUEL controller node.')
parser.add_argument(
'--os-fuelclient-settings', type=argparse.FileType('rt'),
help='Settings for fuelclient. If not set fetched from FUEL master.')
parser.add_argument(
"--version",
action="version",
version=version,
help="Print out version of MCV Consoler and exit.")
parser.add_argument(
"--run-mode",
choices=config.RUN_MODES,
required=True,
help="""Choose mode in which Consoler is going to work.
Possible values:
instance - Run MCV inside the cloud as an instance (L1)
node - Run MCV as a separate node with direct access to admin network (L2)
external - Run MCV as a separate node in external network (L3)""")
parser.add_argument(
"--debug",
default=False,
action="store_true",
help="Show debug messages.")
parser.add_argument(
"--verbose",
default=False,
action="store_true",
help="Verbose debug messages.")
return parser
argparser = _get_parser()
| [
"argparse.FileType",
"argparse.ArgumentParser"
] | [((745, 922), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': 'config.PROJECT_NAME', 'formatter_class': 'argparse.RawTextHelpFormatter', 'description': 'config.PROJECT_DESCRIPTION', 'epilog': 'config.RUN_DESCRIPTION'}), '(prog=config.PROJECT_NAME, formatter_class=argparse.\n RawTextHelpFormatter, description=config.PROJECT_DESCRIPTION, epilog=\n config.RUN_DESCRIPTION)\n', (768, 922), False, 'import argparse\n'), ((1578, 1601), 'argparse.FileType', 'argparse.FileType', (['"""rt"""'], {}), "('rt')\n", (1595, 1601), False, 'import argparse\n'), ((1755, 1778), 'argparse.FileType', 'argparse.FileType', (['"""rt"""'], {}), "('rt')\n", (1772, 1778), False, 'import argparse\n'), ((2046, 2069), 'argparse.FileType', 'argparse.FileType', (['"""rt"""'], {}), "('rt')\n", (2063, 2069), False, 'import argparse\n')] |
import tensorflow as tf
from dataset.create_kitti_tfrecords import get_dataset
from dataset import input_generator
import model_options
import scipy.misc as smi
import numpy as np
import cv2
slim = tf.contrib.slim
prefetch_queue = slim.prefetch_queue
# with tf.Graph().as_default():
# dataset = get_dataset(model_options.train_datasets, model_options.num_samples)
# samples = input_generator.get(dataset,
# model_options.crop_size,
# model_options.train_batch_size,
# num_readers=1,
# num_threads=1,
# is_training=True)
# inputs_queue = prefetch_queue.prefetch_queue(samples,capacity=8)
if __name__ == "__main__":
test_datasets = ['train_cityscapes_segmentation_0.tfrecords',]
num_samples = 200
crop_size = [384, 960]
batch_size = 1
# with tf.Session() as sess:
sess = tf.InteractiveSession()
# a = tf.constant(1)
# b = tf.constant(2)
# c = a + b
# d = sess.run(c)
# print d
s = tf.constant('campus')
s2 = tf.constant('campus1')
print(tf.equal(s,s2))
result = tf.cond(tf.equal(s,s2), lambda:tf.constant(1),lambda:tf.constant(0))
print(sess.run(result))
# tf.cond(tf. )
# tf.cond(a<b, lambda:tf.)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
dataset = get_dataset(test_datasets, num_samples)
samples = input_generator.get(dataset,
crop_size,
batch_size,
num_readers=3,
num_threads=3,
is_training=True)
inputs_queue = prefetch_queue.prefetch_queue(samples,capacity=8*model_options.train_batch_size)
samples_ = inputs_queue.dequeue()
print(samples_)
# images = samples_['image']
# init_op = tf.group(tf.global_variables_initializer(),
# tf.local_variables_initializer())
# sess.run(init_op)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
for i in range(1):
sample_ = sess.run(samples_)
image_ = sample_['image']
label_ = sample_['label']
smi.imsave('temp/{}_im_t.png'.format(i),image_[0,:,:,:])
smi.imsave('temp/{}_la_t.png'.format(i),label_[0,:,:])
# print(sess.run(samples_).__class__)
print(image_.max(),image_.shape)
cv2.imshow('train_im',image_[0,:,:,:].astype(np.uint8))
k = cv2.waitKey(0)
if k==27:
cv2.destroyWindow('train_im')
# print images_
# # smi.imshow(images_[0,:,:,:])
coord.request_stop()
coord.join(threads)
| [
"tensorflow.InteractiveSession",
"tensorflow.equal",
"dataset.create_kitti_tfrecords.get_dataset",
"tensorflow.train.Coordinator",
"cv2.destroyWindow",
"dataset.input_generator.get",
"tensorflow.train.start_queue_runners",
"tensorflow.constant",
"cv2.waitKey"
] | [((860, 883), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', ([], {}), '()\n', (881, 883), True, 'import tensorflow as tf\n'), ((995, 1016), 'tensorflow.constant', 'tf.constant', (['"""campus"""'], {}), "('campus')\n", (1006, 1016), True, 'import tensorflow as tf\n'), ((1026, 1048), 'tensorflow.constant', 'tf.constant', (['"""campus1"""'], {}), "('campus1')\n", (1037, 1048), True, 'import tensorflow as tf\n'), ((1249, 1271), 'tensorflow.train.Coordinator', 'tf.train.Coordinator', ([], {}), '()\n', (1269, 1271), True, 'import tensorflow as tf\n'), ((1286, 1327), 'tensorflow.train.start_queue_runners', 'tf.train.start_queue_runners', ([], {'coord': 'coord'}), '(coord=coord)\n', (1314, 1327), True, 'import tensorflow as tf\n'), ((1343, 1382), 'dataset.create_kitti_tfrecords.get_dataset', 'get_dataset', (['test_datasets', 'num_samples'], {}), '(test_datasets, num_samples)\n', (1354, 1382), False, 'from dataset.create_kitti_tfrecords import get_dataset\n'), ((1397, 1500), 'dataset.input_generator.get', 'input_generator.get', (['dataset', 'crop_size', 'batch_size'], {'num_readers': '(3)', 'num_threads': '(3)', 'is_training': '(True)'}), '(dataset, crop_size, batch_size, num_readers=3,\n num_threads=3, is_training=True)\n', (1416, 1500), False, 'from dataset import input_generator\n'), ((1897, 1919), 'tensorflow.train.Coordinator', 'tf.train.Coordinator', ([], {}), '()\n', (1917, 1919), True, 'import tensorflow as tf\n'), ((1934, 1975), 'tensorflow.train.start_queue_runners', 'tf.train.start_queue_runners', ([], {'coord': 'coord'}), '(coord=coord)\n', (1962, 1975), True, 'import tensorflow as tf\n'), ((1059, 1074), 'tensorflow.equal', 'tf.equal', (['s', 's2'], {}), '(s, s2)\n', (1067, 1074), True, 'import tensorflow as tf\n'), ((1096, 1111), 'tensorflow.equal', 'tf.equal', (['s', 's2'], {}), '(s, s2)\n', (1104, 1111), True, 'import tensorflow as tf\n'), ((2397, 2411), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (2408, 2411), False, 'import cv2\n'), ((1119, 1133), 'tensorflow.constant', 'tf.constant', (['(1)'], {}), '(1)\n', (1130, 1133), True, 'import tensorflow as tf\n'), ((1141, 1155), 'tensorflow.constant', 'tf.constant', (['(0)'], {}), '(0)\n', (1152, 1155), True, 'import tensorflow as tf\n'), ((2442, 2471), 'cv2.destroyWindow', 'cv2.destroyWindow', (['"""train_im"""'], {}), "('train_im')\n", (2459, 2471), False, 'import cv2\n')] |
#!/usr/bin/env python3
from cache_tools import CachedBaseHttpSession
# TODO: Schema checking
class LunaSource:
def __init__(self, api_url: str, api_key: str):
self._session = CachedBaseHttpSession("LUNA", api_url)
self._session.headers.update({
"Authorization": api_key
})
def _get_json(self, path: str):
r = self._session.get(path)
r.raise_for_status()
return r.json()
@property
def latest_backup(self):
return self._get_json("backups/$latest")
@property
def backup_list(self):
return self._get_json("backups")
@property
def server_status(self):
return self._get_json("status")
@property
def players_data(self):
return self._get_json("playerdata")
@property
def map_status(self):
return self._get_json("maprender")
| [
"cache_tools.CachedBaseHttpSession"
] | [((191, 229), 'cache_tools.CachedBaseHttpSession', 'CachedBaseHttpSession', (['"""LUNA"""', 'api_url'], {}), "('LUNA', api_url)\n", (212, 229), False, 'from cache_tools import CachedBaseHttpSession\n')] |
import math
class UintN:
def __init__(self, number, n):
self.n = n
assert 0 <= number and number < 2 ** self.n
self.number = number
def bits(self):
number = self.number
bits = [None] * self.n
for i in range(self.n):
bits[self.n-1-i] = bool(number % 2)
number = int(math.floor(number / 2))
return bits
def __add__(self, other):
return UintN((self.number + other.number) % 2 ** self.n, self.n)
@classmethod
def from_bits(cls, bits):
n = len(bits)
number = sum([(2**(n-1-i)) * bits[i] for i in range(n)])
return cls(number, n)
class Uint6(UintN):
def __init__(self, number):
super().__init__(number, 6)
| [
"math.floor"
] | [((348, 370), 'math.floor', 'math.floor', (['(number / 2)'], {}), '(number / 2)\n', (358, 370), False, 'import math\n')] |
import asyncio
import logging
import re
import socket
from dataclasses import dataclass, field
from typing import Any, Dict, Optional, Union, Coroutine, Callable
import aiohttp
from aiven.monitor import Check, CheckResult
logger = logging.getLogger(__name__)
@dataclass
class HTTPCheckResult(CheckResult):
status: Optional[int] = field(default=None)
connected: bool = field(default=False)
content_verified: bool = field(default=False)
elapsed: float = field(default=None)
async def on_request_start(_, trace_config_ctx, __):
trace_config_ctx.start = asyncio.get_event_loop().time()
async def on_request_end(_, trace_config_ctx, __):
check_result: HTTPCheckResult = trace_config_ctx.trace_request_ctx.get(
"check_result"
)
if check_result:
check_result.elapsed = asyncio.get_event_loop().time() - trace_config_ctx.start
# Trace configuration to make use of aiohttp's event tracing
trace_config = aiohttp.TraceConfig()
trace_config.on_request_start.append(on_request_start)
trace_config.on_request_end.append(on_request_end)
@dataclass
class HTTPCheck(Check):
url: str
method: str = field(default="GET")
regex: Optional[str] = field(default=None)
timeout: Union[int, float] = field(default=2.0)
headers: Dict[str, Any] = field(default_factory=dict)
verify_ssl: bool = field(default=True)
interval: Union[int, float] = field(default=30.0)
def __post_init__(self):
self.method = self.method.upper().strip()
if self.method not in aiohttp.ClientRequest.ALL_METHODS:
raise ValueError(f"Unsupported http method specified: {self.method}")
async def start(self, callback: Callable[[CheckResult], Coroutine]) -> None:
pattern = re.compile(self.regex) if self.regex else None
connector = aiohttp.TCPConnector(
limit=1,
verify_ssl=self.verify_ssl,
enable_cleanup_closed=True,
force_close=True,
)
async with aiohttp.ClientSession(
connector=connector,
headers=self.headers,
timeout=aiohttp.ClientTimeout(total=self.timeout),
trace_configs=[trace_config],
) as session:
while True:
logger.info("Starting check for url %s", self.url)
result = HTTPCheckResult()
try:
async with session.request(
method=self.method,
url=self.url,
trace_request_ctx={"check_result": result},
) as resp:
result.connected = True
result.status = resp.status
if pattern:
result.content_verified = bool(
pattern.search(await resp.text())
)
except aiohttp.ServerDisconnectedError as e:
result.connected = True
result.error = e.message
except (
aiohttp.ClientConnectionError,
aiohttp.ClientConnectorError,
socket.gaierror,
) as e:
result.error = str(e)
except asyncio.CancelledError:
break
logger.debug("Triggering callback for check (%s)", self.url)
await callback(result)
await asyncio.sleep(self.interval)
| [
"logging.getLogger",
"re.compile",
"aiohttp.TraceConfig",
"aiohttp.ClientTimeout",
"asyncio.sleep",
"aiohttp.TCPConnector",
"asyncio.get_event_loop",
"dataclasses.field"
] | [((235, 262), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (252, 262), False, 'import logging\n'), ((955, 976), 'aiohttp.TraceConfig', 'aiohttp.TraceConfig', ([], {}), '()\n', (974, 976), False, 'import aiohttp\n'), ((340, 359), 'dataclasses.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (345, 359), False, 'from dataclasses import dataclass, field\n'), ((382, 402), 'dataclasses.field', 'field', ([], {'default': '(False)'}), '(default=False)\n', (387, 402), False, 'from dataclasses import dataclass, field\n'), ((432, 452), 'dataclasses.field', 'field', ([], {'default': '(False)'}), '(default=False)\n', (437, 452), False, 'from dataclasses import dataclass, field\n'), ((474, 493), 'dataclasses.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (479, 493), False, 'from dataclasses import dataclass, field\n'), ((1151, 1171), 'dataclasses.field', 'field', ([], {'default': '"""GET"""'}), "(default='GET')\n", (1156, 1171), False, 'from dataclasses import dataclass, field\n'), ((1199, 1218), 'dataclasses.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (1204, 1218), False, 'from dataclasses import dataclass, field\n'), ((1252, 1270), 'dataclasses.field', 'field', ([], {'default': '(2.0)'}), '(default=2.0)\n', (1257, 1270), False, 'from dataclasses import dataclass, field\n'), ((1301, 1328), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (1306, 1328), False, 'from dataclasses import dataclass, field\n'), ((1352, 1371), 'dataclasses.field', 'field', ([], {'default': '(True)'}), '(default=True)\n', (1357, 1371), False, 'from dataclasses import dataclass, field\n'), ((1406, 1425), 'dataclasses.field', 'field', ([], {'default': '(30.0)'}), '(default=30.0)\n', (1411, 1425), False, 'from dataclasses import dataclass, field\n'), ((1820, 1927), 'aiohttp.TCPConnector', 'aiohttp.TCPConnector', ([], {'limit': '(1)', 'verify_ssl': 'self.verify_ssl', 'enable_cleanup_closed': '(True)', 'force_close': '(True)'}), '(limit=1, verify_ssl=self.verify_ssl,\n enable_cleanup_closed=True, force_close=True)\n', (1840, 1927), False, 'import aiohttp\n'), ((578, 602), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (600, 602), False, 'import asyncio\n'), ((1753, 1775), 're.compile', 're.compile', (['self.regex'], {}), '(self.regex)\n', (1763, 1775), False, 'import re\n'), ((820, 844), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (842, 844), False, 'import asyncio\n'), ((2112, 2153), 'aiohttp.ClientTimeout', 'aiohttp.ClientTimeout', ([], {'total': 'self.timeout'}), '(total=self.timeout)\n', (2133, 2153), False, 'import aiohttp\n'), ((3486, 3514), 'asyncio.sleep', 'asyncio.sleep', (['self.interval'], {}), '(self.interval)\n', (3499, 3514), False, 'import asyncio\n')] |
import pytest
import inspect
import starstar
def test_divide():
def b(a=None, b=None, c=None):
return 'b', a, b, c
def c(d=None, e=None, f=None, c=None):
return 'c', d, e, f, c
kw = dict(a='a', e='e')
assert starstar.divide(kw, b, c) == [{'a': 'a'}, {'e': 'e'}]
kw = dict(a='a', e='e', c='c')
assert starstar.divide(kw, b, c) == [{'a': 'a', 'c': 'c'}, {'e': 'e', 'c': 'c'}]
kwx = dict(a='a', e='e', zzz='zzz')
with pytest.raises(TypeError):
starstar.divide(kwx, b, c, mode='strict')
assert starstar.divide(kwx, b, c, mode='separate') == [{'a': 'a'}, {'e': 'e'}, {'zzz': 'zzz'}]
assert starstar.divide(kwx, b, c, mode=None) == [{'a': 'a'}, {'e': 'e'}]
def b2(a=None, b=None, c=None, **kw):
return 'b', a, b, c, kw
def c2(d=None, e=None, f=None, **kw):
return 'c', d, e, f, kw
kwx = dict(a='a', e='e', zzz='zzz')
assert starstar.divide(kwx, b2, c2, mode='strict') == [{'a': 'a', 'zzz': 'zzz'}, {'e': 'e', 'zzz': 'zzz'}]
assert starstar.divide(kwx, b2, c2) == [{'a': 'a', 'zzz': 'zzz'}, {'e': 'e', 'zzz': 'zzz'}]
assert starstar.divide(kwx, b2, c2, varkw=True) == [{'a': 'a', 'zzz': 'zzz'}, {'e': 'e', 'zzz': 'zzz'}]
assert starstar.divide(kwx, b2, c2, varkw='first') == [{'a': 'a', 'zzz': 'zzz'}, {'e': 'e'}]
assert starstar.divide(kwx, b2, c2, varkw=False, mode='ignore') == [{'a': 'a'}, {'e': 'e'}]
assert starstar.divide(kwx, b2, c2, mode='separate') == [{'a': 'a', 'zzz': 'zzz'}, {'e': 'e', 'zzz': 'zzz'}, {}]
def test_signature():
def b(a=None, b=None, c=None):
return 'b', a, b, c
insig = inspect.signature(b)
sssig = starstar.signature(b)
assert insig is not sssig
assert insig == sssig
assert inspect.signature(b) is starstar.signature(b) # inspect is reading __signature__
assert starstar.signature(b) is starstar.signature(b)
# assert starstar.signature(sssig) is sssig
def test_core():
def b(a=None, b=None, c=None):
return 'b', a, b, c
def c(d=None, e=None, f=None):
return 'c', d, e, f
@starstar.traceto(b, c)
def a(aaa=None, **kw):
kw_b, kw_c = starstar.divide(kw, b, c)
return b(**kw_b), c(**kw_c)
a_names = {'aaa', 'a', 'b', 'c', 'd', 'e', 'f'}
assert set(a.__signature__.parameters) == a_names
a_return = ('b', 'a', 'b', 'c'), ('c', 'd', 'e', 'f')
assert a(**{x: x for x in a_names}) == a_return
def x(x=None, y=None, z=None):
return 'x', x, y, z
@starstar.traceto(a, x)
def asdf(**kw):
kw_a, kw_x = starstar.divide(kw, (a, b, c), x)
return a(**kw_a), x(**kw_x)
asdf_names = a_names | {'x', 'y', 'z'}
assert set(asdf.__signature__.parameters) == asdf_names
asdf_return = a_return, ('x', 'x', 'y', 'z')
assert asdf(**{x: x for x in asdf_names}) == asdf_return
@starstar.traceto(a, x)
def asdf2(**kw):
kw_a, kw_x = starstar.divide(kw, a, x)
return a(**kw_a), x(**kw_x)
assert set(asdf2.__signature__.parameters) == asdf_names
assert asdf2(**{x: x for x in asdf_names}) == asdf_return
def test_merge_docs():
def aaa(x, y):
'''aaa doc
Arguments:
x (int): x from aaa
y (int): y from aaa
'''
def bbb(y, z):
'''bbb doc
Arguments:
y (int): y from bbb
z (int): z from bbb
'''
def main(**kw):
'''main doc'''
doc = str(starstar.traceto(aaa, bbb, doc=True)(main).__doc__)
print(doc)
assert doc.strip() == '''
main doc
Args:
x (int): x from aaa
y (int): y from aaa
z (int): z from bbb
'''.strip()
def main(**kw):
'''main doc
Arguments:
a (int): from main
b (int): from main
'''
doc = str(starstar.traceto(aaa, bbb, doc=True)(main).__doc__)
print(doc)
assert doc.strip() == '''
main doc
Args:
a (int): from main
b (int): from main
x (int): x from aaa
y (int): y from aaa
z (int): z from bbb
'''.strip()
def main(**kw):
'''main doc
Returns:
(int): some number
'''
doc = str(starstar.traceto(aaa, bbb, doc=False)(main).__doc__)
print(doc)
assert cleandoc(doc) == cleandoc('''
main doc
Returns:
(int): some number
'''.strip())
doc = str(starstar.traceto(aaa, bbb, doc=True)(main).__doc__)
print(doc)
assert doc.strip() == '''
main doc
Args:
x (int): x from aaa
y (int): y from aaa
z (int): z from bbb
Returns:
(int): some number
'''.strip()
def funcA(a, b):
'''Another function
Arguments:
a (int): blah
b (int): blallhak
'''
def funcB(b, c):
'''Anotherrrr function
Arguments:
b (int): blah
c (int): blallhak
'''
def funcC(**kw):
'''Hellooo'''
def funcnewlines(**kw):
'''Hello
Args:
x: asdfasdf
asdfasdf
asdf
'''
funcD = starstar.nestdoc(funcA, b_kw=funcB)(funcC)
import docstring_parser as dcp
doc_rec = dcp.compose(dcp.parse(funcnewlines.__doc__))
# print(cleandoc(funcC.__doc__))
# print(cleandoc(doc_rec))
assert cleandoc(funcnewlines.__doc__) == cleandoc(doc_rec)
print(funcD.__doc__)
assert funcD.__doc__.strip() == '''
Hellooo
Args:
funcA_kw (dict?): Keyword arguments for :func:`funcA`.
- a (int): blah
- b (int): blallhak
b_kw (dict?): Keyword arguments for :func:`funcB`.
- b (int): blah
- c (int): blallhak
'''.strip()
def cleandoc(doc):
doc = inspect.cleandoc(doc)
return '\n'.join(l.strip() if not l.strip() else l for l in doc.split('\n'))
def test_wraps():
def a(x, y, *aaa, z, **kwaaa):
pass
@starstar.wraps(a)
def asdf(q, *a, **kw):
a(*a, **kw) + q
assert tuple(inspect.signature(asdf).parameters) == ('q', 'x', 'y', 'aaa', 'z', 'kwaaa')
@starstar.wraps(a, skip_n=1)
def asdf(q, *a, **kw):
a(q, *a, **kw)
assert tuple(inspect.signature(asdf).parameters) == ('q', 'y', 'aaa', 'z', 'kwaaa')
@starstar.wraps(a, skip_args='x')
def asdf(q, *a, **kw):
a(q, *a, **kw)
assert tuple(inspect.signature(asdf).parameters) == ('q', 'y', 'aaa', 'z', 'kwaaa')
def test_defaults():
@starstar.defaults
def a():
pass
@starstar.defaults
def a(x):
return x
with pytest.raises(TypeError):
a()
with pytest.raises(TypeError):
a.update(y=10)
# a.update(x=1) # FIXME: TypeError: Unexpected arguments: {'x'}
# assert a() == 1
@starstar.defaults
def a(x, y=6, *args, z=7, **kw):
return x, y, z, kw
print(a)
assert a(5) == (5, 6, 7, {})
assert a(10, 11, z=12) == (10, 11, 12, {})
assert a.get() == {'y': 6, 'z': 7}
assert tuple(inspect.signature(a).parameters) == ('x', 'y', 'args', 'z', 'kw')
assert tuple(p.default for p in inspect.signature(a).parameters.values()) == (
inspect._empty, 6, inspect._empty, 7, inspect._empty)
a.update(x=8, z=13)
assert a() == (8, 6, 13, {})
assert a(10, 11, z=12) == (10, 11, 12, {})
assert tuple(inspect.signature(a).parameters) == ('x', 'y', 'args', 'z', 'kw')
assert tuple(p.default for p in inspect.signature(a).parameters.values()) == (
8, 6, inspect._empty, 13, inspect._empty)
a.clear()
assert a(5) == (5, 6, 7, {})
assert a(10, 11, z=12) == (10, 11, 12, {})
assert tuple(inspect.signature(a).parameters) == ('x', 'y', 'args', 'z', 'kw')
assert tuple(p.default for p in inspect.signature(a).parameters.values()) == (
inspect._empty, 6, inspect._empty, 7, inspect._empty)
def test_as_akw():
def func_a(a, b, c, *x, d=0):
return a, b, c, d
a, kw = starstar.as_args_kwargs(func_a, {'a': 1, 'b': 2, 'c': 3, 'd': 4})
assert a == [1, 2, 3]
assert kw == {'d': 4}
a, kw = starstar.as_args_kwargs(func_a, {'a': 1, 'b': 2, 'c': 3, 'x': [6,6,6], '*': [7,7,7], 'd': 4})
assert a == [1, 2, 3, 6, 6, 6, 7, 7, 7]
assert kw == {'d': 4}
def func_a(a, b, c, *, d=0):
return a, b, c, d
a, kw = starstar.as_args_kwargs(func_a, {'a': 1, 'b': 2, 'c': 3, 'd': 4})
assert a == [1, 2, 3]
assert kw == {'d': 4}
def test_kw_filtering():
def func_a(a, b, c):
return a+b+c
kw = dict(b=2, c=3, x=1, y=2)
assert starstar.filter_kw(func_a, kw) == {'b': 2, 'c': 3}
assert starstar.filter_kw(lambda b, **kw: kw, kw) == kw
func_a1 = starstar.filtered(func_a)
func_a1(1, 2, c=3, x=1, y=2) # just gonna ignore x and y
assert starstar.unmatched_kw(func_a1, 'a', 'b', 'z') == {'z'}
assert starstar.unmatched_kw(func_a1, 'a', 'b', 'z', reversed=True) == {'c'}
def func_b(a, b, c, **kw):
return a+b+c, kw
assert starstar.unmatched_kw(func_b, 'a', 'b', 'z') == set()
assert starstar.unmatched_kw(func_b, 'a', 'b', 'z', reversed=True) == {'c'}
def test_get_args():
def func(a, b, *xs, c):
...
assert [p.name for p in starstar.get_args(func)] == ['a', 'b', 'xs', 'c']
assert [p.name for p in starstar.get_args(func, starstar.POS)] == ['a', 'b']
assert [p.name for p in starstar.get_args(func, starstar.KW)] == ['a', 'b', 'c']
assert [p.name for p in starstar.get_args(func, starstar.KW_ONLY)] == ['c']
assert [p.name for p in starstar.get_args(func, ignore=starstar.VAR)] == ['a', 'b', 'c']
def test_kw2id():
kw = {'name': 'asdf', 'count': 10, 'enabled': True}
assert starstar.kw2id(kw, 'name', 'count', 'enabled', 'xxx') == 'name_asdf-count_10-enabled_True'
assert starstar.kw2id(kw, 'name', 'xxx', 'count', filter=False) == 'name_asdf-xxx_-count_10' | [
"starstar.filtered",
"starstar.unmatched_kw",
"inspect.signature",
"starstar.wraps",
"inspect.cleandoc",
"starstar.traceto",
"starstar.get_args",
"pytest.raises",
"starstar.nestdoc",
"starstar.filter_kw",
"starstar.divide",
"starstar.signature",
"docstring_parser.parse",
"starstar.kw2id",
... | [((1639, 1659), 'inspect.signature', 'inspect.signature', (['b'], {}), '(b)\n', (1656, 1659), False, 'import inspect\n'), ((1672, 1693), 'starstar.signature', 'starstar.signature', (['b'], {}), '(b)\n', (1690, 1693), False, 'import starstar\n'), ((2105, 2127), 'starstar.traceto', 'starstar.traceto', (['b', 'c'], {}), '(b, c)\n', (2121, 2127), False, 'import starstar\n'), ((2527, 2549), 'starstar.traceto', 'starstar.traceto', (['a', 'x'], {}), '(a, x)\n', (2543, 2549), False, 'import starstar\n'), ((2882, 2904), 'starstar.traceto', 'starstar.traceto', (['a', 'x'], {}), '(a, x)\n', (2898, 2904), False, 'import starstar\n'), ((5818, 5839), 'inspect.cleandoc', 'inspect.cleandoc', (['doc'], {}), '(doc)\n', (5834, 5839), False, 'import inspect\n'), ((5995, 6012), 'starstar.wraps', 'starstar.wraps', (['a'], {}), '(a)\n', (6009, 6012), False, 'import starstar\n'), ((6164, 6191), 'starstar.wraps', 'starstar.wraps', (['a'], {'skip_n': '(1)'}), '(a, skip_n=1)\n', (6178, 6191), False, 'import starstar\n'), ((6336, 6368), 'starstar.wraps', 'starstar.wraps', (['a'], {'skip_args': '"""x"""'}), "(a, skip_args='x')\n", (6350, 6368), False, 'import starstar\n'), ((8027, 8092), 'starstar.as_args_kwargs', 'starstar.as_args_kwargs', (['func_a', "{'a': 1, 'b': 2, 'c': 3, 'd': 4}"], {}), "(func_a, {'a': 1, 'b': 2, 'c': 3, 'd': 4})\n", (8050, 8092), False, 'import starstar\n'), ((8158, 8259), 'starstar.as_args_kwargs', 'starstar.as_args_kwargs', (['func_a', "{'a': 1, 'b': 2, 'c': 3, 'x': [6, 6, 6], '*': [7, 7, 7], 'd': 4}"], {}), "(func_a, {'a': 1, 'b': 2, 'c': 3, 'x': [6, 6, 6],\n '*': [7, 7, 7], 'd': 4})\n", (8181, 8259), False, 'import starstar\n'), ((8396, 8461), 'starstar.as_args_kwargs', 'starstar.as_args_kwargs', (['func_a', "{'a': 1, 'b': 2, 'c': 3, 'd': 4}"], {}), "(func_a, {'a': 1, 'b': 2, 'c': 3, 'd': 4})\n", (8419, 8461), False, 'import starstar\n'), ((8765, 8790), 'starstar.filtered', 'starstar.filtered', (['func_a'], {}), '(func_a)\n', (8782, 8790), False, 'import starstar\n'), ((245, 270), 'starstar.divide', 'starstar.divide', (['kw', 'b', 'c'], {}), '(kw, b, c)\n', (260, 270), False, 'import starstar\n'), ((346, 371), 'starstar.divide', 'starstar.divide', (['kw', 'b', 'c'], {}), '(kw, b, c)\n', (361, 371), False, 'import starstar\n'), ((470, 494), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (483, 494), False, 'import pytest\n'), ((504, 545), 'starstar.divide', 'starstar.divide', (['kwx', 'b', 'c'], {'mode': '"""strict"""'}), "(kwx, b, c, mode='strict')\n", (519, 545), False, 'import starstar\n'), ((557, 600), 'starstar.divide', 'starstar.divide', (['kwx', 'b', 'c'], {'mode': '"""separate"""'}), "(kwx, b, c, mode='separate')\n", (572, 600), False, 'import starstar\n'), ((656, 693), 'starstar.divide', 'starstar.divide', (['kwx', 'b', 'c'], {'mode': 'None'}), '(kwx, b, c, mode=None)\n', (671, 693), False, 'import starstar\n'), ((925, 968), 'starstar.divide', 'starstar.divide', (['kwx', 'b2', 'c2'], {'mode': '"""strict"""'}), "(kwx, b2, c2, mode='strict')\n", (940, 968), False, 'import starstar\n'), ((1036, 1064), 'starstar.divide', 'starstar.divide', (['kwx', 'b2', 'c2'], {}), '(kwx, b2, c2)\n', (1051, 1064), False, 'import starstar\n'), ((1132, 1172), 'starstar.divide', 'starstar.divide', (['kwx', 'b2', 'c2'], {'varkw': '(True)'}), '(kwx, b2, c2, varkw=True)\n', (1147, 1172), False, 'import starstar\n'), ((1240, 1283), 'starstar.divide', 'starstar.divide', (['kwx', 'b2', 'c2'], {'varkw': '"""first"""'}), "(kwx, b2, c2, varkw='first')\n", (1255, 1283), False, 'import starstar\n'), ((1337, 1393), 'starstar.divide', 'starstar.divide', (['kwx', 'b2', 'c2'], {'varkw': '(False)', 'mode': '"""ignore"""'}), "(kwx, b2, c2, varkw=False, mode='ignore')\n", (1352, 1393), False, 'import starstar\n'), ((1433, 1478), 'starstar.divide', 'starstar.divide', (['kwx', 'b2', 'c2'], {'mode': '"""separate"""'}), "(kwx, b2, c2, mode='separate')\n", (1448, 1478), False, 'import starstar\n'), ((1761, 1781), 'inspect.signature', 'inspect.signature', (['b'], {}), '(b)\n', (1778, 1781), False, 'import inspect\n'), ((1785, 1806), 'starstar.signature', 'starstar.signature', (['b'], {}), '(b)\n', (1803, 1806), False, 'import starstar\n'), ((1854, 1875), 'starstar.signature', 'starstar.signature', (['b'], {}), '(b)\n', (1872, 1875), False, 'import starstar\n'), ((1879, 1900), 'starstar.signature', 'starstar.signature', (['b'], {}), '(b)\n', (1897, 1900), False, 'import starstar\n'), ((2176, 2201), 'starstar.divide', 'starstar.divide', (['kw', 'b', 'c'], {}), '(kw, b, c)\n', (2191, 2201), False, 'import starstar\n'), ((2591, 2624), 'starstar.divide', 'starstar.divide', (['kw', '(a, b, c)', 'x'], {}), '(kw, (a, b, c), x)\n', (2606, 2624), False, 'import starstar\n'), ((2947, 2972), 'starstar.divide', 'starstar.divide', (['kw', 'a', 'x'], {}), '(kw, a, x)\n', (2962, 2972), False, 'import starstar\n'), ((5165, 5200), 'starstar.nestdoc', 'starstar.nestdoc', (['funcA'], {'b_kw': 'funcB'}), '(funcA, b_kw=funcB)\n', (5181, 5200), False, 'import starstar\n'), ((5270, 5301), 'docstring_parser.parse', 'dcp.parse', (['funcnewlines.__doc__'], {}), '(funcnewlines.__doc__)\n', (5279, 5301), True, 'import docstring_parser as dcp\n'), ((6643, 6667), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (6656, 6667), False, 'import pytest\n'), ((6690, 6714), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (6703, 6714), False, 'import pytest\n'), ((8638, 8668), 'starstar.filter_kw', 'starstar.filter_kw', (['func_a', 'kw'], {}), '(func_a, kw)\n', (8656, 8668), False, 'import starstar\n'), ((8701, 8743), 'starstar.filter_kw', 'starstar.filter_kw', (['(lambda b, **kw: kw)', 'kw'], {}), '(lambda b, **kw: kw, kw)\n', (8719, 8743), False, 'import starstar\n'), ((8865, 8910), 'starstar.unmatched_kw', 'starstar.unmatched_kw', (['func_a1', '"""a"""', '"""b"""', '"""z"""'], {}), "(func_a1, 'a', 'b', 'z')\n", (8886, 8910), False, 'import starstar\n'), ((8931, 8991), 'starstar.unmatched_kw', 'starstar.unmatched_kw', (['func_a1', '"""a"""', '"""b"""', '"""z"""'], {'reversed': '(True)'}), "(func_a1, 'a', 'b', 'z', reversed=True)\n", (8952, 8991), False, 'import starstar\n'), ((9070, 9114), 'starstar.unmatched_kw', 'starstar.unmatched_kw', (['func_b', '"""a"""', '"""b"""', '"""z"""'], {}), "(func_b, 'a', 'b', 'z')\n", (9091, 9114), False, 'import starstar\n'), ((9135, 9194), 'starstar.unmatched_kw', 'starstar.unmatched_kw', (['func_b', '"""a"""', '"""b"""', '"""z"""'], {'reversed': '(True)'}), "(func_b, 'a', 'b', 'z', reversed=True)\n", (9156, 9194), False, 'import starstar\n'), ((9772, 9825), 'starstar.kw2id', 'starstar.kw2id', (['kw', '"""name"""', '"""count"""', '"""enabled"""', '"""xxx"""'], {}), "(kw, 'name', 'count', 'enabled', 'xxx')\n", (9786, 9825), False, 'import starstar\n'), ((9874, 9930), 'starstar.kw2id', 'starstar.kw2id', (['kw', '"""name"""', '"""xxx"""', '"""count"""'], {'filter': '(False)'}), "(kw, 'name', 'xxx', 'count', filter=False)\n", (9888, 9930), False, 'import starstar\n'), ((3502, 3538), 'starstar.traceto', 'starstar.traceto', (['aaa', 'bbb'], {'doc': '(True)'}), '(aaa, bbb, doc=True)\n', (3518, 3538), False, 'import starstar\n'), ((3861, 3897), 'starstar.traceto', 'starstar.traceto', (['aaa', 'bbb'], {'doc': '(True)'}), '(aaa, bbb, doc=True)\n', (3877, 3897), False, 'import starstar\n'), ((4234, 4271), 'starstar.traceto', 'starstar.traceto', (['aaa', 'bbb'], {'doc': '(False)'}), '(aaa, bbb, doc=False)\n', (4250, 4271), False, 'import starstar\n'), ((4433, 4469), 'starstar.traceto', 'starstar.traceto', (['aaa', 'bbb'], {'doc': '(True)'}), '(aaa, bbb, doc=True)\n', (4449, 4469), False, 'import starstar\n'), ((6082, 6105), 'inspect.signature', 'inspect.signature', (['asdf'], {}), '(asdf)\n', (6099, 6105), False, 'import inspect\n'), ((6259, 6282), 'inspect.signature', 'inspect.signature', (['asdf'], {}), '(asdf)\n', (6276, 6282), False, 'import inspect\n'), ((6436, 6459), 'inspect.signature', 'inspect.signature', (['asdf'], {}), '(asdf)\n', (6453, 6459), False, 'import inspect\n'), ((7070, 7090), 'inspect.signature', 'inspect.signature', (['a'], {}), '(a)\n', (7087, 7090), False, 'import inspect\n'), ((7405, 7425), 'inspect.signature', 'inspect.signature', (['a'], {}), '(a)\n', (7422, 7425), False, 'import inspect\n'), ((7718, 7738), 'inspect.signature', 'inspect.signature', (['a'], {}), '(a)\n', (7735, 7738), False, 'import inspect\n'), ((9296, 9319), 'starstar.get_args', 'starstar.get_args', (['func'], {}), '(func)\n', (9313, 9319), False, 'import starstar\n'), ((9374, 9411), 'starstar.get_args', 'starstar.get_args', (['func', 'starstar.POS'], {}), '(func, starstar.POS)\n', (9391, 9411), False, 'import starstar\n'), ((9455, 9491), 'starstar.get_args', 'starstar.get_args', (['func', 'starstar.KW'], {}), '(func, starstar.KW)\n', (9472, 9491), False, 'import starstar\n'), ((9540, 9581), 'starstar.get_args', 'starstar.get_args', (['func', 'starstar.KW_ONLY'], {}), '(func, starstar.KW_ONLY)\n', (9557, 9581), False, 'import starstar\n'), ((9620, 9664), 'starstar.get_args', 'starstar.get_args', (['func'], {'ignore': 'starstar.VAR'}), '(func, ignore=starstar.VAR)\n', (9637, 9664), False, 'import starstar\n'), ((7172, 7192), 'inspect.signature', 'inspect.signature', (['a'], {}), '(a)\n', (7189, 7192), False, 'import inspect\n'), ((7507, 7527), 'inspect.signature', 'inspect.signature', (['a'], {}), '(a)\n', (7524, 7527), False, 'import inspect\n'), ((7820, 7840), 'inspect.signature', 'inspect.signature', (['a'], {}), '(a)\n', (7837, 7840), False, 'import inspect\n')] |
import pandas as pd
#https://archive.ics.uci.edu/ml/machine-learning-databases/mushroom/agaricus-lepiota.names
#
# TODO: Load up the mushroom dataset into dataframe 'X'
# Verify you did it properly.
# Indices shouldn't be doubled.
# Header information is on the dataset's website at the UCI ML Repo
# Check NA Encoding
#
# .. your code here ..
X = pd.read_csv("Datasets/agaricus-lepiota.data", header = None)
X.columns = ("label",
"cap-shape",
"cap-surface",
"cap-color",
"bruises?",
"odor",
"gill-attachment",
"gill-spacing",
"gill-size",
"gill-color",
"stalk-shape",
"stalk-root",
"stalk-surface-above-ring",
"stalk-surface-below-ring",
"stalk-color-above-ring",
"stalk-color-below-ring",
"veil-type",
"veil-color",
"ring-number",
"ring-type",
"spore-print-color",
"population",
"habitat")
# INFO: An easy way to show which rows have nans in them
print (X[pd.isnull(X).any(axis=1)])
#
# TODO: Go ahead and drop any row with a nan
#
# .. your code here ..
X = X.dropna(how = "any")
print (X.shape)
#
# TODO: Copy the labels out of the dset into variable 'y' then Remove
# them from X. Encode the labels, using the .map() trick we showed
# you in Module 5 -- canadian:0, kama:1, and rosa:2
#
# .. your code here ..
y = X["label"]
X = X.drop("label", axis = 1)
y = y.map({'p':0, 'e':1})
#
# TODO: Encode the entire dataset using dummies
#
# .. your code here ..
X = pd.get_dummies(X)
#
# TODO: Split your data into test / train sets
# Your test size can be 30% with random_state 7
# Use variable names: X_train, X_test, y_train, y_test
#
# .. your code here ..
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=7)
#
# TODO: Create an DT classifier. No need to set any parameters
#
# .. your code here ..
from sklearn import tree
DT = tree.DecisionTreeClassifier()
#
# TODO: train the classifier on the training data / labels:
# TODO: score the classifier on the testing data / labels:
#
# .. your code here ..
DT.fit(X_train, y_train)
score = DT.score(X_test, y_test)
print ("High-Dimensionality Score: ", round((score*100), 3))
#
# TODO: Use the code on the courses SciKit-Learn page to output a .DOT file
# Then render the .DOT to .PNGs. Ensure you have graphviz installed.
# If not, `brew install graphviz. If you can't, use: http://webgraphviz.com/
#
# .. your code here ..
tree.export_graphviz(DT.tree_, out_file='tree.dot', feature_names=X_train.columns)
from subprocess import call
#call(['dot', '-T', 'png', 'tree.dot', '-o', 'tree.png'])
| [
"pandas.isnull",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.tree.DecisionTreeClassifier",
"sklearn.tree.export_graphviz",
"pandas.get_dummies"
] | [((353, 411), 'pandas.read_csv', 'pd.read_csv', (['"""Datasets/agaricus-lepiota.data"""'], {'header': 'None'}), "('Datasets/agaricus-lepiota.data', header=None)\n", (364, 411), True, 'import pandas as pd\n'), ((1686, 1703), 'pandas.get_dummies', 'pd.get_dummies', (['X'], {}), '(X)\n', (1700, 1703), True, 'import pandas as pd\n'), ((1971, 2024), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.3)', 'random_state': '(7)'}), '(X, y, test_size=0.3, random_state=7)\n', (1987, 2024), False, 'from sklearn.model_selection import train_test_split\n'), ((2147, 2176), 'sklearn.tree.DecisionTreeClassifier', 'tree.DecisionTreeClassifier', ([], {}), '()\n', (2174, 2176), False, 'from sklearn import tree\n'), ((2696, 2783), 'sklearn.tree.export_graphviz', 'tree.export_graphviz', (['DT.tree_'], {'out_file': '"""tree.dot"""', 'feature_names': 'X_train.columns'}), "(DT.tree_, out_file='tree.dot', feature_names=X_train.\n columns)\n", (2716, 2783), False, 'from sklearn import tree\n'), ((1173, 1185), 'pandas.isnull', 'pd.isnull', (['X'], {}), '(X)\n', (1182, 1185), True, 'import pandas as pd\n')] |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ResNet model for classifying images from CIFAR-10 dataset.
Support single-host training with one or multiple devices.
ResNet as proposed in:
<NAME>, <NAME>, <NAME>, <NAME>
Deep Residual Learning for Image Recognition. arXiv:1512.03385
CIFAR-10 as in:
http://www.cs.toronto.edu/~kriz/cifar.html
"""
from __future__ import division
from __future__ import print_function
import argparse
import datetime
import functools
import itertools
import os
# Silence tf for prettier logging of Bayesian Optimization
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
from bayes_opt import BayesianOptimization
from bayes_opt import UtilityFunction
import cifar10
import cifar10_model
import cifar10_utils
import numpy as np
import six
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
# import tensorflow_addons as tfa
# Setting verbosity to INFO will log training and evaluation details.
tf.logging.set_verbosity(tf.logging.ERROR)
import ray
from ray.tune import run, Trainable
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.suggest.bayesopt import BayesOptSearch
import logging
logging.getLogger("tensorflow").setLevel(logging.ERROR)
parser = argparse.ArgumentParser()
parser.add_argument(
"--data-dir",
type=str,
required=True,
help="The directory where the CIFAR-10 input data is stored.",
)
parser.add_argument(
"--job-dir",
type=str,
required=True,
help="The directory where the model will be stored.",
)
parser.add_argument(
"--variable-strategy",
choices=["CPU", "GPU"],
type=str,
default="CPU",
help="Where to locate variable operations",
)
parser.add_argument(
"--num-gpus",
type=int,
default=1,
help="The number of gpus used. Uses only CPU if set to 0.",
)
parser.add_argument(
"--num-layers",
type=int,
default=20,
help="The number of layers of the model.",
)
parser.add_argument(
"--train-steps",
type=int,
default=80000,
help="The number of steps to use for training.",
)
# parser.add_argument(
# "--train-batch-size",
# type=int,
# default=128,
# help="Batch size for training.",
# )
parser.add_argument(
"--eval-batch-size",
type=int,
default=500,
help="Batch size for validation.",
)
parser.add_argument(
"--num-batches-for-eval",
type=int,
default=10,
help="Number of batches for validation.",
)
# parser.add_argument(
# "--momentum",
# type=float,
# default=0.9,
# help="Momentum for MomentumOptimizer.",
# )
# parser.add_argument(
# "--weight-decay",
# type=float,
# default=2e-4,
# help="Weight decay for convolutions.",
# )
# parser.add_argument(
# "--learning-rate",
# type=float,
# default=0.1,
# help="""\
# This is the inital learning rate value. The learning rate will decrease
# during training. For more details check the model_fn implementation in
# this file.\
# """,
# )
parser.add_argument(
"--use-distortion-for-training",
type=bool,
default=True,
help="If doing image distortion for training.",
)
parser.add_argument(
"--sync",
action="store_true",
default=False,
help="""\
If present when running in a distributed environment will run on sync mode.\
""",
)
parser.add_argument(
"--num-intra-threads",
type=int,
default=0,
help="""\
Number of threads to use for intra-op parallelism. When training on CPU
set to 0 to have the system pick the appropriate number or alternatively
set it to the number of physical CPU cores.\
""",
)
parser.add_argument(
"--num-inter-threads",
type=int,
default=0,
help="""\
Number of threads to use for inter-op parallelism. If set to 0, the
system will pick an appropriate number.\
""",
)
parser.add_argument(
"--data-format",
type=str,
default=None,
help="""\
If not set, the data format best for the training device is used.
Allowed values: channels_first (NCHW) channels_last (NHWC).\
""",
)
parser.add_argument(
"--log-device-placement",
action="store_true",
default=False,
help="Whether to log device placement.",
)
# parser.add_argument(
# "--batch-norm-decay",
# type=float,
# default=0.997,
# help="Decay for batch norm.",
# )
# parser.add_argument(
# "--batch-norm-epsilon",
# type=float,
# default=1e-5,
# help="Epsilon for batch norm.",
# )
# Add arguments related to BayesOpt
parser.add_argument(
"--smoke-test",
action="store_true",
default=False,
help="Finish quickly for testing",
)
# parser.add_argument(
# "--verbose", type=bool, default=False, help="Verbose output of training."
# )
parser.add_argument(
"--strategy",
type=str,
default="proposed",
help="Strategy for discretizing. Possible options are: basic, proposed.",
)
parser.add_argument(
"--metric",
type=str,
default="accuracy",
help="""\
Whether to use accuracy or loss for Bayesian optimization.\
""",
)
# TODO: better name?
parser.add_argument(
"--precision",
type=int,
default=1000,
help="""\
Size of grid\
""",
)
parser.add_argument(
"--log-path",
type=str,
default=os.getcwd() + "/train.log",
help="""
""",
)
parser.add_argument(
"--ray-address",
type=str,
default="",
help="""
""",
)
args = parser.parse_args()
# Filling in shared values here
hparams = {}
hparams["num_layers"] = args.num_layers
hparams["eval_batch_size"] = args.eval_batch_size
hparams["sync"] = args.sync
hparams["num_inter_threads"] = args.num_inter_threads
hparams["data_format"] = args.data_format
def get_model_fn(num_gpus, variable_strategy, num_workers):
"""Returns a function that will build the resnet model."""
def _resnet_model_fn(features, labels, mode, params):
"""Resnet model body.
Support single host, one or more GPU training. Parameter distribution can
be either one of the following scheme.
1. CPU is the parameter server and manages gradient updates.
2. Parameters are distributed evenly across all GPUs, and the first GPU
manages gradient updates.
Args:
features: a list of tensors, one for each tower
labels: a list of tensors, one for each tower
mode: ModeKeys.TRAIN or EVAL
params: Hyperparameters suitable for tuning
Returns:
A EstimatorSpec object.
"""
is_training = mode == tf.estimator.ModeKeys.TRAIN
weight_decay = params.weight_decay
momentum = params.momentum
tower_features = features
tower_labels = labels
tower_losses = []
tower_gradvars = []
tower_preds = []
# channels first (NCHW) is normally optimal on GPU and channels last (NHWC)
# on CPU. The exception is Intel MKL on CPU which is optimal with
# channels_last.
data_format = params.data_format
if not data_format:
if num_gpus == 0:
data_format = "channels_last"
else:
data_format = "channels_first"
if num_gpus == 0:
num_devices = 1
device_type = "cpu"
else:
num_devices = num_gpus
device_type = "gpu"
for i in range(num_devices):
worker_device = "/{}:{}".format(device_type, i)
if variable_strategy == "CPU":
device_setter = cifar10_utils.local_device_setter(
worker_device=worker_device
)
elif variable_strategy == "GPU":
device_setter = cifar10_utils.local_device_setter(
ps_device_type="gpu",
worker_device=worker_device,
ps_strategy=tf.contrib.training.GreedyLoadBalancingStrategy(
num_gpus, tf.contrib.training.byte_size_load_fn
),
)
with tf.variable_scope("resnet", reuse=bool(i != 0)):
with tf.name_scope("tower_%d" % i) as name_scope:
with tf.device(device_setter):
loss, gradvars, preds = _tower_fn(
is_training,
weight_decay,
tower_features[i],
tower_labels[i],
data_format,
params.num_layers,
params.batch_norm_decay,
params.batch_norm_epsilon,
)
tower_losses.append(loss)
tower_gradvars.append(gradvars)
tower_preds.append(preds)
if i == 0:
# Only trigger batch_norm moving mean and variance update from
# the 1st tower. Ideally, we should grab the updates from all
# towers but these stats accumulate extremely fast so we can
# ignore the other stats from the other towers without
# significant detriment.
update_ops = tf.get_collection(
tf.GraphKeys.UPDATE_OPS, name_scope
)
# Now compute global loss and gradients.
gradvars = []
with tf.name_scope("gradient_averaging"):
all_grads = {}
for grad, var in itertools.chain(*tower_gradvars):
if grad is not None:
all_grads.setdefault(var, []).append(grad)
for var, grads in six.iteritems(all_grads):
# Average gradients on the same device as the variables
# to which they apply.
with tf.device(var.device):
if len(grads) == 1:
avg_grad = grads[0]
else:
avg_grad = tf.multiply(
tf.add_n(grads), 1.0 / len(grads)
)
gradvars.append((avg_grad, var))
# Device that runs the ops to apply global gradient updates.
consolidation_device = (
"/gpu:0" if variable_strategy == "GPU" else "/cpu:0"
)
with tf.device(consolidation_device):
# Suggested learning rate scheduling from
# https://github.com/ppwwyyxx/tensorpack/blob/master/examples/ResNet/cifar10-resnet.py#L155
num_batches_per_epoch = cifar10.Cifar10DataSet.num_examples_per_epoch(
"train"
) // (
params.train_batch_size * num_workers
)
boundaries = [
num_batches_per_epoch * x
for x in np.array([80, 120, 160], dtype=np.int64)
]
staged_lr = [
params.learning_rate * x for x in [1, 0.1, 0.01, 0.001]
]
learning_rate = tf.train.piecewise_constant(
tf.train.get_global_step(), boundaries, staged_lr
)
loss = tf.reduce_mean(tower_losses, name="loss")
# examples_sec_hook = cifar10_utils.ExamplesPerSecondHook(
# params.train_batch_size, every_n_steps=10
# )
# tensors_to_log = {"learning_rate": learning_rate, "loss": loss}
# logging_hook = tf.train.LoggingTensorHook(
# tensors=tensors_to_log, every_n_iter=100
# )
# train_hooks = [logging_hook, examples_sec_hook]
train_hooks = []
# Hyper-parameter "momentum" is only used for the Momentum Optimizer
# Other optimizers use their default parameters.
if params.optimizer == "momentum":
optimizer = tf.train.MomentumOptimizer(
learning_rate=learning_rate, momentum=momentum
)
elif params.optimizer == "adam":
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
elif params.optimizer == "adagrad":
optimizer = tf.train.AdagradOptimizer(
learning_rate=learning_rate
)
elif params.optimizer == "adadelta":
optimizer = tf.train.AdadeltaOptimizer(
learning_rate=learning_rate
)
elif params.optimizer == "sgd":
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=learning_rate
)
elif params.optimizer == "rmsprop":
optimizer = tf.train.RMSPropOptimizer(
learning_rate=learning_rate
)
else:
raise ValueError("unrecognized optimizer name")
# TODO: RAdam is implemented in tensorflow-addons v0.6, which requires tf 2.0
# Upgrade code by removing tf.contrib modules.
# optimizer = tfa.optimizers.RectifiedAdam(lr=learning_rate)
if params.sync:
optimizer = tf.train.SyncReplicasOptimizer(
optimizer, replicas_to_aggregate=num_workers
)
sync_replicas_hook = optimizer.make_session_run_hook(
params.is_chief
)
train_hooks.append(sync_replicas_hook)
# Create single grouped train op
train_op = [
optimizer.apply_gradients(
gradvars, global_step=tf.train.get_global_step()
)
]
train_op.extend(update_ops)
train_op = tf.group(*train_op)
predictions = {
"classes": tf.concat(
[p["classes"] for p in tower_preds], axis=0
),
"probabilities": tf.concat(
[p["probabilities"] for p in tower_preds], axis=0
),
}
stacked_labels = tf.concat(labels, axis=0)
metrics = {
"accuracy": tf.metrics.accuracy(
stacked_labels, predictions["classes"]
)
}
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
training_hooks=train_hooks,
eval_metric_ops=metrics,
)
return _resnet_model_fn
def _tower_fn(
is_training,
weight_decay,
feature,
label,
data_format,
num_layers,
batch_norm_decay,
batch_norm_epsilon,
):
"""Build computation tower (Resnet).
Args:
is_training: true if is training graph.
weight_decay: weight regularization strength, a float.
feature: a Tensor.
label: a Tensor.
data_format: channels_last (NHWC) or channels_first (NCHW).
num_layers: number of layers, an int.
batch_norm_decay: decay for batch normalization, a float.
batch_norm_epsilon: epsilon for batch normalization, a float.
Returns:
A tuple with the loss for the tower, the gradients and parameters, and
predictions.
"""
model = cifar10_model.ResNetCifar10(
num_layers,
batch_norm_decay=batch_norm_decay,
batch_norm_epsilon=batch_norm_epsilon,
is_training=is_training,
data_format=data_format,
)
logits = model.forward_pass(feature, input_data_format="channels_last")
tower_pred = {
"classes": tf.argmax(input=logits, axis=1),
"probabilities": tf.nn.softmax(logits),
}
tower_loss = tf.losses.sparse_softmax_cross_entropy(
logits=logits, labels=label
)
tower_loss = tf.reduce_mean(tower_loss)
model_params = tf.trainable_variables()
tower_loss += weight_decay * tf.add_n(
[tf.nn.l2_loss(v) for v in model_params]
)
tower_grad = tf.gradients(tower_loss, model_params)
return tower_loss, zip(tower_grad, model_params), tower_pred
def input_fn(
data_dir, subset, num_shards, batch_size, use_distortion_for_training=True
):
"""Create input graph for model.
Args:
data_dir: Directory where TFRecords representing the dataset are located.
subset: one of 'train', 'validation' and 'eval'.
num_shards: num of towers participating in data-parallel training.
batch_size: total batch size for training to be divided by the number of
shards.
use_distortion_for_training: True to use distortions.
Returns:
two lists of tensors for features and labels, each of num_shards length.
"""
with tf.device("/cpu:0"):
use_distortion = subset == "train" and use_distortion_for_training
dataset = cifar10.Cifar10DataSet(data_dir, subset, use_distortion)
image_batch, label_batch = dataset.make_batch(batch_size)
if num_shards <= 1:
# No GPU available or only 1 GPU.
return [image_batch], [label_batch]
# Note that passing num=batch_size is safe here, even though
# dataset.batch(batch_size) can, in some cases, return fewer than batch_size
# examples. This is because it does so only when repeating for a limited
# number of epochs, but our dataset repeats forever.
image_batch = tf.unstack(image_batch, num=batch_size, axis=0)
label_batch = tf.unstack(label_batch, num=batch_size, axis=0)
feature_shards = [[] for i in range(num_shards)]
label_shards = [[] for i in range(num_shards)]
for i in xrange(batch_size):
idx = i % num_shards
feature_shards[idx].append(image_batch[i])
label_shards[idx].append(label_batch[i])
feature_shards = [tf.parallel_stack(x) for x in feature_shards]
label_shards = [tf.parallel_stack(x) for x in label_shards]
return feature_shards, label_shards
def build_estimator(
data_dir,
num_gpus,
variable_strategy,
run_config,
hparams,
use_distortion_for_training=True,
ws=None,
):
"""Returns an Experiment function.
Experiments perform training on several workers in parallel,
in other words experiments know how to invoke train and eval in a sensible
fashion for distributed training. Arguments passed directly to this
function are not tunable, all other arguments should be passed within
tf.HParams, passed to the enclosed function.
Args:
data_dir: str. Location of the data for input_fns.
num_gpus: int. Number of GPUs on each worker.
variable_strategy: String. CPU to use CPU as the parameter server
and GPU to use the GPUs as the parameter server.
use_distortion_for_training: bool. See cifar10.Cifar10DataSet.
Returns:
A function (tf.estimator.RunConfig, tf.contrib.training.HParams) ->
tf.contrib.learn.Experiment.
Suitable for use by tf.contrib.learn.learn_runner, which will run various
methods on Experiment (train, evaluate) based on information
about the current runner in `run_config`.
"""
# Create estimator.
train_input_fn = functools.partial(
input_fn,
data_dir,
subset="train",
num_shards=num_gpus,
batch_size=hparams.train_batch_size,
use_distortion_for_training=use_distortion_for_training,
)
eval_input_fn = functools.partial(
input_fn,
data_dir,
subset="validation",
batch_size=hparams.eval_batch_size,
num_shards=num_gpus,
)
# validation: 5000, eval:10000
num_eval_examples = cifar10.Cifar10DataSet.num_examples_per_epoch(
"validation"
)
if num_eval_examples % hparams.eval_batch_size != 0:
raise ValueError(
"validation set size must be multiple of eval_batch_size"
)
classifier = tf.estimator.Estimator(
model_fn=get_model_fn(
num_gpus, variable_strategy, run_config.num_worker_replicas or 1
),
config=run_config,
params=hparams,
warm_start_from=ws,
)
return train_input_fn, eval_input_fn, classifier
def get_idx(pbounds, names):
param_names = list(pbounds.keys())
param_names.sort()
param_list = [0] * len(param_names)
for i in range(len(param_names)):
if param_names[i] in names:
param_list[i] = 1
return param_list
class MyTrainableEstimator(Trainable):
def _setup(self, config):
# The env variable is on deprecation path, default is set to off.
os.environ["TF_SYNC_ON_FINISH"] = "0"
os.environ["TF_ENABLE_WINOGRAD_NONFUSED"] = "1"
# Session configuration.
sess_config = tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=args.log_device_placement,
intra_op_parallelism_threads=args.num_intra_threads,
gpu_options=tf.GPUOptions(
force_gpu_compatible=True, allow_growth=True
),
)
# Convert to actual hyperparameter values here using the grid (discrete) input
hparams["train_batch_size"] = 2 ** (int(config["batch_size"]) + 5)
hparams["momentum"] = 0.4 + (
0.55 * int(config["momentum"]) / args.precision
)
hparams["weight_decay"] = 1e-4 + (
1e-4 * int(config["weight_decay"]) / args.precision
)
hparams["batch_norm_decay"] = 0.8 + (
0.199 * int(config["batch_norm_decay"]) / args.precision
)
hparams["batch_norm_epsilon"] = 1e-5 + (
0.00099 * int(config["batch_norm_epsilon"]) / args.precision
)
hparams["learning_rate"] = 0.01 + (
0.1 * int(config["learning_rate"]) / args.precision
)
opt = int(config["optimizer"])
if opt == 0:
hparams["optimizer"] = "momentum"
elif opt == 1:
hparams["optimizer"] = "adam"
elif opt == 2:
hparams["optimizer"] = "adagrad"
elif opt == 3:
hparams["optimizer"] = "adadelta"
elif opt == 4:
hparams["optimizer"] = "sgd"
else:
hparams["optimizer"] = "rmsprop"
# Calculate number of steps per one epoch
self.train_steps = cifar10.Cifar10DataSet.num_examples_per_epoch(
"train"
) // (hparams["train_batch_size"])
# TODO: Fix checkpoint dir
run_config = cifar10_utils.RunConfig(
session_config=sess_config,
model_dir=None,
save_checkpoints_secs=None,
save_checkpoints_steps=self.train_steps,
keep_checkpoint_max=None,
keep_checkpoint_every_n_hours=None,
)
self.run_config = run_config
self.train_input_fn, self.eval_input_fn, self.estimator = build_estimator(
data_dir=args.data_dir,
num_gpus=args.num_gpus,
variable_strategy=args.variable_strategy,
use_distortion_for_training=args.use_distortion_for_training,
run_config=run_config,
hparams=tf.contrib.training.HParams(
is_chief=run_config.is_chief, **hparams
),
)
self.logger = logging.getLogger("metrics")
self.logger.setLevel(logging.INFO)
file_handler = logging.FileHandler(args.log_path)
self.logger.addHandler(file_handler)
self.logger.info(f"[CONFIG] ID={self._experiment_id} config={hparams}")
# self.steps = self.train_steps
def _train(self):
self.estimator.train(
input_fn=self.train_input_fn, steps=self.train_steps
)
metrics = self.estimator.evaluate(
input_fn=self.eval_input_fn,
steps=args.eval_batch_size * args.num_batches_for_eval,
)
# self.steps = self.steps + self.train_steps
self.logger.info(
f"[RESULT] ID={self._experiment_id} iter={self._iteration} result={metrics}"
)
return metrics
def _stop(self):
self.estimator = None
def _save(self, checkpoint_dir):
lastest_checkpoint = self.estimator.latest_checkpoint()
tf.logging.info(
"Saving checkpoint {} for tune".format(lastest_checkpoint)
)
f = open(checkpoint_dir + "/path.txt", "w")
f.write(lastest_checkpoint)
f.flush()
f.close()
return checkpoint_dir + "/path.txt"
def _restore(self, checkpoint_path):
f = open(checkpoint_path, "r")
path = f.readline().strip()
tf.logging.info("Opening checkpoint {} for tune".format(path))
f.flush()
f.close()
ws = tf.estimator.WarmStartSettings(ckpt_to_initialize_from=path)
self.train_input_fn, self.eval_input_fn, self.estimator = build_estimator(
data_dir=args.data_dir,
num_gpus=args.num_gpus,
variable_strategy=args.variable_strategy,
use_distortion_for_training=args.use_distortion_for_training,
run_config=self.run_config,
hparams=tf.contrib.training.HParams(
is_chief=self.run_config.is_chief, **hparams
),
warm_start_from=ws,
)
def main():
# print(args)
# Minor hack of generating a grid of 100 values each.
# By setting all parameters to be discrete values over range (0,100),
# we can map each integer value to corresponding hyperparameter value in training code.
pbounds = {
"batch_size": (0, 6),
"momentum": (0, args.precision),
"weight_decay": (0, args.precision),
"batch_norm_decay": (0, args.precision),
"batch_norm_epsilon": (0, args.precision),
"learning_rate": (0, args.precision),
"optimizer": (0, 6),
}
discrete = [
"batch_size",
"momentum",
"weight_decay",
"batch_norm_decay",
"batch_norm_epsilon",
"learning_rate",
"optimizer",
]
categorical = []
discrete_indices = get_idx(pbounds, discrete)
categorical_indices = get_idx(pbounds, categorical)
train_spec = {
"resources_per_trial": {"cpu": 12, "gpu": 1},
"stop": {
"accuracy": 93,
"training_iteration": 2 if args.smoke_test else 99999,
},
"config": {
"exp": "ckpt", # the name of directory where training results are saved
"log_level": "ERROR",
},
"num_samples": 100000,
"local_dir": "/home/ddoyoon/BayesianOptimization/examples/cnn/cifar10_estimator/ckpt",
"checkpoint_at_end": True,
}
algo = BayesOptSearch(
args.strategy,
pbounds,
discrete=discrete_indices,
categorical=categorical_indices,
max_concurrent=12,
metric="accuracy",
mode="max",
utility_kwargs={"kind": "ucb", "kappa": 2.5, "xi": 0.0},
)
# TODO: Initial values will not be discretized as of now.
# Manually probing with discrete values instead.
# algo.optimizer.probe(
# params={
# "batch_size": 0,
# "momentum": 0,
# "weight_decay": 0,
# "batch_norm_decay": 0,
# "batch_norm_epsilon": 0,
# "learning_rate": 0,
# },
# lazy=True,
# )
scheduler = AsyncHyperBandScheduler(
metric="accuracy",
mode="max",
max_t=200,
grace_period=20,
reduction_factor=2,
)
experiment_start = datetime.datetime.utcnow()
logger = logging.getLogger("metrics")
logger.setLevel(logging.INFO)
file_handler = logging.FileHandler(args.log_path)
logger.addHandler(file_handler)
logger.info(f"[ TIME ] start={experiment_start}")
run(
MyTrainableEstimator,
name="bo_resnet_cifar10",
search_alg=algo,
scheduler=scheduler,
**train_spec,
)
experiment_end = datetime.datetime.utcnow()
experiment_duration = experiment_end - experiment_start
logger.info(f"[ TIME ] end={experiment_end}")
logger.info(
f"[ TIME ] end-to-end (min)={experiment_duration.total_seconds() / 60}"
)
if __name__ == "__main__":
if args.ray_address != "":
ray.init(redis_address=args.ray_address, logging_level=logging.ERROR)
else:
ray.init()
if args.num_gpus > 0:
assert tf.test.is_gpu_available(), "Requested GPUs but none found."
if args.num_gpus < 0:
raise ValueError(
'Invalid GPU count: "--num-gpus" must be 0 or a positive integer.'
)
if args.num_gpus == 0 and args.variable_strategy == "GPU":
raise ValueError(
"num-gpus=0, CPU must be used as parameter server. Set"
"--variable-strategy=CPU."
)
if (args.num_layers - 2) % 6 != 0:
raise ValueError("Invalid --num-layers parameter.")
main()
| [
"logging.getLogger",
"ray.tune.suggest.bayesopt.BayesOptSearch",
"tensorflow.unstack",
"itertools.chain",
"cifar10.Cifar10DataSet",
"tensorflow.logging.set_verbosity",
"tensorflow.estimator.EstimatorSpec",
"tensorflow.gradients",
"tensorflow.group",
"numpy.array",
"six.moves.xrange",
"tensorfl... | [((1608, 1650), 'tensorflow.logging.set_verbosity', 'tf.logging.set_verbosity', (['tf.logging.ERROR'], {}), '(tf.logging.ERROR)\n', (1632, 1650), True, 'import tensorflow as tf\n'), ((1891, 1916), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1914, 1916), False, 'import argparse\n'), ((16042, 16213), 'cifar10_model.ResNetCifar10', 'cifar10_model.ResNetCifar10', (['num_layers'], {'batch_norm_decay': 'batch_norm_decay', 'batch_norm_epsilon': 'batch_norm_epsilon', 'is_training': 'is_training', 'data_format': 'data_format'}), '(num_layers, batch_norm_decay=batch_norm_decay,\n batch_norm_epsilon=batch_norm_epsilon, is_training=is_training,\n data_format=data_format)\n', (16069, 16213), False, 'import cifar10_model\n'), ((16472, 16539), 'tensorflow.losses.sparse_softmax_cross_entropy', 'tf.losses.sparse_softmax_cross_entropy', ([], {'logits': 'logits', 'labels': 'label'}), '(logits=logits, labels=label)\n', (16510, 16539), True, 'import tensorflow as tf\n'), ((16571, 16597), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['tower_loss'], {}), '(tower_loss)\n', (16585, 16597), True, 'import tensorflow as tf\n'), ((16618, 16642), 'tensorflow.trainable_variables', 'tf.trainable_variables', ([], {}), '()\n', (16640, 16642), True, 'import tensorflow as tf\n'), ((16759, 16797), 'tensorflow.gradients', 'tf.gradients', (['tower_loss', 'model_params'], {}), '(tower_loss, model_params)\n', (16771, 16797), True, 'import tensorflow as tf\n'), ((20008, 20185), 'functools.partial', 'functools.partial', (['input_fn', 'data_dir'], {'subset': '"""train"""', 'num_shards': 'num_gpus', 'batch_size': 'hparams.train_batch_size', 'use_distortion_for_training': 'use_distortion_for_training'}), "(input_fn, data_dir, subset='train', num_shards=num_gpus,\n batch_size=hparams.train_batch_size, use_distortion_for_training=\n use_distortion_for_training)\n", (20025, 20185), False, 'import functools\n'), ((20253, 20373), 'functools.partial', 'functools.partial', (['input_fn', 'data_dir'], {'subset': '"""validation"""', 'batch_size': 'hparams.eval_batch_size', 'num_shards': 'num_gpus'}), "(input_fn, data_dir, subset='validation', batch_size=\n hparams.eval_batch_size, num_shards=num_gpus)\n", (20270, 20373), False, 'import functools\n'), ((20476, 20535), 'cifar10.Cifar10DataSet.num_examples_per_epoch', 'cifar10.Cifar10DataSet.num_examples_per_epoch', (['"""validation"""'], {}), "('validation')\n", (20521, 20535), False, 'import cifar10\n'), ((27538, 27751), 'ray.tune.suggest.bayesopt.BayesOptSearch', 'BayesOptSearch', (['args.strategy', 'pbounds'], {'discrete': 'discrete_indices', 'categorical': 'categorical_indices', 'max_concurrent': '(12)', 'metric': '"""accuracy"""', 'mode': '"""max"""', 'utility_kwargs': "{'kind': 'ucb', 'kappa': 2.5, 'xi': 0.0}"}), "(args.strategy, pbounds, discrete=discrete_indices,\n categorical=categorical_indices, max_concurrent=12, metric='accuracy',\n mode='max', utility_kwargs={'kind': 'ucb', 'kappa': 2.5, 'xi': 0.0})\n", (27552, 27751), False, 'from ray.tune.suggest.bayesopt import BayesOptSearch\n'), ((28246, 28352), 'ray.tune.schedulers.AsyncHyperBandScheduler', 'AsyncHyperBandScheduler', ([], {'metric': '"""accuracy"""', 'mode': '"""max"""', 'max_t': '(200)', 'grace_period': '(20)', 'reduction_factor': '(2)'}), "(metric='accuracy', mode='max', max_t=200,\n grace_period=20, reduction_factor=2)\n", (28269, 28352), False, 'from ray.tune.schedulers import AsyncHyperBandScheduler\n'), ((28420, 28446), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (28444, 28446), False, 'import datetime\n'), ((28460, 28488), 'logging.getLogger', 'logging.getLogger', (['"""metrics"""'], {}), "('metrics')\n", (28477, 28488), False, 'import logging\n'), ((28542, 28576), 'logging.FileHandler', 'logging.FileHandler', (['args.log_path'], {}), '(args.log_path)\n', (28561, 28576), False, 'import logging\n'), ((28672, 28779), 'ray.tune.run', 'run', (['MyTrainableEstimator'], {'name': '"""bo_resnet_cifar10"""', 'search_alg': 'algo', 'scheduler': 'scheduler'}), "(MyTrainableEstimator, name='bo_resnet_cifar10', search_alg=algo,\n scheduler=scheduler, **train_spec)\n", (28675, 28779), False, 'from ray.tune import run, Trainable\n'), ((28845, 28871), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (28869, 28871), False, 'import datetime\n'), ((1825, 1856), 'logging.getLogger', 'logging.getLogger', (['"""tensorflow"""'], {}), "('tensorflow')\n", (1842, 1856), False, 'import logging\n'), ((15026, 15175), 'tensorflow.estimator.EstimatorSpec', 'tf.estimator.EstimatorSpec', ([], {'mode': 'mode', 'predictions': 'predictions', 'loss': 'loss', 'train_op': 'train_op', 'training_hooks': 'train_hooks', 'eval_metric_ops': 'metrics'}), '(mode=mode, predictions=predictions, loss=loss,\n train_op=train_op, training_hooks=train_hooks, eval_metric_ops=metrics)\n', (15052, 15175), True, 'import tensorflow as tf\n'), ((16367, 16398), 'tensorflow.argmax', 'tf.argmax', ([], {'input': 'logits', 'axis': '(1)'}), '(input=logits, axis=1)\n', (16376, 16398), True, 'import tensorflow as tf\n'), ((16425, 16446), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {}), '(logits)\n', (16438, 16446), True, 'import tensorflow as tf\n'), ((17494, 17513), 'tensorflow.device', 'tf.device', (['"""/cpu:0"""'], {}), "('/cpu:0')\n", (17503, 17513), True, 'import tensorflow as tf\n'), ((17608, 17664), 'cifar10.Cifar10DataSet', 'cifar10.Cifar10DataSet', (['data_dir', 'subset', 'use_distortion'], {}), '(data_dir, subset, use_distortion)\n', (17630, 17664), False, 'import cifar10\n'), ((18172, 18219), 'tensorflow.unstack', 'tf.unstack', (['image_batch'], {'num': 'batch_size', 'axis': '(0)'}), '(image_batch, num=batch_size, axis=0)\n', (18182, 18219), True, 'import tensorflow as tf\n'), ((18242, 18289), 'tensorflow.unstack', 'tf.unstack', (['label_batch'], {'num': 'batch_size', 'axis': '(0)'}), '(label_batch, num=batch_size, axis=0)\n', (18252, 18289), True, 'import tensorflow as tf\n'), ((18419, 18437), 'six.moves.xrange', 'xrange', (['batch_size'], {}), '(batch_size)\n', (18425, 18437), False, 'from six.moves import xrange\n'), ((23323, 23529), 'cifar10_utils.RunConfig', 'cifar10_utils.RunConfig', ([], {'session_config': 'sess_config', 'model_dir': 'None', 'save_checkpoints_secs': 'None', 'save_checkpoints_steps': 'self.train_steps', 'keep_checkpoint_max': 'None', 'keep_checkpoint_every_n_hours': 'None'}), '(session_config=sess_config, model_dir=None,\n save_checkpoints_secs=None, save_checkpoints_steps=self.train_steps,\n keep_checkpoint_max=None, keep_checkpoint_every_n_hours=None)\n', (23346, 23529), False, 'import cifar10_utils\n'), ((24114, 24142), 'logging.getLogger', 'logging.getLogger', (['"""metrics"""'], {}), "('metrics')\n", (24131, 24142), False, 'import logging\n'), ((24209, 24243), 'logging.FileHandler', 'logging.FileHandler', (['args.log_path'], {}), '(args.log_path)\n', (24228, 24243), False, 'import logging\n'), ((25567, 25627), 'tensorflow.estimator.WarmStartSettings', 'tf.estimator.WarmStartSettings', ([], {'ckpt_to_initialize_from': 'path'}), '(ckpt_to_initialize_from=path)\n', (25597, 25627), True, 'import tensorflow as tf\n'), ((29154, 29223), 'ray.init', 'ray.init', ([], {'redis_address': 'args.ray_address', 'logging_level': 'logging.ERROR'}), '(redis_address=args.ray_address, logging_level=logging.ERROR)\n', (29162, 29223), False, 'import ray\n'), ((29242, 29252), 'ray.init', 'ray.init', ([], {}), '()\n', (29250, 29252), False, 'import ray\n'), ((29295, 29321), 'tensorflow.test.is_gpu_available', 'tf.test.is_gpu_available', ([], {}), '()\n', (29319, 29321), True, 'import tensorflow as tf\n'), ((5946, 5957), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5955, 5957), False, 'import os\n'), ((10175, 10210), 'tensorflow.name_scope', 'tf.name_scope', (['"""gradient_averaging"""'], {}), "('gradient_averaging')\n", (10188, 10210), True, 'import tensorflow as tf\n'), ((10268, 10300), 'itertools.chain', 'itertools.chain', (['*tower_gradvars'], {}), '(*tower_gradvars)\n', (10283, 10300), False, 'import itertools\n'), ((10432, 10456), 'six.iteritems', 'six.iteritems', (['all_grads'], {}), '(all_grads)\n', (10445, 10456), False, 'import six\n'), ((11099, 11130), 'tensorflow.device', 'tf.device', (['consolidation_device'], {}), '(consolidation_device)\n', (11108, 11130), True, 'import tensorflow as tf\n'), ((11903, 11944), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['tower_losses'], {'name': '"""loss"""'}), "(tower_losses, name='loss')\n", (11917, 11944), True, 'import tensorflow as tf\n'), ((14474, 14493), 'tensorflow.group', 'tf.group', (['*train_op'], {}), '(*train_op)\n', (14482, 14493), True, 'import tensorflow as tf\n'), ((14820, 14845), 'tensorflow.concat', 'tf.concat', (['labels'], {'axis': '(0)'}), '(labels, axis=0)\n', (14829, 14845), True, 'import tensorflow as tf\n'), ((18606, 18626), 'tensorflow.parallel_stack', 'tf.parallel_stack', (['x'], {}), '(x)\n', (18623, 18626), True, 'import tensorflow as tf\n'), ((18676, 18696), 'tensorflow.parallel_stack', 'tf.parallel_stack', (['x'], {}), '(x)\n', (18693, 18696), True, 'import tensorflow as tf\n'), ((23156, 23210), 'cifar10.Cifar10DataSet.num_examples_per_epoch', 'cifar10.Cifar10DataSet.num_examples_per_epoch', (['"""train"""'], {}), "('train')\n", (23201, 23210), False, 'import cifar10\n'), ((8198, 8260), 'cifar10_utils.local_device_setter', 'cifar10_utils.local_device_setter', ([], {'worker_device': 'worker_device'}), '(worker_device=worker_device)\n', (8231, 8260), False, 'import cifar10_utils\n'), ((11326, 11380), 'cifar10.Cifar10DataSet.num_examples_per_epoch', 'cifar10.Cifar10DataSet.num_examples_per_epoch', (['"""train"""'], {}), "('train')\n", (11371, 11380), False, 'import cifar10\n'), ((11819, 11845), 'tensorflow.train.get_global_step', 'tf.train.get_global_step', ([], {}), '()\n', (11843, 11845), True, 'import tensorflow as tf\n'), ((12615, 12689), 'tensorflow.train.MomentumOptimizer', 'tf.train.MomentumOptimizer', ([], {'learning_rate': 'learning_rate', 'momentum': 'momentum'}), '(learning_rate=learning_rate, momentum=momentum)\n', (12641, 12689), True, 'import tensorflow as tf\n'), ((13902, 13978), 'tensorflow.train.SyncReplicasOptimizer', 'tf.train.SyncReplicasOptimizer', (['optimizer'], {'replicas_to_aggregate': 'num_workers'}), '(optimizer, replicas_to_aggregate=num_workers)\n', (13932, 13978), True, 'import tensorflow as tf\n'), ((14550, 14604), 'tensorflow.concat', 'tf.concat', (["[p['classes'] for p in tower_preds]"], {'axis': '(0)'}), "([p['classes'] for p in tower_preds], axis=0)\n", (14559, 14604), True, 'import tensorflow as tf\n'), ((14677, 14737), 'tensorflow.concat', 'tf.concat', (["[p['probabilities'] for p in tower_preds]"], {'axis': '(0)'}), "([p['probabilities'] for p in tower_preds], axis=0)\n", (14686, 14737), True, 'import tensorflow as tf\n'), ((14898, 14957), 'tensorflow.metrics.accuracy', 'tf.metrics.accuracy', (['stacked_labels', "predictions['classes']"], {}), "(stacked_labels, predictions['classes'])\n", (14917, 14957), True, 'import tensorflow as tf\n'), ((16695, 16711), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['v'], {}), '(v)\n', (16708, 16711), True, 'import tensorflow as tf\n'), ((21783, 21842), 'tensorflow.GPUOptions', 'tf.GPUOptions', ([], {'force_gpu_compatible': '(True)', 'allow_growth': '(True)'}), '(force_gpu_compatible=True, allow_growth=True)\n', (21796, 21842), True, 'import tensorflow as tf\n'), ((23981, 24049), 'tensorflow.contrib.training.HParams', 'tf.contrib.training.HParams', ([], {'is_chief': 'run_config.is_chief'}), '(is_chief=run_config.is_chief, **hparams)\n', (24008, 24049), True, 'import tensorflow as tf\n'), ((25971, 26044), 'tensorflow.contrib.training.HParams', 'tf.contrib.training.HParams', ([], {'is_chief': 'self.run_config.is_chief'}), '(is_chief=self.run_config.is_chief, **hparams)\n', (25998, 26044), True, 'import tensorflow as tf\n'), ((8783, 8812), 'tensorflow.name_scope', 'tf.name_scope', (["('tower_%d' % i)"], {}), "('tower_%d' % i)\n", (8796, 8812), True, 'import tensorflow as tf\n'), ((10590, 10611), 'tensorflow.device', 'tf.device', (['var.device'], {}), '(var.device)\n', (10599, 10611), True, 'import tensorflow as tf\n'), ((11578, 11618), 'numpy.array', 'np.array', (['[80, 120, 160]'], {'dtype': 'np.int64'}), '([80, 120, 160], dtype=np.int64)\n', (11586, 11618), True, 'import numpy as np\n'), ((12801, 12852), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (12823, 12852), True, 'import tensorflow as tf\n'), ((8853, 8877), 'tensorflow.device', 'tf.device', (['device_setter'], {}), '(device_setter)\n', (8862, 8877), True, 'import tensorflow as tf\n'), ((12929, 12983), 'tensorflow.train.AdagradOptimizer', 'tf.train.AdagradOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (12954, 12983), True, 'import tensorflow as tf\n'), ((14352, 14378), 'tensorflow.train.get_global_step', 'tf.train.get_global_step', ([], {}), '()\n', (14376, 14378), True, 'import tensorflow as tf\n'), ((8534, 8635), 'tensorflow.contrib.training.GreedyLoadBalancingStrategy', 'tf.contrib.training.GreedyLoadBalancingStrategy', (['num_gpus', 'tf.contrib.training.byte_size_load_fn'], {}), '(num_gpus, tf.contrib.\n training.byte_size_load_fn)\n', (8581, 8635), True, 'import tensorflow as tf\n'), ((9973, 10027), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.UPDATE_OPS', 'name_scope'], {}), '(tf.GraphKeys.UPDATE_OPS, name_scope)\n', (9990, 10027), True, 'import tensorflow as tf\n'), ((10799, 10814), 'tensorflow.add_n', 'tf.add_n', (['grads'], {}), '(grads)\n', (10807, 10814), True, 'import tensorflow as tf\n'), ((13099, 13154), 'tensorflow.train.AdadeltaOptimizer', 'tf.train.AdadeltaOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (13125, 13154), True, 'import tensorflow as tf\n'), ((13265, 13327), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (13298, 13327), True, 'import tensorflow as tf\n'), ((13442, 13496), 'tensorflow.train.RMSPropOptimizer', 'tf.train.RMSPropOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (13467, 13496), True, 'import tensorflow as tf\n')] |
"""add org_name
Revision ID: 0647beeadb7d
Revises: <PASSWORD>
Create Date: 2022-05-12 19:35:03.484368
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "0647beeadb7d"
down_revision = "<PASSWORD>"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
"projects",
sa.Column("org_name", sa.String(), nullable=True),
)
op.drop_index("projects_name_owner_cluster_uq", table_name="projects")
op.create_index(
"projects_name_owner_cluster_org_null_uq",
"projects",
["name", "owner", "cluster"],
unique=True,
postgresql_where=sa.text("org_name IS NULL"),
)
op.create_index(
"projects_name_owner_cluster_org_uq",
"projects",
["name", "owner", "cluster", "org_name"],
unique=True,
)
def downgrade():
op.drop_index("projects_name_owner_cluster_org_uq", table_name="projects")
op.drop_index("projects_name_owner_cluster_org_null_uq", table_name="projects")
op.drop_column("projects", "org_name")
| [
"sqlalchemy.text",
"alembic.op.drop_column",
"sqlalchemy.String",
"alembic.op.drop_index",
"alembic.op.create_index"
] | [((417, 487), 'alembic.op.drop_index', 'op.drop_index', (['"""projects_name_owner_cluster_uq"""'], {'table_name': '"""projects"""'}), "('projects_name_owner_cluster_uq', table_name='projects')\n", (430, 487), False, 'from alembic import op\n'), ((703, 827), 'alembic.op.create_index', 'op.create_index', (['"""projects_name_owner_cluster_org_uq"""', '"""projects"""', "['name', 'owner', 'cluster', 'org_name']"], {'unique': '(True)'}), "('projects_name_owner_cluster_org_uq', 'projects', ['name',\n 'owner', 'cluster', 'org_name'], unique=True)\n", (718, 827), False, 'from alembic import op\n'), ((886, 960), 'alembic.op.drop_index', 'op.drop_index', (['"""projects_name_owner_cluster_org_uq"""'], {'table_name': '"""projects"""'}), "('projects_name_owner_cluster_org_uq', table_name='projects')\n", (899, 960), False, 'from alembic import op\n'), ((965, 1044), 'alembic.op.drop_index', 'op.drop_index', (['"""projects_name_owner_cluster_org_null_uq"""'], {'table_name': '"""projects"""'}), "('projects_name_owner_cluster_org_null_uq', table_name='projects')\n", (978, 1044), False, 'from alembic import op\n'), ((1049, 1087), 'alembic.op.drop_column', 'op.drop_column', (['"""projects"""', '"""org_name"""'], {}), "('projects', 'org_name')\n", (1063, 1087), False, 'from alembic import op\n'), ((378, 389), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (387, 389), True, 'import sqlalchemy as sa\n'), ((664, 691), 'sqlalchemy.text', 'sa.text', (['"""org_name IS NULL"""'], {}), "('org_name IS NULL')\n", (671, 691), True, 'import sqlalchemy as sa\n')] |
import unittest
from fastNLP import Vocabulary
from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder
import torch
import os
from fastNLP import DataSet
@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis")
class TestDownload(unittest.TestCase):
def test_download(self):
# import os
vocab = Vocabulary().add_word_lst("This is a test .".split())
embed = BertEmbedding(vocab, model_dir_or_name='en')
words = torch.LongTensor([[2, 3, 4, 0]])
print(embed(words).size())
for pool_method in ['first', 'last', 'max', 'avg']:
for include_cls_sep in [True, False]:
embed = BertEmbedding(vocab, model_dir_or_name='en', pool_method=pool_method,
include_cls_sep=include_cls_sep)
print(embed(words).size())
def test_word_drop(self):
vocab = Vocabulary().add_word_lst("This is a test .".split())
embed = BertEmbedding(vocab, model_dir_or_name='en', dropout=0.1, word_dropout=0.2)
for i in range(10):
words = torch.LongTensor([[2, 3, 4, 0]])
print(embed(words).size())
class TestBertEmbedding(unittest.TestCase):
def test_bert_embedding_1(self):
vocab = Vocabulary().add_word_lst("this is a test . [SEP] NotInBERT".split())
embed = BertEmbedding(vocab, model_dir_or_name='test/data_for_tests/embedding/small_bert', word_dropout=0.1)
requires_grad = embed.requires_grad
embed.requires_grad = not requires_grad
embed.train()
words = torch.LongTensor([[2, 3, 4, 0]])
result = embed(words)
self.assertEqual(result.size(), (1, 4, 16))
embed = BertEmbedding(vocab, model_dir_or_name='test/data_for_tests/embedding/small_bert', word_dropout=0.1,
only_use_pretrain_bpe=True)
embed.eval()
words = torch.LongTensor([[2, 3, 4, 0]])
result = embed(words)
self.assertEqual(result.size(), (1, 4, 16))
# 自动截断而不报错
embed = BertEmbedding(vocab, model_dir_or_name='test/data_for_tests/embedding/small_bert', word_dropout=0.1,
only_use_pretrain_bpe=True, auto_truncate=True)
words = torch.LongTensor([[2, 3, 4, 1]*10,
[2, 3]+[0]*38])
result = embed(words)
self.assertEqual(result.size(), (2, 40, 16))
def test_bert_embedding_2(self):
# 测试only_use_pretrain_vocab与truncate_embed是否正常工作
with open('test/data_for_tests/embedding/small_bert/vocab.txt', 'r', encoding='utf-8') as f:
num_word = len(f.readlines())
Embedding = BertEmbedding
vocab = Vocabulary().add_word_lst("this is a texta and [SEP] NotInBERT".split())
embed1 = Embedding(vocab, model_dir_or_name='test/data_for_tests/embedding/small_bert',
only_use_pretrain_bpe=True, truncate_embed=True, min_freq=1)
embed_bpe_vocab_size = len(vocab)-1 + 2 # 排除NotInBERT, 额外加##a, [CLS]
self.assertEqual(embed_bpe_vocab_size, len(embed1.model.tokenzier.vocab))
embed2 = Embedding(vocab, model_dir_or_name='test/data_for_tests/embedding/small_bert',
only_use_pretrain_bpe=True, truncate_embed=False, min_freq=1)
embed_bpe_vocab_size = num_word # 排除NotInBERT
self.assertEqual(embed_bpe_vocab_size, len(embed2.model.tokenzier.vocab))
embed3 = Embedding(vocab, model_dir_or_name='test/data_for_tests/embedding/small_bert',
only_use_pretrain_bpe=False, truncate_embed=True, min_freq=1)
embed_bpe_vocab_size = len(vocab)+2 # 新增##a, [CLS]
self.assertEqual(embed_bpe_vocab_size, len(embed3.model.tokenzier.vocab))
embed4 = Embedding(vocab, model_dir_or_name='test/data_for_tests/embedding/small_bert',
only_use_pretrain_bpe=False, truncate_embed=False, min_freq=1)
embed_bpe_vocab_size = num_word+1 # 新增##a
self.assertEqual(embed_bpe_vocab_size, len(embed4.model.tokenzier.vocab))
# 测试各种情况下以下tensor的值是相等的
embed1.eval()
embed2.eval()
embed3.eval()
embed4.eval()
tensor = torch.LongTensor([[vocab.to_index(w) for w in 'this is a texta and'.split()]])
t1 = embed1(tensor)
t2 = embed2(tensor)
t3 = embed3(tensor)
t4 = embed4(tensor)
self.assertEqual((t1-t2).sum(), 0)
self.assertEqual((t1-t3).sum(), 0)
self.assertEqual((t1-t4).sum(), 0)
class TestBertWordPieceEncoder(unittest.TestCase):
def test_bert_word_piece_encoder(self):
embed = BertWordPieceEncoder(model_dir_or_name='test/data_for_tests/embedding/small_bert', word_dropout=0.1)
ds = DataSet({'words': ["this is a test . [SEP]".split()]})
embed.index_datasets(ds, field_name='words')
self.assertTrue(ds.has_field('word_pieces'))
result = embed(torch.LongTensor([[1,2,3,4]]))
def test_bert_embed_eq_bert_piece_encoder(self):
ds = DataSet({'words': ["this is a texta model vocab".split(), 'this is'.split()]})
encoder = BertWordPieceEncoder(model_dir_or_name='test/data_for_tests/embedding/small_bert')
encoder.eval()
encoder.index_datasets(ds, field_name='words')
word_pieces = torch.LongTensor(ds['word_pieces'].get([0, 1]))
word_pieces_res = encoder(word_pieces)
vocab = Vocabulary()
vocab.from_dataset(ds, field_name='words')
vocab.index_dataset(ds, field_name='words', new_field_name='words')
ds.set_input('words')
words = torch.LongTensor(ds['words'].get([0, 1]))
embed = BertEmbedding(vocab, model_dir_or_name='test/data_for_tests/embedding/small_bert',
pool_method='first', include_cls_sep=True, pooled_cls=False)
embed.eval()
words_res = embed(words)
# 检查word piece什么的是正常work的
self.assertEqual((word_pieces_res[0, :5]-words_res[0, :5]).sum(), 0)
self.assertEqual((word_pieces_res[0, 6:]-words_res[0, 5:]).sum(), 0)
self.assertEqual((word_pieces_res[1, :3]-words_res[1, :3]).sum(), 0) | [
"torch.LongTensor",
"unittest.skipIf",
"fastNLP.embeddings.BertEmbedding",
"fastNLP.embeddings.BertWordPieceEncoder",
"fastNLP.Vocabulary"
] | [((168, 225), 'unittest.skipIf', 'unittest.skipIf', (["('TRAVIS' in os.environ)", '"""Skip in travis"""'], {}), "('TRAVIS' in os.environ, 'Skip in travis')\n", (183, 225), False, 'import unittest\n'), ((400, 444), 'fastNLP.embeddings.BertEmbedding', 'BertEmbedding', (['vocab'], {'model_dir_or_name': '"""en"""'}), "(vocab, model_dir_or_name='en')\n", (413, 444), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((461, 493), 'torch.LongTensor', 'torch.LongTensor', (['[[2, 3, 4, 0]]'], {}), '([[2, 3, 4, 0]])\n', (477, 493), False, 'import torch\n'), ((965, 1040), 'fastNLP.embeddings.BertEmbedding', 'BertEmbedding', (['vocab'], {'model_dir_or_name': '"""en"""', 'dropout': '(0.1)', 'word_dropout': '(0.2)'}), "(vocab, model_dir_or_name='en', dropout=0.1, word_dropout=0.2)\n", (978, 1040), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((1346, 1451), 'fastNLP.embeddings.BertEmbedding', 'BertEmbedding', (['vocab'], {'model_dir_or_name': '"""test/data_for_tests/embedding/small_bert"""', 'word_dropout': '(0.1)'}), "(vocab, model_dir_or_name=\n 'test/data_for_tests/embedding/small_bert', word_dropout=0.1)\n", (1359, 1451), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((1577, 1609), 'torch.LongTensor', 'torch.LongTensor', (['[[2, 3, 4, 0]]'], {}), '([[2, 3, 4, 0]])\n', (1593, 1609), False, 'import torch\n'), ((1709, 1846), 'fastNLP.embeddings.BertEmbedding', 'BertEmbedding', (['vocab'], {'model_dir_or_name': '"""test/data_for_tests/embedding/small_bert"""', 'word_dropout': '(0.1)', 'only_use_pretrain_bpe': '(True)'}), "(vocab, model_dir_or_name=\n 'test/data_for_tests/embedding/small_bert', word_dropout=0.1,\n only_use_pretrain_bpe=True)\n", (1722, 1846), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((1905, 1937), 'torch.LongTensor', 'torch.LongTensor', (['[[2, 3, 4, 0]]'], {}), '([[2, 3, 4, 0]])\n', (1921, 1937), False, 'import torch\n'), ((2056, 2213), 'fastNLP.embeddings.BertEmbedding', 'BertEmbedding', (['vocab'], {'model_dir_or_name': '"""test/data_for_tests/embedding/small_bert"""', 'word_dropout': '(0.1)', 'only_use_pretrain_bpe': '(True)', 'auto_truncate': '(True)'}), "(vocab, model_dir_or_name=\n 'test/data_for_tests/embedding/small_bert', word_dropout=0.1,\n only_use_pretrain_bpe=True, auto_truncate=True)\n", (2069, 2213), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((2252, 2308), 'torch.LongTensor', 'torch.LongTensor', (['[[2, 3, 4, 1] * 10, [2, 3] + [0] * 38]'], {}), '([[2, 3, 4, 1] * 10, [2, 3] + [0] * 38])\n', (2268, 2308), False, 'import torch\n'), ((4680, 4785), 'fastNLP.embeddings.BertWordPieceEncoder', 'BertWordPieceEncoder', ([], {'model_dir_or_name': '"""test/data_for_tests/embedding/small_bert"""', 'word_dropout': '(0.1)'}), "(model_dir_or_name=\n 'test/data_for_tests/embedding/small_bert', word_dropout=0.1)\n", (4700, 4785), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((5173, 5260), 'fastNLP.embeddings.BertWordPieceEncoder', 'BertWordPieceEncoder', ([], {'model_dir_or_name': '"""test/data_for_tests/embedding/small_bert"""'}), "(model_dir_or_name=\n 'test/data_for_tests/embedding/small_bert')\n", (5193, 5260), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((5468, 5480), 'fastNLP.Vocabulary', 'Vocabulary', ([], {}), '()\n', (5478, 5480), False, 'from fastNLP import Vocabulary\n'), ((5712, 5864), 'fastNLP.embeddings.BertEmbedding', 'BertEmbedding', (['vocab'], {'model_dir_or_name': '"""test/data_for_tests/embedding/small_bert"""', 'pool_method': '"""first"""', 'include_cls_sep': '(True)', 'pooled_cls': '(False)'}), "(vocab, model_dir_or_name=\n 'test/data_for_tests/embedding/small_bert', pool_method='first',\n include_cls_sep=True, pooled_cls=False)\n", (5725, 5864), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((1089, 1121), 'torch.LongTensor', 'torch.LongTensor', (['[[2, 3, 4, 0]]'], {}), '([[2, 3, 4, 0]])\n', (1105, 1121), False, 'import torch\n'), ((4978, 5010), 'torch.LongTensor', 'torch.LongTensor', (['[[1, 2, 3, 4]]'], {}), '([[1, 2, 3, 4]])\n', (4994, 5010), False, 'import torch\n'), ((330, 342), 'fastNLP.Vocabulary', 'Vocabulary', ([], {}), '()\n', (340, 342), False, 'from fastNLP import Vocabulary\n'), ((664, 770), 'fastNLP.embeddings.BertEmbedding', 'BertEmbedding', (['vocab'], {'model_dir_or_name': '"""en"""', 'pool_method': 'pool_method', 'include_cls_sep': 'include_cls_sep'}), "(vocab, model_dir_or_name='en', pool_method=pool_method,\n include_cls_sep=include_cls_sep)\n", (677, 770), False, 'from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder\n'), ((895, 907), 'fastNLP.Vocabulary', 'Vocabulary', ([], {}), '()\n', (905, 907), False, 'from fastNLP import Vocabulary\n'), ((1260, 1272), 'fastNLP.Vocabulary', 'Vocabulary', ([], {}), '()\n', (1270, 1272), False, 'from fastNLP import Vocabulary\n'), ((2708, 2720), 'fastNLP.Vocabulary', 'Vocabulary', ([], {}), '()\n', (2718, 2720), False, 'from fastNLP import Vocabulary\n')] |
import torch
import cv2 as cv
import numpy as np
from sklearn.neighbors import NearestNeighbors
from .model_utils import spread_feature
def optimize_image_mask(image_mask, sp_image, nK=4, th=1e-2):
mask_pts = image_mask.reshape(-1)
xyz_pts = sp_image.reshape(-1, 3)
xyz_pts = xyz_pts[mask_pts > 0.5, :]
Neighbors = NearestNeighbors(n_neighbors=nK + 1, algorithm='kd_tree').fit(xyz_pts)
nn_dist, nn_idx = Neighbors.kneighbors(xyz_pts) # N,nK
nn_dist = nn_dist[:, 1:]
valid = (np.sum((nn_dist < th).astype(np.float), axis=1) == nK).astype(np.float)
optimized_mask = image_mask.reshape(-1)
optimized_mask[mask_pts > 0.5] = valid
optimized_mask = optimized_mask.reshape(image_mask.shape[0], image_mask.shape[1])
return optimized_mask
def generate_final_mask(image_learned_uv, image_mask,
image_resize_factor, mask_container_low_res, final_gim):
"""
Post Process Algorithm to generate mask of the unwrapped chart
Parameters
----------
image_learned_uv: [H,W,2]
image_mask: [H,W]
image_resize_factor: float
mask_container_low_res: a predefined tensor with intermediate low resolution
final_gim: a predefined tensor with target high resolution
"""
# resize (larger) rgb and uv with Bi-linear up-sampling
resized_uv = cv.resize(image_learned_uv, dsize=(image_resize_factor * image_learned_uv.shape[0],
image_resize_factor * image_learned_uv.shape[1]),
interpolation=cv.INTER_LINEAR)
resized_mask = cv.resize(image_mask, dsize=(image_resize_factor * image_learned_uv.shape[0],
image_resize_factor * image_learned_uv.shape[1]),
interpolation=cv.INTER_LINEAR)
resized_mask = (resized_mask > 0.5).astype(np.float)
# use gradient to remove the edge
discontinuous_mask_u = cv.Laplacian(image_learned_uv[..., 0], ddepth=cv.CV_32F) # small gradient map
discontinuous_mask_v = cv.Laplacian(image_learned_uv[..., 1], ddepth=cv.CV_32F) # small gradient map
# use the max and min in latent u and v to find the threshhold
u_max = (image_learned_uv[..., 0] * image_mask).max()
v_max = (image_learned_uv[..., 1] * image_mask).max()
u_min = (image_learned_uv[..., 0] * image_mask + (1.0 - image_mask)).min()
v_min = (image_learned_uv[..., 1] * image_mask + (1.0 - image_mask)).min()
u_th = (u_max - u_min) / 30
v_th = (v_max - v_min) / 30
discontinuous_mask_u = (discontinuous_mask_u > u_th).astype(np.float) * image_mask
discontinuous_mask_v = (discontinuous_mask_v > v_th).astype(np.float) * image_mask
discontinuous_mask = ((discontinuous_mask_u + discontinuous_mask_v) > 0).astype(np.float)
# use the mask to remove the boundary
boundary_recovery_mask = (cv.Laplacian(image_mask, ddepth=cv.CV_32F) > 0.01).astype(np.float)
discontinuous_mask = discontinuous_mask * (1.0 - boundary_recovery_mask)
resized_discontinuous_mask = cv.resize(discontinuous_mask,
dsize=(image_resize_factor * image_learned_uv.shape[0],
image_resize_factor * image_learned_uv.shape[1]),
interpolation=cv.INTER_NEAREST)
# make the small mask & texture
high_res_mask = torch.from_numpy(resized_mask * (1.0 - resized_discontinuous_mask)) \
.unsqueeze(0).unsqueeze(0).cuda().float() # 1,1,R,R
high_res_uv = torch.from_numpy(resized_uv).permute(2, 0, 1).unsqueeze(0).cuda().float()
low_res_mask = mask_container_low_res.cuda()
low_res_mask = spread_feature(low_res_mask, high_res_uv, high_res_mask, high_res_mask)
# use close to remove the holes in small mask and then resize
low_res_mask_closed = low_res_mask.detach().cpu().squeeze(0).squeeze(0).numpy() # R,R
close_k_size = int(final_gim.shape[2] / 100)
close_kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (close_k_size, close_k_size))
final_mask_np = cv.resize(low_res_mask_closed, dsize=(final_gim.shape[2],
final_gim.shape[2]),
interpolation=cv.INTER_NEAREST) # R,R,3
final_mask_np = (final_mask_np > 0).astype(np.float)
final_mask_np = cv.morphologyEx(final_mask_np, cv.MORPH_OPEN, close_kernel)
return final_mask_np
def generate_texture(sp_image, full_gim, image_rgb, image_mask, final_mask_np, final_res, nK=4, th=1e-2):
# prepare root and query points form the image and from the high-res chart
root_xyz_np = sp_image.reshape(-1, 3) # H*W,3
root_rgb_np = image_rgb.reshape(-1, 3) # H*W,3
_image_mask = image_mask.reshape(-1) # H*W
root_xyz_np = root_xyz_np[_image_mask > 0.5, :] # M,2 [0,1]
root_rgb_np = root_rgb_np[_image_mask > 0.5, :] # M,3 [0,1]
query_xyz_np = full_gim.reshape(-1, 3) # R*R,3
_final_mask_np = final_mask_np.reshape(-1) # R*R
query_xyz_np = query_xyz_np[_final_mask_np > 0.5, :] # N,3 [0,1]
# finding nearest root pixel points
Neighbors = NearestNeighbors(n_neighbors=nK, algorithm='kd_tree').fit(root_xyz_np)
nn_dist, nn_idx = Neighbors.kneighbors(query_xyz_np) # N,nK
# optimize the gim mask
valid = (nn_dist[:, 0] < th).astype(np.float)
optimized_final_mask_np = final_mask_np.reshape(-1).copy()
optimized_final_mask_np[_final_mask_np > 0.5] = valid
optimized_final_mask_np = optimized_final_mask_np.reshape(final_mask_np.shape[0], final_mask_np.shape[1])
# do interpolation based on chart distance
interpolation_weight = nn_dist.copy()
interpolation_weight = 1 - interpolation_weight / np.sum(interpolation_weight, 1, keepdims=True)
interpolation_weight = interpolation_weight / np.sum(interpolation_weight, 1, keepdims=True)
query_rgb_np = np.zeros((query_xyz_np.shape[0], 3))
for kdx in range(nK):
nn_color = root_rgb_np[nn_idx[:, kdx], :]
query_rgb_np += nn_color * interpolation_weight[:, kdx][..., np.newaxis]
final_texture_np = np.ones((final_res ** 2, 3))
final_texture_np[_final_mask_np > 0.5, :] = query_rgb_np
final_texture_np = final_texture_np.reshape(final_res, final_res, 3)
return final_texture_np, optimized_final_mask_np | [
"cv2.Laplacian",
"numpy.ones",
"torch.from_numpy",
"cv2.morphologyEx",
"numpy.zeros",
"numpy.sum",
"sklearn.neighbors.NearestNeighbors",
"cv2.resize",
"cv2.getStructuringElement"
] | [((1330, 1503), 'cv2.resize', 'cv.resize', (['image_learned_uv'], {'dsize': '(image_resize_factor * image_learned_uv.shape[0], image_resize_factor *\n image_learned_uv.shape[1])', 'interpolation': 'cv.INTER_LINEAR'}), '(image_learned_uv, dsize=(image_resize_factor * image_learned_uv.\n shape[0], image_resize_factor * image_learned_uv.shape[1]),\n interpolation=cv.INTER_LINEAR)\n', (1339, 1503), True, 'import cv2 as cv\n'), ((1593, 1761), 'cv2.resize', 'cv.resize', (['image_mask'], {'dsize': '(image_resize_factor * image_learned_uv.shape[0], image_resize_factor *\n image_learned_uv.shape[1])', 'interpolation': 'cv.INTER_LINEAR'}), '(image_mask, dsize=(image_resize_factor * image_learned_uv.shape[0\n ], image_resize_factor * image_learned_uv.shape[1]), interpolation=cv.\n INTER_LINEAR)\n', (1602, 1761), True, 'import cv2 as cv\n'), ((1951, 2007), 'cv2.Laplacian', 'cv.Laplacian', (['image_learned_uv[..., 0]'], {'ddepth': 'cv.CV_32F'}), '(image_learned_uv[..., 0], ddepth=cv.CV_32F)\n', (1963, 2007), True, 'import cv2 as cv\n'), ((2057, 2113), 'cv2.Laplacian', 'cv.Laplacian', (['image_learned_uv[..., 1]'], {'ddepth': 'cv.CV_32F'}), '(image_learned_uv[..., 1], ddepth=cv.CV_32F)\n', (2069, 2113), True, 'import cv2 as cv\n'), ((3059, 3235), 'cv2.resize', 'cv.resize', (['discontinuous_mask'], {'dsize': '(image_resize_factor * image_learned_uv.shape[0], image_resize_factor *\n image_learned_uv.shape[1])', 'interpolation': 'cv.INTER_NEAREST'}), '(discontinuous_mask, dsize=(image_resize_factor * image_learned_uv\n .shape[0], image_resize_factor * image_learned_uv.shape[1]),\n interpolation=cv.INTER_NEAREST)\n', (3068, 3235), True, 'import cv2 as cv\n'), ((4007, 4079), 'cv2.getStructuringElement', 'cv.getStructuringElement', (['cv.MORPH_ELLIPSE', '(close_k_size, close_k_size)'], {}), '(cv.MORPH_ELLIPSE, (close_k_size, close_k_size))\n', (4031, 4079), True, 'import cv2 as cv\n'), ((4100, 4215), 'cv2.resize', 'cv.resize', (['low_res_mask_closed'], {'dsize': '(final_gim.shape[2], final_gim.shape[2])', 'interpolation': 'cv.INTER_NEAREST'}), '(low_res_mask_closed, dsize=(final_gim.shape[2], final_gim.shape[2\n ]), interpolation=cv.INTER_NEAREST)\n', (4109, 4215), True, 'import cv2 as cv\n'), ((4385, 4444), 'cv2.morphologyEx', 'cv.morphologyEx', (['final_mask_np', 'cv.MORPH_OPEN', 'close_kernel'], {}), '(final_mask_np, cv.MORPH_OPEN, close_kernel)\n', (4400, 4444), True, 'import cv2 as cv\n'), ((5921, 5957), 'numpy.zeros', 'np.zeros', (['(query_xyz_np.shape[0], 3)'], {}), '((query_xyz_np.shape[0], 3))\n', (5929, 5957), True, 'import numpy as np\n'), ((6138, 6166), 'numpy.ones', 'np.ones', (['(final_res ** 2, 3)'], {}), '((final_res ** 2, 3))\n', (6145, 6166), True, 'import numpy as np\n'), ((5855, 5901), 'numpy.sum', 'np.sum', (['interpolation_weight', '(1)'], {'keepdims': '(True)'}), '(interpolation_weight, 1, keepdims=True)\n', (5861, 5901), True, 'import numpy as np\n'), ((332, 389), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ([], {'n_neighbors': '(nK + 1)', 'algorithm': '"""kd_tree"""'}), "(n_neighbors=nK + 1, algorithm='kd_tree')\n", (348, 389), False, 'from sklearn.neighbors import NearestNeighbors\n'), ((5170, 5223), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ([], {'n_neighbors': 'nK', 'algorithm': '"""kd_tree"""'}), "(n_neighbors=nK, algorithm='kd_tree')\n", (5186, 5223), False, 'from sklearn.neighbors import NearestNeighbors\n'), ((5758, 5804), 'numpy.sum', 'np.sum', (['interpolation_weight', '(1)'], {'keepdims': '(True)'}), '(interpolation_weight, 1, keepdims=True)\n', (5764, 5804), True, 'import numpy as np\n'), ((2881, 2923), 'cv2.Laplacian', 'cv.Laplacian', (['image_mask'], {'ddepth': 'cv.CV_32F'}), '(image_mask, ddepth=cv.CV_32F)\n', (2893, 2923), True, 'import cv2 as cv\n'), ((3419, 3486), 'torch.from_numpy', 'torch.from_numpy', (['(resized_mask * (1.0 - resized_discontinuous_mask))'], {}), '(resized_mask * (1.0 - resized_discontinuous_mask))\n', (3435, 3486), False, 'import torch\n'), ((3568, 3596), 'torch.from_numpy', 'torch.from_numpy', (['resized_uv'], {}), '(resized_uv)\n', (3584, 3596), False, 'import torch\n')] |
from contextlib import closing
from .helper import create_journal_group_name_lookup, load_delimited_data, save_delimited_data
from mysql.connector import connect
from .settings import get_config
SELECT_JOURNAL_DATA_BY_ID = 'SELECT nlmid, medline_ta from journals WHERE id = %(id)s'
SELECT_ARTICLE_DATA_BY_ID = 'SELECT pmid, pub_year from articles WHERE id = %(id)s'
def run(predictions_filepath, journal_groups_filepath, db_config, dereferenced_predictions_filepath, encoding, delimiter):
predictions = load_delimited_data(predictions_filepath, encoding, delimiter)
with closing(connect(**db_config)) as db_conn:
journal_group_lookup = create_journal_group_name_lookup(journal_groups_filepath, encoding, delimiter)
dereferenced = _dereference_predictions(db_conn, predictions, journal_group_lookup)
save_delimited_data(dereferenced_predictions_filepath, encoding, delimiter, dereferenced)
def _dereference_predictions(db_config, predictions, journal_group_lookup):
dereferenced = []
for prediction in predictions:
article_id, journal_id, act, score = prediction
pmid, pub_year = _get_by_id(db_config, SELECT_ARTICLE_DATA_BY_ID, article_id)
journal_info = _get_by_id(db_config, SELECT_JOURNAL_DATA_BY_ID, journal_id)
nlmid, medline_ta = 'unknown', 'unknown'
if journal_info:
nlmid, medline_ta = journal_info
journal_group = journal_group_lookup[nlmid] if nlmid in journal_group_lookup else 'unknown'
dereferenced.append((pmid, pub_year, nlmid, medline_ta, journal_group, act, score))
return tuple(dereferenced)
def _get_by_id(db_conn, query, id):
with closing(db_conn.cursor()) as cursor: #pylint: disable=E1101
cursor.execute(query, { 'id': int(id) }) #pylint: disable=E1101
return cursor.fetchone() #pylint: disable=E1101
if __name__ == '__main__':
predictions_filepath = '/****/predictions.csv'
dereferenced_predictions_filepath = '/****/dereferenced_predictions.csv'
encoding = 'utf8'
delimiter = ','
config = get_config()
db_config = config['database']
journal_groups_filepath = config['journal_groups_file']
run(predictions_filepath, journal_groups_filepath, db_config, dereferenced_predictions_filepath, encoding, delimiter)
| [
"mysql.connector.connect"
] | [((591, 611), 'mysql.connector.connect', 'connect', ([], {}), '(**db_config)\n', (598, 611), False, 'from mysql.connector import connect\n')] |
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from django.utils.html import format_html, escape, mark_safe
from mptt.admin import DraggableMPTTAdmin
from .models import Comment, RootHeader
from attachment.admin import AttachmentInline, AttachmentAdminMixin
class CommentAdmin(AttachmentAdminMixin, DraggableMPTTAdmin):
fieldsets = (
(
'Komentár',
{'fields': ('subject', 'user', 'user_name', 'original_comment')}
),
(
'Metainformácie',
{'fields': ('ip_address', 'is_public', 'is_removed', 'is_locked')}
),
)
list_display = ('tree_actions', 'get_subject', 'name', 'ip_address', 'created', 'is_public', 'is_removed', 'is_locked')
list_display_links = ('get_subject',)
list_filter = ('created', 'is_public', 'is_removed',)
raw_id_fields = ('user',)
search_fields = ('filtered_comment', 'user__username', 'user_name', 'ip_address')
inlines = [AttachmentInline]
def get_subject(self, obj):
return mark_safe(('<span style="display: inline-block; border-left: 1px solid #ddd; width: 16px; padding-top: 4px; padding-bottom: 8px; margin-top: -4px; margin-bottom: -8px;"> </span>' * (obj._mpttfield('level')-1)) + escape(obj.subject))
get_subject.short_description = 'Predmet'
get_subject.admin_order_field = 'subject'
def get_actions(self, request):
actions = super().get_actions(request)
if not request.user.is_superuser:
actions.pop('delete_selected', None)
return actions
def get_queryset(self, request):
qs = super().get_queryset(request).exclude(level=0)
if 'content_type_id__exact' in request.GET and 'object_id__exact' in request.GET:
try:
content_type_id = int(request.GET['content_type_id__exact'])
object_id = int(request.GET['object_id__exact'])
return qs.filter(content_type_id=content_type_id, object_id=object_id)
except ValueError:
return qs.none()
if request.resolver_match.view_name in ('admin:comments_comment_change', 'admin:comments_comment_delete', 'admin:comments_comment_history', 'admin:comments_comment_add'):
return qs
return qs.none()
def get_model_perms(self, request):
perms = super().get_model_perms(request)
if request.resolver_match.view_name not in ('admin:comments_comment_changelist', 'admin:comments_comment_change', 'admin:comments_comment_delete', 'admin:comments_comment_history', 'admin:comments_comment_add'):
perms['delete'] = False
perms['add'] = False
perms['change'] = False
return perms
class RootHeaderAdmin(admin.ModelAdmin):
date_hierarchy = 'pub_date'
list_display = ('get_name', 'get_link')
list_display_links = None
def get_queryset(self, request):
return super().get_queryset(request).select_related('content_type')
def has_add_permission(self, request):
return False
def get_name(self, obj):
return format_html('<a href="{}">{} </a>', obj.get_admin_url(), obj.content_object)
get_name.short_description = "Názov"
def get_link(self, obj):
return format_html('<a href="{}">Zobraziť</a>', obj.get_absolute_url())
get_link.short_description = "Zobraziť"
class CommentInline(GenericTabularInline):
model = Comment
fields = ('get_subject',)
readonly_fields = ('get_subject',)
template = 'admin/edit_inline/comments.html'
verbose_name = 'komentár'
verbose_name_plural = 'komentáre'
ct_field = 'content_type'
ct_fk_field = 'object_id'
extra = 0
def get_queryset(self, request):
return super().get_queryset(request).order_by('lft')
def get_subject(self, obj):
indent = ''
if obj.level:
indent = mark_safe(' ' * (obj.level-1))
return format_html("{}{}", indent, obj.subject)
get_subject.short_description = "Predmet"
admin.site.register(Comment, CommentAdmin)
admin.site.register(RootHeader, RootHeaderAdmin)
| [
"django.utils.html.format_html",
"django.contrib.admin.site.register",
"django.utils.html.escape",
"django.utils.html.mark_safe"
] | [((3706, 3748), 'django.contrib.admin.site.register', 'admin.site.register', (['Comment', 'CommentAdmin'], {}), '(Comment, CommentAdmin)\n', (3725, 3748), False, 'from django.contrib import admin\n'), ((3749, 3797), 'django.contrib.admin.site.register', 'admin.site.register', (['RootHeader', 'RootHeaderAdmin'], {}), '(RootHeader, RootHeaderAdmin)\n', (3768, 3797), False, 'from django.contrib import admin\n'), ((3620, 3660), 'django.utils.html.format_html', 'format_html', (['"""{}{}"""', 'indent', 'obj.subject'], {}), "('{}{}', indent, obj.subject)\n", (3631, 3660), False, 'from django.utils.html import format_html, escape, mark_safe\n'), ((3569, 3612), 'django.utils.html.mark_safe', 'mark_safe', (["(' ' * (obj.level - 1))"], {}), "(' ' * (obj.level - 1))\n", (3578, 3612), False, 'from django.utils.html import format_html, escape, mark_safe\n'), ((1221, 1240), 'django.utils.html.escape', 'escape', (['obj.subject'], {}), '(obj.subject)\n', (1227, 1240), False, 'from django.utils.html import format_html, escape, mark_safe\n')] |
import giphypop
import requests
from os import path, makedirs
from PIL import Image, ImagePalette
import io
import argparse
import json
import time
from Utils import init_dirs
limit = 100
giphy = giphypop.Giphy(api_key='')
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--src', type=str, required=True, help='Path to .JSON file with search terms')
parser.add_argument('--dest', type=str, required=True, help='Destination directory')
parser.add_argument('--limit', type=int, default=100, help='Limit the number of GIFs for each search')
parser.add_argument('--api-key', type=str, required=True, help='GIPHY API key')
args = parser.parse_args()
# verify limit value
if args.limit <= 0:
raise ValueError('parse_args: Invalid number for \'limit\'. Must be >= 1.')
# unpack api
if path.isfile(args.api_key):
ext = path.splitext(args.api_key)[-1]
if ext == '.json':
j = json.load(args.api_key)
if 'api' not in j:
raise ValueError('parse_args: Cannot find API. JSON file does not contain key \'api\'.')
args.api_key = j['api']
elif ext == '.txt':
with open(args.api_key) as f:
args.api_key = f.readline().strip('\n').strip()
else:
raise ValueError('parse_args: Cannot find API. Unrecognized file extension')
return parser.parse_args()
# function takes a GIF and returns a list of images
def gif2jpg(gifbytes):
try:
img = Image.open(io.BytesIO(gifbytes))
except IOError:
return []
i = 0
palette = img.getpalette()
images = []
try:
while True:
img.putpalette(palette)
next_image = Image.new("RGBA", img.size)
next_image.paste(img)
images.append(next_image.copy())
i += 1
img.seek(i)
except EOFError:
pass
return images
# load and validate arguments
args = parse_args()
dirnames = init_dirs()
logdir = dirnames['log']
resdir = dirnames['res']
configdir = dirnames['config']
if not path.isdir(args.dest):
makedirs(args.dest)
rootdir = args.dest
setname = path.basename(path.splitext(args.dest)[0])
logfilename = path.join(logdir, '%s-Log.txt' % setname)
configfilename = path.join(configdir, '%s-Config.json' % setname)
# load search words (type = list)
with open(args.src) as f:
categories = json.load(f)
if not isinstance(categories, dict):
raise ValueError('Cannot infer .json format')
# load config file
if path.isfile(configfilename):
with open(configfilename) as f:
config = json.load(f)
# mesh them together
config_titles = list(config.keys())
for config_tile in config_titles:
for category_title in categories.keys():
if category_title not in config:
config[category_title] = 0
else:
idx = config_titles.index(category_title)
del config_titles[idx]
for title in config_titles:
del config[title]
# create new config file
else:
config = {
title: {
term: 0 for term in terms
} for title, terms in categories.items()
}
with open(configfilename, 'w') as f:
json.dump(config, f)
categories = config
if not path.isfile(logfilename):
open(logfilename, 'w').close()
for category, terms in categories.items():
# create new directory if needed
outdir = path.join(rootdir, category)
if not path.isdir(outdir):
makedirs(outdir)
# write log file
logstr = 'category: %s\n' % category
with open(logfilename, 'a+') as f:
f.write(logstr)
print(logstr[:-1])
for term, start_index in terms.items():
logstr = ' term : %s\n' % term
logstr += ' start index: %d\n' % start_index
with open(logfilename, 'a+') as f:
f.write(logstr)
print(logstr[:-1])
# query for each term
start_t = time.time()
success = 0
num_url_failed = 0
num_save_failed = 0
for i, gif in enumerate(giphy.search(term, limit=limit)):
if (i + 1) % 10 == 0:
end_t = time.time()
logstr = ' ------------------------\n'
logstr += ' current index : %d\n' % (i + start_index)
logstr += ' gifs downloaded: %d\n' % success
logstr += ' url fails : %d\n' % num_url_failed
logstr += ' save fails : %d\n' % num_save_failed
logstr += ' time : %.2f\n' % (end_t - start_t)
with open(logfilename, 'a+') as f:
f.write(logstr)
print(logstr[:-1])
success = 0
num_url_failed = 0
num_save_failed = 0
# save config
config[category][term] = i + start_index
with open(configfilename, 'w') as f:
json.dump(config, f, indent=2)
start_t = time.time()
try:
url = gif.media_url
try:
r = requests.get(url)
data = r.content
filename = '%s_%d.gif' % (term, i)
open(path.join(outdir, filename), "wb+").write(data)
success += 1
except Exception as e:
num_save_failed += 1
except Exception as e:
num_url_failed += 1
print('done')
| [
"argparse.ArgumentParser",
"os.makedirs",
"PIL.Image.new",
"os.path.join",
"Utils.init_dirs",
"os.path.splitext",
"io.BytesIO",
"os.path.isfile",
"requests.get",
"os.path.isdir",
"json.load",
"giphypop.Giphy",
"time.time",
"json.dump"
] | [((198, 224), 'giphypop.Giphy', 'giphypop.Giphy', ([], {'api_key': '""""""'}), "(api_key='')\n", (212, 224), False, 'import giphypop\n'), ((2024, 2035), 'Utils.init_dirs', 'init_dirs', ([], {}), '()\n', (2033, 2035), False, 'from Utils import init_dirs\n'), ((2260, 2301), 'os.path.join', 'path.join', (['logdir', "('%s-Log.txt' % setname)"], {}), "(logdir, '%s-Log.txt' % setname)\n", (2269, 2301), False, 'from os import path, makedirs\n'), ((2319, 2367), 'os.path.join', 'path.join', (['configdir', "('%s-Config.json' % setname)"], {}), "(configdir, '%s-Config.json' % setname)\n", (2328, 2367), False, 'from os import path, makedirs\n'), ((2570, 2597), 'os.path.isfile', 'path.isfile', (['configfilename'], {}), '(configfilename)\n', (2581, 2597), False, 'from os import path, makedirs\n'), ((258, 283), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (281, 283), False, 'import argparse\n'), ((857, 882), 'os.path.isfile', 'path.isfile', (['args.api_key'], {}), '(args.api_key)\n', (868, 882), False, 'from os import path, makedirs\n'), ((2125, 2146), 'os.path.isdir', 'path.isdir', (['args.dest'], {}), '(args.dest)\n', (2135, 2146), False, 'from os import path, makedirs\n'), ((2152, 2171), 'os.makedirs', 'makedirs', (['args.dest'], {}), '(args.dest)\n', (2160, 2171), False, 'from os import path, makedirs\n'), ((2446, 2458), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2455, 2458), False, 'import json\n'), ((3334, 3358), 'os.path.isfile', 'path.isfile', (['logfilename'], {}), '(logfilename)\n', (3345, 3358), False, 'from os import path, makedirs\n'), ((3491, 3519), 'os.path.join', 'path.join', (['rootdir', 'category'], {}), '(rootdir, category)\n', (3500, 3519), False, 'from os import path, makedirs\n'), ((2217, 2241), 'os.path.splitext', 'path.splitext', (['args.dest'], {}), '(args.dest)\n', (2230, 2241), False, 'from os import path, makedirs\n'), ((2652, 2664), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2661, 2664), False, 'import json\n'), ((3285, 3305), 'json.dump', 'json.dump', (['config', 'f'], {}), '(config, f)\n', (3294, 3305), False, 'import json\n'), ((3531, 3549), 'os.path.isdir', 'path.isdir', (['outdir'], {}), '(outdir)\n', (3541, 3549), False, 'from os import path, makedirs\n'), ((3559, 3575), 'os.makedirs', 'makedirs', (['outdir'], {}), '(outdir)\n', (3567, 3575), False, 'from os import path, makedirs\n'), ((4017, 4028), 'time.time', 'time.time', ([], {}), '()\n', (4026, 4028), False, 'import time\n'), ((898, 925), 'os.path.splitext', 'path.splitext', (['args.api_key'], {}), '(args.api_key)\n', (911, 925), False, 'from os import path, makedirs\n'), ((973, 996), 'json.load', 'json.load', (['args.api_key'], {}), '(args.api_key)\n', (982, 996), False, 'import json\n'), ((1549, 1569), 'io.BytesIO', 'io.BytesIO', (['gifbytes'], {}), '(gifbytes)\n', (1559, 1569), False, 'import io\n'), ((1758, 1785), 'PIL.Image.new', 'Image.new', (['"""RGBA"""', 'img.size'], {}), "('RGBA', img.size)\n", (1767, 1785), False, 'from PIL import Image, ImagePalette\n'), ((4228, 4239), 'time.time', 'time.time', ([], {}), '()\n', (4237, 4239), False, 'import time\n'), ((5095, 5106), 'time.time', 'time.time', ([], {}), '()\n', (5104, 5106), False, 'import time\n'), ((5038, 5068), 'json.dump', 'json.dump', (['config', 'f'], {'indent': '(2)'}), '(config, f, indent=2)\n', (5047, 5068), False, 'import json\n'), ((5207, 5224), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (5219, 5224), False, 'import requests\n'), ((5343, 5370), 'os.path.join', 'path.join', (['outdir', 'filename'], {}), '(outdir, filename)\n', (5352, 5370), False, 'from os import path, makedirs\n')] |
import logging
import os
import numpy as np
import torch
if torch.cuda.is_available():
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
torch.backends.cudnn.deterministic = True
device = torch.device('cuda')
else:
device = torch.device('cpu')
def _patch_noise_extend_to_img(noise, image_size=[3, 32, 32], patch_location='center'):
c, h, w = image_size[0], image_size[1], image_size[2]
mask = np.zeros((c, h, w), np.float32)
x_len, y_len = noise.shape[1], noise.shape[2]
if patch_location == 'center' or (h == w == x_len == y_len):
x = h // 2
y = w // 2
elif patch_location == 'random':
x = np.random.randint(x_len // 2, w - x_len // 2)
y = np.random.randint(y_len // 2, h - y_len // 2)
else:
raise('Invalid patch location')
x1 = np.clip(x - x_len // 2, 0, h)
x2 = np.clip(x + x_len // 2, 0, h)
y1 = np.clip(y - y_len // 2, 0, w)
y2 = np.clip(y + y_len // 2, 0, w)
mask[:, x1: x2, y1: y2] = noise
return mask
def setup_logger(name, log_file, level=logging.INFO):
"""To setup as many loggers as you want"""
formatter = logging.Formatter('%(asctime)s %(message)s')
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
file_handler = logging.FileHandler(log_file)
file_handler.setFormatter(formatter)
logger = logging.getLogger(name)
logger.setLevel(level)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
return logger
def log_display(epoch, global_step, time_elapse, **kwargs):
display = 'epoch=' + str(epoch) + \
'\tglobal_step=' + str(global_step)
for key, value in kwargs.items():
if type(value) == str:
display = '\t' + key + '=' + value
else:
display += '\t' + str(key) + '=%.4f' % value
display += '\ttime=%.2fit/s' % (1. / time_elapse)
return display
def accuracy(output, target, topk=(1,)):
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(1/batch_size))
return res
def save_model(filename, epoch, model, optimizer, scheduler, save_best=False, **kwargs):
# Torch Save State Dict
state = {
'epoch': epoch+1,
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'scheduler_state_dict': scheduler.state_dict() if scheduler is not None else None
}
for key, value in kwargs.items():
state[key] = value
torch.save(state, filename + '.pth')
filename += '_best.pth'
if save_best:
torch.save(state, filename)
return
def load_model(filename, model, optimizer, scheduler, **kwargs):
# Load Torch State Dict
filename = filename + '.pth'
checkpoints = torch.load(filename, map_location=device)
model.load_state_dict(checkpoints['model_state_dict'])
if optimizer is not None and checkpoints['optimizer_state_dict'] is not None:
optimizer.load_state_dict(checkpoints['optimizer_state_dict'])
if scheduler is not None and checkpoints['scheduler_state_dict'] is not None:
scheduler.load_state_dict(checkpoints['scheduler_state_dict'])
return checkpoints
def count_parameters_in_MB(model):
return sum(np.prod(v.size()) for name, v in model.named_parameters() if "auxiliary_head" not in name)/1e6
def build_dirs(path):
if not os.path.exists(path):
os.makedirs(path)
return
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
self.max = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
self.max = max(self.max, val)
def onehot(size, target):
vec = torch.zeros(size, dtype=torch.float32)
vec[target] = 1.
return vec
def rand_bbox(size, lam):
if len(size) == 4:
W = size[2]
H = size[3]
elif len(size) == 3:
W = size[1]
H = size[2]
else:
raise Exception
cut_rat = np.sqrt(1. - lam)
cut_w = np.int(W * cut_rat)
cut_h = np.int(H * cut_rat)
# uniform
cx = np.random.randint(W)
cy = np.random.randint(H)
bbx1 = np.clip(cx - cut_w // 2, 0, W)
bby1 = np.clip(cy - cut_h // 2, 0, H)
bbx2 = np.clip(cx + cut_w // 2, 0, W)
bby2 = np.clip(cy + cut_h // 2, 0, H)
return bbx1, bby1, bbx2, bby2
| [
"numpy.clip",
"logging.getLogger",
"os.path.exists",
"logging.StreamHandler",
"numpy.sqrt",
"os.makedirs",
"logging.Formatter",
"torch.load",
"numpy.zeros",
"torch.cuda.is_available",
"logging.FileHandler",
"numpy.random.randint",
"torch.save",
"numpy.int",
"torch.zeros",
"torch.device... | [((62, 87), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (85, 87), False, 'import torch\n'), ((230, 250), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (242, 250), False, 'import torch\n'), ((270, 289), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (282, 289), False, 'import torch\n'), ((449, 480), 'numpy.zeros', 'np.zeros', (['(c, h, w)', 'np.float32'], {}), '((c, h, w), np.float32)\n', (457, 480), True, 'import numpy as np\n'), ((848, 877), 'numpy.clip', 'np.clip', (['(x - x_len // 2)', '(0)', 'h'], {}), '(x - x_len // 2, 0, h)\n', (855, 877), True, 'import numpy as np\n'), ((887, 916), 'numpy.clip', 'np.clip', (['(x + x_len // 2)', '(0)', 'h'], {}), '(x + x_len // 2, 0, h)\n', (894, 916), True, 'import numpy as np\n'), ((926, 955), 'numpy.clip', 'np.clip', (['(y - y_len // 2)', '(0)', 'w'], {}), '(y - y_len // 2, 0, w)\n', (933, 955), True, 'import numpy as np\n'), ((965, 994), 'numpy.clip', 'np.clip', (['(y + y_len // 2)', '(0)', 'w'], {}), '(y + y_len // 2, 0, w)\n', (972, 994), True, 'import numpy as np\n'), ((1166, 1210), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(message)s"""'], {}), "('%(asctime)s %(message)s')\n", (1183, 1210), False, 'import logging\n'), ((1233, 1256), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1254, 1256), False, 'import logging\n'), ((1320, 1349), 'logging.FileHandler', 'logging.FileHandler', (['log_file'], {}), '(log_file)\n', (1339, 1349), False, 'import logging\n'), ((1404, 1427), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (1421, 1427), False, 'import logging\n'), ((2763, 2799), 'torch.save', 'torch.save', (['state', "(filename + '.pth')"], {}), "(state, filename + '.pth')\n", (2773, 2799), False, 'import torch\n'), ((3039, 3080), 'torch.load', 'torch.load', (['filename'], {'map_location': 'device'}), '(filename, map_location=device)\n', (3049, 3080), False, 'import torch\n'), ((4200, 4238), 'torch.zeros', 'torch.zeros', (['size'], {'dtype': 'torch.float32'}), '(size, dtype=torch.float32)\n', (4211, 4238), False, 'import torch\n'), ((4480, 4498), 'numpy.sqrt', 'np.sqrt', (['(1.0 - lam)'], {}), '(1.0 - lam)\n', (4487, 4498), True, 'import numpy as np\n'), ((4510, 4529), 'numpy.int', 'np.int', (['(W * cut_rat)'], {}), '(W * cut_rat)\n', (4516, 4529), True, 'import numpy as np\n'), ((4542, 4561), 'numpy.int', 'np.int', (['(H * cut_rat)'], {}), '(H * cut_rat)\n', (4548, 4561), True, 'import numpy as np\n'), ((4586, 4606), 'numpy.random.randint', 'np.random.randint', (['W'], {}), '(W)\n', (4603, 4606), True, 'import numpy as np\n'), ((4616, 4636), 'numpy.random.randint', 'np.random.randint', (['H'], {}), '(H)\n', (4633, 4636), True, 'import numpy as np\n'), ((4649, 4679), 'numpy.clip', 'np.clip', (['(cx - cut_w // 2)', '(0)', 'W'], {}), '(cx - cut_w // 2, 0, W)\n', (4656, 4679), True, 'import numpy as np\n'), ((4691, 4721), 'numpy.clip', 'np.clip', (['(cy - cut_h // 2)', '(0)', 'H'], {}), '(cy - cut_h // 2, 0, H)\n', (4698, 4721), True, 'import numpy as np\n'), ((4733, 4763), 'numpy.clip', 'np.clip', (['(cx + cut_w // 2)', '(0)', 'W'], {}), '(cx + cut_w // 2, 0, W)\n', (4740, 4763), True, 'import numpy as np\n'), ((4775, 4805), 'numpy.clip', 'np.clip', (['(cy + cut_h // 2)', '(0)', 'H'], {}), '(cy + cut_h // 2, 0, H)\n', (4782, 4805), True, 'import numpy as np\n'), ((2854, 2881), 'torch.save', 'torch.save', (['state', 'filename'], {}), '(state, filename)\n', (2864, 2881), False, 'import torch\n'), ((3651, 3671), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (3665, 3671), False, 'import os\n'), ((3681, 3698), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (3692, 3698), False, 'import os\n'), ((684, 729), 'numpy.random.randint', 'np.random.randint', (['(x_len // 2)', '(w - x_len // 2)'], {}), '(x_len // 2, w - x_len // 2)\n', (701, 729), True, 'import numpy as np\n'), ((742, 787), 'numpy.random.randint', 'np.random.randint', (['(y_len // 2)', '(h - y_len // 2)'], {}), '(y_len // 2, h - y_len // 2)\n', (759, 787), True, 'import numpy as np\n')] |
import pandas as pd
from sklearn.datasets import load_boston
boston_housing = load_boston()
columns_names = boston_housing.feature_names
y = boston_housing.target
X = boston_housing.data
# Splitting features and target datasets into: train and test
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.35)
# Training a Linear Regression model with fit()
from sklearn.linear_model import LinearRegression
lm = LinearRegression()
lm.fit(X_train, y_train)
# Output of the training is a model: a + b*X0 + c*X1 + d*X2 ...
print(f"Intercept: {lm.intercept_}\n")
print(f"Coeficients: {lm.coef_}\n")
print(f"Named Coeficients: {pd.DataFrame(lm.coef_, columns_names)}")
| [
"sklearn.model_selection.train_test_split",
"sklearn.datasets.load_boston",
"sklearn.linear_model.LinearRegression",
"pandas.DataFrame"
] | [((79, 92), 'sklearn.datasets.load_boston', 'load_boston', ([], {}), '()\n', (90, 92), False, 'from sklearn.datasets import load_boston\n'), ((339, 377), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.35)'}), '(X, y, test_size=0.35)\n', (355, 377), False, 'from sklearn.model_selection import train_test_split\n'), ((482, 500), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (498, 500), False, 'from sklearn.linear_model import LinearRegression\n'), ((694, 731), 'pandas.DataFrame', 'pd.DataFrame', (['lm.coef_', 'columns_names'], {}), '(lm.coef_, columns_names)\n', (706, 731), True, 'import pandas as pd\n')] |
from . import builder, config, exceptions, generator, logging
import click
import os
import sys
__all__ = []
LOG = logging.getLogger(__name__)
@click.group()
@click.option('-v', '--verbose', is_flag=True)
def promenade(*, verbose):
if _debug():
verbose = True
logging.setup(verbose=verbose)
@promenade.command('build-all', help='Construct all scripts')
@click.option(
'-o',
'--output-dir',
default='.',
type=click.Path(
exists=True, file_okay=False, dir_okay=True, resolve_path=True),
required=True,
help='Location to write complete cluster configuration.')
@click.option('--validators', is_flag=True, help='Generate validation scripts')
@click.option(
'--leave-kubectl',
is_flag=True,
help='Leave behind kubectl on joined nodes')
@click.argument('config_files', nargs=-1, type=click.File('rb'))
def build_all(*, config_files, leave_kubectl, output_dir, validators):
debug = _debug()
try:
c = config.Configuration.from_streams(
debug=debug,
substitute=True,
allow_missing_substitutions=False,
leave_kubectl=leave_kubectl,
streams=config_files)
b = builder.Builder(c, validators=validators)
b.build_all(output_dir=output_dir)
except exceptions.PromenadeException as e:
e.display(debug=debug)
sys.exit(e.EXIT_CODE)
@promenade.command('generate-certs', help='Generate a certs for a site')
@click.option(
'-o',
'--output-dir',
type=click.Path(
exists=True, file_okay=False, dir_okay=True, resolve_path=True),
required=True,
help='Location to write *-certificates.yaml')
@click.argument('config_files', nargs=-1, type=click.File('rb'))
def genereate_certs(*, config_files, output_dir):
debug = _debug()
try:
c = config.Configuration.from_streams(
debug=debug,
streams=config_files,
substitute=True,
allow_missing_substitutions=True,
validate=False)
g = generator.Generator(c)
g.generate(output_dir)
except exceptions.PromenadeException as e:
e.display(debug=debug)
sys.exit(e.EXIT_CODE)
def _debug():
return os.environ.get('PROMENADE_DEBUG', '').lower() in {'1', 'True'}
| [
"click.group",
"click.option",
"click.File",
"os.environ.get",
"click.Path",
"sys.exit"
] | [((148, 161), 'click.group', 'click.group', ([], {}), '()\n', (159, 161), False, 'import click\n'), ((163, 208), 'click.option', 'click.option', (['"""-v"""', '"""--verbose"""'], {'is_flag': '(True)'}), "('-v', '--verbose', is_flag=True)\n", (175, 208), False, 'import click\n'), ((613, 691), 'click.option', 'click.option', (['"""--validators"""'], {'is_flag': '(True)', 'help': '"""Generate validation scripts"""'}), "('--validators', is_flag=True, help='Generate validation scripts')\n", (625, 691), False, 'import click\n'), ((693, 788), 'click.option', 'click.option', (['"""--leave-kubectl"""'], {'is_flag': '(True)', 'help': '"""Leave behind kubectl on joined nodes"""'}), "('--leave-kubectl', is_flag=True, help=\n 'Leave behind kubectl on joined nodes')\n", (705, 788), False, 'import click\n'), ((446, 520), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'file_okay': '(False)', 'dir_okay': '(True)', 'resolve_path': '(True)'}), '(exists=True, file_okay=False, dir_okay=True, resolve_path=True)\n', (456, 520), False, 'import click\n'), ((844, 860), 'click.File', 'click.File', (['"""rb"""'], {}), "('rb')\n", (854, 860), False, 'import click\n'), ((1520, 1594), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'file_okay': '(False)', 'dir_okay': '(True)', 'resolve_path': '(True)'}), '(exists=True, file_okay=False, dir_okay=True, resolve_path=True)\n', (1530, 1594), False, 'import click\n'), ((1721, 1737), 'click.File', 'click.File', (['"""rb"""'], {}), "('rb')\n", (1731, 1737), False, 'import click\n'), ((1369, 1390), 'sys.exit', 'sys.exit', (['e.EXIT_CODE'], {}), '(e.EXIT_CODE)\n', (1377, 1390), False, 'import sys\n'), ((2180, 2201), 'sys.exit', 'sys.exit', (['e.EXIT_CODE'], {}), '(e.EXIT_CODE)\n', (2188, 2201), False, 'import sys\n'), ((2229, 2266), 'os.environ.get', 'os.environ.get', (['"""PROMENADE_DEBUG"""', '""""""'], {}), "('PROMENADE_DEBUG', '')\n", (2243, 2266), False, 'import os\n')] |
#!/usr/bin/env python
import PySimpleGUI as sg
from os import system
from time import sleep
from threading import Thread
# Read Saved Theme
with open("theme.txt") as theme:
theme = theme.read()
# Variables
timer_goal = 5
timer = 0
start_loops = True
clicks = 0
running = True
final = 0
cps = 0
round_finished = False
off = True
record = ""
file = ""
saved_rec = ""
write_rec = ""
# Window Theme
sg.theme(theme)
#··Window Elements··#
# Upper menu
menu = [["File", ["Themes", ["Default", "Lite_Theme", "Light_Blue", "Dark_Blue", "Navy_Blue",
"Blue_Night", "Blue_Purple", "Light_Purple", "Dark_Purple",
"Black_and_Red", "Grey_and_Green", "Cob_Green", "Garnet", "Berry", "Green",]]],
["Time", ["1 Second", "3 Seconds", "5 Seconds", "10 Seconds", "15 Seconds", "30 Seconds", "1 Minute"]]]
# Themes Dictionary, to be able to write a custom name to the themes and not have the theme name that uses PySimpleGui
themes = dict(Default="DarkAmber", Lite_Theme="SystemDefault1", Light_Blue="BlueMono", Navy_Blue="DarkBlue13", Dark_Blue="DarkBlue12",
Blue_Night="DarkBlue14", Blue_Purple="DarkBlue5", Light_Purple="DarkBlue4", Dark_Purple="DarkBlue6",
Grey_and_Green="DarkGrey", Cob_Green="DarkGreen1", Black_and_Red="DarkBrown4", Berry="DarkPurple3", Garnet="DarkRed", Green="Green")
# The theme List of the Upper Menu
them_list = ["Default", "Lite_Theme", "Light_Blue", "Dark_Blue", "Navy_Blue",
"Blue_Night", "Blue_Purple", "Light_Purple", "Dark_Purple",
"Grey_and_Green", "Cob_Green", "Black_and_Red", "Berry", "Garnet", "Green"]
# Score table
scores = [ [sg.Text("0.0 CPS in 0s", justification='c', size=(30,1), font='Mambo 20', pad=(None, (0, 20)), key='-R-')],
[sg.Text("Timer: 0", justification='c', size=(30,1), font='Mambo 35', pad=(None, (0, 20)), key='-T-')],
[sg.Text("CPS: 0.0", justification='c', size=(30,1), font='Mambo 35', key='-CPS-')],
[sg.Text("Clicks: 0", justification='c', size=(30,1), font='Mambo 35', pad=(None, (20, 20)), key='-C-')]]
# Exit Button
close_but = [ [sg.Column(scores, pad=(0, (15, 1)), key='-CL-')],
[sg.Column([[sg.Button('Close', size=(12, 1))]], justification='right')]]
# Window construction
layout = [ [sg.Menu(menu, background_color="White", text_color="Black", key="-M-")],
[sg.Button('Press to Start', size=(60, 15), font='Arial 15', key='-B-'),
sg.Column(close_but, expand_y=True)]]
# Window Features
window = sg.Window('G-CPS', layout, size=(1000, 375), finalize=True, grab_anywhere=True, icon="ico.ico")
# Tkinter code required to position the close button
window['-CL-'].Widget.pack(expand=True)
window['-CL-'].Widget.master.pack(fill='y', expand=True)
# Display the record of the default open time mode when opening the program. If file doesn't exist a new one will be created
create_f = open("record_5.txt", "a")
create_f.close()
with open("record_5.txt", "r") as saved:
saved_rec = saved.read()
if saved_rec == "":
saved_rec = 0.0
record = str(saved_rec) + " CPS in 5s"
window["-R-"].update(record)
# Function to disable the Button for 3s to prevent extra clicks
def disable():
global timer, off, file, timer_goal, saved_rec, write_rec, cps, record
window["-B-"].update(disabled=True)
window['-T-'].update("Timer: " + str(timer))
window["-B-"].update("Done!")
sleep(1.5)
window.grab_any_where_on()
window["-B-"].update("Press to Start")
window["-B-"].update(disabled=False)
off = False
file = "record_" + str(timer_goal) + ".txt"
with open(file, "r") as saved:
saved_rec = saved.read()
saved.close()
if saved_rec == "" or float(saved_rec) < cps:
write_rec = open(file, "w+")
write_rec.write(str(cps))
write_rec.close()
record = str(cps) + " CPS in " + str(timer_goal) + "s"
window["-R-"].update(record)
# Creating the thread for the disable function
dis = Thread(target=disable)
# Main loop where the timer gets updated
def timer_loop():
global timer, round_finished, clicks
try:
while running:
if round_finished == False:
sleep(1)
timer += 1
# Stopping the timer
if timer == timer_goal and off == True:
round_finished = True
disable()
if round_finished == True:
timer = timer_goal
window['-T-'].update("Timer: " + str(timer))
if timer == 0 and clicks == 0:
window['-B-'].update("Press to Start")
else:
sleep(0.05)
except:
pass
# Creating the thread for the timer_loop() function
start_loop = Thread(target=timer_loop)
# Function where the CPS calculation is done
def calculation():
global clicks, cps, timer, round_finished, running
while running:
try:
if round_finished == True:
sleep(0.1)
pass
else:
sleep(0.3)
cps = clicks / timer
cps = round(cps, 1)
window["-CPS-"].update("CPS: " + str(cps))
except:
pass
# Creating the thread for the calculation() function
start_calc = Thread(target=calculation)
#···Main PySimpleGui event loop···#
while running:
window["-C-"].update("Clicks: " + str(clicks))
# This first if will only run the first time the user clicks the Button
if clicks == 1 and start_loops == True:
start_loop.start()
start_calc.start()
start_loops = False
event, values = window.read()
# Window closing event
if event == sg.WINDOW_CLOSED or event == "Close":
running = False
# Reset values when round is finished
if round_finished == True:
clicks = 0
timer = 0
# Restart the scores hen first click is pressed
if event == '-B-':
if off == False:
off = True
round_finished = False
clicks += 1
window['-C-'].update("Clicks: " + str(clicks))
# Empty the Button when the user is spamming clicks
if clicks != 0:
window.grab_any_where_off()
window['-B-'].Update("")
#Theme Saving
if event in them_list:
set_theme= open("theme.txt","w+")
set_theme.write(themes[event])
set_theme.close()
running = False
system("hide.vbs")
# Change the timer goal
if event == "1 Second" and clicks == 0:
if timer >= 1:
pass
else:
timer_goal = 1
if event == "3 Seconds" and clicks == 0:
if timer >= 3:
pass
else:
timer_goal =3
if event == "5 Seconds" and clicks == 0:
if timer >= 5:
pass
else:
timer_goal = 5
if event == "10 Seconds" and clicks == 0:
if timer >= 10:
pass
else:
timer_goal = 10
if event == "15 Seconds" and clicks == 0:
if timer >= 15:
pass
else:
timer_goal = 15
if event == "30 Seconds" and clicks == 0:
if timer >=30:
pass
else:
timer_goal = 30
if event == "1 Minute" and clicks == 0:
if timer >= 60:
pass
else:
timer_goal = 60
# Saving of the record
if event in ("1 Second", "3 Seconds", "5 Seconds", "10 Seconds", "15 Seconds", "30 Seconds", "1 Minute") and clicks == 0:
if event == "1 Minute":
event = "60"
event = int(''.join(filter(str.isdigit, event)))
file = "record_" + str(event) + ".txt"
# Create file inc ase doesn't exist
create = open(file, "a")
create.close()
with open(file, "r") as saved:
saved_rec = saved.read()
if saved_rec == "":
saved_rec = 0.0
record = str(saved_rec) + " CPS in " + str(event) + "s"
window["-R-"].update(record)
saved.close()
# Closing the window
window.close()
# Made By PETEROLO 291© | [
"PySimpleGUI.Menu",
"PySimpleGUI.Column",
"PySimpleGUI.Text",
"time.sleep",
"PySimpleGUI.Button",
"PySimpleGUI.theme",
"threading.Thread",
"os.system",
"PySimpleGUI.Window"
] | [((407, 422), 'PySimpleGUI.theme', 'sg.theme', (['theme'], {}), '(theme)\n', (415, 422), True, 'import PySimpleGUI as sg\n'), ((2600, 2700), 'PySimpleGUI.Window', 'sg.Window', (['"""G-CPS"""', 'layout'], {'size': '(1000, 375)', 'finalize': '(True)', 'grab_anywhere': '(True)', 'icon': '"""ico.ico"""'}), "('G-CPS', layout, size=(1000, 375), finalize=True, grab_anywhere=\n True, icon='ico.ico')\n", (2609, 2700), True, 'import PySimpleGUI as sg\n'), ((4098, 4120), 'threading.Thread', 'Thread', ([], {'target': 'disable'}), '(target=disable)\n', (4104, 4120), False, 'from threading import Thread\n'), ((4994, 5019), 'threading.Thread', 'Thread', ([], {'target': 'timer_loop'}), '(target=timer_loop)\n', (5000, 5019), False, 'from threading import Thread\n'), ((5550, 5576), 'threading.Thread', 'Thread', ([], {'target': 'calculation'}), '(target=calculation)\n', (5556, 5576), False, 'from threading import Thread\n'), ((3518, 3528), 'time.sleep', 'sleep', (['(1.5)'], {}), '(1.5)\n', (3523, 3528), False, 'from time import sleep\n'), ((1714, 1824), 'PySimpleGUI.Text', 'sg.Text', (['"""0.0 CPS in 0s"""'], {'justification': '"""c"""', 'size': '(30, 1)', 'font': '"""Mambo 20"""', 'pad': '(None, (0, 20))', 'key': '"""-R-"""'}), "('0.0 CPS in 0s', justification='c', size=(30, 1), font='Mambo 20',\n pad=(None, (0, 20)), key='-R-')\n", (1721, 1824), True, 'import PySimpleGUI as sg\n'), ((1835, 1941), 'PySimpleGUI.Text', 'sg.Text', (['"""Timer: 0"""'], {'justification': '"""c"""', 'size': '(30, 1)', 'font': '"""Mambo 35"""', 'pad': '(None, (0, 20))', 'key': '"""-T-"""'}), "('Timer: 0', justification='c', size=(30, 1), font='Mambo 35', pad=(\n None, (0, 20)), key='-T-')\n", (1842, 1941), True, 'import PySimpleGUI as sg\n'), ((1951, 2038), 'PySimpleGUI.Text', 'sg.Text', (['"""CPS: 0.0"""'], {'justification': '"""c"""', 'size': '(30, 1)', 'font': '"""Mambo 35"""', 'key': '"""-CPS-"""'}), "('CPS: 0.0', justification='c', size=(30, 1), font='Mambo 35', key=\n '-CPS-')\n", (1958, 2038), True, 'import PySimpleGUI as sg\n'), ((2048, 2156), 'PySimpleGUI.Text', 'sg.Text', (['"""Clicks: 0"""'], {'justification': '"""c"""', 'size': '(30, 1)', 'font': '"""Mambo 35"""', 'pad': '(None, (20, 20))', 'key': '"""-C-"""'}), "('Clicks: 0', justification='c', size=(30, 1), font='Mambo 35', pad=\n (None, (20, 20)), key='-C-')\n", (2055, 2156), True, 'import PySimpleGUI as sg\n'), ((2185, 2232), 'PySimpleGUI.Column', 'sg.Column', (['scores'], {'pad': '(0, (15, 1))', 'key': '"""-CL-"""'}), "(scores, pad=(0, (15, 1)), key='-CL-')\n", (2194, 2232), True, 'import PySimpleGUI as sg\n'), ((2363, 2433), 'PySimpleGUI.Menu', 'sg.Menu', (['menu'], {'background_color': '"""White"""', 'text_color': '"""Black"""', 'key': '"""-M-"""'}), "(menu, background_color='White', text_color='Black', key='-M-')\n", (2370, 2433), True, 'import PySimpleGUI as sg\n'), ((2449, 2519), 'PySimpleGUI.Button', 'sg.Button', (['"""Press to Start"""'], {'size': '(60, 15)', 'font': '"""Arial 15"""', 'key': '"""-B-"""'}), "('Press to Start', size=(60, 15), font='Arial 15', key='-B-')\n", (2458, 2519), True, 'import PySimpleGUI as sg\n'), ((2534, 2569), 'PySimpleGUI.Column', 'sg.Column', (['close_but'], {'expand_y': '(True)'}), '(close_but, expand_y=True)\n', (2543, 2569), True, 'import PySimpleGUI as sg\n'), ((6733, 6751), 'os.system', 'system', (['"""hide.vbs"""'], {}), "('hide.vbs')\n", (6739, 6751), False, 'from os import system\n'), ((4322, 4330), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4327, 4330), False, 'from time import sleep\n'), ((4877, 4888), 'time.sleep', 'sleep', (['(0.05)'], {}), '(0.05)\n', (4882, 4888), False, 'from time import sleep\n'), ((5238, 5248), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (5243, 5248), False, 'from time import sleep\n'), ((5305, 5315), 'time.sleep', 'sleep', (['(0.3)'], {}), '(0.3)\n', (5310, 5315), False, 'from time import sleep\n'), ((2265, 2297), 'PySimpleGUI.Button', 'sg.Button', (['"""Close"""'], {'size': '(12, 1)'}), "('Close', size=(12, 1))\n", (2274, 2297), True, 'import PySimpleGUI as sg\n')] |
import pylab
import matplotlib as mpl
import numpy as np
import matplotlib.colors as colors
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
from colormap import cmap_builder, test_cmap # for _build_colormap()
# ---------------------------------------------------------------------------
# Useful colormaps:
# plt.cm.spectral, plt.get_cmap('jet')
# hint: reverse colormaps by adding underscore and r, e.g. plt.cm.spectral_r
# ---------------------------------------------------------------------------
# cmap_rg = mcolors.LinearSegmentedColormap('my_colormap', cdict, 100)
def _build_colormap(color1, color2, color3):
""" Builds colormap from three given colors (given as strings)"""
cm = cmap_builder('blue', 'orange', 'green')
return cm
def show_truncated_colormap(cmap = plt.cm.spectral, minv = 0.2, maxv = 0.8):
""" Compare original and truncated colormap """
arr = np.linspace(0, 50, 100).reshape((10, 10))
fig, ax = plt.subplots(ncols=2)
cmap = plt.cm.spectral
new_cmap = _truncate_colormap(cmap, minv, maxv)
ax[0].imshow(arr, interpolation='nearest', cmap=cmap)
ax[1].imshow(arr, interpolation='nearest', cmap=new_cmap)
plt.show()
def _truncate_colormap(cmap = plt.cm.spectral, minval=0.0, maxval=1.0, n=100):
""" Sample colormap from given colormap """
""" """
new_cmap = colors.LinearSegmentedColormap.from_list(
'trunc({n},{a:.2f},{b:.2f})'.format(n=cmap.name, a=minval, b=maxval),
cmap(np.linspace(minval, maxval, n)))
return new_cmap
def do_plot ( arr, filename = "test.png", title = "A plot", bool_save = False, minval=0, maxval=0.95, cmap=plt.cm.spectral ):
""" A function to plot and label a raster dataset given as an array. Extra
options to choose bounds of the colourscale and the colormap to use.
"""
# TODO: use opencv for saving files
dpi = 200
# define source colormap
cmap = plt.cm.spectral
# cut colors of sourcecolormap to get appropriate colors for ndvi
cmap_ndvi = _truncate_colormap(cmap, minval=0.9, maxval=0.5, n=100)
plt.imshow (arr, interpolation='nearest', cmap = cmap)
plt.title(title)
plt.colorbar()
plt.axis('off')
if bool_save == True:
plt.savefig(filename,bbox_inches='tight', dpi = dpi)
else:
plt.show()
plt.clf()
| [
"matplotlib.pyplot.imshow",
"colormap.cmap_builder",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.colorbar",
"matplotlib.pyplot.clf",
"matplotlib.pyplot.axis",
"numpy.linspace",
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((769, 808), 'colormap.cmap_builder', 'cmap_builder', (['"""blue"""', '"""orange"""', '"""green"""'], {}), "('blue', 'orange', 'green')\n", (781, 808), False, 'from colormap import cmap_builder, test_cmap\n'), ((1019, 1040), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': '(2)'}), '(ncols=2)\n', (1031, 1040), True, 'import matplotlib.pyplot as plt\n'), ((1245, 1255), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1253, 1255), True, 'import matplotlib.pyplot as plt\n'), ((2150, 2201), 'matplotlib.pyplot.imshow', 'plt.imshow', (['arr'], {'interpolation': '"""nearest"""', 'cmap': 'cmap'}), "(arr, interpolation='nearest', cmap=cmap)\n", (2160, 2201), True, 'import matplotlib.pyplot as plt\n'), ((2209, 2225), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (2218, 2225), True, 'import matplotlib.pyplot as plt\n'), ((2230, 2244), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (2242, 2244), True, 'import matplotlib.pyplot as plt\n'), ((2249, 2264), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (2257, 2264), True, 'import matplotlib.pyplot as plt\n'), ((2386, 2395), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2393, 2395), True, 'import matplotlib.pyplot as plt\n'), ((2300, 2351), 'matplotlib.pyplot.savefig', 'plt.savefig', (['filename'], {'bbox_inches': '"""tight"""', 'dpi': 'dpi'}), "(filename, bbox_inches='tight', dpi=dpi)\n", (2311, 2351), True, 'import matplotlib.pyplot as plt\n'), ((2371, 2381), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2379, 2381), True, 'import matplotlib.pyplot as plt\n'), ((963, 986), 'numpy.linspace', 'np.linspace', (['(0)', '(50)', '(100)'], {}), '(0, 50, 100)\n', (974, 986), True, 'import numpy as np\n'), ((1545, 1575), 'numpy.linspace', 'np.linspace', (['minval', 'maxval', 'n'], {}), '(minval, maxval, n)\n', (1556, 1575), True, 'import numpy as np\n')] |
from django.contrib import admin
from django import forms
from import_export import fields,resources
from import_export.admin import ImportExportModelAdmin,ImportExportActionModelAdmin
from import_export.widgets import ForeignKeyWidget
from .models import Customer
class CustomerResource(resources.ModelResource):
class Meta:
model=Customer
class CustomerAdminForm(forms.ModelForm):
class Meta:
model = Customer
fields = '__all__'
class CustomerAdmin(ImportExportActionModelAdmin):
form = CustomerAdminForm
resource_class=CustomerResource
list_display = ['name', 'id', 'created', 'updated', 'phonenumber', 'Address','area', 'type', 'relatedas', 'relatedto']
readonly_fields = ['name', 'id', 'created', 'updated', 'phonenumber', 'Address', 'area','type', 'relatedas', 'relatedto']
admin.site.register(Customer, CustomerAdmin)
# class SupplierResource(resources.ModelResource):
#
# class Meta:
# model = Supplier
#
# class SupplierAdminForm(forms.ModelForm):
#
# class Meta:
# model = Supplier
# fields = '__all__'
#
#
# class SupplierAdmin(ImportExportActionModelAdmin):
# form = SupplierAdminForm
# resource_class = SupplierResource
# list_display = ['name', 'id', 'created', 'last_updated', 'organisation', 'phonenumber', 'initial']
# readonly_fields = ['name', 'id', 'created', 'last_updated', 'organisation', 'phonenumber', 'initial']
#
# admin.site.register(Supplier, SupplierAdmin)
| [
"django.contrib.admin.site.register"
] | [((834, 878), 'django.contrib.admin.site.register', 'admin.site.register', (['Customer', 'CustomerAdmin'], {}), '(Customer, CustomerAdmin)\n', (853, 878), False, 'from django.contrib import admin\n')] |
import os
import sys
import argparse
import logging
import datetime as dt
from fishnet_generator import fishnet_func
from hru_parameters import hru_parameters
from dem_parameters import dem_parameters
from dem_2_streams import flow_parameters
from crt_fill_parameters import crt_fill_parameters
from stream_parameters import stream_parameters
from veg_parameters import veg_parameters
from soil_raster_prep import soil_raster_prep
from soil_parameters import soil_parameters
from prism_800m_normals import prism_800m_parameters
from ppt_ratio_parameters import ppt_ratio_parameters
from impervious_parameters import impervious_parameters
from prms_template_fill import prms_template_fill
def run_batch(overwrite=True):
args = argparse.Namespace(ini=r'C:\Users\CNA372\gsflow-arcpy-master\uyws_multibasin\uyws_parameters.ini',
overwrite=overwrite)
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
logging.info('\n{}'.format('#' * 80))
log_f = '{:<20s} {}'
logging.info(log_f.format(
'Run Time Stamp:', dt.datetime.now().isoformat(' ')))
logging.info(log_f.format('Current Directory:', os.getcwd()))
logging.info(log_f.format('Script:', os.path.basename(sys.argv[0])))
logging.info('\n\n\n\nfishnet_func \n\n\n\n')
fishnet_func(config_path=args.ini, overwrite_flag=args.overwrite)
logging.info('\n\n\n\nhru_parameters \n\n\n\n')
hru_parameters(config_path=args.ini)
logging.info('\n\n\n\ndem_parameters \n\n\n\n')
dem_parameters(config_path=args.ini)
logging.info('\n\n\n\nveg_parameters \n\n\n\n')
veg_parameters(config_path=args.ini)
logging.info('\n\n\n\nsoil_raster_prep \n\n\n\n')
soil_raster_prep(config_path=args.ini)
logging.info('\n\n\n\nsoil_parameters \n\n\n\n')
soil_parameters(config_path=args.ini)
logging.info('\n\n\n\nimpervious_parameters \n\n\n\n')
impervious_parameters(config_path=args.ini)
logging.info('\n\n\n\nprism_800m_parameters \n\n\n\n')
prism_800m_parameters(config_path=args.ini)
logging.info('\n\n\n\nppt_ratio_parameters \n\n\n\n')
ppt_ratio_parameters(config_path=args.ini)
logging.info('\n\n\n\nflow_parameters \n\n\n\n')
flow_parameters(config_path=args.ini)
logging.info('\n\n\n\ncrt_fill_parameters \n\n\n\n')
crt_fill_parameters(config_path=args.ini)
logging.info('\n\n\n\nstream_parameters \n\n\n\n')
stream_parameters(config_path=args.ini)
logging.info('\n\n\n\nprms_template_fill \n\n\n\n')
prms_template_fill(config_path=args.ini)
if __name__ == '__main__':
run_batch(overwrite=True)
# ========================= EOF ====================================================================
| [
"prism_800m_normals.prism_800m_parameters",
"crt_fill_parameters.crt_fill_parameters",
"dem_2_streams.flow_parameters",
"prms_template_fill.prms_template_fill",
"argparse.Namespace",
"fishnet_generator.fishnet_func",
"logging.info",
"veg_parameters.veg_parameters",
"stream_parameters.stream_paramete... | [((733, 866), 'argparse.Namespace', 'argparse.Namespace', ([], {'ini': '"""C:\\\\Users\\\\CNA372\\\\gsflow-arcpy-master\\\\uyws_multibasin\\\\uyws_parameters.ini"""', 'overwrite': 'overwrite'}), "(ini=\n 'C:\\\\Users\\\\CNA372\\\\gsflow-arcpy-master\\\\uyws_multibasin\\\\uyws_parameters.ini'\n , overwrite=overwrite)\n", (751, 866), False, 'import argparse\n'), ((887, 949), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(message)s"""'}), "(level=logging.DEBUG, format='%(message)s')\n", (906, 949), False, 'import logging\n'), ((1254, 1295), 'logging.info', 'logging.info', (['"""\n\n\n\nfishnet_func \n\n\n\n"""'], {}), '("""\n\n\n\nfishnet_func \n\n\n\n""")\n', (1266, 1295), False, 'import logging\n'), ((1304, 1369), 'fishnet_generator.fishnet_func', 'fishnet_func', ([], {'config_path': 'args.ini', 'overwrite_flag': 'args.overwrite'}), '(config_path=args.ini, overwrite_flag=args.overwrite)\n', (1316, 1369), False, 'from fishnet_generator import fishnet_func\n'), ((1375, 1418), 'logging.info', 'logging.info', (['"""\n\n\n\nhru_parameters \n\n\n\n"""'], {}), '("""\n\n\n\nhru_parameters \n\n\n\n""")\n', (1387, 1418), False, 'import logging\n'), ((1427, 1463), 'hru_parameters.hru_parameters', 'hru_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (1441, 1463), False, 'from hru_parameters import hru_parameters\n'), ((1469, 1512), 'logging.info', 'logging.info', (['"""\n\n\n\ndem_parameters \n\n\n\n"""'], {}), '("""\n\n\n\ndem_parameters \n\n\n\n""")\n', (1481, 1512), False, 'import logging\n'), ((1521, 1557), 'dem_parameters.dem_parameters', 'dem_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (1535, 1557), False, 'from dem_parameters import dem_parameters\n'), ((1563, 1606), 'logging.info', 'logging.info', (['"""\n\n\n\nveg_parameters \n\n\n\n"""'], {}), '("""\n\n\n\nveg_parameters \n\n\n\n""")\n', (1575, 1606), False, 'import logging\n'), ((1615, 1651), 'veg_parameters.veg_parameters', 'veg_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (1629, 1651), False, 'from veg_parameters import veg_parameters\n'), ((1657, 1702), 'logging.info', 'logging.info', (['"""\n\n\n\nsoil_raster_prep \n\n\n\n"""'], {}), '("""\n\n\n\nsoil_raster_prep \n\n\n\n""")\n', (1669, 1702), False, 'import logging\n'), ((1711, 1749), 'soil_raster_prep.soil_raster_prep', 'soil_raster_prep', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (1727, 1749), False, 'from soil_raster_prep import soil_raster_prep\n'), ((1755, 1799), 'logging.info', 'logging.info', (['"""\n\n\n\nsoil_parameters \n\n\n\n"""'], {}), '("""\n\n\n\nsoil_parameters \n\n\n\n""")\n', (1767, 1799), False, 'import logging\n'), ((1808, 1845), 'soil_parameters.soil_parameters', 'soil_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (1823, 1845), False, 'from soil_parameters import soil_parameters\n'), ((1851, 1901), 'logging.info', 'logging.info', (['"""\n\n\n\nimpervious_parameters \n\n\n\n"""'], {}), '("""\n\n\n\nimpervious_parameters \n\n\n\n""")\n', (1863, 1901), False, 'import logging\n'), ((1910, 1953), 'impervious_parameters.impervious_parameters', 'impervious_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (1931, 1953), False, 'from impervious_parameters import impervious_parameters\n'), ((1959, 2009), 'logging.info', 'logging.info', (['"""\n\n\n\nprism_800m_parameters \n\n\n\n"""'], {}), '("""\n\n\n\nprism_800m_parameters \n\n\n\n""")\n', (1971, 2009), False, 'import logging\n'), ((2018, 2061), 'prism_800m_normals.prism_800m_parameters', 'prism_800m_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (2039, 2061), False, 'from prism_800m_normals import prism_800m_parameters\n'), ((2067, 2116), 'logging.info', 'logging.info', (['"""\n\n\n\nppt_ratio_parameters \n\n\n\n"""'], {}), '("""\n\n\n\nppt_ratio_parameters \n\n\n\n""")\n', (2079, 2116), False, 'import logging\n'), ((2125, 2167), 'ppt_ratio_parameters.ppt_ratio_parameters', 'ppt_ratio_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (2145, 2167), False, 'from ppt_ratio_parameters import ppt_ratio_parameters\n'), ((2173, 2217), 'logging.info', 'logging.info', (['"""\n\n\n\nflow_parameters \n\n\n\n"""'], {}), '("""\n\n\n\nflow_parameters \n\n\n\n""")\n', (2185, 2217), False, 'import logging\n'), ((2226, 2263), 'dem_2_streams.flow_parameters', 'flow_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (2241, 2263), False, 'from dem_2_streams import flow_parameters\n'), ((2269, 2317), 'logging.info', 'logging.info', (['"""\n\n\n\ncrt_fill_parameters \n\n\n\n"""'], {}), '("""\n\n\n\ncrt_fill_parameters \n\n\n\n""")\n', (2281, 2317), False, 'import logging\n'), ((2326, 2367), 'crt_fill_parameters.crt_fill_parameters', 'crt_fill_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (2345, 2367), False, 'from crt_fill_parameters import crt_fill_parameters\n'), ((2373, 2419), 'logging.info', 'logging.info', (['"""\n\n\n\nstream_parameters \n\n\n\n"""'], {}), '("""\n\n\n\nstream_parameters \n\n\n\n""")\n', (2385, 2419), False, 'import logging\n'), ((2428, 2467), 'stream_parameters.stream_parameters', 'stream_parameters', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (2445, 2467), False, 'from stream_parameters import stream_parameters\n'), ((2473, 2520), 'logging.info', 'logging.info', (['"""\n\n\n\nprms_template_fill \n\n\n\n"""'], {}), '("""\n\n\n\nprms_template_fill \n\n\n\n""")\n', (2485, 2520), False, 'import logging\n'), ((2529, 2569), 'prms_template_fill.prms_template_fill', 'prms_template_fill', ([], {'config_path': 'args.ini'}), '(config_path=args.ini)\n', (2547, 2569), False, 'from prms_template_fill import prms_template_fill\n'), ((1162, 1173), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1171, 1173), False, 'import os\n'), ((1217, 1246), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (1233, 1246), False, 'import os\n'), ((1075, 1092), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (1090, 1092), True, 'import datetime as dt\n')] |
from absl import app, flags, logging
from absl.flags import FLAGS
import tensorflow as tf
import numpy as np
import cv2
from tensorflow.keras.callbacks import (
ReduceLROnPlateau,
EarlyStopping,
ModelCheckpoint,
TensorBoard
)
from yolov3_tf2.models import (
YoloV3, YoloV3Tiny, YoloLoss,
yolo_anchors, yolo_anchor_masks,
yolo_tiny_anchors, yolo_tiny_anchor_masks
)
from yolov3_tf2.utils import freeze_all
import yolov3_tf2.dataset as dataset
class FLAGS:
dataset = './data/voc2012_train.tfrecord'
val_dataset= None
tiny = False
weights = None
classes = './data/voc2012.names'
mode = 'eager_tf'
transfer = 'none'
size = 416
epochs = 2
batch_size = 8
learning_rate= 1e-3
num_classes= 20
weights_num_classes=None
def main(_argv):
physical_devices = tf.config.experimental.list_physical_devices('GPU')
for physical_device in physical_devices:
tf.config.experimental.set_memory_growth(physical_device, True)
if FLAGS.tiny:
model = YoloV3Tiny(FLAGS.size, training=True,
classes=FLAGS.num_classes)
anchors = yolo_tiny_anchors
anchor_masks = yolo_tiny_anchor_masks
else:
model = YoloV3(FLAGS.size, training=True, classes=FLAGS.num_classes)
anchors = yolo_anchors
anchor_masks = yolo_anchor_masks
if FLAGS.dataset:
train_dataset = dataset.load_tfrecord_dataset(FLAGS.dataset, FLAGS.classes, FLAGS.size)
else:
train_dataset = dataset.load_fake_dataset()
train_dataset = train_dataset.shuffle(buffer_size=512)
train_dataset = train_dataset.batch(FLAGS.batch_size)
train_dataset = train_dataset.map(lambda x, y: (
dataset.transform_images(x, FLAGS.size),
dataset.transform_targets(y, anchors, anchor_masks, FLAGS.size)))
train_dataset = train_dataset.prefetch(
buffer_size=tf.data.experimental.AUTOTUNE)
if FLAGS.val_dataset:
val_dataset = dataset.load_tfrecord_dataset(
FLAGS.val_dataset, FLAGS.classes, FLAGS.size)
else:
val_dataset = dataset.load_fake_dataset()
val_dataset = val_dataset.batch(FLAGS.batch_size)
val_dataset = val_dataset.map(lambda x, y: (
dataset.transform_images(x, FLAGS.size),
dataset.transform_targets(y, anchors, anchor_masks, FLAGS.size)))
# Configure the model for transfer learning
if FLAGS.transfer == 'none':
pass # Nothing to do
elif FLAGS.transfer in ['darknet', 'no_output']:
# Darknet transfer is a special case that works
# with incompatible number of classes
# reset top layers
if FLAGS.tiny:
model_pretrained = YoloV3Tiny(
FLAGS.size, training=True, classes=FLAGS.weights_num_classes or FLAGS.num_classes)
else:
model_pretrained = YoloV3(
FLAGS.size, training=True, classes=FLAGS.weights_num_classes or FLAGS.num_classes)
#model_pretrained.load_weights(FLAGS.weights)
if FLAGS.transfer == 'darknet':
model.get_layer('yolo_darknet').set_weights(
model_pretrained.get_layer('yolo_darknet').get_weights())
freeze_all(model.get_layer('yolo_darknet'))
elif FLAGS.transfer == 'no_output':
for l in model.layers:
if not l.name.startswith('yolo_output'):
l.set_weights(model_pretrained.get_layer(
l.name).get_weights())
freeze_all(l)
else:
# All other transfer require matching classes
model.load_weights(FLAGS.weights)
if FLAGS.transfer == 'fine_tune':
# freeze darknet and fine tune other layers
darknet = model.get_layer('yolo_darknet')
freeze_all(darknet)
elif FLAGS.transfer == 'frozen':
# freeze everything
freeze_all(model)
optimizer = tf.keras.optimizers.Adam(lr=FLAGS.learning_rate)
loss = [YoloLoss(anchors[mask], classes=FLAGS.num_classes)
for mask in anchor_masks]
print(model)
if FLAGS.mode == 'eager_tf':
# Eager mode is great for debugging
# Non eager graph mode is recommended for real training
avg_loss = tf.keras.metrics.Mean('loss', dtype=tf.float32)
avg_val_loss = tf.keras.metrics.Mean('val_loss', dtype=tf.float32)
for epoch in range(1, FLAGS.epochs + 1):
for batch, (images, labels) in enumerate(train_dataset):
with tf.GradientTape() as tape:
outputs = model(images, training=True)
regularization_loss = tf.reduce_sum(model.losses)
pred_loss = []
wh_batch_losses = []
for output, label, loss_fn in zip(outputs, labels, loss):
batch_xy_loss, batch_wh_loss, batch_obj_loss, batch_class_loss = loss_fn(label, output)
pred_total_loss = batch_xy_loss + batch_wh_loss + batch_obj_loss + batch_class_loss
pred_loss.append(pred_total_loss)
wh_batch_losses.append(batch_wh_loss)
total_loss = tf.reduce_sum(pred_loss) + regularization_loss
total_wh_batch_loss = tf.reduce_sum(wh_batch_losses)
grads = tape.gradient(total_loss, model.trainable_variables)
optimizer.apply_gradients(
zip(grads, model.trainable_variables))
logging.info("{}_train_{}, {}, {} , {} ".format(
epoch, batch, total_loss.numpy(), total_wh_batch_loss.numpy(),
list(map(lambda x: np.sum(x.numpy()), pred_loss))))
avg_loss.update_state(total_loss)
for batch, (images, labels) in enumerate(val_dataset):
outputs = model(images)
regularization_loss = tf.reduce_sum(model.losses)
pred_loss = []
for output, label, loss_fn in zip(outputs, labels, loss):
pred_loss.append(loss_fn(label, output))
total_loss = tf.reduce_sum(pred_loss) + regularization_loss
logging.info("{}_val_{}, {}, {}".format(
epoch, batch, total_loss.numpy(),
list(map(lambda x: np.sum(x.numpy()), pred_loss))))
avg_val_loss.update_state(total_loss)
logging.info("{}, train: {}, val: {}".format(
epoch,
avg_loss.result().numpy(),
avg_val_loss.result().numpy()))
avg_loss.reset_states()
avg_val_loss.reset_states()
model.save_weights(
'checkpoints/yolov3_train_{}.tf'.format(epoch))
else:
model.compile(optimizer=optimizer, loss=loss,
run_eagerly=(FLAGS.mode == 'eager_fit'))
callbacks = [
ReduceLROnPlateau(verbose=1),
EarlyStopping(patience=3, verbose=1),
ModelCheckpoint('checkpoints/yolov3_train_{epoch}.tf',
verbose=1, save_weights_only=True),
TensorBoard(log_dir='logs')
]
history = model.fit(train_dataset,
epochs=FLAGS.epochs,
callbacks=callbacks,
validation_data=val_dataset)
try:
app.run(main)
except SystemExit:
pass
| [
"yolov3_tf2.dataset.load_fake_dataset",
"yolov3_tf2.models.YoloLoss",
"tensorflow.reduce_sum",
"yolov3_tf2.utils.freeze_all",
"tensorflow.GradientTape",
"tensorflow.keras.callbacks.EarlyStopping",
"yolov3_tf2.dataset.load_tfrecord_dataset",
"yolov3_tf2.models.YoloV3",
"tensorflow.keras.callbacks.Red... | [((876, 927), 'tensorflow.config.experimental.list_physical_devices', 'tf.config.experimental.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (920, 927), True, 'import tensorflow as tf\n'), ((4067, 4115), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'lr': 'FLAGS.learning_rate'}), '(lr=FLAGS.learning_rate)\n', (4091, 4115), True, 'import tensorflow as tf\n'), ((7617, 7630), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (7624, 7630), False, 'from absl import app, flags, logging\n'), ((983, 1046), 'tensorflow.config.experimental.set_memory_growth', 'tf.config.experimental.set_memory_growth', (['physical_device', '(True)'], {}), '(physical_device, True)\n', (1023, 1046), True, 'import tensorflow as tf\n'), ((1086, 1150), 'yolov3_tf2.models.YoloV3Tiny', 'YoloV3Tiny', (['FLAGS.size'], {'training': '(True)', 'classes': 'FLAGS.num_classes'}), '(FLAGS.size, training=True, classes=FLAGS.num_classes)\n', (1096, 1150), False, 'from yolov3_tf2.models import YoloV3, YoloV3Tiny, YoloLoss, yolo_anchors, yolo_anchor_masks, yolo_tiny_anchors, yolo_tiny_anchor_masks\n'), ((1291, 1351), 'yolov3_tf2.models.YoloV3', 'YoloV3', (['FLAGS.size'], {'training': '(True)', 'classes': 'FLAGS.num_classes'}), '(FLAGS.size, training=True, classes=FLAGS.num_classes)\n', (1297, 1351), False, 'from yolov3_tf2.models import YoloV3, YoloV3Tiny, YoloLoss, yolo_anchors, yolo_anchor_masks, yolo_tiny_anchors, yolo_tiny_anchor_masks\n'), ((1476, 1547), 'yolov3_tf2.dataset.load_tfrecord_dataset', 'dataset.load_tfrecord_dataset', (['FLAGS.dataset', 'FLAGS.classes', 'FLAGS.size'], {}), '(FLAGS.dataset, FLAGS.classes, FLAGS.size)\n', (1505, 1547), True, 'import yolov3_tf2.dataset as dataset\n'), ((1584, 1611), 'yolov3_tf2.dataset.load_fake_dataset', 'dataset.load_fake_dataset', ([], {}), '()\n', (1609, 1611), True, 'import yolov3_tf2.dataset as dataset\n'), ((2059, 2134), 'yolov3_tf2.dataset.load_tfrecord_dataset', 'dataset.load_tfrecord_dataset', (['FLAGS.val_dataset', 'FLAGS.classes', 'FLAGS.size'], {}), '(FLAGS.val_dataset, FLAGS.classes, FLAGS.size)\n', (2088, 2134), True, 'import yolov3_tf2.dataset as dataset\n'), ((2183, 2210), 'yolov3_tf2.dataset.load_fake_dataset', 'dataset.load_fake_dataset', ([], {}), '()\n', (2208, 2210), True, 'import yolov3_tf2.dataset as dataset\n'), ((4129, 4179), 'yolov3_tf2.models.YoloLoss', 'YoloLoss', (['anchors[mask]'], {'classes': 'FLAGS.num_classes'}), '(anchors[mask], classes=FLAGS.num_classes)\n', (4137, 4179), False, 'from yolov3_tf2.models import YoloV3, YoloV3Tiny, YoloLoss, yolo_anchors, yolo_anchor_masks, yolo_tiny_anchors, yolo_tiny_anchor_masks\n'), ((4401, 4448), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', (['"""loss"""'], {'dtype': 'tf.float32'}), "('loss', dtype=tf.float32)\n", (4422, 4448), True, 'import tensorflow as tf\n'), ((4473, 4524), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', (['"""val_loss"""'], {'dtype': 'tf.float32'}), "('val_loss', dtype=tf.float32)\n", (4494, 4524), True, 'import tensorflow as tf\n'), ((7132, 7160), 'tensorflow.keras.callbacks.ReduceLROnPlateau', 'ReduceLROnPlateau', ([], {'verbose': '(1)'}), '(verbose=1)\n', (7149, 7160), False, 'from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, TensorBoard\n'), ((7175, 7211), 'tensorflow.keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'patience': '(3)', 'verbose': '(1)'}), '(patience=3, verbose=1)\n', (7188, 7211), False, 'from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, TensorBoard\n'), ((7226, 7319), 'tensorflow.keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', (['"""checkpoints/yolov3_train_{epoch}.tf"""'], {'verbose': '(1)', 'save_weights_only': '(True)'}), "('checkpoints/yolov3_train_{epoch}.tf', verbose=1,\n save_weights_only=True)\n", (7241, 7319), False, 'from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, TensorBoard\n'), ((7359, 7386), 'tensorflow.keras.callbacks.TensorBoard', 'TensorBoard', ([], {'log_dir': '"""logs"""'}), "(log_dir='logs')\n", (7370, 7386), False, 'from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, TensorBoard\n'), ((1794, 1833), 'yolov3_tf2.dataset.transform_images', 'dataset.transform_images', (['x', 'FLAGS.size'], {}), '(x, FLAGS.size)\n', (1818, 1833), True, 'import yolov3_tf2.dataset as dataset\n'), ((1844, 1907), 'yolov3_tf2.dataset.transform_targets', 'dataset.transform_targets', (['y', 'anchors', 'anchor_masks', 'FLAGS.size'], {}), '(y, anchors, anchor_masks, FLAGS.size)\n', (1869, 1907), True, 'import yolov3_tf2.dataset as dataset\n'), ((2325, 2364), 'yolov3_tf2.dataset.transform_images', 'dataset.transform_images', (['x', 'FLAGS.size'], {}), '(x, FLAGS.size)\n', (2349, 2364), True, 'import yolov3_tf2.dataset as dataset\n'), ((2375, 2438), 'yolov3_tf2.dataset.transform_targets', 'dataset.transform_targets', (['y', 'anchors', 'anchor_masks', 'FLAGS.size'], {}), '(y, anchors, anchor_masks, FLAGS.size)\n', (2400, 2438), True, 'import yolov3_tf2.dataset as dataset\n'), ((2801, 2898), 'yolov3_tf2.models.YoloV3Tiny', 'YoloV3Tiny', (['FLAGS.size'], {'training': '(True)', 'classes': '(FLAGS.weights_num_classes or FLAGS.num_classes)'}), '(FLAGS.size, training=True, classes=FLAGS.weights_num_classes or\n FLAGS.num_classes)\n', (2811, 2898), False, 'from yolov3_tf2.models import YoloV3, YoloV3Tiny, YoloLoss, yolo_anchors, yolo_anchor_masks, yolo_tiny_anchors, yolo_tiny_anchor_masks\n'), ((2960, 3053), 'yolov3_tf2.models.YoloV3', 'YoloV3', (['FLAGS.size'], {'training': '(True)', 'classes': '(FLAGS.weights_num_classes or FLAGS.num_classes)'}), '(FLAGS.size, training=True, classes=FLAGS.weights_num_classes or\n FLAGS.num_classes)\n', (2966, 3053), False, 'from yolov3_tf2.models import YoloV3, YoloV3Tiny, YoloLoss, yolo_anchors, yolo_anchor_masks, yolo_tiny_anchors, yolo_tiny_anchor_masks\n'), ((3922, 3941), 'yolov3_tf2.utils.freeze_all', 'freeze_all', (['darknet'], {}), '(darknet)\n', (3932, 3941), False, 'from yolov3_tf2.utils import freeze_all\n'), ((6091, 6118), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['model.losses'], {}), '(model.losses)\n', (6104, 6118), True, 'import tensorflow as tf\n'), ((4030, 4047), 'yolov3_tf2.utils.freeze_all', 'freeze_all', (['model'], {}), '(model)\n', (4040, 4047), False, 'from yolov3_tf2.utils import freeze_all\n'), ((4669, 4686), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (4684, 4686), True, 'import tensorflow as tf\n'), ((4799, 4826), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['model.losses'], {}), '(model.losses)\n', (4812, 4826), True, 'import tensorflow as tf\n'), ((5452, 5482), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['wh_batch_losses'], {}), '(wh_batch_losses)\n', (5465, 5482), True, 'import tensorflow as tf\n'), ((6318, 6342), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['pred_loss'], {}), '(pred_loss)\n', (6331, 6342), True, 'import tensorflow as tf\n'), ((5362, 5386), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['pred_loss'], {}), '(pred_loss)\n', (5375, 5386), True, 'import tensorflow as tf\n'), ((3629, 3642), 'yolov3_tf2.utils.freeze_all', 'freeze_all', (['l'], {}), '(l)\n', (3639, 3642), False, 'from yolov3_tf2.utils import freeze_all\n')] |
import argparse
import json
import subprocess
import sys
from eks import AwsCluster
from shell import run_out, run_in
from utils import reduce_subnets, get_current_region
DEFAULT_CIDR_BLOCK = "192.168.0.0/16"
def create_cluster(data):
run_in("eksctl", "create", "cluster", "-f", "-", **data)
def generate_cluster_cfg(name, region, cidr, vpcid, private, public):
return {
'apiVersion': 'eksctl.io/v1alpha5',
'kind': 'ClusterConfig',
'metadata': {
'name': name,
'region': region
},
'vpc': {
'cidr': cidr,
'id': vpcid,
'subnets': {
'private': reduce_subnets(private),
'public': reduce_subnets(public)
}
} if private and public and vpcid else {
'cidr': cidr,
'nat': {'gateway': 'Single'},
'clusterEndpoints': {'publicAccess': True, 'privateAccess': True}
},
'nodeGroups': [
{
'name': 'member-ng',
'minSize': 2,
'maxSize': 2,
'instancesDistribution': {
'maxPrice': 0.093,
'instanceTypes': ["t3a.large", "t3.large"],
'onDemandBaseCapacity': 0,
'onDemandPercentageAboveBaseCapacity': 50,
'spotInstancePools': 2
},
'ssh': {
'publicKeyPath': '~/.ssh/id_rsa.pub'
},
'iam': {
'withAddonPolicies': {
'externalDNS': True
}
}
}
]
}
def open_security_groups(cluster_name, region):
res = run_out("aws", "ec2", "describe-security-groups",
"--region", region, "--filters",
"Name=tag:aws:cloudformation:logical-id,Values=SG",
"Name=tag:alpha.eksctl.io/cluster-name,Values=" + cluster_name)
sg = res['SecurityGroups']
if len(sg) < 1:
raise Exception("no security group found for cluster {0} nodegroup".format(cluster_name))
subprocess.check_call(
["aws", "ec2", "authorize-security-group-ingress", "--group-id", sg[0]['GroupId'], "--protocol", "-1",
"--port", "-1", "--cidr", "0.0.0.0/0", "--region", region])
def main():
parser = argparse.ArgumentParser(description='Utility for dealing with AWS clusters')
parser.add_argument('--name', required=True,
help='Member cluster name to create config for.')
parser.add_argument('--region', required=False,
help='Member cluster region')
parser.add_argument('--ref', required=False,
help='Reference cluster name (client cluster will use reference clusters vpc when is created)')
parser.add_argument('--cidr', required=False,
help='Client cluster name to create config yaml for.')
parser.add_argument('--test', required=False,
help='Dump generated config', action='store_true')
parser.add_argument('--open-sg', required=False,
help='Open all ports and all ips for SecurityGroups', dest='open_sg', action='store_true')
args = parser.parse_args()
cidr = args.cidr if args.cidr else DEFAULT_CIDR_BLOCK
region = args.region if args.region else get_current_region()
priv_subnets, pub_subnets, vpcid = None, None, None
if args.ref:
reference_cluster = AwsCluster(args.ref, region)
priv_subnets = reference_cluster.get_subnets("Private")
pub_subnets = reference_cluster.get_subnets("Public")
vpcid = reference_cluster.get_vpcid()
cfg = generate_cluster_cfg(args.name, region, cidr, vpcid, priv_subnets, pub_subnets)
if args.test:
json.dump(cfg, sys.stdout, indent=4)
return
create_cluster(cfg)
if args.open_sg:
open_security_groups(args.name, region)
if __name__ == '__main__':
main()
| [
"shell.run_out",
"argparse.ArgumentParser",
"subprocess.check_call",
"json.dump",
"utils.get_current_region",
"shell.run_in",
"utils.reduce_subnets",
"eks.AwsCluster"
] | [((243, 299), 'shell.run_in', 'run_in', (['"""eksctl"""', '"""create"""', '"""cluster"""', '"""-f"""', '"""-"""'], {}), "('eksctl', 'create', 'cluster', '-f', '-', **data)\n", (249, 299), False, 'from shell import run_out, run_in\n'), ((1762, 1969), 'shell.run_out', 'run_out', (['"""aws"""', '"""ec2"""', '"""describe-security-groups"""', '"""--region"""', 'region', '"""--filters"""', '"""Name=tag:aws:cloudformation:logical-id,Values=SG"""', "('Name=tag:alpha.eksctl.io/cluster-name,Values=' + cluster_name)"], {}), "('aws', 'ec2', 'describe-security-groups', '--region', region,\n '--filters', 'Name=tag:aws:cloudformation:logical-id,Values=SG', \n 'Name=tag:alpha.eksctl.io/cluster-name,Values=' + cluster_name)\n", (1769, 1969), False, 'from shell import run_out, run_in\n'), ((2170, 2362), 'subprocess.check_call', 'subprocess.check_call', (["['aws', 'ec2', 'authorize-security-group-ingress', '--group-id', sg[0][\n 'GroupId'], '--protocol', '-1', '--port', '-1', '--cidr', '0.0.0.0/0',\n '--region', region]"], {}), "(['aws', 'ec2', 'authorize-security-group-ingress',\n '--group-id', sg[0]['GroupId'], '--protocol', '-1', '--port', '-1',\n '--cidr', '0.0.0.0/0', '--region', region])\n", (2191, 2362), False, 'import subprocess\n'), ((2399, 2475), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Utility for dealing with AWS clusters"""'}), "(description='Utility for dealing with AWS clusters')\n", (2422, 2475), False, 'import argparse\n'), ((3432, 3452), 'utils.get_current_region', 'get_current_region', ([], {}), '()\n', (3450, 3452), False, 'from utils import reduce_subnets, get_current_region\n'), ((3555, 3583), 'eks.AwsCluster', 'AwsCluster', (['args.ref', 'region'], {}), '(args.ref, region)\n', (3565, 3583), False, 'from eks import AwsCluster\n'), ((3873, 3909), 'json.dump', 'json.dump', (['cfg', 'sys.stdout'], {'indent': '(4)'}), '(cfg, sys.stdout, indent=4)\n', (3882, 3909), False, 'import json\n'), ((671, 694), 'utils.reduce_subnets', 'reduce_subnets', (['private'], {}), '(private)\n', (685, 694), False, 'from utils import reduce_subnets, get_current_region\n'), ((722, 744), 'utils.reduce_subnets', 'reduce_subnets', (['public'], {}), '(public)\n', (736, 744), False, 'from utils import reduce_subnets, get_current_region\n')] |
import os
from kavik.settings import BASE_DIR
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
SECRET_KEY = ''
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Basic auth user/pass
BASIC_USER = ''
BASIC_PASS = ''
EMAIL_HOST = 'localhost'
DEFAULT_FROM_EMAIL = '<EMAIL>'
REGISTER_FROM_MAIL = DEFAULT_FROM_EMAIL
EMAIL_PORT = 25
#EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
#EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' # real
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # prints
| [
"os.path.join"
] | [((367, 403), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (379, 403), False, 'import os\n')] |
import random as _random
import unittest as _unittest
import numpy as _np
import torch as _torch
import torchutils as _tu
class _TestUtils(_unittest.TestCase):
def test_set_random_seed(self):
# Set new seed and verify.
seed = _random.randint(1, 1000)
_tu.set_random_seed(seed)
np_new_seed = _np.random.get_state()[1][0]
torch_new_seed = _torch.initial_seed()
self.assertEqual(seed, np_new_seed)
self.assertEqual(seed, torch_new_seed)
if _torch.cuda.is_available():
cuda_new_seed = _torch.cuda.initial_seed()
self.assertEqual(seed, cuda_new_seed)
if __name__ == '__main__':
_unittest.main()
| [
"numpy.random.get_state",
"torch.initial_seed",
"torch.cuda.is_available",
"torch.cuda.initial_seed",
"unittest.main",
"random.randint",
"torchutils.set_random_seed"
] | [((676, 692), 'unittest.main', '_unittest.main', ([], {}), '()\n', (690, 692), True, 'import unittest as _unittest\n'), ((251, 275), 'random.randint', '_random.randint', (['(1)', '(1000)'], {}), '(1, 1000)\n', (266, 275), True, 'import random as _random\n'), ((284, 309), 'torchutils.set_random_seed', '_tu.set_random_seed', (['seed'], {}), '(seed)\n', (303, 309), True, 'import torchutils as _tu\n'), ((386, 407), 'torch.initial_seed', '_torch.initial_seed', ([], {}), '()\n', (405, 407), True, 'import torch as _torch\n'), ((510, 536), 'torch.cuda.is_available', '_torch.cuda.is_available', ([], {}), '()\n', (534, 536), True, 'import torch as _torch\n'), ((566, 592), 'torch.cuda.initial_seed', '_torch.cuda.initial_seed', ([], {}), '()\n', (590, 592), True, 'import torch as _torch\n'), ((332, 354), 'numpy.random.get_state', '_np.random.get_state', ([], {}), '()\n', (352, 354), True, 'import numpy as _np\n')] |
from os import getcwd
from os.path import join
from sys import exit
from docker_tests.test_utils import CurrentTest, test_command
if __name__ == '__main__':
# install expac-git
test_command("git clone https://aur.archlinux.org/expac-git.git")
test_command("makepkg -si --needed --noconfirm", dir_to_execute=join(getcwd(), "expac-git"))
test_command("rm -rf expac-git/")
# install aurman
test_command("sudo python setup.py install --optimize=1", "/home/aurman/aurman-git")
# change pkgdest dir
test_command('sudo sh -c "{}"'
''.format("echo 'PKGDEST=/tmp' >> /etc/makepkg.conf"))
# install cower
test_command(
"aurman -Syu cower --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm")
# check if cower installed
test_command("pacman -Qi cower")
# change pkgdest dir in different makepkg.conf
test_command("mkdir -p /home/aurman/build_dir")
test_command('sudo sh -c "{}"'
''.format("echo 'PKGDEST=/home/aurman/build_dir' > ~/.makepkg.conf"))
# install pacman-git
test_command(
"aurman -S pacman-git --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm")
# check if pacman-git installed
test_command("pacman -Qi pacman-git")
# install cower-git
test_command(
"aurman -S cower-git --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm")
# check if cower-git installed
test_command("pacman -Qi cower-git")
exit(CurrentTest.to_return)
| [
"docker_tests.test_utils.test_command",
"os.getcwd",
"sys.exit"
] | [((187, 252), 'docker_tests.test_utils.test_command', 'test_command', (['"""git clone https://aur.archlinux.org/expac-git.git"""'], {}), "('git clone https://aur.archlinux.org/expac-git.git')\n", (199, 252), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((354, 387), 'docker_tests.test_utils.test_command', 'test_command', (['"""rm -rf expac-git/"""'], {}), "('rm -rf expac-git/')\n", (366, 387), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((414, 502), 'docker_tests.test_utils.test_command', 'test_command', (['"""sudo python setup.py install --optimize=1"""', '"""/home/aurman/aurman-git"""'], {}), "('sudo python setup.py install --optimize=1',\n '/home/aurman/aurman-git')\n", (426, 502), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((657, 786), 'docker_tests.test_utils.test_command', 'test_command', (['"""aurman -Syu cower --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm"""'], {}), "(\n 'aurman -Syu cower --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm'\n )\n", (669, 786), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((821, 853), 'docker_tests.test_utils.test_command', 'test_command', (['"""pacman -Qi cower"""'], {}), "('pacman -Qi cower')\n", (833, 853), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((910, 957), 'docker_tests.test_utils.test_command', 'test_command', (['"""mkdir -p /home/aurman/build_dir"""'], {}), "('mkdir -p /home/aurman/build_dir')\n", (922, 957), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((1110, 1242), 'docker_tests.test_utils.test_command', 'test_command', (['"""aurman -S pacman-git --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm"""'], {}), "(\n 'aurman -S pacman-git --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm'\n )\n", (1122, 1242), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((1283, 1320), 'docker_tests.test_utils.test_command', 'test_command', (['"""pacman -Qi pacman-git"""'], {}), "('pacman -Qi pacman-git')\n", (1295, 1320), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((1350, 1481), 'docker_tests.test_utils.test_command', 'test_command', (['"""aurman -S cower-git --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm"""'], {}), "(\n 'aurman -S cower-git --noedit --pgp_fetch --keyserver hkp://ipv4.pool.sks-keyservers.net:11371 --noconfirm'\n )\n", (1362, 1481), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((1520, 1556), 'docker_tests.test_utils.test_command', 'test_command', (['"""pacman -Qi cower-git"""'], {}), "('pacman -Qi cower-git')\n", (1532, 1556), False, 'from docker_tests.test_utils import CurrentTest, test_command\n'), ((1562, 1589), 'sys.exit', 'exit', (['CurrentTest.to_return'], {}), '(CurrentTest.to_return)\n', (1566, 1589), False, 'from sys import exit\n'), ((326, 334), 'os.getcwd', 'getcwd', ([], {}), '()\n', (332, 334), False, 'from os import getcwd\n')] |
from __future__ import print_function
from __future__ import absolute_import
import tensorflow as tf
def encoder_decoder_input_placeholder(encoder_input_range, decoder_input_range):
encoder_inputs = []
decoder_inputs = []
for i in xrange(encoder_input_range):
encoder_inputs.append(tf.placeholder(tf.int32, shape=[None], name='encoder{0}'.format(i)))
for i in xrange(decoder_input_range):
decoder_inputs.append(tf.placeholder(tf.int32, shape=[None], name='decoder{0}'.format(i)))
return encoder_inputs, decoder_inputs
def target_placeholder(decoder_inputs):
return [decoder_inputs[i + 1] for i in xrange(len(decoder_inputs) - 1)]
def target_weight_placeholder(decoder_input_range):
target_weights = []
for i in xrange(decoder_input_range):
target_weights.append(tf.placeholder(tf.float32, shape=[None], name='weight{0}'.format(i)))
return target_weights
def handle_large_vocabulary(num_samples, size, target_vocab_size):
output_projection = None
softmax_loss_function = None
if num_samples > 0 and num_samples < target_vocab_size:
w = tf.get_variable('proj_w', [size, target_vocab_size])
w_t = tf.transpose(w)
b = tf.get_variable('proj_b', [target_vocab_size])
output_projection = (w, b)
def sampled_loss(inputs, labels):
local_w_t = tf.cast(w_t, tf.float32)
local_b = tf.cast(b, tf.float32)
local_inputs = tf.cast(inputs, tf.float32)
labels = tf.reshape(labels, [-1, 1])
return tf.cast(tf.nn.sampled_softmax_loss(local_w_t, local_b, local_inputs, labels, num_samples, target_vocab_size), tf.float32)
softmax_loss_function = sampled_loss
return output_projection, softmax_loss_function
def get_more_hyperparameters(learning_rate, learning_rate_decay_factor):
learning_rate_var = tf.Variable(float(learning_rate), trainable=False, dtype=tf.float32)
learning_rate_decay_op = learning_rate_var.assign(learning_rate_var * learning_rate_decay_factor)
global_step = tf.Variable(0, trainable=False)
return learning_rate_var, learning_rate_decay_op, global_step
def embedding_seq2seq_with_attention(cell, source_vocab_size, target_vocab_size, num_units, output_projection):
def seq2seq_f(encoder_inputs, decoder_inputs):
return tf.nn.seq2seq.embedding_attention_seq2seq(encoder_inputs, decoder_inputs, cell, num_encoder_symbols=source_vocab_size, num_decoder_symbols=target_vocab_size, embedding_size=num_units, output_projection=output_projection, dtype=tf.float32)
return seq2seq_f
| [
"tensorflow.get_variable",
"tensorflow.transpose",
"tensorflow.Variable",
"tensorflow.nn.seq2seq.embedding_attention_seq2seq",
"tensorflow.reshape",
"tensorflow.nn.sampled_softmax_loss",
"tensorflow.cast"
] | [((1926, 1957), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'trainable': '(False)'}), '(0, trainable=False)\n', (1937, 1957), True, 'import tensorflow as tf\n'), ((1066, 1118), 'tensorflow.get_variable', 'tf.get_variable', (['"""proj_w"""', '[size, target_vocab_size]'], {}), "('proj_w', [size, target_vocab_size])\n", (1081, 1118), True, 'import tensorflow as tf\n'), ((1127, 1142), 'tensorflow.transpose', 'tf.transpose', (['w'], {}), '(w)\n', (1139, 1142), True, 'import tensorflow as tf\n'), ((1149, 1195), 'tensorflow.get_variable', 'tf.get_variable', (['"""proj_b"""', '[target_vocab_size]'], {}), "('proj_b', [target_vocab_size])\n", (1164, 1195), True, 'import tensorflow as tf\n'), ((2192, 2444), 'tensorflow.nn.seq2seq.embedding_attention_seq2seq', 'tf.nn.seq2seq.embedding_attention_seq2seq', (['encoder_inputs', 'decoder_inputs', 'cell'], {'num_encoder_symbols': 'source_vocab_size', 'num_decoder_symbols': 'target_vocab_size', 'embedding_size': 'num_units', 'output_projection': 'output_projection', 'dtype': 'tf.float32'}), '(encoder_inputs, decoder_inputs,\n cell, num_encoder_symbols=source_vocab_size, num_decoder_symbols=\n target_vocab_size, embedding_size=num_units, output_projection=\n output_projection, dtype=tf.float32)\n', (2233, 2444), True, 'import tensorflow as tf\n'), ((1278, 1302), 'tensorflow.cast', 'tf.cast', (['w_t', 'tf.float32'], {}), '(w_t, tf.float32)\n', (1285, 1302), True, 'import tensorflow as tf\n'), ((1316, 1338), 'tensorflow.cast', 'tf.cast', (['b', 'tf.float32'], {}), '(b, tf.float32)\n', (1323, 1338), True, 'import tensorflow as tf\n'), ((1357, 1384), 'tensorflow.cast', 'tf.cast', (['inputs', 'tf.float32'], {}), '(inputs, tf.float32)\n', (1364, 1384), True, 'import tensorflow as tf\n'), ((1397, 1424), 'tensorflow.reshape', 'tf.reshape', (['labels', '[-1, 1]'], {}), '(labels, [-1, 1])\n', (1407, 1424), True, 'import tensorflow as tf\n'), ((1444, 1548), 'tensorflow.nn.sampled_softmax_loss', 'tf.nn.sampled_softmax_loss', (['local_w_t', 'local_b', 'local_inputs', 'labels', 'num_samples', 'target_vocab_size'], {}), '(local_w_t, local_b, local_inputs, labels,\n num_samples, target_vocab_size)\n', (1470, 1548), True, 'import tensorflow as tf\n')] |
OntCversion = '2.0.0'
from ontology.interop.Ontology.Contract import Migrate
# from ontology.interop.Ontology.Contract import Destroy
from ontology.interop.System.Runtime import Notify
from ontology.interop.System.Storage import Put, GetContext, Get
KEY = "KEY"
NAME = "SecondName"
def Main(operation, args):
# if operation == "DestroyContract":
# return DestroyContract()
if operation == "MigrateContract":
if len(args) != 1:
Notify("param error")
return False
return MigrateContract(args[0])
if operation == "put":
return put()
if operation == "get":
return get()
if operation == "name":
return NAME
# def DestroyContract():
# Destroy()
# Notify(["Destory"])
# return True
def MigrateContract(code):
"""
Note that the existing contract will be replaced by the newly migrated contract
:param code: your avm code
:return:
"""
res = Migrate(code, True, "name", "version", "author", "email", "description")
assert(res)
Notify(["Migrate successfully"])
return True
def get():
return Get(GetContext(), KEY)
def put():
Put(GetContext(), KEY, 898)
return True
| [
"ontology.interop.System.Storage.GetContext",
"ontology.interop.Ontology.Contract.Migrate",
"ontology.interop.System.Runtime.Notify"
] | [((964, 1036), 'ontology.interop.Ontology.Contract.Migrate', 'Migrate', (['code', '(True)', '"""name"""', '"""version"""', '"""author"""', '"""email"""', '"""description"""'], {}), "(code, True, 'name', 'version', 'author', 'email', 'description')\n", (971, 1036), False, 'from ontology.interop.Ontology.Contract import Migrate\n'), ((1057, 1089), 'ontology.interop.System.Runtime.Notify', 'Notify', (["['Migrate successfully']"], {}), "(['Migrate successfully'])\n", (1063, 1089), False, 'from ontology.interop.System.Runtime import Notify\n'), ((1133, 1145), 'ontology.interop.System.Storage.GetContext', 'GetContext', ([], {}), '()\n', (1143, 1145), False, 'from ontology.interop.System.Storage import Put, GetContext, Get\n'), ((1172, 1184), 'ontology.interop.System.Storage.GetContext', 'GetContext', ([], {}), '()\n', (1182, 1184), False, 'from ontology.interop.System.Storage import Put, GetContext, Get\n'), ((465, 486), 'ontology.interop.System.Runtime.Notify', 'Notify', (['"""param error"""'], {}), "('param error')\n", (471, 486), False, 'from ontology.interop.System.Runtime import Notify\n')] |
from bot import botToken
import unittest
def test_bot():
assert str(botToken(token_file='test_bot.token').getToken(
)) == '1111111111:TeStTaPiToKeN', 'Loading token from file is wrong'
| [
"bot.botToken"
] | [((74, 111), 'bot.botToken', 'botToken', ([], {'token_file': '"""test_bot.token"""'}), "(token_file='test_bot.token')\n", (82, 111), False, 'from bot import botToken\n')] |
from torchfes.opt.utils.generalize import generalize
from typing import Dict
import torch
from torch import Tensor, nn
from .. import properties as p
from .utils import (
Lagrangian, set_directional_hessian, set_directional_gradient)
from ..utils import detach
# Line search Newtons method is below
# gp := g(x + a * p) @ p
# hp := p @ H @ p
#
# while True:
# eval f(x), g(x), H(x)
# p = - H^-1 g
# a = 1
# tol = gp(0) * tol_dec_rate
# while |gp(a)| > tol:
# a = a - gp(a) / hp(a)
# dx = a * p
# x = x + dx
#
# while True:
# eval f(x), g(x), H(x)
# p = - H^-1 g
# tol = gp(x) * tol_dec_rate
# x = x + p
# eval gp(x)
# while |gp(x)| > tol:
# da = - gp(x) / hp(x)
# x = x + p * da
#
# while True:
# eval gp(x)
# if init or |gp(x)| < tol:
# p = - H^-1 g
# tol = gp(x) * tol_dec_rate
# x = x + p
# else:
# da = - gp(x) / hp(x)
# x = x + p * da
#
# while True:
# x = x + dx
# eval f(x), g(x)
# eval gp(x)
# if init or |gp(x)| < tol:
# eval H(x)
# p = - H^-1 g
# tol = gp(x) * tol_dec_rate
# dx = p
# else:
# eval hp(x)
# da = - gp(x) / hp(x)
# dx = p * da
class LineSearch(nn.Module):
def __init__(self,
lag: Lagrangian, dir: nn.Module,
tol: float,
grd_dec_rat: float = 0.1,
lin_ini: bool = True,
abs_grd_hes: bool = True,
):
super().__init__()
self.lag = lag
self.grd_dec_rat = grd_dec_rat
self.dir = dir
self.first_step_line_search = lin_ini
self.abs_grd_hes = abs_grd_hes
self.tol = tol
def _direction(self, mol: Dict[str, Tensor]):
mol = self.dir(mol)
gsd = mol[p.gen_stp_dir]
n_bch = gsd.size(0)
mol[p.gen_stp_siz] = torch.ones(
[n_bch], dtype=gsd.dtype, device=gsd.device)
mol[p.gen_stp] = mol[p.gen_stp_dir]
return mol
def _linesearch(self, mol: Dict[str, Tensor]):
mol = mol.copy()
mol = set_directional_hessian(mol)
if self.abs_grd_hes:
mol[p.gen_stp_siz] = -mol[p.gen_dir_grd] / mol[p.gen_dir_hes].abs()
else:
mol[p.gen_stp_siz] = -mol[p.gen_dir_grd] / mol[p.gen_dir_hes]
mol[p.gen_stp] = mol[p.gen_stp_siz][:, None] * mol[p.gen_stp_dir]
return mol
def _condition(self, mol: Dict[str, Tensor]):
if p.gen_lin_tol not in mol:
return True
if (mol[p.gen_dir_grd].abs() < mol[p.gen_lin_tol]).all():
return True
else:
return False
def forward(self, mol: Dict[str, Tensor]):
if p.gen_pos not in mol:
mol = generalize(mol)
if p.gen_stp not in mol:
mol = self.lag(mol, create_graph=True)
mol = self._direction(mol)
mol = set_directional_gradient(mol)
mol[p.gen_lin_tol] = mol[p.gen_dir_grd].abs() * self.grd_dec_rat
mol[p.gen_lin_tol].masked_fill_(
mol[p.gen_lin_tol] < self.tol, self.tol)
if self.first_step_line_search:
mol = self._linesearch(mol)
mol[p.gen_pos] = mol[p.gen_pos] + mol[p.gen_stp]
mol = self.lag(mol, create_graph=True)
mol = set_directional_gradient(mol)
if self._condition(mol):
mol = self._direction(mol)
mol[p.gen_lin_tol] = mol[p.gen_dir_grd].abs() * self.grd_dec_rat
mol[p.gen_lin_tol].masked_fill_(
mol[p.gen_lin_tol] < self.tol, self.tol)
if self.first_step_line_search:
mol = self._linesearch(mol)
else:
mol = self._linesearch(mol)
return detach(mol)
def limit_step_size(mol: Dict[str, Tensor], siz: float):
mol = mol.copy()
real_dir_siz: Tensor = mol[p.gen_stp_dir].norm(p=2, dim=1)
idel_dir_siz = real_dir_siz.masked_fill(real_dir_siz > siz, siz)
ratio = idel_dir_siz / real_dir_siz
mol[p.gen_stp_siz] = ratio
mol[p.gen_stp] = mol[p.gen_stp_dir] * mol[p.gen_stp_siz][:, None]
return mol
class LimitStepSize(nn.Module):
def __init__(self, dir, siz):
super().__init__()
self.siz = siz
self.dir = dir
def forward(self, mol: Dict[str, Tensor]):
siz = self.siz
mol = self.dir(mol)
real_dir_siz: Tensor = mol[p.gen_stp_dir].norm(p=2, dim=1)
idel_dir_siz = real_dir_siz.masked_fill(real_dir_siz > siz, siz)
ratio = idel_dir_siz / real_dir_siz
mol[p.gen_stp_dir] = mol[p.gen_stp_dir] * ratio[:, None]
return mol
| [
"torchfes.opt.utils.generalize.generalize",
"torch.ones"
] | [((1927, 1982), 'torch.ones', 'torch.ones', (['[n_bch]'], {'dtype': 'gsd.dtype', 'device': 'gsd.device'}), '([n_bch], dtype=gsd.dtype, device=gsd.device)\n', (1937, 1982), False, 'import torch\n'), ((2809, 2824), 'torchfes.opt.utils.generalize.generalize', 'generalize', (['mol'], {}), '(mol)\n', (2819, 2824), False, 'from torchfes.opt.utils.generalize import generalize\n')] |
import json
import boto3
import logging
from datetime import datetime
from dateutil import tz
logger = logging.getLogger()
logger.setLevel(logging.INFO)
from_zone = tz.gettz('UTC')
to_zone = tz.gettz('Asia/Jerusalem')
starts_in = {'SINGLE': 5,
"DOUBLE": 2,
"LONG": 0
}
client = boto3.client('iot-data', region_name='eu-west-1')
def lambda_handler(event, context):
logger.info('Received event: ' + json.dumps(event))
click_type = event["deviceEvent"]["buttonClicked"]["clickType"]
event_timestamp = event["deviceEvent"]["buttonClicked"]["reportedTime"]
device_id = event["placementInfo"]["devices"]["Announcer"]
timestamp = datetime.utcnow()
timestamp = timestamp.replace(tzinfo=from_zone)
timestamplocal = timestamp.astimezone(to_zone)
timelocal = int(timestamplocal.strftime('%H'))
if timelocal<=12:
day_part = 'morning'
elif timelocal<=17:
day_part = 'afternoon'
else:
day_part = 'evening'
if starts_in[click_type]>1:
session_start = "will start in %d minutes." % starts_in[click_type]
else:
session_start = "is starting now!"
tts = ("Goog %s floor28 ateendees. Please join us at the main conference room. The session %s") % (day_part, session_start)
logger.info(tts)
# Change topic, qos and payload
response = client.publish(
topic='$aws/things/Floor28_RBPi/announce',
qos=0,
payload=json.dumps({"click":click_type,
"timestamp": event_timestamp,
"device_id": device_id,
"tts": tts
})
)
logger.info('IoT Response: ' + json.dumps(response))
return {
'statusCode': 200,
'body': json.dumps('Hello from Lambda!')
}
| [
"logging.getLogger",
"boto3.client",
"datetime.datetime.utcnow",
"dateutil.tz.gettz",
"json.dumps"
] | [((104, 123), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (121, 123), False, 'import logging\n'), ((167, 182), 'dateutil.tz.gettz', 'tz.gettz', (['"""UTC"""'], {}), "('UTC')\n", (175, 182), False, 'from dateutil import tz\n'), ((193, 219), 'dateutil.tz.gettz', 'tz.gettz', (['"""Asia/Jerusalem"""'], {}), "('Asia/Jerusalem')\n", (201, 219), False, 'from dateutil import tz\n'), ((290, 339), 'boto3.client', 'boto3.client', (['"""iot-data"""'], {'region_name': '"""eu-west-1"""'}), "('iot-data', region_name='eu-west-1')\n", (302, 339), False, 'import boto3\n'), ((658, 675), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (673, 675), False, 'from datetime import datetime\n'), ((1702, 1734), 'json.dumps', 'json.dumps', (['"""Hello from Lambda!"""'], {}), "('Hello from Lambda!')\n", (1712, 1734), False, 'import json\n'), ((414, 431), 'json.dumps', 'json.dumps', (['event'], {}), '(event)\n', (424, 431), False, 'import json\n'), ((1437, 1540), 'json.dumps', 'json.dumps', (["{'click': click_type, 'timestamp': event_timestamp, 'device_id': device_id,\n 'tts': tts}"], {}), "({'click': click_type, 'timestamp': event_timestamp, 'device_id':\n device_id, 'tts': tts})\n", (1447, 1540), False, 'import json\n'), ((1623, 1643), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (1633, 1643), False, 'import json\n')] |
from typing import Generator
import pytest
from fastapi.testclient import TestClient
from app.main import app
@pytest.fixture(scope="module")
def test_data():
sample_data = {"user_handle": 1}
return sample_data
@pytest.fixture()
def client() -> Generator:
with TestClient(app) as _client:
yield _client
app.dependency_overrides = {}
| [
"pytest.fixture",
"fastapi.testclient.TestClient"
] | [((115, 145), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (129, 145), False, 'import pytest\n'), ((226, 242), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (240, 242), False, 'import pytest\n'), ((279, 294), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (289, 294), False, 'from fastapi.testclient import TestClient\n')] |
# GTA_Hunter pipeline to first blast a query against GTA database to search for GTA homologs and then classifying them using SVM approach
# Author: Evolutionary Computational Genomics Lab
# Last modified: 6/20/2019
###############
### IMPORTS ###
###############
import os
# import sys
from Bio.Blast.Applications import NcbiblastpCommandline
from Bio.Blast import NCBIXML
from Bio import SeqIO
import argparse
import numpy as np
import time
import pandas as pd
from bin.filter_extract_fasta_ch import extract_fasta
from bin.Loader import Loader
from bin.Weight import Weight
from bin.Feature import Feature
from bin.SVM import SVM
### CONSTANTS ###
#################
NREPS = 10 # for xval in SVM
PSE_WEIGHT = 0.05 # for pseaac feature
opt_params = {} # contains the parameters for each gene that yielded
# the best xval results in the format [k-mers,PseAAC,C-value,t-value,Phys-Chem]
opt_params['2'] = [2, None, 10000, .02, False]
opt_params['3'] = [3, None, 10000, .02, False]
opt_params['4'] = [3, 3, 10000, .02, False]
opt_params['5'] = [3, None, 100, .02, False]
opt_params['6'] = [4, None, .1, .02, True]
opt_params['8'] = [2, 3, .1, .03, False]
opt_params['9'] = [2, None, 100, .1, False]
opt_params['12'] = [5, None, 10000, .05, False]
opt_params['13'] = [2, None, 100, 0, False]
opt_params['14'] = [6, 6, 0.01, .03, False]
opt_params['15'] = [2, None, 10000, .02, False]
def my_main():
"Main function."
parser = get_args()
args = parser.parse_args()
if args.blast:
# Initiate blast wrapper
if not args.queries and not args.folder:
print("""Please specify a query file or a folder of .faa files to
blast and classify""")
else:
if not args.outdir:
print("""Please specify a directory in which to store output
files""")
else:
#Don't use user-specified weighting or training files
if args.weight:
print("""Specified weighting files will be ignored, must
first determine which GTA gene homologs you have.""")
if args.gta or args.virus:
print("""Specified training files will be ignored, must
first determine which GTA gene homologs you have.""")
if args.folder:
org = args.folder[0]
in_folder = org
out_folder = args.outdir[0]
for child in os.listdir(in_folder):
if child[-3:] == "faa":
name = "_" + child[:-4]
query = os.path.join(in_folder, child)
args.queries = [query]
args.outdir = [out_folder]
run_wrapper(args, name)
else:
run_wrapper(args)
else:
#run GTA_Hunter without doing a BLAST search
if not args.gta or not args.virus:
print("""You must specify the training files if you are running
GTA_Hunter without blast""")
else:
new_hunter(args)
def get_args():
"Init parser."
parser = argparse.ArgumentParser(description="""Gene Classification Using
SVM.""")
### Define Args ###
# Main
parser.add_argument("-b", "--blast", action="store_true",
dest="blast", required=False,
help="Run BLASTP search to first identify GTA homologs for the classification.")
parser.add_argument("-g", "--GTA", type=str, nargs=1,
dest="gta", required=False,
help="The FASTA-formatted (.faa or .fna) true GTA sequences used for training.")
parser.add_argument("-v", "--virus", type=str, nargs=1,
dest="virus", required=False,
help="The FASTA-formatted (.faa or .fna) true viruses sequences used for training.")
parser.add_argument("-q", "--queries", type=str, nargs=1,
dest="queries", required=False,
help="The FASTA-formatted (.faa or .fna) sequences to be classified.")
parser.add_argument("-o", "--outdir", type=str, nargs=1,
dest="outdir", required=False,
help="The folder path in which to store output.")
parser.add_argument("-k", "--kmer", type=int, nargs="?",
dest="kmer", required=False, const=4, default=None,
help="""The size of k-mer (default=4).""")
parser.add_argument("-p", "--pseaac", nargs="?", type=int,
dest="pseaac", required=False, const=3, default=None,
help="""Expand feature set to include pseudo amino acid composition (default=3). As the parameter,
specify value of lambda. Weight = 0.05 (after Chou 2001).""")
parser.add_argument("-y", "--physico", action="store_true",
dest="physico", required=False,
help="""Expand feature set to include physicochemical properties of amino acids.""")
parser.add_argument("-m", "--min", action="store_true",
dest="mini", required=False,
help="Print bare minimum results.")
parser.add_argument("-O", "--optimal", action="store_true",
dest="opt", required=False,
help="""Use the optimal parameters for the RcGTA gene homolog classification as listed in
Table 2 in Kogay et al 2019.""")
parser.add_argument("-f", "--folder",type=str, nargs=1,
dest="folder", required=False,
help="""Provide a folder with one or multiple proteomes (*.faa files).""")
# Weight
parser.add_argument("-W", action="store_true",
dest="wt", required=False,
help="Weight training set if desired. Distance files will be supplied automatically.")
parser.add_argument("-w", "--weight", type=str, nargs=2,
dest="weight", required=False,
help="Weigh the training set. Specify the two pairwise distance files needed for eighting (first file for GTAs, second file for viruses).")
parser.add_argument("-z", "--cluster_type", type=str, nargs=1,
dest="cluster_type", required=False, default=['farthest'],
help="Specify 'farthest' or 'nearest' neighbors clustering (default='farthest').")
parser.add_argument("-t", "--dist", type=float, nargs=1,
dest="dist", required=False, default=[0.01],
help="Specify the cutoff distance for clustering in the weighting scheme (default=0.01).")
# SVM
parser.add_argument("-c", "--soft_margin", type=float, nargs=1,
dest="c", required=False, default=[1.0],
help="The soft margin for the SVM (default=1.0).")
parser.add_argument("-x", "--xval", type=int, nargs="?",
dest="xval", required=False, const=5, default=None,
help="Performs cross validation of training set. Specify folds over 10 repetitions (default=5).")
parser.add_argument("-e", "--kernel", nargs=2,
dest="kernel", required=False, default=["linear",0],
help="Specify kernel to be used and sigma if applicable (i.e. gaussian) (default='linear', 0).")
parser.add_argument("-s", "--svs", action="store_true",
dest="svs", required=False,
help="Show support vectors.")
return parser
def get_dict():
#function to create a dictionary with info associated with each gta gene,
#so that when there is a blast hit we can identify which gta gene it is a homolog to
folder = "data/training/gta/"
folder2 = "data/training/viral/"
master_dict = {}
for child in os.listdir(folder):
if child[-3:] == "faa":
file = os.path.join(folder,child)
gene = child.split('_')[0]
glist = []
f = open(file)
content = f.readlines()
for line in content:
if line[0] == '>':
words = line.split()
ID = words[0][1:]
glist.append(ID)
master_dict[gene] = glist
for child in os.listdir(folder2):
if child[-3:] == "faa":
file = os.path.join(folder2,child)
gene = child.split('_')[0]
f = open(file)
content = f.readlines()
for line in content:
if line[0] == '>':
words = line.split()
ID = words[0][1:]
master_dict[gene].append(ID)
return master_dict
def run_wrapper(args,name=''):
my_dict = get_dict()
query_file = args.queries[0]
out_dir = args.outdir[0]
#output file (btab) from the blast against GTA db
blast_out = out_dir +"/blast"+name + ".out"
#special blast outformat parameters
outformat = "6 qseqid sstart send sframe sseqid pident qlen slen length mismatch gapopen qstart qend evalue bitscore"
#run blast search using database of viral and GTA training set
blastp_cline = NcbiblastpCommandline(query=query_file, db="data/GTA_db/GTA_viral", evalue=0.001, outfmt=outformat, out=blast_out, num_threads=2,dbsize=10000000)
blastp_cline
stdout,stderr = blastp_cline()
result = open(blast_out)
lines = result.readlines()
result.close()
handle_in = open(query_file)
#keep track of which sequences did not have a GTA homolog
no_hit_list = []
for record in SeqIO.parse(handle_in, "fasta"):
no_hit_list.append(record.id)
handle_in.close()
#continue if blast search had results
if len(lines) > 0:
#file to store the results of running GTA_Hunter
results_file = out_dir + "/results" +name+".out"
#run script to extract fasta files from blast search
genes_found = extract_fasta(blast_out,query_file,out_dir,my_dict,name)
if len(genes_found) > 0:
results_handle = open(results_file, 'w')
# for each GTA gene that had homologs, run GTA Hunter
for gene in genes_found:
out_faa = out_dir + "/gta_homolog_" +gene+name+".faa"
#get training files corresponding to the gene identified
gta_file = "data/training/gta/" + gene + "_gta.faa"
virus_file = "data/training/viral/" + gene + "_viral.faa"
gta_weight = "data/training/gta/" + gene + "_gta.dist"
virus_weight = "data/training/viral/" + gene + "_viral.dist"
homolog_file = out_faa
args.queries=[homolog_file]
args.gta = [gta_file]
args.virus = [virus_file]
if args.wt:
args.weight = [gta_weight,virus_weight]
if args.opt:
#get the optimal parameters
args.kmer = opt_params[gene][0]
args.pseaac = opt_params[gene][1]
c_value = opt_params[gene][2]
args.c = [c_value]
d_value = opt_params[gene][3]
args.dist = [d_value]
if opt_params[gene][4] == True:
args.physico = True
print("Running with paramateres: ", "k-mer size", args.kmer,"PseAAC",args.pseaac, "Phys-Chem properties included", "C", c_value, "t", d_value)
else:
print("Running with paramateres: ", "k-mer size", args.kmer,"PseAAC",args.pseaac, "C", c_value, "t", d_value)
#run GTA_Hunter with all the correct files
print("running GTA Hunter for gene",gene)
new_hunter(args,results_handle)
results_handle.close()
#close original query file and GTA_Hunter results file
results_handle = open(results_file)
lines = results_handle.readlines()
#update the list of sequences that did not have a GTA homolog
for line in lines:
words = line.split()
if words[0] != "Gene":
if words[0][1:] in no_hit_list:
no_hit_list.remove(words[0][1:])
results_handle.close()
else:
#if the blast.out file is empty
print("Sorry, no GTA homologs were found")
#write out a list of gene IDs of genes that were not homologs to any GTA gene
no_hit_file = out_dir+ "/no_homologs" +name+".txt"
no_hit_handle = open(no_hit_file,'w')
for nohit in no_hit_list:
no_hit_handle.write(nohit + '\n')
no_hit_handle.close()
def new_hunter(args,results_handle=''):
start = time.time()
# Print detail
mini = args.mini
### Load training set and make features ###
gta_file = args.gta[0]
virus_file = args.virus[0]
# Load profiles
gta_profs = Loader.load(gta_file, "GTA")
viral_profs = Loader.load(virus_file, "virus")
# Make features
feats = Feature(gta_profs.profiles + viral_profs.profiles)
if args.kmer:
kmer_size = args.kmer
feats.make_kmer_dict(kmer_size)
feats.kmer_feat()
if args.pseaac!=None:
feats.pseaac(lam=int(args.pseaac), weight=PSE_WEIGHT)
if args.physico:
feats.physicochem()
if not args.kmer and args.pseaac==None and not args.physico:
print("You must specify at least one feature type (-k, -p, -y).")
else:
# Weight if needed
if args.wt:
# Get distance threshold
d = args.dist[0]
# Get cluster type
cluster_type = args.cluster_type[0]
# Weight GTA
pairwiseGTA = Weight.load(args.weight[0])
GTA_weight = Weight(gta_profs, pairwiseGTA)
GTA_clusters = GTA_weight.cluster(cluster_type, d)
GTA_weight.weight(GTA_clusters)
# Weight Virus
pairwiseViral = Weight.load(args.weight[1])
virus_weight = Weight(viral_profs, pairwiseViral)
virus_clusters = virus_weight.cluster(cluster_type, d)
virus_weight.weight(virus_clusters)
# Create SVM
c = args.c[0]
kernel = args.kernel[0]
kernel_var = float(args.kernel[1])
svm = SVM(gta_profs, viral_profs, c, kernel, kernel_var)
# Print support vectors
if args.svs:
svm.show_svs()
# Xval
if args.xval:
nfolds = args.xval
if args.wt:
result = svm.xval(nfolds, NREPS, pairwiseGTA, pairwiseViral, cluster_type, d)
else:
result = svm.xval(nfolds, NREPS)
if mini:
print("GTA Correct\tViral Correct")
print("%.2f\t%.2f" % (result[0], result[1]))
else:
print("We correctly classified (on average) %.2f/%d GTA and %.2f/%d Viral genes."
% (result[0], len(gta_profs), result[1], len(viral_profs)))
else:
if args.queries == None:
print("The query file was not specified. Please declare queries using -q.")
else: # All good
# Load test set
test_profs = Loader.load(args.queries[0])
# Make features
if args.kmer:
feats.kmer_feat(test_profs)
if args.pseaac:
feats.pseaac(lam=int(args.pseaac), weight=PSE_WEIGHT, profiles=test_profs)
if args.physico:
feats.physicochem(profiles=test_profs)
# Classify
svm.predict(test_profs)
# The gta gene being classified
gene = args.gta[0].split('/')[3].split('_')[0]
# Print results
if mini:
print("Gene\t\tClass")
if args.blast:
results_handle.write("Gene\t\tClass\n")
for profile in test_profs:
print(">%s\t%s" % (profile.name, profile.label))
if args.blast:
results_handle.write(">%s\t%s\n" % (profile.name, profile.label))
else:
print("%-*s%-*s%-*s%12s" % (95, "Gene", 15, "Score", 5, "Classification","GTA Gene"))
if args.blast:
results_handle.write("%-*s%-*s%-*s%12s\n" % (95, "Gene", 15, "Score", 5, "Classification", "GTA Gene"))
for profile in test_profs:
print(">%-*s%-*f%-*s%14s" % (95, profile.org_name, 15, profile.score, 5, profile.label, gene))
if args.blast:
results_handle.write(">%-*s%-*f%-*s%14s\n" % (95, profile.org_name, 15, profile.score, 5, profile.label, gene))
end = time.time()
total = (end - start)
print ("time to run:", total)
my_main()
| [
"bin.Feature.Feature",
"os.listdir",
"argparse.ArgumentParser",
"bin.filter_extract_fasta_ch.extract_fasta",
"bin.Weight.Weight.load",
"os.path.join",
"bin.Loader.Loader.load",
"Bio.SeqIO.parse",
"Bio.Blast.Applications.NcbiblastpCommandline",
"bin.Weight.Weight",
"time.time",
"bin.SVM.SVM"
] | [((3259, 3345), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Gene Classification Using\n SVM."""'}), '(description=\n """Gene Classification Using\n SVM.""")\n', (3282, 3345), False, 'import argparse\n'), ((7822, 7840), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (7832, 7840), False, 'import os\n'), ((8286, 8305), 'os.listdir', 'os.listdir', (['folder2'], {}), '(folder2)\n', (8296, 8305), False, 'import os\n'), ((9178, 9329), 'Bio.Blast.Applications.NcbiblastpCommandline', 'NcbiblastpCommandline', ([], {'query': 'query_file', 'db': '"""data/GTA_db/GTA_viral"""', 'evalue': '(0.001)', 'outfmt': 'outformat', 'out': 'blast_out', 'num_threads': '(2)', 'dbsize': '(10000000)'}), "(query=query_file, db='data/GTA_db/GTA_viral', evalue=\n 0.001, outfmt=outformat, out=blast_out, num_threads=2, dbsize=10000000)\n", (9199, 9329), False, 'from Bio.Blast.Applications import NcbiblastpCommandline\n'), ((9589, 9620), 'Bio.SeqIO.parse', 'SeqIO.parse', (['handle_in', '"""fasta"""'], {}), "(handle_in, 'fasta')\n", (9600, 9620), False, 'from Bio import SeqIO\n'), ((12799, 12810), 'time.time', 'time.time', ([], {}), '()\n', (12808, 12810), False, 'import time\n'), ((12993, 13021), 'bin.Loader.Loader.load', 'Loader.load', (['gta_file', '"""GTA"""'], {}), "(gta_file, 'GTA')\n", (13004, 13021), False, 'from bin.Loader import Loader\n'), ((13040, 13072), 'bin.Loader.Loader.load', 'Loader.load', (['virus_file', '"""virus"""'], {}), "(virus_file, 'virus')\n", (13051, 13072), False, 'from bin.Loader import Loader\n'), ((13105, 13155), 'bin.Feature.Feature', 'Feature', (['(gta_profs.profiles + viral_profs.profiles)'], {}), '(gta_profs.profiles + viral_profs.profiles)\n', (13112, 13155), False, 'from bin.Feature import Feature\n'), ((16974, 16985), 'time.time', 'time.time', ([], {}), '()\n', (16983, 16985), False, 'import time\n'), ((9945, 10005), 'bin.filter_extract_fasta_ch.extract_fasta', 'extract_fasta', (['blast_out', 'query_file', 'out_dir', 'my_dict', 'name'], {}), '(blast_out, query_file, out_dir, my_dict, name)\n', (9958, 10005), False, 'from bin.filter_extract_fasta_ch import extract_fasta\n'), ((14386, 14436), 'bin.SVM.SVM', 'SVM', (['gta_profs', 'viral_profs', 'c', 'kernel', 'kernel_var'], {}), '(gta_profs, viral_profs, c, kernel, kernel_var)\n', (14389, 14436), False, 'from bin.SVM import SVM\n'), ((7893, 7920), 'os.path.join', 'os.path.join', (['folder', 'child'], {}), '(folder, child)\n', (7905, 7920), False, 'import os\n'), ((8358, 8386), 'os.path.join', 'os.path.join', (['folder2', 'child'], {}), '(folder2, child)\n', (8370, 8386), False, 'import os\n'), ((13801, 13828), 'bin.Weight.Weight.load', 'Weight.load', (['args.weight[0]'], {}), '(args.weight[0])\n', (13812, 13828), False, 'from bin.Weight import Weight\n'), ((13854, 13884), 'bin.Weight.Weight', 'Weight', (['gta_profs', 'pairwiseGTA'], {}), '(gta_profs, pairwiseGTA)\n', (13860, 13884), False, 'from bin.Weight import Weight\n'), ((14047, 14074), 'bin.Weight.Weight.load', 'Weight.load', (['args.weight[1]'], {}), '(args.weight[1])\n', (14058, 14074), False, 'from bin.Weight import Weight\n'), ((14102, 14136), 'bin.Weight.Weight', 'Weight', (['viral_profs', 'pairwiseViral'], {}), '(viral_profs, pairwiseViral)\n', (14108, 14136), False, 'from bin.Weight import Weight\n'), ((15339, 15367), 'bin.Loader.Loader.load', 'Loader.load', (['args.queries[0]'], {}), '(args.queries[0])\n', (15350, 15367), False, 'from bin.Loader import Loader\n'), ((2529, 2550), 'os.listdir', 'os.listdir', (['in_folder'], {}), '(in_folder)\n', (2539, 2550), False, 'import os\n'), ((2688, 2718), 'os.path.join', 'os.path.join', (['in_folder', 'child'], {}), '(in_folder, child)\n', (2700, 2718), False, 'import os\n')] |
"""
This module contains the tests to check peleffy's solvent.
"""
import tempfile
from peleffy.utils import get_data_file_path, temporary_cd
from peleffy.topology import Molecule, Topology
from peleffy.forcefield import OPLS2005ForceField, OpenForceField
from peleffy.solvent import OPLSOBC, OBC2
class TestSolvent(object):
"""
It contains all the tests that validate the solvent-template generator.
"""
def test_single_topology(self):
"""
It tests the class that generates a OpenFFCompatibleSolvent object for
a single topology.
"""
from .utils import compare_dicts
import json
TEMPLATE_PARAMS_MAL = get_data_file_path('tests/ligandParams_MAL.txt')
# Loads the molecule
molecule = Molecule(path=get_data_file_path('ligands/malonate.pdb'),
tag='MAL')
# Sets forcefield and parameterizes it
ff = OpenForceField('openff_unconstrained-1.2.1.offxml')
parameters = ff.parameterize(molecule, charge_method='gasteiger')
# Initializes topology
topology = Topology(molecule, parameters)
# Initializes solvent and gets parameters file
solvent = OBC2(topology)
solvent_dict = solvent.to_dict()
# Loads reference dict from template
with open(TEMPLATE_PARAMS_MAL, 'r') as f:
reference_dict = json.load(f)
# Compare the output parameters dict with the reference parameters
compare_dicts(reference_dict, solvent_dict)
def test_multiple_topologies(self):
"""
It tests the class that generates a OpenFFCompatibleSolvent object for
multiple topologies.
"""
from .utils import compare_dicts, merge_dicts
# Path to multiple non standard residues
pdb_path_MAL = get_data_file_path('ligands/malonate.pdb')
pdb_path_MET = get_data_file_path('ligands/methane.pdb')
# Force Field to parameterize the molecules
ff = OpenForceField('openff_unconstrained-1.2.1.offxml')
# Topology of malonate
mol_MAL = Molecule(path=pdb_path_MAL, tag='MAL')
parameters_MAL = ff.parameterize(mol_MAL, charge_method='gasteiger')
topology_MAL = Topology(mol_MAL, parameters_MAL)
# Topology of methane
mol_MET = Molecule(path=pdb_path_MET, tag='MET')
parameters_MET = ff.parameterize(mol_MET, charge_method='gasteiger')
topology_MET = Topology(mol_MET, parameters_MET)
# List containing both topologies
topologies = [topology_MAL, topology_MET]
# Generate the Solvent parameters dictionaries
solvent_MAL_dict = OBC2(topology_MAL).to_dict()
solvent_MET_dict = OBC2(topology_MET).to_dict()
solvent_dict = OBC2(topologies).to_dict()
# Check that merging both single topology dicitionaries we obtain the
# same dictionary that using multiple topologies
compare_dicts(merge_dicts(solvent_MAL_dict['SolventParameters'],
solvent_MET_dict['SolventParameters']),
solvent_dict['SolventParameters'])
def test_multiple_topologies_writer(self):
"""
It tests the class that generates a OpenFFCompatibleSolvent object for multiple topologies. It compares the outcome of the Solvent writer with
a reference file.
"""
from .utils import compare_dicts, parameterize_opls2005
import json
TEMPLATE_PARAMS = get_data_file_path('tests/ligandParams.txt')
with tempfile.TemporaryDirectory() as tmpdir:
with temporary_cd(tmpdir):
path_OXO = get_data_file_path('tests/MRO_oleic/OXO.pdb')
path_OLC = get_data_file_path('tests/MRO_oleic/OLC.pdb')
ff = OpenForceField('openff_unconstrained-1.2.1.offxml')
opls2005 = OPLS2005ForceField()
# Group OXO
m_OXO = Molecule(path_OXO)
ffld_file = get_data_file_path('tests/OXO_ffld_output.txt')
parameters_OXO = parameterize_opls2005(opls2005, m_OXO,
ffld_file)
topology_OXO = Topology(m_OXO, parameters_OXO)
# Acid oleic
m_OLC = Molecule(path_OLC)
parameters_OLC = ff.parameterize(m_OLC,
charge_method='gasteiger')
topology_OLC = Topology(m_OLC, parameters_OLC)
# Multiple topologies
topologies = [topology_OXO, topology_OLC]
solvent = OBC2(topologies)
solvent.to_file('OBC_parameters.txt')
# Loads reference dict from template
with open(TEMPLATE_PARAMS, 'r') as f:
reference_dict = json.load(f)
# Loads the generated template into a dict
with open('OBC_parameters.txt', 'r') as f:
solvent_dict = json.load(f)
# Compare the output parameters dict with the reference parameters
compare_dicts(reference_dict, solvent_dict)
def test_OBCOPLS_writer(self):
"""
It test the function that writes a OPLS2005CompatibleSolvent object to
a file compatible with PELE.
"""
from .utils import parameterize_opls2005, compare_files_without_order
TEMPLATE_PARAMS_ETL = get_data_file_path(
'tests/ETL_solventParamsHCTOBC.txt')
TEMPLATE_PARAMS_MAL = get_data_file_path(
'tests/MAL_solventParamsHCTOBC.txt')
TEMPLATE_PARAMS_MET = get_data_file_path(
'tests/MET_solventParamsHCTOBC.txt')
def test_OBCOPLS_writer_ligand(pdbfile, tag_name, ffld_name,
reference_file):
"""
Given a ligand, it tests that the output parameters file
corresponds to the refenrece file.
Parameters
----------
pdbfile : str
The path to the PDB of the ligand to test
ffld_name : str
The path to the ffld_server's output file
reference_file : str
The path to reference TXT file compatible with PELE
"""
with tempfile.TemporaryDirectory() as tmpdir:
with temporary_cd(tmpdir):
# Loads the molecule
molecule = Molecule(get_data_file_path(pdbfile),
tag=tag_name)
# Sets forcefield and parameterizes it
opls2005 = OPLS2005ForceField()
ffld_file = get_data_file_path(ffld_name)
parameters = parameterize_opls2005(opls2005,
molecule,
ffld_file)
# Initializes topology
topology = Topology(molecule, parameters)
# Initializes solvent and gets parameters file
solvent = OPLSOBC(topology)
solvent.to_file('OBC_parameters.txt')
# Compare the output file with the reference parameters file
compare_files_without_order('OBC_parameters.txt',
reference_file)
# Test for ethylene
test_OBCOPLS_writer_ligand(pdbfile='ligands/ethylene.pdb',
tag_name='ETL',
ffld_name='tests/ETL_ffld_output.txt',
reference_file=TEMPLATE_PARAMS_ETL)
# Test for methane
test_OBCOPLS_writer_ligand(pdbfile='ligands/methane.pdb',
tag_name='MET',
ffld_name='tests/MET_ffld_output.txt',
reference_file=TEMPLATE_PARAMS_MET)
# Test for malonate
test_OBCOPLS_writer_ligand(pdbfile='ligands/malonate.pdb',
tag_name='MAL',
ffld_name='tests/MAL_ffld_output.txt',
reference_file=TEMPLATE_PARAMS_MAL)
| [
"peleffy.utils.temporary_cd",
"tempfile.TemporaryDirectory",
"peleffy.forcefield.OPLS2005ForceField",
"peleffy.utils.get_data_file_path",
"peleffy.topology.Molecule",
"json.load",
"peleffy.solvent.OPLSOBC",
"peleffy.topology.Topology",
"peleffy.forcefield.OpenForceField",
"peleffy.solvent.OBC2"
] | [((680, 728), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""tests/ligandParams_MAL.txt"""'], {}), "('tests/ligandParams_MAL.txt')\n", (698, 728), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((937, 988), 'peleffy.forcefield.OpenForceField', 'OpenForceField', (['"""openff_unconstrained-1.2.1.offxml"""'], {}), "('openff_unconstrained-1.2.1.offxml')\n", (951, 988), False, 'from peleffy.forcefield import OPLS2005ForceField, OpenForceField\n'), ((1114, 1144), 'peleffy.topology.Topology', 'Topology', (['molecule', 'parameters'], {}), '(molecule, parameters)\n', (1122, 1144), False, 'from peleffy.topology import Molecule, Topology\n'), ((1219, 1233), 'peleffy.solvent.OBC2', 'OBC2', (['topology'], {}), '(topology)\n', (1223, 1233), False, 'from peleffy.solvent import OPLSOBC, OBC2\n'), ((1841, 1883), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""ligands/malonate.pdb"""'], {}), "('ligands/malonate.pdb')\n", (1859, 1883), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((1907, 1948), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""ligands/methane.pdb"""'], {}), "('ligands/methane.pdb')\n", (1925, 1948), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((2015, 2066), 'peleffy.forcefield.OpenForceField', 'OpenForceField', (['"""openff_unconstrained-1.2.1.offxml"""'], {}), "('openff_unconstrained-1.2.1.offxml')\n", (2029, 2066), False, 'from peleffy.forcefield import OPLS2005ForceField, OpenForceField\n'), ((2117, 2155), 'peleffy.topology.Molecule', 'Molecule', ([], {'path': 'pdb_path_MAL', 'tag': '"""MAL"""'}), "(path=pdb_path_MAL, tag='MAL')\n", (2125, 2155), False, 'from peleffy.topology import Molecule, Topology\n'), ((2256, 2289), 'peleffy.topology.Topology', 'Topology', (['mol_MAL', 'parameters_MAL'], {}), '(mol_MAL, parameters_MAL)\n', (2264, 2289), False, 'from peleffy.topology import Molecule, Topology\n'), ((2339, 2377), 'peleffy.topology.Molecule', 'Molecule', ([], {'path': 'pdb_path_MET', 'tag': '"""MET"""'}), "(path=pdb_path_MET, tag='MET')\n", (2347, 2377), False, 'from peleffy.topology import Molecule, Topology\n'), ((2478, 2511), 'peleffy.topology.Topology', 'Topology', (['mol_MET', 'parameters_MET'], {}), '(mol_MET, parameters_MET)\n', (2486, 2511), False, 'from peleffy.topology import Molecule, Topology\n'), ((3523, 3567), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""tests/ligandParams.txt"""'], {}), "('tests/ligandParams.txt')\n", (3541, 3567), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((5495, 5550), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""tests/ETL_solventParamsHCTOBC.txt"""'], {}), "('tests/ETL_solventParamsHCTOBC.txt')\n", (5513, 5550), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((5594, 5649), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""tests/MAL_solventParamsHCTOBC.txt"""'], {}), "('tests/MAL_solventParamsHCTOBC.txt')\n", (5612, 5649), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((5693, 5748), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""tests/MET_solventParamsHCTOBC.txt"""'], {}), "('tests/MET_solventParamsHCTOBC.txt')\n", (5711, 5748), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((1400, 1412), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1409, 1412), False, 'import json\n'), ((3582, 3611), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (3609, 3611), False, 'import tempfile\n'), ((793, 835), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""ligands/malonate.pdb"""'], {}), "('ligands/malonate.pdb')\n", (811, 835), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((2688, 2706), 'peleffy.solvent.OBC2', 'OBC2', (['topology_MAL'], {}), '(topology_MAL)\n', (2692, 2706), False, 'from peleffy.solvent import OPLSOBC, OBC2\n'), ((2744, 2762), 'peleffy.solvent.OBC2', 'OBC2', (['topology_MET'], {}), '(topology_MET)\n', (2748, 2762), False, 'from peleffy.solvent import OPLSOBC, OBC2\n'), ((2796, 2812), 'peleffy.solvent.OBC2', 'OBC2', (['topologies'], {}), '(topologies)\n', (2800, 2812), False, 'from peleffy.solvent import OPLSOBC, OBC2\n'), ((3640, 3660), 'peleffy.utils.temporary_cd', 'temporary_cd', (['tmpdir'], {}), '(tmpdir)\n', (3652, 3660), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((3689, 3734), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""tests/MRO_oleic/OXO.pdb"""'], {}), "('tests/MRO_oleic/OXO.pdb')\n", (3707, 3734), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((3762, 3807), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""tests/MRO_oleic/OLC.pdb"""'], {}), "('tests/MRO_oleic/OLC.pdb')\n", (3780, 3807), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((3830, 3881), 'peleffy.forcefield.OpenForceField', 'OpenForceField', (['"""openff_unconstrained-1.2.1.offxml"""'], {}), "('openff_unconstrained-1.2.1.offxml')\n", (3844, 3881), False, 'from peleffy.forcefield import OPLS2005ForceField, OpenForceField\n'), ((3909, 3929), 'peleffy.forcefield.OPLS2005ForceField', 'OPLS2005ForceField', ([], {}), '()\n', (3927, 3929), False, 'from peleffy.forcefield import OPLS2005ForceField, OpenForceField\n'), ((3983, 4001), 'peleffy.topology.Molecule', 'Molecule', (['path_OXO'], {}), '(path_OXO)\n', (3991, 4001), False, 'from peleffy.topology import Molecule, Topology\n'), ((4030, 4077), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['"""tests/OXO_ffld_output.txt"""'], {}), "('tests/OXO_ffld_output.txt')\n", (4048, 4077), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((4247, 4278), 'peleffy.topology.Topology', 'Topology', (['m_OXO', 'parameters_OXO'], {}), '(m_OXO, parameters_OXO)\n', (4255, 4278), False, 'from peleffy.topology import Molecule, Topology\n'), ((4333, 4351), 'peleffy.topology.Molecule', 'Molecule', (['path_OLC'], {}), '(path_OLC)\n', (4341, 4351), False, 'from peleffy.topology import Molecule, Topology\n'), ((4515, 4546), 'peleffy.topology.Topology', 'Topology', (['m_OLC', 'parameters_OLC'], {}), '(m_OLC, parameters_OLC)\n', (4523, 4546), False, 'from peleffy.topology import Molecule, Topology\n'), ((4670, 4686), 'peleffy.solvent.OBC2', 'OBC2', (['topologies'], {}), '(topologies)\n', (4674, 4686), False, 'from peleffy.solvent import OPLSOBC, OBC2\n'), ((6371, 6400), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (6398, 6400), False, 'import tempfile\n'), ((4886, 4898), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4895, 4898), False, 'import json\n'), ((5053, 5065), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5062, 5065), False, 'import json\n'), ((6433, 6453), 'peleffy.utils.temporary_cd', 'temporary_cd', (['tmpdir'], {}), '(tmpdir)\n', (6445, 6453), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((6712, 6732), 'peleffy.forcefield.OPLS2005ForceField', 'OPLS2005ForceField', ([], {}), '()\n', (6730, 6732), False, 'from peleffy.forcefield import OPLS2005ForceField, OpenForceField\n'), ((6765, 6794), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['ffld_name'], {}), '(ffld_name)\n', (6783, 6794), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n'), ((7066, 7096), 'peleffy.topology.Topology', 'Topology', (['molecule', 'parameters'], {}), '(molecule, parameters)\n', (7074, 7096), False, 'from peleffy.topology import Molecule, Topology\n'), ((7195, 7212), 'peleffy.solvent.OPLSOBC', 'OPLSOBC', (['topology'], {}), '(topology)\n', (7202, 7212), False, 'from peleffy.solvent import OPLSOBC, OBC2\n'), ((6538, 6565), 'peleffy.utils.get_data_file_path', 'get_data_file_path', (['pdbfile'], {}), '(pdbfile)\n', (6556, 6565), False, 'from peleffy.utils import get_data_file_path, temporary_cd\n')] |
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
from solo12_collisions_utils import followBoundary
# Load the collision map from file
col_map_file = './npy_data/collision_map_centered_res100.npy'
col_map = np.load(col_map_file, allow_pickle=True)
traj1 = np.array(followBoundary(col_map))
traj1 = [[t[1], t[0]] for t in traj1]
traj2 = np.array(followBoundary(col_map, first_dir=2))
traj2 = [[t[1], t[0]] for t in traj2]
print(col_map.shape)
#plt.imshow(col_map.T)
#plt.show()
xSize = len(col_map[0])
ySize = len(col_map)
xx, yy = np.meshgrid(np.linspace(0, xSize, xSize),
np.linspace(0, ySize, ySize))
# returns neighboring indices at given dist around [k,l]
def getNeighbors(k, l, dist):
neighbors = []
dist = int(dist)
for i in range(2*dist):
for j in range(2*dist):
neighbors.append([k - dist + i, l - dist + j])
return neighbors
X = []
for i in range(xSize):
for j in range(ySize):
neighbors = getNeighbors(i,j,2)
append = False
'''
for n in neighbors:
if(n in traj1 or n in traj2):
append = True
if(append or (i%3 == 0 and j%3 == 0)):
X.append([i,j])
'''
X.append([i,j])
X = np.array(X)
print(X.shape)
Y = col_map[X[:,0],X[:,1]] > 0 #for classifier
clf = svm.NuSVC(nu=0.5)
clf.fit(X,Y)
support = np.array(clf.support_vectors_)
print("Nb. support vectors : \n{}".format(clf.n_support_))
print("Support vectors : \n{}".format(support))
# plot the decision function for each datapoint on the grid
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.imshow(Z, interpolation='nearest',
extent=(xx.min(), xx.max(), yy.min(), yy.max()), aspect='auto',
origin='lower', cmap=plt.cm.PuOr_r)
contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,
linestyles='dashed', colors=['red'])
#plt.scatter(X[:, 0], X[:, 1], s=35, c=Y, cmap=plt.cm.Paired,
# edgecolors='k')
plt.scatter(support[:,0], support[:,1], c='red', s=15)
plt.xticks(())
plt.yticks(())
plt.axis([0,xSize,0,ySize])
plt.show() | [
"matplotlib.pyplot.xticks",
"sklearn.svm.NuSVC",
"solo12_collisions_utils.followBoundary",
"numpy.array",
"matplotlib.pyplot.contour",
"numpy.linspace",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.axis",
"numpy.load",
"matplotlib.pyplot.show"
] | [((235, 275), 'numpy.load', 'np.load', (['col_map_file'], {'allow_pickle': '(True)'}), '(col_map_file, allow_pickle=True)\n', (242, 275), True, 'import numpy as np\n'), ((1417, 1428), 'numpy.array', 'np.array', (['X'], {}), '(X)\n', (1425, 1428), True, 'import numpy as np\n'), ((1497, 1514), 'sklearn.svm.NuSVC', 'svm.NuSVC', ([], {'nu': '(0.5)'}), '(nu=0.5)\n', (1506, 1514), False, 'from sklearn import svm\n'), ((1538, 1568), 'numpy.array', 'np.array', (['clf.support_vectors_'], {}), '(clf.support_vectors_)\n', (1546, 1568), True, 'import numpy as np\n'), ((1991, 2080), 'matplotlib.pyplot.contour', 'plt.contour', (['xx', 'yy', 'Z'], {'levels': '[0]', 'linewidths': '(2)', 'linestyles': '"""dashed"""', 'colors': "['red']"}), "(xx, yy, Z, levels=[0], linewidths=2, linestyles='dashed',\n colors=['red'])\n", (2002, 2080), True, 'import matplotlib.pyplot as plt\n'), ((2191, 2247), 'matplotlib.pyplot.scatter', 'plt.scatter', (['support[:, 0]', 'support[:, 1]'], {'c': '"""red"""', 's': '(15)'}), "(support[:, 0], support[:, 1], c='red', s=15)\n", (2202, 2247), True, 'import matplotlib.pyplot as plt\n'), ((2246, 2260), 'matplotlib.pyplot.xticks', 'plt.xticks', (['()'], {}), '(())\n', (2256, 2260), True, 'import matplotlib.pyplot as plt\n'), ((2261, 2275), 'matplotlib.pyplot.yticks', 'plt.yticks', (['()'], {}), '(())\n', (2271, 2275), True, 'import matplotlib.pyplot as plt\n'), ((2276, 2306), 'matplotlib.pyplot.axis', 'plt.axis', (['[0, xSize, 0, ySize]'], {}), '([0, xSize, 0, ySize])\n', (2284, 2306), True, 'import matplotlib.pyplot as plt\n'), ((2304, 2314), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2312, 2314), True, 'import matplotlib.pyplot as plt\n'), ((293, 316), 'solo12_collisions_utils.followBoundary', 'followBoundary', (['col_map'], {}), '(col_map)\n', (307, 316), False, 'from solo12_collisions_utils import followBoundary\n'), ((374, 410), 'solo12_collisions_utils.followBoundary', 'followBoundary', (['col_map'], {'first_dir': '(2)'}), '(col_map, first_dir=2)\n', (388, 410), False, 'from solo12_collisions_utils import followBoundary\n'), ((574, 602), 'numpy.linspace', 'np.linspace', (['(0)', 'xSize', 'xSize'], {}), '(0, xSize, xSize)\n', (585, 602), True, 'import numpy as np\n'), ((625, 653), 'numpy.linspace', 'np.linspace', (['(0)', 'ySize', 'ySize'], {}), '(0, ySize, ySize)\n', (636, 653), True, 'import numpy as np\n')] |
"""Tests for usgsm2m module."""
import os
import pytest
from usgsm2m.usgsm2m import USGSM2M
from usgsm2m.errors import USGSM2MError
@pytest.fixture(scope="module")
def ee():
return USGSM2M(
os.getenv("USGSM2M_USERNAME"), os.getenv("USGSM2M_PASSWORD")
)
def test_ee_login(ee):
assert ee.logged_in()
def test_ee_login_error():
with pytest.raises(USGSM2MError):
USGSM2MError("bad_username", "bad_password")
| [
"pytest.fixture",
"pytest.raises",
"os.getenv",
"usgsm2m.errors.USGSM2MError"
] | [((137, 167), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (151, 167), False, 'import pytest\n'), ((206, 235), 'os.getenv', 'os.getenv', (['"""USGSM2M_USERNAME"""'], {}), "('USGSM2M_USERNAME')\n", (215, 235), False, 'import os\n'), ((237, 266), 'os.getenv', 'os.getenv', (['"""USGSM2M_PASSWORD"""'], {}), "('USGSM2M_PASSWORD')\n", (246, 266), False, 'import os\n'), ((362, 389), 'pytest.raises', 'pytest.raises', (['USGSM2MError'], {}), '(USGSM2MError)\n', (375, 389), False, 'import pytest\n'), ((399, 443), 'usgsm2m.errors.USGSM2MError', 'USGSM2MError', (['"""bad_username"""', '"""bad_password"""'], {}), "('bad_username', 'bad_password')\n", (411, 443), False, 'from usgsm2m.errors import USGSM2MError\n')] |
import os
import numpy as np
from data_prepare import *
from Network_structure import *
from loss_function import *
from train_method import *
def save_eeg(saved_model, result_location, foldername, save_train, save_vali, save_test,
noiseEEG_train, EEG_train, noiseEEG_val, EEG_val, noiseEEG_test, EEG_test, train_num, denoise_network, datanum):
if save_train == True:
try:
# generate every signal in training set
Denoiseoutput_train, _ = test_step(saved_model, noiseEEG_train, EEG_train, denoise_network, datanum)
if not os.path.exists(result_location +'/'+ foldername + '/' + train_num + '/' +'nn_output'):
os.makedirs(result_location +'/'+ foldername + '/' + train_num + '/'+ 'nn_output' )
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' + '/' + 'noiseinput_train.npy', noiseEEG_train)
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' + '/' + 'Denoiseoutput_train.npy', Denoiseoutput_train) ####################### change the adress!
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' + '/' + 'EEG_train.npy', EEG_train)
except:
print("Error during saving training signal.")
if save_vali == True:
try:
# generate every signal in test set
Denoiseoutput_val, _ = test_step(saved_model, noiseEEG_val, EEG_val, denoise_network, datanum)
if not os.path.exists(result_location +'/'+ foldername + '/' + train_num + '/'+ 'nn_output'):
os.makedirs(result_location +'/'+ foldername + '/' + train_num + '/'+ 'nn_output')
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' +'/' + 'noiseinput_val.npy', noiseEEG_val)
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' +'/' + 'Denoiseoutput_val.npy', Denoiseoutput_val) ####################### change the adress!
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' +'/' + 'EEG_val.npy', EEG_val)
except:
print("Error during saving validation signal.")
if save_test == True:
try:
# generate every signal in test set
Denoiseoutput_test, _ = test_step(saved_model, noiseEEG_test, EEG_test, denoise_network, datanum)
if not os.path.exists(result_location +'/'+ foldername + '/' + train_num + '/'+ 'nn_output'):
os.makedirs(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output')
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' +'/' + 'noiseinput_test.npy', noiseEEG_test)
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' +'/' + 'Denoiseoutput_test.npy', Denoiseoutput_test) ####################### change the adress!
np.save(result_location +'/'+ foldername + '/' + train_num + '/' + 'nn_output' +'/' + 'EEG_test.npy', EEG_test)
except:
print("Error during saving test signal.") | [
"os.makedirs",
"os.path.exists",
"numpy.save"
] | [((814, 946), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'noiseinput_train.npy')", 'noiseEEG_train'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'noiseinput_train.npy', noiseEEG_train)\n", (821, 946), True, 'import numpy as np\n'), ((955, 1095), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'Denoiseoutput_train.npy')", 'Denoiseoutput_train'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'Denoiseoutput_train.npy', Denoiseoutput_train)\n", (962, 1095), True, 'import numpy as np\n'), ((1164, 1284), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'EEG_train.npy')", 'EEG_train'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'EEG_train.npy', EEG_train)\n", (1171, 1284), True, 'import numpy as np\n'), ((1811, 1939), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'noiseinput_val.npy')", 'noiseEEG_val'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'noiseinput_val.npy', noiseEEG_val)\n", (1818, 1939), True, 'import numpy as np\n'), ((1947, 2083), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'Denoiseoutput_val.npy')", 'Denoiseoutput_val'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'Denoiseoutput_val.npy', Denoiseoutput_val)\n", (1954, 2083), True, 'import numpy as np\n'), ((2158, 2274), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'EEG_val.npy')", 'EEG_val'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'EEG_val.npy', EEG_val)\n", (2165, 2274), True, 'import numpy as np\n'), ((2788, 2918), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'noiseinput_test.npy')", 'noiseEEG_test'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'noiseinput_test.npy', noiseEEG_test)\n", (2795, 2918), True, 'import numpy as np\n'), ((2926, 3064), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'Denoiseoutput_test.npy')", 'Denoiseoutput_test'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'Denoiseoutput_test.npy', Denoiseoutput_test)\n", (2933, 3064), True, 'import numpy as np\n'), ((3139, 3257), 'numpy.save', 'np.save', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output' +\n '/' + 'EEG_test.npy')", 'EEG_test'], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output' + '/' + 'EEG_test.npy', EEG_test)\n", (3146, 3257), True, 'import numpy as np\n'), ((607, 699), 'os.path.exists', 'os.path.exists', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output')"], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output')\n", (621, 699), False, 'import os\n'), ((713, 802), 'os.makedirs', 'os.makedirs', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output')"], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output')\n", (724, 802), False, 'import os\n'), ((1603, 1695), 'os.path.exists', 'os.path.exists', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output')"], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output')\n", (1617, 1695), False, 'import os\n'), ((1709, 1798), 'os.makedirs', 'os.makedirs', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output')"], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output')\n", (1720, 1798), False, 'import os\n'), ((2579, 2671), 'os.path.exists', 'os.path.exists', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output')"], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output')\n", (2593, 2671), False, 'import os\n'), ((2685, 2774), 'os.makedirs', 'os.makedirs', (["(result_location + '/' + foldername + '/' + train_num + '/' + 'nn_output')"], {}), "(result_location + '/' + foldername + '/' + train_num + '/' +\n 'nn_output')\n", (2696, 2774), False, 'import os\n')] |
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from cloudify import ctx
from cloudify.decorators import operation
import pynsxv.library.nsx_dlr as nsx_router
import cloudify_nsx.library.nsx_common as common
from cloudify import exceptions as cfy_exc
import cloudify_nsx.library.nsx_esg_dlr as nsx_dlr
@operation
def create(**kwargs):
validation_rules = {
"name": {
"required": True
},
"dlr_pwd": {
"required": True
},
"dlr_size": {
"default": "compact",
"values": [
"compact",
"large",
"quadlarge",
"xlarge"
]
},
"ha_ls_id": {
"required": True
},
"uplink_ls_id": {
"required": True
},
"uplink_ip": {
"required": True
},
"uplink_subnet": {
"required": True
},
"uplink_dgw": {
"required": True
}
}
use_existing, router_dict = common.get_properties_and_validate(
'router', kwargs, validation_rules
)
ctx.logger.info("checking %s" % router_dict["name"])
# credentials
client_session = common.nsx_login(kwargs)
resource_id = ctx.instance.runtime_properties.get('resource_id')
if not use_existing and not resource_id:
resource_id, _ = nsx_router.dlr_read(
client_session, router_dict["name"]
)
if use_existing:
ctx.instance.runtime_properties['resource_id'] = resource_id
ctx.logger.info("Used existed %s" % resource_id)
elif resource_id:
raise cfy_exc.NonRecoverableError(
"Router '%s' already exists" % router_dict["name"]
)
if not resource_id:
# update properties with vcenter specific values,
# required only on create
router_dict = common.possibly_assign_vm_creation_props(router_dict)
resource_id, _ = nsx_router.dlr_create(
client_session,
router_dict['name'],
router_dict['dlr_pwd'],
router_dict['dlr_size'],
router_dict['datacentermoid'],
router_dict['datastoremoid'],
router_dict['resourcepoolid'],
router_dict['ha_ls_id'],
router_dict['uplink_ls_id'],
router_dict['uplink_ip'],
router_dict['uplink_subnet'],
router_dict['uplink_dgw'])
ctx.instance.runtime_properties['resource_id'] = resource_id
ctx.logger.info("created %s" % resource_id)
uplink_vnic = nsx_dlr.get_uplink_vnic(
client_session, resource_id, router_dict['uplink_ls_id'])
ctx.instance.runtime_properties['router']['uplink_vnic'] = uplink_vnic
nsx_dlr.update_common_edges(client_session, resource_id, kwargs, False)
@operation
def delete(**kwargs):
use_existing, router_dict = common.get_properties('router', kwargs)
if use_existing:
nsx_dlr.remove_properties_edges()
ctx.logger.info("Used pre existed!")
return
resource_id = ctx.instance.runtime_properties.get('resource_id')
if not resource_id:
nsx_dlr.remove_properties_edges()
ctx.logger.info("We dont have resource_id")
return
# credentials
client_session = common.nsx_login(kwargs)
common.attempt_with_rerun(
nsx_dlr.del_edge,
client_session=client_session,
resource_id=resource_id
)
ctx.logger.info("deleted %s" % resource_id)
nsx_dlr.remove_properties_edges()
| [
"pynsxv.library.nsx_dlr.dlr_create",
"cloudify_nsx.library.nsx_common.possibly_assign_vm_creation_props",
"cloudify_nsx.library.nsx_esg_dlr.update_common_edges",
"cloudify_nsx.library.nsx_common.attempt_with_rerun",
"cloudify_nsx.library.nsx_common.nsx_login",
"cloudify.exceptions.NonRecoverableError",
... | [((1647, 1717), 'cloudify_nsx.library.nsx_common.get_properties_and_validate', 'common.get_properties_and_validate', (['"""router"""', 'kwargs', 'validation_rules'], {}), "('router', kwargs, validation_rules)\n", (1681, 1717), True, 'import cloudify_nsx.library.nsx_common as common\n'), ((1737, 1789), 'cloudify.ctx.logger.info', 'ctx.logger.info', (["('checking %s' % router_dict['name'])"], {}), "('checking %s' % router_dict['name'])\n", (1752, 1789), False, 'from cloudify import ctx\n'), ((1830, 1854), 'cloudify_nsx.library.nsx_common.nsx_login', 'common.nsx_login', (['kwargs'], {}), '(kwargs)\n', (1846, 1854), True, 'import cloudify_nsx.library.nsx_common as common\n'), ((1874, 1924), 'cloudify.ctx.instance.runtime_properties.get', 'ctx.instance.runtime_properties.get', (['"""resource_id"""'], {}), "('resource_id')\n", (1909, 1924), False, 'from cloudify import ctx\n'), ((3227, 3313), 'cloudify_nsx.library.nsx_esg_dlr.get_uplink_vnic', 'nsx_dlr.get_uplink_vnic', (['client_session', 'resource_id', "router_dict['uplink_ls_id']"], {}), "(client_session, resource_id, router_dict[\n 'uplink_ls_id'])\n", (3250, 3313), True, 'import cloudify_nsx.library.nsx_esg_dlr as nsx_dlr\n'), ((3399, 3470), 'cloudify_nsx.library.nsx_esg_dlr.update_common_edges', 'nsx_dlr.update_common_edges', (['client_session', 'resource_id', 'kwargs', '(False)'], {}), '(client_session, resource_id, kwargs, False)\n', (3426, 3470), True, 'import cloudify_nsx.library.nsx_esg_dlr as nsx_dlr\n'), ((3538, 3577), 'cloudify_nsx.library.nsx_common.get_properties', 'common.get_properties', (['"""router"""', 'kwargs'], {}), "('router', kwargs)\n", (3559, 3577), True, 'import cloudify_nsx.library.nsx_common as common\n'), ((3721, 3771), 'cloudify.ctx.instance.runtime_properties.get', 'ctx.instance.runtime_properties.get', (['"""resource_id"""'], {}), "('resource_id')\n", (3756, 3771), False, 'from cloudify import ctx\n'), ((3945, 3969), 'cloudify_nsx.library.nsx_common.nsx_login', 'common.nsx_login', (['kwargs'], {}), '(kwargs)\n', (3961, 3969), True, 'import cloudify_nsx.library.nsx_common as common\n'), ((3975, 4078), 'cloudify_nsx.library.nsx_common.attempt_with_rerun', 'common.attempt_with_rerun', (['nsx_dlr.del_edge'], {'client_session': 'client_session', 'resource_id': 'resource_id'}), '(nsx_dlr.del_edge, client_session=client_session,\n resource_id=resource_id)\n', (4000, 4078), True, 'import cloudify_nsx.library.nsx_common as common\n'), ((4110, 4153), 'cloudify.ctx.logger.info', 'ctx.logger.info', (["('deleted %s' % resource_id)"], {}), "('deleted %s' % resource_id)\n", (4125, 4153), False, 'from cloudify import ctx\n'), ((4159, 4192), 'cloudify_nsx.library.nsx_esg_dlr.remove_properties_edges', 'nsx_dlr.remove_properties_edges', ([], {}), '()\n', (4190, 4192), True, 'import cloudify_nsx.library.nsx_esg_dlr as nsx_dlr\n'), ((1996, 2052), 'pynsxv.library.nsx_dlr.dlr_read', 'nsx_router.dlr_read', (['client_session', "router_dict['name']"], {}), "(client_session, router_dict['name'])\n", (2015, 2052), True, 'import pynsxv.library.nsx_dlr as nsx_router\n'), ((2526, 2579), 'cloudify_nsx.library.nsx_common.possibly_assign_vm_creation_props', 'common.possibly_assign_vm_creation_props', (['router_dict'], {}), '(router_dict)\n', (2566, 2579), True, 'import cloudify_nsx.library.nsx_common as common\n'), ((2605, 2959), 'pynsxv.library.nsx_dlr.dlr_create', 'nsx_router.dlr_create', (['client_session', "router_dict['name']", "router_dict['dlr_pwd']", "router_dict['dlr_size']", "router_dict['datacentermoid']", "router_dict['datastoremoid']", "router_dict['resourcepoolid']", "router_dict['ha_ls_id']", "router_dict['uplink_ls_id']", "router_dict['uplink_ip']", "router_dict['uplink_subnet']", "router_dict['uplink_dgw']"], {}), "(client_session, router_dict['name'], router_dict[\n 'dlr_pwd'], router_dict['dlr_size'], router_dict['datacentermoid'],\n router_dict['datastoremoid'], router_dict['resourcepoolid'],\n router_dict['ha_ls_id'], router_dict['uplink_ls_id'], router_dict[\n 'uplink_ip'], router_dict['uplink_subnet'], router_dict['uplink_dgw'])\n", (2626, 2959), True, 'import pynsxv.library.nsx_dlr as nsx_router\n'), ((3164, 3207), 'cloudify.ctx.logger.info', 'ctx.logger.info', (["('created %s' % resource_id)"], {}), "('created %s' % resource_id)\n", (3179, 3207), False, 'from cloudify import ctx\n'), ((3608, 3641), 'cloudify_nsx.library.nsx_esg_dlr.remove_properties_edges', 'nsx_dlr.remove_properties_edges', ([], {}), '()\n', (3639, 3641), True, 'import cloudify_nsx.library.nsx_esg_dlr as nsx_dlr\n'), ((3650, 3686), 'cloudify.ctx.logger.info', 'ctx.logger.info', (['"""Used pre existed!"""'], {}), "('Used pre existed!')\n", (3665, 3686), False, 'from cloudify import ctx\n'), ((3804, 3837), 'cloudify_nsx.library.nsx_esg_dlr.remove_properties_edges', 'nsx_dlr.remove_properties_edges', ([], {}), '()\n', (3835, 3837), True, 'import cloudify_nsx.library.nsx_esg_dlr as nsx_dlr\n'), ((3846, 3889), 'cloudify.ctx.logger.info', 'ctx.logger.info', (['"""We dont have resource_id"""'], {}), "('We dont have resource_id')\n", (3861, 3889), False, 'from cloudify import ctx\n'), ((2185, 2233), 'cloudify.ctx.logger.info', 'ctx.logger.info', (["('Used existed %s' % resource_id)"], {}), "('Used existed %s' % resource_id)\n", (2200, 2233), False, 'from cloudify import ctx\n'), ((2278, 2357), 'cloudify.exceptions.NonRecoverableError', 'cfy_exc.NonRecoverableError', (['("Router \'%s\' already exists" % router_dict[\'name\'])'], {}), '("Router \'%s\' already exists" % router_dict[\'name\'])\n', (2305, 2357), True, 'from cloudify import exceptions as cfy_exc\n')] |
from asyncio import Future
from prompt_toolkit.layout.containers import HSplit
from prompt_toolkit.layout.dimension import D
from prompt_toolkit.widgets import Button, Dialog, Label
from constants import DIALOG_WIDTH
from custom_types.ui_types import PopUpDialog
from utils import display_path
class ConfirmDialog(PopUpDialog):
"""Dialog box to confirm or cancel"""
def __init__(self, title: str, text: str):
self.future = Future()
def set_done() -> None:
"""Confirm the dialog."""
self.future.set_result(True)
def set_cancel() -> None:
"""Cancel the dialog."""
self.future.set_result(False)
yes_button = Button(text="Yes", handler=set_done)
no_button = Button(text="No", handler=set_cancel)
self.dialog = Dialog(
title=title,
body=HSplit([Label(text=display_path(text))]),
buttons=[yes_button, no_button],
width=D(preferred=DIALOG_WIDTH),
modal=True,
)
def __pt_container__(self):
return self.dialog
| [
"prompt_toolkit.widgets.Button",
"utils.display_path",
"asyncio.Future",
"prompt_toolkit.layout.dimension.D"
] | [((444, 452), 'asyncio.Future', 'Future', ([], {}), '()\n', (450, 452), False, 'from asyncio import Future\n'), ((701, 737), 'prompt_toolkit.widgets.Button', 'Button', ([], {'text': '"""Yes"""', 'handler': 'set_done'}), "(text='Yes', handler=set_done)\n", (707, 737), False, 'from prompt_toolkit.widgets import Button, Dialog, Label\n'), ((758, 795), 'prompt_toolkit.widgets.Button', 'Button', ([], {'text': '"""No"""', 'handler': 'set_cancel'}), "(text='No', handler=set_cancel)\n", (764, 795), False, 'from prompt_toolkit.widgets import Button, Dialog, Label\n'), ((974, 999), 'prompt_toolkit.layout.dimension.D', 'D', ([], {'preferred': 'DIALOG_WIDTH'}), '(preferred=DIALOG_WIDTH)\n', (975, 999), False, 'from prompt_toolkit.layout.dimension import D\n'), ((888, 906), 'utils.display_path', 'display_path', (['text'], {}), '(text)\n', (900, 906), False, 'from utils import display_path\n')] |
import torch
import ignite.distributed as idist
from tests.ignite.distributed.utils import (
_sanity_check,
_test_distrib__get_max_length,
_test_distrib_all_gather,
_test_distrib_all_reduce,
_test_distrib_barrier,
_test_distrib_broadcast,
_test_sync,
)
def test_no_distrib(capsys):
assert idist.backend() is None
if torch.cuda.is_available():
assert idist.device().type == "cuda"
else:
assert idist.device().type == "cpu"
assert idist.get_rank() == 0
assert idist.get_world_size() == 1
assert idist.get_local_rank() == 0
assert idist.model_name() == "serial"
from ignite.distributed.utils import _model, _SerialModel
_sanity_check()
assert isinstance(_model, _SerialModel)
idist.show_config()
captured = capsys.readouterr()
out = captured.err.split("\r")
out = list(map(lambda x: x.strip(), out))
out = list(filter(None, out))
assert "ignite.distributed.utils INFO: distributed configuration: serial" in out[-1]
assert "ignite.distributed.utils INFO: backend: None" in out[-1]
if torch.cuda.is_available():
assert "ignite.distributed.utils INFO: device: cuda" in out[-1]
else:
assert "ignite.distributed.utils INFO: device: cpu" in out[-1]
assert "ignite.distributed.utils INFO: rank: 0" in out[-1]
assert "ignite.distributed.utils INFO: local rank: 0" in out[-1]
assert "ignite.distributed.utils INFO: world size: 1" in out[-1]
def test_sync_no_dist():
from ignite.distributed.comp_models import _SerialModel
_test_sync(_SerialModel)
def test_idist_methods_no_dist():
assert idist.get_world_size() < 2
assert idist.backend() is None, f"{idist.backend()}"
def test_idist__model_methods_no_dist():
_test_distrib__get_max_length("cpu")
if torch.cuda.device_count() > 1:
_test_distrib__get_max_length("cuda")
def test_idist_collective_ops_no_dist():
_test_distrib_all_reduce("cpu")
_test_distrib_all_gather("cpu")
_test_distrib_barrier("cpu")
_test_distrib_broadcast("cpu")
if torch.cuda.device_count() > 1:
_test_distrib_all_reduce("cuda")
_test_distrib_all_gather("cuda")
_test_distrib_barrier("cuda")
_test_distrib_broadcast("cuda")
| [
"ignite.distributed.get_rank",
"ignite.distributed.show_config",
"ignite.distributed.device",
"tests.ignite.distributed.utils._sanity_check",
"tests.ignite.distributed.utils._test_distrib_broadcast",
"tests.ignite.distributed.utils._test_distrib__get_max_length",
"torch.cuda.device_count",
"tests.igni... | [((356, 381), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (379, 381), False, 'import torch\n'), ((703, 718), 'tests.ignite.distributed.utils._sanity_check', '_sanity_check', ([], {}), '()\n', (716, 718), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((768, 787), 'ignite.distributed.show_config', 'idist.show_config', ([], {}), '()\n', (785, 787), True, 'import ignite.distributed as idist\n'), ((1103, 1128), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1126, 1128), False, 'import torch\n'), ((1576, 1600), 'tests.ignite.distributed.utils._test_sync', '_test_sync', (['_SerialModel'], {}), '(_SerialModel)\n', (1586, 1600), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((1779, 1815), 'tests.ignite.distributed.utils._test_distrib__get_max_length', '_test_distrib__get_max_length', (['"""cpu"""'], {}), "('cpu')\n", (1808, 1815), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((1947, 1978), 'tests.ignite.distributed.utils._test_distrib_all_reduce', '_test_distrib_all_reduce', (['"""cpu"""'], {}), "('cpu')\n", (1971, 1978), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((1983, 2014), 'tests.ignite.distributed.utils._test_distrib_all_gather', '_test_distrib_all_gather', (['"""cpu"""'], {}), "('cpu')\n", (2007, 2014), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((2019, 2047), 'tests.ignite.distributed.utils._test_distrib_barrier', '_test_distrib_barrier', (['"""cpu"""'], {}), "('cpu')\n", (2040, 2047), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((2052, 2082), 'tests.ignite.distributed.utils._test_distrib_broadcast', '_test_distrib_broadcast', (['"""cpu"""'], {}), "('cpu')\n", (2075, 2082), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((325, 340), 'ignite.distributed.backend', 'idist.backend', ([], {}), '()\n', (338, 340), True, 'import ignite.distributed as idist\n'), ((493, 509), 'ignite.distributed.get_rank', 'idist.get_rank', ([], {}), '()\n', (507, 509), True, 'import ignite.distributed as idist\n'), ((526, 548), 'ignite.distributed.get_world_size', 'idist.get_world_size', ([], {}), '()\n', (546, 548), True, 'import ignite.distributed as idist\n'), ((565, 587), 'ignite.distributed.get_local_rank', 'idist.get_local_rank', ([], {}), '()\n', (585, 587), True, 'import ignite.distributed as idist\n'), ((604, 622), 'ignite.distributed.model_name', 'idist.model_name', ([], {}), '()\n', (620, 622), True, 'import ignite.distributed as idist\n'), ((1648, 1670), 'ignite.distributed.get_world_size', 'idist.get_world_size', ([], {}), '()\n', (1668, 1670), True, 'import ignite.distributed as idist\n'), ((1686, 1701), 'ignite.distributed.backend', 'idist.backend', ([], {}), '()\n', (1699, 1701), True, 'import ignite.distributed as idist\n'), ((1823, 1848), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (1846, 1848), False, 'import torch\n'), ((1862, 1899), 'tests.ignite.distributed.utils._test_distrib__get_max_length', '_test_distrib__get_max_length', (['"""cuda"""'], {}), "('cuda')\n", (1891, 1899), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((2091, 2116), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (2114, 2116), False, 'import torch\n'), ((2130, 2162), 'tests.ignite.distributed.utils._test_distrib_all_reduce', '_test_distrib_all_reduce', (['"""cuda"""'], {}), "('cuda')\n", (2154, 2162), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((2171, 2203), 'tests.ignite.distributed.utils._test_distrib_all_gather', '_test_distrib_all_gather', (['"""cuda"""'], {}), "('cuda')\n", (2195, 2203), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((2212, 2241), 'tests.ignite.distributed.utils._test_distrib_barrier', '_test_distrib_barrier', (['"""cuda"""'], {}), "('cuda')\n", (2233, 2241), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((2250, 2281), 'tests.ignite.distributed.utils._test_distrib_broadcast', '_test_distrib_broadcast', (['"""cuda"""'], {}), "('cuda')\n", (2273, 2281), False, 'from tests.ignite.distributed.utils import _sanity_check, _test_distrib__get_max_length, _test_distrib_all_gather, _test_distrib_all_reduce, _test_distrib_barrier, _test_distrib_broadcast, _test_sync\n'), ((1714, 1729), 'ignite.distributed.backend', 'idist.backend', ([], {}), '()\n', (1727, 1729), True, 'import ignite.distributed as idist\n'), ((398, 412), 'ignite.distributed.device', 'idist.device', ([], {}), '()\n', (410, 412), True, 'import ignite.distributed as idist\n'), ((453, 467), 'ignite.distributed.device', 'idist.device', ([], {}), '()\n', (465, 467), True, 'import ignite.distributed as idist\n')] |
import os
import tempfile
import time
import cv2
import numpy as np
from PIL import Image
def calcVanishingPoint(lines):
points = lines[:, :2]
normals = lines[:, 2:4] - lines[:, :2]
normals /= np.maximum(np.linalg.norm(normals, axis=-1, keepdims=True), 1e-4)
normals = np.stack([normals[:, 1], -normals[:, 0]], axis=1)
normalPointDot = (normals * points).sum(1)
if lines.shape[0] == 2:
VP = np.linalg.solve(normals, normalPointDot)
else:
VP = np.linalg.lstsq(normals, normalPointDot)[0]
pass
return VP
def calcVanishingPoints(allLines, numVPs):
distanceThreshold = np.sin(np.deg2rad(5))
lines = allLines.copy()
VPs = []
VPLines = []
for VPIndex in range(numVPs):
points = lines[:, :2]
lengths = np.linalg.norm(lines[:, 2:4] - lines[:, :2], axis=-1)
normals = lines[:, 2:4] - lines[:, :2]
normals /= np.maximum(np.linalg.norm(normals, axis=-1, keepdims=True), 1e-4)
normals = np.stack([normals[:, 1], -normals[:, 0]], axis=1)
maxNumInliers = 0
bestVP = np.zeros(2)
#for _ in range(int(np.sqrt(lines.shape[0]))):
for _ in range(min(pow(lines.shape[0], 2), 100)):
sampledInds = np.random.choice(lines.shape[0], 2)
if sampledInds[0] == sampledInds[1]:
continue
sampledLines = lines[sampledInds]
try:
VP = calcVanishingPoint(sampledLines)
except:
continue
inliers = np.abs(((np.expand_dims(VP, 0) - points) * normals).sum(-1)) / np.linalg.norm(np.expand_dims(VP, 0) - points, axis=-1) < distanceThreshold
numInliers = lengths[inliers].sum()
if numInliers > maxNumInliers:
maxNumInliers = numInliers
bestVP = VP
bestVPInliers = inliers
pass
continue
if maxNumInliers > 0:
inlierLines = lines[bestVPInliers]
VP = calcVanishingPoint(inlierLines)
VPs.append(VP)
#print(bestVP)
#print(inlierLines)
#print(VP)
#exit(1)
VPLines.append(inlierLines)
lines = lines[np.logical_not(bestVPInliers)]
pass
continue
VPs = np.stack(VPs, axis=0)
return VPs, VPLines, lines
def estimateFocalLength(image):
from pylsd.lsd import lsd
height = image.shape[0]
width = image.shape[1]
lines = lsd(image.mean(2))
lineImage = image.copy()
for line in lines:
cv2.line(lineImage, (int(line[0]), int(line[1])), (int(line[2]), int(line[3])), (0, 0, 255), int(np.ceil(line[4] / 2)))
continue
#cv2.imwrite('test/lines.png', lineImage)
numVPs = 3
VPs, VPLines, remainingLines = calcVanishingPoints(lines, numVPs=numVPs)
#focalLength = (np.sqrt(np.linalg.norm(np.cross(VPs[0], VPs[1]))) + np.sqrt(np.linalg.norm(np.cross(VPs[0], VPs[2]))) + np.sqrt(np.linalg.norm(np.cross(VPs[1], VPs[2])))) / 3
focalLength = (np.sqrt(np.abs(np.dot(VPs[0], VPs[1]))) + np.sqrt(np.abs(np.dot(VPs[0], VPs[2]))) + np.sqrt(np.abs(np.dot(VPs[1], VPs[2])))) / 3
return focalLength
def PlaneDepthLayer(planes, ranges):
batchSize = 1
if len(planes.shape) == 3:
batchSize = planes.shape[0]
planes = planes.reshape(planes.shape[0] * planes.shape[1], planes.shape[2])
pass
planesD = np.linalg.norm(planes, 2, 1)
planesD = np.maximum(planesD, 1e-4)
planesNormal = -planes / planesD.reshape(-1, 1).repeat(3, 1)
#print(planesD, planesNormal)
#print(ranges.min(), ranges.max())
normalXYZ = np.dot(ranges, planesNormal.transpose())
normalXYZ[normalXYZ == 0] = 1e-4
normalXYZ = 1 / normalXYZ
#print(normalXYZ.min(), normalXYZ.max())
depths = -normalXYZ
depths[:, :] *= planesD
if batchSize > 1:
depths = depths.reshape(depths.shape[0], depths.shape[1], batchSize, -1).transpose([2, 0, 1, 3])
pass
depths[(depths < 0) + (depths > 10)] = 10
return depths
def calcPlaneDepths(planes, width, height, info):
urange = np.arange(width, dtype=np.float32).reshape(1, -1).repeat(height, 0) / (width + 1) * (info[16] + 1) - info[2]
vrange = np.arange(height, dtype=np.float32).reshape(-1, 1).repeat(width, 1) / (height + 1) * (info[17] + 1) - info[6]
ranges = np.array([urange / info[0], np.ones(urange.shape), -vrange / info[5]]).transpose([1, 2, 0])
planeDepths = PlaneDepthLayer(planes, ranges)
return planeDepths
def drawDepthImage(depth):
#return cv2.applyColorMap(np.clip(depth / 10 * 255, 0, 255).astype(np.uint8), cv2.COLORMAP_JET)
return 255 - np.clip(depth / 5 * 255, 0, 255).astype(np.uint8)
class ColorPalette:
def __init__(self, numColors):
#np.random.seed(2)
#self.colorMap = np.random.randint(255, size = (numColors, 3))
#self.colorMap[0] = 0
self.colorMap = np.array([[255, 0, 0],
[0, 255, 0],
[0, 0, 255],
[80, 128, 255],
[255, 230, 180],
[255, 0, 255],
[0, 255, 255],
[100, 0, 0],
[0, 100, 0],
[255, 255, 0],
[50, 150, 0],
[200, 255, 255],
[255, 200, 255],
[128, 128, 80],
[0, 50, 128],
[0, 100, 100],
[0, 255, 128],
[0, 128, 255],
[255, 0, 128],
[128, 0, 255],
[255, 128, 0],
[128, 255, 0],
])
if numColors > self.colorMap.shape[0]:
self.colorMap = np.random.randint(255, size = (numColors, 3))
pass
return
def getColorMap(self):
return self.colorMap
def getColor(self, index):
if index >= colorMap.shape[0]:
return np.random.randint(255, size = (3))
else:
return self.colorMap[index]
pass
def drawSegmentationImage(segmentations, randomColor=None, numColors=22, blackIndex=-1):
if segmentations.ndim == 2:
numColors = max(numColors, segmentations.max() + 2, blackIndex + 1)
else:
numColors = max(numColors, segmentations.shape[2] + 2, blackIndex + 1)
pass
randomColor = ColorPalette(numColors).getColorMap()
if blackIndex >= 0:
randomColor[blackIndex] = 0
pass
width = segmentations.shape[1]
height = segmentations.shape[0]
if segmentations.ndim == 3:
#segmentation = (np.argmax(segmentations, 2) + 1) * (np.max(segmentations, 2) > 0.5)
segmentation = np.argmax(segmentations, 2)
else:
segmentation = segmentations
pass
segmentation = segmentation.astype(np.int)
return randomColor[segmentation.reshape(-1)].reshape((height, width, 3)) | [
"numpy.clip",
"numpy.ceil",
"numpy.linalg.solve",
"numpy.ones",
"numpy.random.choice",
"numpy.logical_not",
"numpy.argmax",
"numpy.stack",
"numpy.deg2rad",
"numpy.zeros",
"numpy.array",
"numpy.random.randint",
"numpy.linalg.lstsq",
"numpy.linalg.norm",
"numpy.dot",
"numpy.expand_dims",... | [((288, 337), 'numpy.stack', 'np.stack', (['[normals[:, 1], -normals[:, 0]]'], {'axis': '(1)'}), '([normals[:, 1], -normals[:, 0]], axis=1)\n', (296, 337), True, 'import numpy as np\n'), ((2329, 2350), 'numpy.stack', 'np.stack', (['VPs'], {'axis': '(0)'}), '(VPs, axis=0)\n', (2337, 2350), True, 'import numpy as np\n'), ((3445, 3473), 'numpy.linalg.norm', 'np.linalg.norm', (['planes', '(2)', '(1)'], {}), '(planes, 2, 1)\n', (3459, 3473), True, 'import numpy as np\n'), ((3486, 3513), 'numpy.maximum', 'np.maximum', (['planesD', '(0.0001)'], {}), '(planesD, 0.0001)\n', (3496, 3513), True, 'import numpy as np\n'), ((219, 266), 'numpy.linalg.norm', 'np.linalg.norm', (['normals'], {'axis': '(-1)', 'keepdims': '(True)'}), '(normals, axis=-1, keepdims=True)\n', (233, 266), True, 'import numpy as np\n'), ((427, 467), 'numpy.linalg.solve', 'np.linalg.solve', (['normals', 'normalPointDot'], {}), '(normals, normalPointDot)\n', (442, 467), True, 'import numpy as np\n'), ((638, 651), 'numpy.deg2rad', 'np.deg2rad', (['(5)'], {}), '(5)\n', (648, 651), True, 'import numpy as np\n'), ((793, 846), 'numpy.linalg.norm', 'np.linalg.norm', (['(lines[:, 2:4] - lines[:, :2])'], {'axis': '(-1)'}), '(lines[:, 2:4] - lines[:, :2], axis=-1)\n', (807, 846), True, 'import numpy as np\n'), ((997, 1046), 'numpy.stack', 'np.stack', (['[normals[:, 1], -normals[:, 0]]'], {'axis': '(1)'}), '([normals[:, 1], -normals[:, 0]], axis=1)\n', (1005, 1046), True, 'import numpy as np\n'), ((1090, 1101), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (1098, 1101), True, 'import numpy as np\n'), ((4933, 5286), 'numpy.array', 'np.array', (['[[255, 0, 0], [0, 255, 0], [0, 0, 255], [80, 128, 255], [255, 230, 180], [\n 255, 0, 255], [0, 255, 255], [100, 0, 0], [0, 100, 0], [255, 255, 0], [\n 50, 150, 0], [200, 255, 255], [255, 200, 255], [128, 128, 80], [0, 50, \n 128], [0, 100, 100], [0, 255, 128], [0, 128, 255], [255, 0, 128], [128,\n 0, 255], [255, 128, 0], [128, 255, 0]]'], {}), '([[255, 0, 0], [0, 255, 0], [0, 0, 255], [80, 128, 255], [255, 230,\n 180], [255, 0, 255], [0, 255, 255], [100, 0, 0], [0, 100, 0], [255, 255,\n 0], [50, 150, 0], [200, 255, 255], [255, 200, 255], [128, 128, 80], [0,\n 50, 128], [0, 100, 100], [0, 255, 128], [0, 128, 255], [255, 0, 128], [\n 128, 0, 255], [255, 128, 0], [128, 255, 0]])\n', (4941, 5286), True, 'import numpy as np\n'), ((7335, 7362), 'numpy.argmax', 'np.argmax', (['segmentations', '(2)'], {}), '(segmentations, 2)\n', (7344, 7362), True, 'import numpy as np\n'), ((491, 531), 'numpy.linalg.lstsq', 'np.linalg.lstsq', (['normals', 'normalPointDot'], {}), '(normals, normalPointDot)\n', (506, 531), True, 'import numpy as np\n'), ((924, 971), 'numpy.linalg.norm', 'np.linalg.norm', (['normals'], {'axis': '(-1)', 'keepdims': '(True)'}), '(normals, axis=-1, keepdims=True)\n', (938, 971), True, 'import numpy as np\n'), ((1241, 1276), 'numpy.random.choice', 'np.random.choice', (['lines.shape[0]', '(2)'], {}), '(lines.shape[0], 2)\n', (1257, 1276), True, 'import numpy as np\n'), ((6343, 6386), 'numpy.random.randint', 'np.random.randint', (['(255)'], {'size': '(numColors, 3)'}), '(255, size=(numColors, 3))\n', (6360, 6386), True, 'import numpy as np\n'), ((6581, 6611), 'numpy.random.randint', 'np.random.randint', (['(255)'], {'size': '(3)'}), '(255, size=3)\n', (6598, 6611), True, 'import numpy as np\n'), ((2254, 2283), 'numpy.logical_not', 'np.logical_not', (['bestVPInliers'], {}), '(bestVPInliers)\n', (2268, 2283), True, 'import numpy as np\n'), ((2696, 2716), 'numpy.ceil', 'np.ceil', (['(line[4] / 2)'], {}), '(line[4] / 2)\n', (2703, 2716), True, 'import numpy as np\n'), ((4664, 4696), 'numpy.clip', 'np.clip', (['(depth / 5 * 255)', '(0)', '(255)'], {}), '(depth / 5 * 255, 0, 255)\n', (4671, 4696), True, 'import numpy as np\n'), ((3172, 3194), 'numpy.dot', 'np.dot', (['VPs[1]', 'VPs[2]'], {}), '(VPs[1], VPs[2])\n', (3178, 3194), True, 'import numpy as np\n'), ((4382, 4403), 'numpy.ones', 'np.ones', (['urange.shape'], {}), '(urange.shape)\n', (4389, 4403), True, 'import numpy as np\n'), ((3088, 3110), 'numpy.dot', 'np.dot', (['VPs[0]', 'VPs[1]'], {}), '(VPs[0], VPs[1])\n', (3094, 3110), True, 'import numpy as np\n'), ((3130, 3152), 'numpy.dot', 'np.dot', (['VPs[0]', 'VPs[2]'], {}), '(VPs[0], VPs[2])\n', (3136, 3152), True, 'import numpy as np\n'), ((1614, 1635), 'numpy.expand_dims', 'np.expand_dims', (['VP', '(0)'], {}), '(VP, 0)\n', (1628, 1635), True, 'import numpy as np\n'), ((4109, 4143), 'numpy.arange', 'np.arange', (['width'], {'dtype': 'np.float32'}), '(width, dtype=np.float32)\n', (4118, 4143), True, 'import numpy as np\n'), ((4231, 4266), 'numpy.arange', 'np.arange', (['height'], {'dtype': 'np.float32'}), '(height, dtype=np.float32)\n', (4240, 4266), True, 'import numpy as np\n'), ((1545, 1566), 'numpy.expand_dims', 'np.expand_dims', (['VP', '(0)'], {}), '(VP, 0)\n', (1559, 1566), True, 'import numpy as np\n')] |
import os
from github import Github
g = Github(os.environ['GITHUB_TOKEN'])
stargazers = set()
contributors = set()
subscribers = set()
for repo in g.get_organization('orbitdb').get_repos():
for gazer in repo.get_stargazers():
stargazers.add(gazer)
for contributor in repo.get_contributors():
contributors.add(contributor)
for subscriber in repo.get_subscribers():
subscribers.add(subscriber)
print("Stargazers: ", len(stargazers))
print("Contributors: ", len(contributors))
print("Subscribers: ", len(subscribers))
# Manually calculated 2020-09-03
print("Used in: ", 1835)
| [
"github.Github"
] | [((41, 75), 'github.Github', 'Github', (["os.environ['GITHUB_TOKEN']"], {}), "(os.environ['GITHUB_TOKEN'])\n", (47, 75), False, 'from github import Github\n')] |
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
TEMPLATE_DIR = os.path.join(BASE_DIR, 'templates')
class Template:
template_name = ""
context = None
def __init__(self, template_name="", context=None, *args, **kwargs):
self.template_name = template_name
self.context = context
def get_template(self):
template_path = os.path.join(TEMPLATE_DIR, self.template_name)
if not os.path.exists(template_path):
raise Exception("The path does not exists")
template_str = ""
with open(template_path, 'r') as file_object:
template_str = file_object.read()
return template_str
def render(self, context=None):
render_context = context
if self.context is not None:
render_context = self.context
if not isinstance(render_context, dict):
render_context = {}
template_str = self.get_template()
return template_str.format(**render_context)
| [
"os.path.abspath",
"os.path.exists",
"os.path.join"
] | [((80, 115), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (92, 115), False, 'import os\n'), ((38, 63), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (53, 63), False, 'import os\n'), ((346, 392), 'os.path.join', 'os.path.join', (['TEMPLATE_DIR', 'self.template_name'], {}), '(TEMPLATE_DIR, self.template_name)\n', (358, 392), False, 'import os\n'), ((402, 431), 'os.path.exists', 'os.path.exists', (['template_path'], {}), '(template_path)\n', (416, 431), False, 'import os\n')] |
from __future__ import print_function
import argparse
from collections import OrderedDict
import json
import os
import logging
from keras.callbacks import EarlyStopping
from sklearn.preprocessing import normalize
from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score
from scipy.sparse import csr_matrix
from keras.utils.io_utils import HDF5Matrix
#from keras.utils.visualize_util import plot
from keras.optimizers import SGD, Adam
from sklearn.metrics import r2_score
import numpy as np
import theano.tensor as tt
import pandas as pd
import random
import common
import models
from predict import obtain_predictions
from eval import do_eval
import h5py
class Config(object):
"""Configuration for the training process."""
def __init__(self, params, normalize=False, whiten=True):
self.model_id = common.get_next_model_id()
self.norm = normalize
self.whiten = whiten
self.x_path = '%s_%sx%s' % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
self.y_path = '%s_%s_%s' % (params['dataset']['fact'],params['dataset']['dim'],params['dataset']['dataset'])
self.dataset_settings = params['dataset']
self.training_params = params['training']
self.model_arch = params['cnn']
self.predicting_params = params['predicting']
def get_dict(self):
object_dict = self.__dict__
first_key = "model_id"
conf_dict = OrderedDict({first_key: object_dict[first_key]})
conf_dict.update(object_dict)
return conf_dict
def _squared_magnitude(x):
return tt.sqr(x).sum(axis=-1)
def _magnitude(x):
return tt.sqrt(tt.maximum(_squared_magnitude(x), np.finfo(x.dtype).tiny))
def cosine(x, y):
return tt.clip((1 - (x * y).sum(axis=-1) /
(_magnitude(x) * _magnitude(y))) / 2, 0, 1)
def load_sparse_csr(filename):
loader = np.load(filename)
return csr_matrix(( loader['data'], loader['indices'], loader['indptr']),
shape = loader['shape'])
def build_model(config):
"""Builds the cnn."""
params = config.model_arch
get_model = getattr(models, 'get_model_'+str(params['architecture']))
model = get_model(params)
#model = model_kenun.build_convnet_model(params)
# Learning setup
t_params = config.training_params
sgd = SGD(lr=t_params["learning_rate"], decay=t_params["decay"],
momentum=t_params["momentum"], nesterov=t_params["nesterov"])
adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
optimizer = eval(t_params['optimizer'])
metrics = ['mean_squared_error']
if config.model_arch["final_activation"] == 'softmax':
metrics.append('categorical_accuracy')
if t_params['loss_func'] == 'cosine':
loss_func = eval(t_params['loss_func'])
else:
loss_func = t_params['loss_func']
model.compile(loss=loss_func, optimizer=optimizer,metrics=metrics)
return model
def load_data_preprocesed(params, X_path, Y_path, dataset, val_percent, test_percent, n_samples, with_metadata=False, only_metadata=False, metadata_source='rovi'):
factors = np.load(common.DATASETS_DIR+'/y_train_'+Y_path+'.npy') # OJO remove S
index_factors = open(common.DATASETS_DIR+'/items_index_train_'+dataset+'.tsv').read().splitlines()
if not only_metadata:
all_X = np.load(common.TRAINDATA_DIR+'/X_train_'+X_path+'.npy')
index_train = open(common.TRAINDATA_DIR+'/index_train_%s.tsv' % (X_path)).read().splitlines()
all_Y = np.zeros((len(index_train),factors.shape[1]))
index_factors_inv = dict()
for i,item in enumerate(index_factors):
index_factors_inv[item] = i
for i,item in enumerate(index_train):
all_Y[i,:] = factors[index_factors_inv[item]]
else:
all_Y = factors
if with_metadata:
if 'w2v' in metadata_source:
all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))[:,:int(params['cnn']['sequence_length'])]
elif 'model' in metadata_source or not params['dataset']['sparse']:
all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))
else:
all_X_meta = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (metadata_source,dataset)).todense()
all_X_in_meta = all_X = all_X_meta
print(all_X.shape)
print(all_Y.shape)
if n_samples != 'all':
n_samples = int(n_samples)
all_X = all_X[:n_samples]
all_Y = all_Y[:n_samples]
if with_metadata:
all_X_in_meta = all_X_in_meta[:n_samples]
if params['training']['normalize_y'] == True:
normalize(all_Y,copy=False)
if params['training']["val_from_file"]:
Y_val = np.load(common.DATASETS_DIR+'/y_val_'+Y_path+'.npy')
Y_test = np.load(common.DATASETS_DIR+'/y_test_'+Y_path+'.npy') #!!! OJO remove S from trainS
if params['dataset']['sparse']:
X_val = load_sparse_csr(common.TRAINDATA_DIR+'/X_val_%s_%s.npz' % (metadata_source,dataset)).todense()
X_test = load_sparse_csr(common.TRAINDATA_DIR+'/X_test_%s_%s.npz' % (metadata_source,dataset)).todense()
else:
X_val = np.load(common.TRAINDATA_DIR+'/X_val_%s_%s.npy' % (metadata_source,dataset))
X_test = np.load(common.TRAINDATA_DIR+'/X_test_%s_%s.npy' % (metadata_source,dataset))
X_train = all_X
Y_train = all_Y
else:
N = all_Y.shape[0]
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
logging.debug("Training data points: %d" % N_train)
logging.debug("Validation data points: %d" % N_val)
logging.debug("Test data points: %d" % (N - N_train - N_val))
if not only_metadata:
# Slice data
X_train = all_X[:N_train]
X_val = all_X[N_train:N_train + N_val]
X_test = all_X[N_train + N_val:]
Y_train = all_Y[:N_train]
Y_val = all_Y[N_train:N_train + N_val]
Y_test = all_Y[N_train + N_val:]
if with_metadata:
if only_metadata:
X_train = all_X_in_meta[:N_train]
X_val = all_X_in_meta[N_train:N_train + N_val]
X_test = all_X_in_meta[N_train + N_val:]
else:
X_train = [X_train,all_X_in_meta[:N_train]]
X_val = [X_val,all_X_in_meta[N_train:N_train + N_val]]
X_test = [X_test,all_X_in_meta[N_train + N_val:]]
return X_train, Y_train, X_val, Y_val, X_test, Y_test
def load_data_hf5(params,val_percent, test_percent):
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%s.hdf5" % (params['dataset']['dataset'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
N = f["targets"].shape[0]
f.close()
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
X_train = HDF5Matrix(hdf5_file, 'features', start=0, end=N_train)
Y_train = HDF5Matrix(hdf5_file, 'targets', start=0, end=N_train)
X_val = HDF5Matrix(hdf5_file, 'features', start=N_train, end=N_train+N_val)
Y_val = HDF5Matrix(hdf5_file, 'targets', start=N_train, end=N_train+N_val)
X_test = HDF5Matrix(hdf5_file, 'features', start=N_train+N_val, end=N)
Y_test = HDF5Matrix(hdf5_file, 'targets', start=N_train+N_val, end=N)
return X_train, Y_train, X_val, Y_val, X_test, Y_test, N_train
def load_data_hf5_memory(params,val_percent, test_percent, y_path, id2gt, X_meta = None, val_from_file = False):
if val_from_file:
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
index_train = f["index"][:]
index_train = np.delete(index_train, np.where(index_train == ""))
N_train = index_train.shape[0]
val_hdf5_file = common.PATCHES_DIR+"/patches_val_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f_val = h5py.File(val_hdf5_file,"r")
X_val = f_val['features'][:]
#Y_val = f_val['targets'][:]
factors_val = np.load(common.DATASETS_DIR+'/y_val_'+y_path+'.npy')
index_factors_val = open(common.DATASETS_DIR+'/items_index_val_'+params['dataset']['dataset']+'.tsv').read().splitlines()
id2gt_val = dict((index,factor) for (index,factor) in zip(index_factors_val,factors_val))
index_val = [i for i in f_val['index'][:] if i in id2gt_val]
X_val = np.delete(X_val, np.where(index_val == ""), axis=0)
index_val = np.delete(index_val, np.where(index_val == ""))
Y_val = np.asarray([id2gt_val[id] for id in index_val])
test_hdf5_file = common.PATCHES_DIR+"/patches_test_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f_test = h5py.File(test_hdf5_file,"r")
X_test = f_test['features'][:]
#Y_test = f_test['targets'][:]
factors_test = np.load(common.DATASETS_DIR+'/y_test_'+y_path+'.npy')
index_factors_test = open(common.DATASETS_DIR+'/items_index_test_'+params['dataset']['dataset']+'.tsv').read().splitlines()
id2gt_test = dict((index,factor) for (index,factor) in zip(index_factors_test,factors_test))
index_test = [i for i in f_test['index'][:] if i in id2gt_test]
X_test = np.delete(X_test, np.where(index_test == ""), axis=0)
index_test = np.delete(index_test, np.where(index_test == ""))
Y_test = np.asarray([id2gt_test[id] for id in index_test])
else:
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
index_all = f["index"][:]
N = index_all.shape[0]
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
X_val = f['features'][N_train:N_train+N_val]
index_val = f['index'][N_train:N_train+N_val]
X_val = np.delete(X_val, np.where(index_val == ""), axis=0)
index_val = np.delete(index_val, np.where(index_val == ""))
Y_val = np.asarray([id2gt[id] for id in index_val])
X_test = f['features'][N_train+N_val:N]
index_test = f['index'][N_train+N_val:N]
print(index_test.shape)
print(X_test.shape)
X_test = np.delete(X_test, np.where(index_test == ""), axis=0)
index_test = np.delete(index_test, np.where(index_test == ""))
print(index_test.shape)
print(X_test.shape)
Y_test = np.asarray([id2gt[id] for id in index_test])
print(Y_test.shape)
index_train = f['index'][:N_train]
index_train = np.delete(index_train, np.where(index_train == ""))
N_train = index_train.shape[0]
if X_meta != None:
X_val = [X_val,X_meta[N_train:N_train+N_val]]
X_test = [X_test,X_meta[N_train+N_val:N]]
return X_val, Y_val, X_test, Y_test, N_train
def batch_block_generator(params, y_path, N_train, id2gt, X_meta=None,
val_from_file=False):
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
block_step = 50000
batch_size = params['training']['n_minibatch']
randomize = True
with_meta = False
if X_meta != None:
with_meta = True
while 1:
for i in range(0, N_train, block_step):
x_block = f['features'][i:min(N_train, i+block_step)]
index_block = f['index'][i:min(N_train, i+block_step)]
#y_block = f['targets'][i:min(N_train,i+block_step)]
x_block = np.delete(x_block, np.where(index_block == ""), axis=0)
index_block = np.delete(index_block, np.where(index_block == ""))
y_block = np.asarray([id2gt[id] for id in index_block])
if params['training']['normalize_y']:
normalize(y_block, copy=False)
items_list = range(x_block.shape[0])
if randomize:
random.shuffle(items_list)
for j in range(0, len(items_list), batch_size):
if j+batch_size <= x_block.shape[0]:
items_in_batch = items_list[j:j+batch_size]
x_batch = x_block[items_in_batch]
y_batch = y_block[items_in_batch]
if with_meta:
x_batch = [x_batch, X_meta[items_in_batch]]
yield (x_batch, y_batch)
def process(params,with_predict=True,with_eval=True):
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
params['cnn']['n_out'] = int(params['dataset']['dim'])
#params['cnn']['n_frames'] = int(params['dataset']['window'] * SR / float(HR))
with_metadata = params['dataset']['with_metadata']
only_metadata = params['dataset']['only_metadata']
metadata_source = params['dataset']['meta-suffix']
if with_metadata:
if 'w2v' in metadata_source:
X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,params['dataset']['dataset']))[:,:int(params['cnn']['sequence_length'])]
params['cnn']['n_metafeatures'] = len(X_meta[0])
if 'meta-suffix2' in params['dataset']:
X_meta2 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix2'],params['dataset']['dataset']))
params['cnn']['n_metafeatures2'] = len(X_meta2[0])
if 'meta-suffix3' in params['dataset']:
X_meta3 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix3'],params['dataset']['dataset']))
params['cnn']['n_metafeatures3'] = len(X_meta3[0])
if 'meta-suffix4' in params['dataset']:
X_meta4 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix4'],params['dataset']['dataset']))
params['cnn']['n_metafeatures4'] = len(X_meta4[0])
elif 'model' in metadata_source or not params['dataset']['sparse']:
X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,params['dataset']['dataset']))
params['cnn']['n_metafeatures'] = len(X_meta[0])
if 'meta-suffix2' in params['dataset']:
X_meta2 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix2'],params['dataset']['dataset']))
params['cnn']['n_metafeatures2'] = len(X_meta2[0])
if 'meta-suffix3' in params['dataset']:
X_meta3 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix3'],params['dataset']['dataset']))
params['cnn']['n_metafeatures3'] = len(X_meta3[0])
if 'meta-suffix4' in params['dataset']:
X_meta4 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix4'],params['dataset']['dataset']))
params['cnn']['n_metafeatures4'] = len(X_meta4[0])
else:
X_meta = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (metadata_source,params['dataset']['dataset'])).todense()
params['cnn']['n_metafeatures'] = X_meta.shape[1]
if 'meta-suffix2' in params['dataset']:
X_meta2 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix2'],params['dataset']['dataset']))
params['cnn']['n_metafeatures2'] = X_meta2.shape[1]
if 'meta-suffix3' in params['dataset']:
X_meta3 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix3'],params['dataset']['dataset']))
params['cnn']['n_metafeatures3'] = len(X_meta3[0])
if 'meta-suffix4' in params['dataset']:
X_meta4 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix4'],params['dataset']['dataset']))
params['cnn']['n_metafeatures3'] = len(X_meta4[0])
print(X_meta.shape)
else:
X_meta = None
config = Config(params)
model_dir = os.path.join(common.MODELS_DIR, config.model_id)
common.ensure_dir(common.MODELS_DIR)
common.ensure_dir(model_dir)
model_file = os.path.join(model_dir, config.model_id + common.MODEL_EXT)
logging.debug("Building Network...")
#model = build_model(config)
model = build_model(config)
print(model.summary())
#plot(model, to_file='model2.png', show_shapes=True)
trained_model = config.get_dict()
# Save model
#plot(model, to_file=os.path.join(model_dir, config.model_id + PLOT_EXT))
common.save_model(model, model_file)
logging.debug(trained_model["model_id"])
logging.debug("Loading Data...")
with_generator = True
if only_metadata:
X_train, Y_train, X_val, Y_val, X_test, Y_test = \
load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"],
config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, metadata_source)
if 'meta-suffix2' in params['dataset']:
X_train2, Y_train2, X_val2, Y_val2, X_test2, Y_test2 = \
load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"],
config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix2'])
X_train = [X_train,X_train2]
X_val = [X_val,X_val2]
X_test = [X_test,X_test2]
print("X_train bi", len(X_train))
if 'meta-suffix3' in params['dataset']:
X_train3, Y_train3, X_val3, Y_val3, X_test3, Y_test3 = \
load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"],
config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix3'])
X_train.append(X_train3)
X_val.append(X_val3)
X_test.append(X_test3)
print("X_train tri", len(X_train))
if 'meta-suffix4' in params['dataset']:
X_train4, Y_train4, X_val4, Y_val4, X_test4, Y_test4 = \
load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"],
config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix4'])
X_train.append(X_train4)
X_val.append(X_val4)
X_test.append(X_test4)
print("X_train four", len(X_train))
else:
if with_generator:
id2gt = dict()
factors = np.load(common.DATASETS_DIR+'/y_train_'+config.y_path+'.npy')
index_factors = open(common.DATASETS_DIR+'/items_index_train_'+params['dataset']['dataset']+'.tsv').read().splitlines()
id2gt = dict((index,factor) for (index,factor) in zip(index_factors,factors))
X_val, Y_val, X_test, Y_test, N_train = load_data_hf5_memory(params,config.training_params["validation"],config.training_params["test"],config.y_path,id2gt,X_meta,config.training_params["val_from_file"])
if params['dataset']['nsamples'] != 'all':
N_train = min(N_train,params['dataset']['nsamples'])
else:
X_train, Y_train, X_val, Y_val, X_test, Y_test, N_train = load_data_hf5(params,config.training_params["validation"],config.training_params["test"])
trained_model["whiten_scaler"] = common.TRAINDATA_DIR+'/scaler_%s.pk' % config.x_path
logging.debug("Training...")
if config.model_arch["final_activation"] == 'softmax':
monitor_metric = 'val_categorical_accuracy'
else:
monitor_metric = 'val_loss'
early_stopping = EarlyStopping(monitor=monitor_metric, patience=4)
if only_metadata:
epochs = model.fit(X_train, Y_train,
batch_size=config.training_params["n_minibatch"],
#shuffle='batch',
nb_epoch=config.training_params["n_epochs"],
verbose=1, validation_data=(X_val, Y_val),
callbacks=[early_stopping])
else:
if with_generator:
print(N_train)
epochs = model.fit_generator(batch_block_generator(params,config.y_path,N_train,id2gt,X_meta,config.training_params["val_from_file"]),
samples_per_epoch = N_train-(N_train % config.training_params["n_minibatch"]),
nb_epoch = config.training_params["n_epochs"],
verbose=1,
validation_data = (X_val, Y_val),
callbacks=[early_stopping])
else:
epochs = model.fit(X_train, Y_train,
batch_size=config.training_params["n_minibatch"],
shuffle='batch',
nb_epoch=config.training_params["n_epochs"],
verbose=1,
validation_data=(X_val, Y_val),
callbacks=[early_stopping])
model.save_weights(os.path.join(model_dir, config.model_id + common.WEIGHTS_EXT))
logging.debug("Saving trained model %s in %s..." %
(trained_model["model_id"], common.DEFAULT_TRAINED_MODELS_FILE))
common.save_trained_model(common.DEFAULT_TRAINED_MODELS_FILE, trained_model)
logging.debug("Evaluating...")
print(X_test[0].shape,X_test[1].shape)
preds=model.predict(X_test)
print(preds.shape)
if params["dataset"]["evaluation"] in ['binary','multiclass']:
y_pred = (preds > 0.5).astype('int32')
acc = accuracy_score(Y_test,y_pred)
prec = precision_score(Y_test,y_pred,average='macro')
recall = recall_score(Y_test,y_pred,average='macro')
f1 = f1_score(Y_test,y_pred,average='macro')
print('Accuracy', acc)
print("%.3f\t%.3f\t%.3f" % (prec,recall,f1))
if params["dataset"]["fact"] == 'class':
good_classes = np.nonzero(Y_test.sum(0))[0]
print(Y_test.shape,preds.shape)
#roc_auc=roc_auc_score(Y_test[:,good_classes],preds[:,good_classes])
#logging.debug('ROC-AUC '+str(roc_auc))
#pr_auc = average_precision_score(Y_test[:,good_classes],preds[:,good_classes])
#print('PR-AUC',pr_auc)
#r2 = roc_auc
elif params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']:
r2s = []
for i,pred in enumerate(preds):
r2 = r2_score(Y_test[i],pred)
r2s.append(r2)
r2 = np.asarray(r2s).mean()
logging.debug('R2 avg '+str(r2))
# Batch prediction
if X_test[1].shape == Y_test[1].shape:
score = model.evaluate(X_test, Y_test, verbose=0)
logging.debug(score)
logging.debug(model.metrics_names)
print(score)
trained_model["loss_score"] = score[0]
trained_model["mse"] = score[1]
if params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']:
trained_model["r2"] = r2
fw=open(common.DATA_DIR+'/results/train_results.txt','a')
fw.write(trained_model["model_id"]+'\n')
if params["training"]["loss_func"] == 'binary_crossentropy':
fw.write('ROC-AUC: '+str(roc_auc)+'\n')
print('ROC-AUC: '+str(roc_auc))
fw.write('Loss: '+str(score[0])+' ('+config.training_params["loss_func"]+')\n')
fw.write('MSE: '+str(score[1])+'\n')
elif params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']:
fw.write('R2 avg: '+str(r2)+'\n')
print('R2 avg: '+str(r2))
fw.write('Loss: '+str(score[0])+' ('+config.training_params["loss_func"]+')\n')
fw.write('MSE: '+str(score[1])+'\n')
fw.write(json.dumps(epochs.history)+"\n\n")
fw.close()
if with_predict:
trained_models = pd.read_csv(common.DEFAULT_TRAINED_MODELS_FILE, sep='\t')
model_config = trained_models[trained_models["model_id"] == trained_model["model_id"]]
model_config = model_config.to_dict(orient="list")
testset = open(common.DATASETS_DIR+'/items_index_test_%s.tsv' % (config.dataset_settings["dataset"])).read().splitlines()
if config.training_params["val_from_file"] and not only_metadata:
predictions, predictions_index = obtain_predictions(model_config, testset, trained_model["model_id"], config.predicting_params["trim_coeff"], model=model, with_metadata=with_metadata, only_metadata=only_metadata, metadata_source=metadata_source, with_patches=True)
else:
predictions, predictions_index = obtain_predictions(model_config, testset, trained_model["model_id"], config.predicting_params["trim_coeff"], model=model, with_metadata=with_metadata, only_metadata=only_metadata, metadata_source=metadata_source)
print("Predictions created")
if with_eval:
do_eval(trained_model["model_id"],get_roc=True,get_map=True,get_p=True,predictions=predictions,predictions_index=predictions_index)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Evaluates the model',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p',
'--params',
dest="params_file",
help='JSON file with params',
default=False)
parser.add_argument('-pred',
'--predict',
dest="with_predict",
help='Predict factors',
action='store_true',
default=False)
parser.add_argument('-eval',
'--eval',
dest="with_eval",
help='Eval factors',
action='store_true',
default=False)
parser.add_argument('-m',
'--metadata',
dest="with_metadata",
help='Use metadata',
action='store_true',
default=False)
parser.add_argument('-om',
'--only_metadata',
dest="only_metadata",
help='Use only metadata',
action='store_true',
default=False)
parser.add_argument('-ms',
'--metadata_source',
dest="metadata_source",
type=str,
help='Suffix of metadata files',
default="rovi")
args = parser.parse_args()
params = models.params_1
if args.params_file:
params = json.load(open(args.params_file))
process(params)
| [
"logging.debug",
"pandas.read_csv",
"common.get_next_model_id",
"common.save_model",
"sklearn.metrics.precision_score",
"sklearn.metrics.recall_score",
"keras.optimizers.SGD",
"common.ensure_dir",
"sklearn.metrics.r2_score",
"argparse.ArgumentParser",
"numpy.where",
"json.dumps",
"numpy.asar... | [((1981, 1998), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (1988, 1998), True, 'import numpy as np\n'), ((2010, 2103), 'scipy.sparse.csr_matrix', 'csr_matrix', (["(loader['data'], loader['indices'], loader['indptr'])"], {'shape': "loader['shape']"}), "((loader['data'], loader['indices'], loader['indptr']), shape=\n loader['shape'])\n", (2020, 2103), False, 'from scipy.sparse import csr_matrix\n'), ((2437, 2562), 'keras.optimizers.SGD', 'SGD', ([], {'lr': "t_params['learning_rate']", 'decay': "t_params['decay']", 'momentum': "t_params['momentum']", 'nesterov': "t_params['nesterov']"}), "(lr=t_params['learning_rate'], decay=t_params['decay'], momentum=\n t_params['momentum'], nesterov=t_params['nesterov'])\n", (2440, 2562), False, 'from keras.optimizers import SGD, Adam\n'), ((2583, 2638), 'keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)', 'beta_1': '(0.9)', 'beta_2': '(0.999)', 'epsilon': '(1e-08)'}), '(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)\n', (2587, 2638), False, 'from keras.optimizers import SGD, Adam\n'), ((3236, 3296), 'numpy.load', 'np.load', (["(common.DATASETS_DIR + '/y_train_' + Y_path + '.npy')"], {}), "(common.DATASETS_DIR + '/y_train_' + Y_path + '.npy')\n", (3243, 3296), True, 'import numpy as np\n'), ((6944, 6969), 'h5py.File', 'h5py.File', (['hdf5_file', '"""r"""'], {}), "(hdf5_file, 'r')\n", (6953, 6969), False, 'import h5py\n'), ((7148, 7203), 'keras.utils.io_utils.HDF5Matrix', 'HDF5Matrix', (['hdf5_file', '"""features"""'], {'start': '(0)', 'end': 'N_train'}), "(hdf5_file, 'features', start=0, end=N_train)\n", (7158, 7203), False, 'from keras.utils.io_utils import HDF5Matrix\n'), ((7218, 7272), 'keras.utils.io_utils.HDF5Matrix', 'HDF5Matrix', (['hdf5_file', '"""targets"""'], {'start': '(0)', 'end': 'N_train'}), "(hdf5_file, 'targets', start=0, end=N_train)\n", (7228, 7272), False, 'from keras.utils.io_utils import HDF5Matrix\n'), ((7285, 7354), 'keras.utils.io_utils.HDF5Matrix', 'HDF5Matrix', (['hdf5_file', '"""features"""'], {'start': 'N_train', 'end': '(N_train + N_val)'}), "(hdf5_file, 'features', start=N_train, end=N_train + N_val)\n", (7295, 7354), False, 'from keras.utils.io_utils import HDF5Matrix\n'), ((7365, 7433), 'keras.utils.io_utils.HDF5Matrix', 'HDF5Matrix', (['hdf5_file', '"""targets"""'], {'start': 'N_train', 'end': '(N_train + N_val)'}), "(hdf5_file, 'targets', start=N_train, end=N_train + N_val)\n", (7375, 7433), False, 'from keras.utils.io_utils import HDF5Matrix\n'), ((7445, 7508), 'keras.utils.io_utils.HDF5Matrix', 'HDF5Matrix', (['hdf5_file', '"""features"""'], {'start': '(N_train + N_val)', 'end': 'N'}), "(hdf5_file, 'features', start=N_train + N_val, end=N)\n", (7455, 7508), False, 'from keras.utils.io_utils import HDF5Matrix\n'), ((7520, 7582), 'keras.utils.io_utils.HDF5Matrix', 'HDF5Matrix', (['hdf5_file', '"""targets"""'], {'start': '(N_train + N_val)', 'end': 'N'}), "(hdf5_file, 'targets', start=N_train + N_val, end=N)\n", (7530, 7582), False, 'from keras.utils.io_utils import HDF5Matrix\n'), ((11710, 11735), 'h5py.File', 'h5py.File', (['hdf5_file', '"""r"""'], {}), "(hdf5_file, 'r')\n", (11719, 11735), False, 'import h5py\n'), ((13089, 13163), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s %(message)s"""', 'level': 'logging.DEBUG'}), "(format='%(asctime)s %(message)s', level=logging.DEBUG)\n", (13108, 13163), False, 'import logging\n'), ((16725, 16773), 'os.path.join', 'os.path.join', (['common.MODELS_DIR', 'config.model_id'], {}), '(common.MODELS_DIR, config.model_id)\n', (16737, 16773), False, 'import os\n'), ((16778, 16814), 'common.ensure_dir', 'common.ensure_dir', (['common.MODELS_DIR'], {}), '(common.MODELS_DIR)\n', (16795, 16814), False, 'import common\n'), ((16819, 16847), 'common.ensure_dir', 'common.ensure_dir', (['model_dir'], {}), '(model_dir)\n', (16836, 16847), False, 'import common\n'), ((16865, 16924), 'os.path.join', 'os.path.join', (['model_dir', '(config.model_id + common.MODEL_EXT)'], {}), '(model_dir, config.model_id + common.MODEL_EXT)\n', (16877, 16924), False, 'import os\n'), ((16929, 16965), 'logging.debug', 'logging.debug', (['"""Building Network..."""'], {}), "('Building Network...')\n", (16942, 16965), False, 'import logging\n'), ((17253, 17289), 'common.save_model', 'common.save_model', (['model', 'model_file'], {}), '(model, model_file)\n', (17270, 17289), False, 'import common\n'), ((17295, 17335), 'logging.debug', 'logging.debug', (["trained_model['model_id']"], {}), "(trained_model['model_id'])\n", (17308, 17335), False, 'import logging\n'), ((17341, 17373), 'logging.debug', 'logging.debug', (['"""Loading Data..."""'], {}), "('Loading Data...')\n", (17354, 17373), False, 'import logging\n'), ((20469, 20497), 'logging.debug', 'logging.debug', (['"""Training..."""'], {}), "('Training...')\n", (20482, 20497), False, 'import logging\n'), ((20677, 20726), 'keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': 'monitor_metric', 'patience': '(4)'}), '(monitor=monitor_metric, patience=4)\n', (20690, 20726), False, 'from keras.callbacks import EarlyStopping\n'), ((22068, 22188), 'logging.debug', 'logging.debug', (["('Saving trained model %s in %s...' % (trained_model['model_id'], common.\n DEFAULT_TRAINED_MODELS_FILE))"], {}), "('Saving trained model %s in %s...' % (trained_model[\n 'model_id'], common.DEFAULT_TRAINED_MODELS_FILE))\n", (22081, 22188), False, 'import logging\n'), ((22206, 22282), 'common.save_trained_model', 'common.save_trained_model', (['common.DEFAULT_TRAINED_MODELS_FILE', 'trained_model'], {}), '(common.DEFAULT_TRAINED_MODELS_FILE, trained_model)\n', (22231, 22282), False, 'import common\n'), ((22288, 22318), 'logging.debug', 'logging.debug', (['"""Evaluating..."""'], {}), "('Evaluating...')\n", (22301, 22318), False, 'import logging\n'), ((26023, 26142), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Evaluates the model"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='Evaluates the model', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (26046, 26142), False, 'import argparse\n'), ((898, 924), 'common.get_next_model_id', 'common.get_next_model_id', ([], {}), '()\n', (922, 924), False, 'import common\n'), ((1531, 1579), 'collections.OrderedDict', 'OrderedDict', (['{first_key: object_dict[first_key]}'], {}), '({first_key: object_dict[first_key]})\n', (1542, 1579), False, 'from collections import OrderedDict\n'), ((3451, 3512), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_' + X_path + '.npy')"], {}), "(common.TRAINDATA_DIR + '/X_train_' + X_path + '.npy')\n", (3458, 3512), True, 'import numpy as np\n'), ((4813, 4841), 'sklearn.preprocessing.normalize', 'normalize', (['all_Y'], {'copy': '(False)'}), '(all_Y, copy=False)\n', (4822, 4841), False, 'from sklearn.preprocessing import normalize\n'), ((4902, 4960), 'numpy.load', 'np.load', (["(common.DATASETS_DIR + '/y_val_' + Y_path + '.npy')"], {}), "(common.DATASETS_DIR + '/y_val_' + Y_path + '.npy')\n", (4909, 4960), True, 'import numpy as np\n'), ((4972, 5031), 'numpy.load', 'np.load', (["(common.DATASETS_DIR + '/y_test_' + Y_path + '.npy')"], {}), "(common.DATASETS_DIR + '/y_test_' + Y_path + '.npy')\n", (4979, 5031), True, 'import numpy as np\n'), ((5764, 5815), 'logging.debug', 'logging.debug', (["('Training data points: %d' % N_train)"], {}), "('Training data points: %d' % N_train)\n", (5777, 5815), False, 'import logging\n'), ((5824, 5875), 'logging.debug', 'logging.debug', (["('Validation data points: %d' % N_val)"], {}), "('Validation data points: %d' % N_val)\n", (5837, 5875), False, 'import logging\n'), ((5884, 5945), 'logging.debug', 'logging.debug', (["('Test data points: %d' % (N - N_train - N_val))"], {}), "('Test data points: %d' % (N - N_train - N_val))\n", (5897, 5945), False, 'import logging\n'), ((7957, 7982), 'h5py.File', 'h5py.File', (['hdf5_file', '"""r"""'], {}), "(hdf5_file, 'r')\n", (7966, 7982), False, 'import h5py\n'), ((8311, 8340), 'h5py.File', 'h5py.File', (['val_hdf5_file', '"""r"""'], {}), "(val_hdf5_file, 'r')\n", (8320, 8340), False, 'import h5py\n'), ((8436, 8494), 'numpy.load', 'np.load', (["(common.DATASETS_DIR + '/y_val_' + y_path + '.npy')"], {}), "(common.DATASETS_DIR + '/y_val_' + y_path + '.npy')\n", (8443, 8494), True, 'import numpy as np\n'), ((8955, 9002), 'numpy.asarray', 'np.asarray', (['[id2gt_val[id] for id in index_val]'], {}), '([id2gt_val[id] for id in index_val])\n', (8965, 9002), True, 'import numpy as np\n'), ((9186, 9216), 'h5py.File', 'h5py.File', (['test_hdf5_file', '"""r"""'], {}), "(test_hdf5_file, 'r')\n", (9195, 9216), False, 'import h5py\n'), ((9317, 9376), 'numpy.load', 'np.load', (["(common.DATASETS_DIR + '/y_test_' + y_path + '.npy')"], {}), "(common.DATASETS_DIR + '/y_test_' + y_path + '.npy')\n", (9324, 9376), True, 'import numpy as np\n'), ((9852, 9901), 'numpy.asarray', 'np.asarray', (['[id2gt_test[id] for id in index_test]'], {}), '([id2gt_test[id] for id in index_test])\n', (9862, 9901), True, 'import numpy as np\n'), ((10085, 10110), 'h5py.File', 'h5py.File', (['hdf5_file', '"""r"""'], {}), "(hdf5_file, 'r')\n", (10094, 10110), False, 'import h5py\n'), ((10583, 10626), 'numpy.asarray', 'np.asarray', (['[id2gt[id] for id in index_val]'], {}), '([id2gt[id] for id in index_val])\n', (10593, 10626), True, 'import numpy as np\n'), ((11019, 11063), 'numpy.asarray', 'np.asarray', (['[id2gt[id] for id in index_test]'], {}), '([id2gt[id] for id in index_test])\n', (11029, 11063), True, 'import numpy as np\n'), ((22001, 22062), 'os.path.join', 'os.path.join', (['model_dir', '(config.model_id + common.WEIGHTS_EXT)'], {}), '(model_dir, config.model_id + common.WEIGHTS_EXT)\n', (22013, 22062), False, 'import os\n'), ((22554, 22584), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['Y_test', 'y_pred'], {}), '(Y_test, y_pred)\n', (22568, 22584), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score\n'), ((22599, 22647), 'sklearn.metrics.precision_score', 'precision_score', (['Y_test', 'y_pred'], {'average': '"""macro"""'}), "(Y_test, y_pred, average='macro')\n", (22614, 22647), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score\n'), ((22663, 22708), 'sklearn.metrics.recall_score', 'recall_score', (['Y_test', 'y_pred'], {'average': '"""macro"""'}), "(Y_test, y_pred, average='macro')\n", (22675, 22708), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score\n'), ((22720, 22761), 'sklearn.metrics.f1_score', 'f1_score', (['Y_test', 'y_pred'], {'average': '"""macro"""'}), "(Y_test, y_pred, average='macro')\n", (22728, 22761), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score\n'), ((23669, 23689), 'logging.debug', 'logging.debug', (['score'], {}), '(score)\n', (23682, 23689), False, 'import logging\n'), ((23698, 23732), 'logging.debug', 'logging.debug', (['model.metrics_names'], {}), '(model.metrics_names)\n', (23711, 23732), False, 'import logging\n'), ((24821, 24878), 'pandas.read_csv', 'pd.read_csv', (['common.DEFAULT_TRAINED_MODELS_FILE'], {'sep': '"""\t"""'}), "(common.DEFAULT_TRAINED_MODELS_FILE, sep='\\t')\n", (24832, 24878), True, 'import pandas as pd\n'), ((25850, 25990), 'eval.do_eval', 'do_eval', (["trained_model['model_id']"], {'get_roc': '(True)', 'get_map': '(True)', 'get_p': '(True)', 'predictions': 'predictions', 'predictions_index': 'predictions_index'}), "(trained_model['model_id'], get_roc=True, get_map=True, get_p=True,\n predictions=predictions, predictions_index=predictions_index)\n", (25857, 25990), False, 'from eval import do_eval\n'), ((1683, 1692), 'theano.tensor.sqr', 'tt.sqr', (['x'], {}), '(x)\n', (1689, 1692), True, 'import theano.tensor as tt\n'), ((5362, 5441), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_val_%s_%s.npy' % (metadata_source, dataset))"], {}), "(common.TRAINDATA_DIR + '/X_val_%s_%s.npy' % (metadata_source, dataset))\n", (5369, 5441), True, 'import numpy as np\n'), ((5460, 5545), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_test_%s_%s.npy' % (metadata_source, dataset))"], {}), "(common.TRAINDATA_DIR + '/X_test_%s_%s.npy' % (metadata_source, dataset)\n )\n", (5467, 5545), True, 'import numpy as np\n'), ((8063, 8090), 'numpy.where', 'np.where', (["(index_train == '')"], {}), "(index_train == '')\n", (8071, 8090), True, 'import numpy as np\n'), ((8819, 8844), 'numpy.where', 'np.where', (["(index_val == '')"], {}), "(index_val == '')\n", (8827, 8844), True, 'import numpy as np\n'), ((8895, 8920), 'numpy.where', 'np.where', (["(index_val == '')"], {}), "(index_val == '')\n", (8903, 8920), True, 'import numpy as np\n'), ((9711, 9737), 'numpy.where', 'np.where', (["(index_test == '')"], {}), "(index_test == '')\n", (9719, 9737), True, 'import numpy as np\n'), ((9790, 9816), 'numpy.where', 'np.where', (["(index_test == '')"], {}), "(index_test == '')\n", (9798, 9816), True, 'import numpy as np\n'), ((10448, 10473), 'numpy.where', 'np.where', (["(index_val == '')"], {}), "(index_val == '')\n", (10456, 10473), True, 'import numpy as np\n'), ((10524, 10549), 'numpy.where', 'np.where', (["(index_val == '')"], {}), "(index_val == '')\n", (10532, 10549), True, 'import numpy as np\n'), ((10819, 10845), 'numpy.where', 'np.where', (["(index_test == '')"], {}), "(index_test == '')\n", (10827, 10845), True, 'import numpy as np\n'), ((10898, 10924), 'numpy.where', 'np.where', (["(index_test == '')"], {}), "(index_test == '')\n", (10906, 10924), True, 'import numpy as np\n'), ((11180, 11207), 'numpy.where', 'np.where', (["(index_train == '')"], {}), "(index_train == '')\n", (11188, 11207), True, 'import numpy as np\n'), ((12337, 12382), 'numpy.asarray', 'np.asarray', (['[id2gt[id] for id in index_block]'], {}), '([id2gt[id] for id in index_block])\n', (12347, 12382), True, 'import numpy as np\n'), ((19575, 19642), 'numpy.load', 'np.load', (["(common.DATASETS_DIR + '/y_train_' + config.y_path + '.npy')"], {}), "(common.DATASETS_DIR + '/y_train_' + config.y_path + '.npy')\n", (19582, 19642), True, 'import numpy as np\n'), ((25282, 25528), 'predict.obtain_predictions', 'obtain_predictions', (['model_config', 'testset', "trained_model['model_id']", "config.predicting_params['trim_coeff']"], {'model': 'model', 'with_metadata': 'with_metadata', 'only_metadata': 'only_metadata', 'metadata_source': 'metadata_source', 'with_patches': '(True)'}), "(model_config, testset, trained_model['model_id'], config\n .predicting_params['trim_coeff'], model=model, with_metadata=\n with_metadata, only_metadata=only_metadata, metadata_source=\n metadata_source, with_patches=True)\n", (25300, 25528), False, 'from predict import obtain_predictions\n'), ((25573, 25800), 'predict.obtain_predictions', 'obtain_predictions', (['model_config', 'testset', "trained_model['model_id']", "config.predicting_params['trim_coeff']"], {'model': 'model', 'with_metadata': 'with_metadata', 'only_metadata': 'only_metadata', 'metadata_source': 'metadata_source'}), "(model_config, testset, trained_model['model_id'], config\n .predicting_params['trim_coeff'], model=model, with_metadata=\n with_metadata, only_metadata=only_metadata, metadata_source=metadata_source\n )\n", (25591, 25800), False, 'from predict import obtain_predictions\n'), ((1780, 1797), 'numpy.finfo', 'np.finfo', (['x.dtype'], {}), '(x.dtype)\n', (1788, 1797), True, 'import numpy as np\n'), ((4016, 4101), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (metadata_source, dataset))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (metadata_source,\n dataset))\n", (4023, 4101), True, 'import numpy as np\n'), ((4238, 4323), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (metadata_source, dataset))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (metadata_source,\n dataset))\n", (4245, 4323), True, 'import numpy as np\n'), ((12200, 12227), 'numpy.where', 'np.where', (["(index_block == '')"], {}), "(index_block == '')\n", (12208, 12227), True, 'import numpy as np\n'), ((12286, 12313), 'numpy.where', 'np.where', (["(index_block == '')"], {}), "(index_block == '')\n", (12294, 12313), True, 'import numpy as np\n'), ((12449, 12479), 'sklearn.preprocessing.normalize', 'normalize', (['y_block'], {'copy': '(False)'}), '(y_block, copy=False)\n', (12458, 12479), False, 'from sklearn.preprocessing import normalize\n'), ((12571, 12597), 'random.shuffle', 'random.shuffle', (['items_list'], {}), '(items_list)\n', (12585, 12597), False, 'import random\n'), ((13552, 13658), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (metadata_source, params[\n 'dataset']['dataset']))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (metadata_source,\n params['dataset']['dataset']))\n", (13559, 13658), True, 'import numpy as np\n'), ((13833, 13958), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix2'], params['dataset']['dataset']))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix2'], params['dataset']['dataset']))\n", (13840, 13958), True, 'import numpy as np\n'), ((14096, 14221), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix3'], params['dataset']['dataset']))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix3'], params['dataset']['dataset']))\n", (14103, 14221), True, 'import numpy as np\n'), ((14359, 14484), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix4'], params['dataset']['dataset']))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix4'], params['dataset']['dataset']))\n", (14366, 14484), True, 'import numpy as np\n'), ((14641, 14747), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (metadata_source, params[\n 'dataset']['dataset']))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (metadata_source,\n params['dataset']['dataset']))\n", (14648, 14747), True, 'import numpy as np\n'), ((23408, 23433), 'sklearn.metrics.r2_score', 'r2_score', (['Y_test[i]', 'pred'], {}), '(Y_test[i], pred)\n', (23416, 23433), False, 'from sklearn.metrics import r2_score\n'), ((24720, 24746), 'json.dumps', 'json.dumps', (['epochs.history'], {}), '(epochs.history)\n', (24730, 24746), False, 'import json\n'), ((14880, 15005), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix2'], params['dataset']['dataset']))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix2'], params['dataset']['dataset']))\n", (14887, 15005), True, 'import numpy as np\n'), ((15143, 15268), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix3'], params['dataset']['dataset']))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix3'], params['dataset']['dataset']))\n", (15150, 15268), True, 'import numpy as np\n'), ((15406, 15531), 'numpy.load', 'np.load', (["(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix4'], params['dataset']['dataset']))"], {}), "(common.TRAINDATA_DIR + '/X_train_%s_%s.npy' % (params['dataset'][\n 'meta-suffix4'], params['dataset']['dataset']))\n", (15413, 15531), True, 'import numpy as np\n'), ((23473, 23488), 'numpy.asarray', 'np.asarray', (['r2s'], {}), '(r2s)\n', (23483, 23488), True, 'import numpy as np\n')] |
"""empty message
Revision ID: <KEY>
Revises: 5b9e2fef18f6
Create Date: 2021-10-21 14:12:46.696355
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import column, table
from sqlalchemy.sql.sqltypes import Boolean, String
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '5b9e2fef18f6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
requeststatus_table = table('FOIRequestStatuses',
column('name',String),
column('description',String),
column('isactive',Boolean),
)
op.bulk_insert(
requeststatus_table,
[
{'name':'On Hold','description':'On Hold','isactive':True},
{'name':'Deduplication','description':'Deduplication','isactive':True},
{'name':'Harms Assessment','description':'Harms Assessment','isactive':True},
{'name':'Response','description':'Response','isactive':True}
]
)
op.execute('Update public."FOIRequestStatuses" set name = \'Fee Estimate\', description = \'Fee Estimate\' where name = \'Fee Assessed\';')
op.execute('Update public."FOIRequestStatuses" set name = \'Records Review\', description = \'Records Review\' where name = \'Review\';')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute('delete from public."FOIRequestStatuses" where name in (\'On Hold\',\'Deduplication\',\'Harms Assessment\',\'Redirect\',\'Response\');')
op.execute('Update public."FOIRequestStatuses" set name = \'Fee Assessed\', description = \'Fee Assessed\' where name = \'Fee Estimate\';')
op.execute('Update public."FOIRequestStatuses" set name = \'Review\', description = \'Review\' where name = \'Records Review\';')
# ### end Alembic commands ###
| [
"sqlalchemy.sql.column",
"alembic.op.bulk_insert",
"alembic.op.execute"
] | [((735, 1080), 'alembic.op.bulk_insert', 'op.bulk_insert', (['requeststatus_table', "[{'name': 'On Hold', 'description': 'On Hold', 'isactive': True}, {'name':\n 'Deduplication', 'description': 'Deduplication', 'isactive': True}, {\n 'name': 'Harms Assessment', 'description': 'Harms Assessment',\n 'isactive': True}, {'name': 'Response', 'description': 'Response',\n 'isactive': True}]"], {}), "(requeststatus_table, [{'name': 'On Hold', 'description':\n 'On Hold', 'isactive': True}, {'name': 'Deduplication', 'description':\n 'Deduplication', 'isactive': True}, {'name': 'Harms Assessment',\n 'description': 'Harms Assessment', 'isactive': True}, {'name':\n 'Response', 'description': 'Response', 'isactive': True}])\n", (749, 1080), False, 'from alembic import op\n'), ((1143, 1292), 'alembic.op.execute', 'op.execute', (['"""Update public."FOIRequestStatuses" set name = \'Fee Estimate\', description = \'Fee Estimate\' where name = \'Fee Assessed\';"""'], {}), '(\n \'Update public."FOIRequestStatuses" set name = \\\'Fee Estimate\\\', description = \\\'Fee Estimate\\\' where name = \\\'Fee Assessed\\\';\'\n )\n', (1153, 1292), False, 'from alembic import op\n'), ((1289, 1436), 'alembic.op.execute', 'op.execute', (['"""Update public."FOIRequestStatuses" set name = \'Records Review\', description = \'Records Review\' where name = \'Review\';"""'], {}), '(\n \'Update public."FOIRequestStatuses" set name = \\\'Records Review\\\', description = \\\'Records Review\\\' where name = \\\'Review\\\';\'\n )\n', (1299, 1436), False, 'from alembic import op\n'), ((1555, 1712), 'alembic.op.execute', 'op.execute', (['"""delete from public."FOIRequestStatuses" where name in (\'On Hold\',\'Deduplication\',\'Harms Assessment\',\'Redirect\',\'Response\');"""'], {}), '(\n \'delete from public."FOIRequestStatuses" where name in (\\\'On Hold\\\',\\\'Deduplication\\\',\\\'Harms Assessment\\\',\\\'Redirect\\\',\\\'Response\\\');\'\n )\n', (1565, 1712), False, 'from alembic import op\n'), ((1707, 1856), 'alembic.op.execute', 'op.execute', (['"""Update public."FOIRequestStatuses" set name = \'Fee Assessed\', description = \'Fee Assessed\' where name = \'Fee Estimate\';"""'], {}), '(\n \'Update public."FOIRequestStatuses" set name = \\\'Fee Assessed\\\', description = \\\'Fee Assessed\\\' where name = \\\'Fee Estimate\\\';\'\n )\n', (1717, 1856), False, 'from alembic import op\n'), ((1853, 1992), 'alembic.op.execute', 'op.execute', (['"""Update public."FOIRequestStatuses" set name = \'Review\', description = \'Review\' where name = \'Records Review\';"""'], {}), '(\n \'Update public."FOIRequestStatuses" set name = \\\'Review\\\', description = \\\'Review\\\' where name = \\\'Records Review\\\';\'\n )\n', (1863, 1992), False, 'from alembic import op\n'), ((546, 568), 'sqlalchemy.sql.column', 'column', (['"""name"""', 'String'], {}), "('name', String)\n", (552, 568), False, 'from sqlalchemy.sql import column, table\n'), ((602, 631), 'sqlalchemy.sql.column', 'column', (['"""description"""', 'String'], {}), "('description', String)\n", (608, 631), False, 'from sqlalchemy.sql import column, table\n'), ((665, 692), 'sqlalchemy.sql.column', 'column', (['"""isactive"""', 'Boolean'], {}), "('isactive', Boolean)\n", (671, 692), False, 'from sqlalchemy.sql import column, table\n')] |
#!/usr/bin/python -B
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
# asfindex.py - Pelican plugin that runs shell scripts during initialization
#
import sys
import subprocess
import shlex
import io
import os
import os.path
import traceback
import pelican.plugins.signals
import pelican.settings
from pelican.contents import Article, Page, Static
from pelican.generators import (ArticlesGenerator, # noqa: I100
PagesGenerator, SourceFileGenerator,
StaticGenerator, TemplatePagesGenerator)
# get setting
# Settings are for the whole pelican environment.
def get_setting(generators, setting):
try:
for g in generators:
if isinstance(g, PagesGenerator):
return g.settings[setting]
except Exception:
return None
# set context
# Context are the processed settings and other environment which is made available to the JINJA template.
# Changes to the settings have no effect as those are already copied to each generator's context.
def set_context(generators, setting, value):
for g in generators:
if isinstance(g, PagesGenerator):
g.context[setting] = value
return value
return None
# get pages
# The PagesGenerator has a list of pages. Retrieve a sorted array of page information
def get_pages(generators):
site_index = []
for g in generators:
if isinstance(g, PagesGenerator):
for p in g.pages:
# use an absolute path
save_as = '/' + p.save_as
if save_as.endswith('/index.html'):
# use "/" for the filename of index.html files assuring that they are first in a folder's list
save_as = save_as[:-10]
# extract the path name
path, page = os.path.split(save_as)
site_index.append((path, save_as, p.title))
site_index.sort()
return site_index
# get site index
def get_index(site_index, scope):
current_folder = None
started = False
site_listing = ''
if not scope:
return
scoped = False
if scope != '**':
scoped = True
for p in site_index:
path, page = os.path.split(p[0])
folder = page.capitalize()
if not scoped or (scoped and p[0].startswith(scope)):
if folder != current_folder:
if started:
site_listing += '</ol>\n'
started = True
site_listing += f'<h3><a href="{p[1]}">{p[2]}</a></h3>\n'
site_listing += '<ol>\n'
current_folder = folder
else:
# menu item for page
site_listing += f'<li><a href="{p[1]}">{p[2]}</a></li>\n'
if started:
site_listing += '</ol>\n'
return site_listing
# get site menu
# def get_menu(site_index, menus):
# currrent_menu = None
# site_menu = ''
# if menus:
# for f in menus:
# path, page = os.path.split(f)
# folder = page.capitalize()
# site_menu += '<li class="nav-item active dropdown">\n'
# site_menu += f'<a class="nav-link dropdown-toggle" href="#" id="dropdown{folder}" '
# site_menu += f'role="button" data-toggle="dropdown" aria-expanded="false">{folder}</a>\n'
# site_menu += f'<ul class="dropdown-menu" aria-labelledby="dropdown{folder}">\n'
# for p in site_index:
# if p[0] == f:
# # menu item for page
# site_menu += f'<li><a class="dropdownitem" href="{p[1]}">{p[2]}</a></li>\n'
# site_menu += '</ul></li>\n'
# return site_menu
#
#
# show pages
def show_pages(generators):
site_index = get_pages(generators)
asf_index = get_setting(generators, 'ASF_INDEX')
print(asf_index)
# Not currently interested in menus this way as it is not generalizable
# set_context(generators, 'SITE_MENU', get_menu(site_index, asf_index['menus']))
set_context(generators, 'SITE_INDEX', get_index(site_index, asf_index['index']))
def tb_finalized(generators):
""" Print any exception, before Pelican chews it into nothingness."""
try:
show_pages(generators)
except Exception:
print('-----', file=sys.stderr)
traceback.print_exc()
# exceptions here stop the build
raise
def register():
pelican.plugins.signals.all_generators_finalized.connect(tb_finalized)
| [
"traceback.print_exc",
"os.path.split"
] | [((2983, 3002), 'os.path.split', 'os.path.split', (['p[0]'], {}), '(p[0])\n', (2996, 3002), False, 'import os\n'), ((5090, 5111), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (5109, 5111), False, 'import traceback\n'), ((2593, 2615), 'os.path.split', 'os.path.split', (['save_as'], {}), '(save_as)\n', (2606, 2615), False, 'import os\n')] |
'''
phone_communication_backup_coalescer
Copyright 2016, <NAME>
Licensed under MIT.
'''
import logging
import sys
import cli
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s:%(message)s')
def main():
cli.run(sys.argv[1:])
if __name__ == "__main__":
main()
| [
"logging.basicConfig",
"cli.run"
] | [((128, 220), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s %(levelname)s:%(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s %(levelname)s:%(message)s')\n", (147, 220), False, 'import logging\n'), ((234, 255), 'cli.run', 'cli.run', (['sys.argv[1:]'], {}), '(sys.argv[1:])\n', (241, 255), False, 'import cli\n')] |
"""
*****************************************************************
Licensed Materials - Property of IBM
(C) Copyright IBM Corp. 2020. All Rights Reserved.
US Government Users Restricted Rights - Use, duplication or
disclosure restricted by GSA ADP Schedule Contract with IBM Corp.
*****************************************************************
"""
import os
import sys
try:
import conda_build.metadata
except ImportError as error:
print("Cannot find `conda_build`, please see https://github.com/open-ce/open-ce#requirements"
" for a list of requirements.")
sys.exit(1)
def _validate_config_file(env_file, variants):
'''Perform some validation on the environment file after loading it.'''
possible_keys = {'imported_envs', 'channels', 'packages', 'git_tag_for_env', 'git_tag'}
try:
meta_obj = conda_build.metadata.MetaData(env_file, variant=variants)
if not ("packages" in meta_obj.meta.keys() or "imported_envs" in meta_obj.meta.keys()):
raise Exception("Content Error!",
"An environment file needs to specify packages or "
"import another environment file.")
for key in meta_obj.meta.keys():
if not key in possible_keys:
raise Exception("Key Error!", key + " is not a valid key in the environment file.")
return meta_obj
except (Exception, SystemExit) as exc: #pylint: disable=broad-except
print('***** Error in %s:\n %s' % (env_file, exc), file=sys.stderr)
return None
def load_env_config_files(config_files, variants):
'''
Load all of the environment config files, plus any that come from "imported_envs"
within an environment config file.
'''
env_config_files = [os.path.abspath(e) for e in config_files]
env_config_data_list = []
loaded_files = []
retval = 0
while env_config_files:
# Load the environment config files using conda-build's API. This will allow for the
# filtering of text using selectors and jinja2 functions
meta_obj = _validate_config_file(env_config_files[0], variants)
if meta_obj is None:
retval = 1
loaded_files += [env_config_files[0]]
env_config_files.pop(0)
continue
env = meta_obj.get_rendered_recipe_text()
# Examine all of the imported_envs items and determine if they still need to be loaded.
new_config_files = []
for imported_env in env.get('imported_envs', []):
imported_env = os.path.expanduser(imported_env)
if not os.path.isabs(imported_env):
imported_env = os.path.join(os.path.dirname(env_config_files[0]), imported_env)
if not imported_env in env_config_files and not imported_env in loaded_files:
new_config_files += [imported_env]
# If there are new files to load, add them to the env_conf_files list.
# Otherwise, remove the current file from the env_conf_files list and
# add its data to the env_config_data_list.
if new_config_files:
env_config_files = new_config_files + env_config_files
else:
env_config_data_list += [env]
loaded_files += [env_config_files[0]]
env_config_files.pop(0)
return retval, env_config_data_list
| [
"os.path.isabs",
"os.path.dirname",
"sys.exit",
"os.path.abspath",
"os.path.expanduser"
] | [((587, 598), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (595, 598), False, 'import sys\n'), ((1780, 1798), 'os.path.abspath', 'os.path.abspath', (['e'], {}), '(e)\n', (1795, 1798), False, 'import os\n'), ((2568, 2600), 'os.path.expanduser', 'os.path.expanduser', (['imported_env'], {}), '(imported_env)\n', (2586, 2600), False, 'import os\n'), ((2620, 2647), 'os.path.isabs', 'os.path.isabs', (['imported_env'], {}), '(imported_env)\n', (2633, 2647), False, 'import os\n'), ((2693, 2729), 'os.path.dirname', 'os.path.dirname', (['env_config_files[0]'], {}), '(env_config_files[0])\n', (2708, 2729), False, 'import os\n')] |
import logging
import struct
from macholib.MachO import MachO
from macholib.mach_o import *
from .base_executable import *
from .section import *
INJECTION_SEGMENT_NAME = 'INJECT'
INJECTION_SECTION_NAME = 'inject'
class MachOExecutable(BaseExecutable):
def __init__(self, file_path):
super(MachOExecutable, self).__init__(file_path)
self.helper = MachO(self.fp)
if self.helper.fat:
raise Exception('MachO fat binaries are not supported at this time')
self.architecture = self._identify_arch()
if self.architecture is None:
raise Exception('Architecture is not recognized')
logging.debug('Initialized {} {} with file \'{}\''.format(self.architecture, type(self).__name__, file_path))
self.pack_endianness = self.helper.headers[0].endian
self.sections = []
for lc, cmd, data in self.helper.headers[0].commands:
if lc.cmd in (LC_SEGMENT, LC_SEGMENT_64):
for section in data:
self.sections.append(section_from_macho_section(section, cmd))
self.executable_segment = [cmd for lc, cmd, _ in self.helper.headers[0].commands
if lc.cmd in (LC_SEGMENT, LC_SEGMENT_64) and cmd.initprot & 0x4][0]
self.libraries = [fp.rstrip('\x00') for lc, cmd, fp in self.helper.headers[0].commands if lc.cmd == LC_LOAD_DYLIB]
def _identify_arch(self):
if self.helper.headers[0].header.cputype == 0x7:
return ARCHITECTURE.X86
elif self.helper.headers[0].header.cputype == 0x01000007:
return ARCHITECTURE.X86_64
elif self.helper.headers[0].header.cputype == 0xc:
return ARCHITECTURE.ARM
elif self.helper.headers[0].header.cputype == 0x0100000c:
return ARCHITECTURE.ARM_64
else:
return None
def executable_segment_vaddr(self):
return self.executable_segment.vmaddr
def executable_segment_size(self):
return self.executable_segment.vmsize
def entry_point(self):
for lc, cmd, _ in self.helper.headers[0].commands:
if lc.cmd == LC_MAIN:
return cmd.entryoff
return
def _extract_symbol_table(self):
ordered_symbols = []
symtab_command = self.helper.headers[0].getSymbolTableCommand()
if symtab_command:
self.binary.seek(symtab_command.stroff)
symbol_strings = self.binary.read(symtab_command.strsize)
self.binary.seek(symtab_command.symoff)
for i in range(symtab_command.nsyms):
if self.is_64_bit():
symbol = nlist_64.from_fileobj(self.binary, _endian_=self.pack_endianness)
else:
symbol = nlist.from_fileobj(self.binary, _endian_=self.pack_endianness)
symbol_name = symbol_strings[symbol.n_un:].split('\x00')[0]
if symbol.n_type & N_STAB == 0:
is_ext = symbol.n_type & N_EXT and symbol.n_value == 0
# Ignore Apple's hack for radar bug 5614542
if not is_ext and symbol_name != 'radr://5614542':
size = 0
logging.debug('Adding function {} from the symtab at vaddr {} with size {}'
.format(symbol_name, hex(symbol.n_value), hex(size)))
f = Function(symbol.n_value, size, symbol_name, self)
self.functions[symbol.n_value] = f
ordered_symbols.append(symbol_name)
dysymtab_command = self.helper.headers[0].getDynamicSymbolTableCommand()
if dysymtab_command:
self.binary.seek(dysymtab_command.indirectsymoff)
indirect_symbols = self.binary.read(dysymtab_command.nindirectsyms*4)
sym_offsets = struct.unpack(self.pack_endianness + 'I'*dysymtab_command.nindirectsyms, indirect_symbols)
for lc, cmd, sections in self.helper.headers[0].commands:
if lc.cmd in (LC_SEGMENT, LC_SEGMENT_64) and cmd.initprot & 0x4:
for section in sections:
if section.flags & S_NON_LAZY_SYMBOL_POINTERS == S_NON_LAZY_SYMBOL_POINTERS \
or section.flags & S_LAZY_SYMBOL_POINTERS == S_LAZY_SYMBOL_POINTERS \
or section.flags & S_SYMBOL_STUBS == S_SYMBOL_STUBS:
logging.debug('Parsing dynamic entries in {}.{}'.format(section.segname, section.sectname))
if section.flags & S_SYMBOL_STUBS:
stride = section.reserved2
else:
stride = (64 if self.is_64_bit() else 32)
count = section.size / stride
for i in range(count):
addr = self.executable_segment.vmaddr + section.offset + (i * stride)
idx = sym_offsets[i + section.reserved1]
if idx == 0x40000000:
symbol_name = "INDIRECT_SYMBOL_ABS"
elif idx == 0x80000000:
symbol_name = "INDIRECT_SYMBOL_LOCAL"
else:
symbol_name = ordered_symbols[idx]
logging.debug('Adding function {} from the dynamic symtab at vaddr {} with size {}'
.format(symbol_name, hex(addr), hex(stride)))
f = Function(addr, stride, symbol_name, self, type=Function.DYNAMIC_FUNC)
self.functions[addr] = f
def iter_string_sections(self):
STRING_SECTIONS = ['__const', '__cstring', '__objc_methname', '__objc_classname']
for s in self.sections:
if s.name in STRING_SECTIONS:
yield s
def prepare_for_injection(self):
# Total size of the stuff we're going to be adding in the middle of the binary
offset = 72+80 if self.is_64_bit() else 56+68 # 1 segment header + 1 section header
fileoff = (self.binary.len & ~0xfff) + 0x1000
vmaddr = self.function_named('__mh_execute_header').address + fileoff
logging.debug('Creating new MachOSegment at vaddr {}'.format(hex(vmaddr)))
new_segment = segment_command_64() if self.is_64_bit() else segment_command()
new_segment._endian_ = self.pack_endianness
new_segment.segname = INJECTION_SEGMENT_NAME
new_segment.fileoff = fileoff
new_segment.filesize = 0
new_segment.vmaddr = vmaddr
new_segment.vmsize = 0x1000
new_segment.maxprot = 0x7 #RWX
new_segment.initprot = 0x5 # RX
new_segment.flags = 0
new_segment.nsects = 1
logging.debug('Creating new MachOSection at vaddr {}'.format(hex(vmaddr)))
new_section = section_64() if self.is_64_bit() else section()
new_section._endian_ = self.pack_endianness
new_section.sectname = INJECTION_SECTION_NAME
new_section.segname = new_segment.segname
new_section.addr = new_segment.vmaddr
new_section.size = 0
new_section.offset = new_segment.fileoff
new_section.align = 4
new_section.flags = 0x80000400
lc = load_command()
lc._endian_ = self.pack_endianness
lc.cmd = LC_SEGMENT_64 if self.is_64_bit() else LC_SEGMENT
lc.cmdsize = offset
self.helper.headers[0].commands.append((lc, new_segment, [new_section]))
self.helper.headers[0].header.ncmds += 1
self.helper.headers[0].header.sizeofcmds += offset
return new_segment
def inject(self, asm, update_entry=False):
found = [s for lc,s,_ in self.helper.headers[0].commands if lc.cmd in (LC_SEGMENT, LC_SEGMENT_64) and s.segname == INJECTION_SEGMENT_NAME]
if found:
injection_vaddr = found[0].vmaddr
else:
logging.warning(
'prepare_for_injection() was not called before inject(). This may cause unexpected behavior')
inject_seg = self.prepare_for_injection()
injection_vaddr = inject_seg.vmaddr
if update_entry:
for lc, cmd, _ in self.helper.headers[0].commands:
if lc.cmd == LC_MAIN:
cmd.entryoff = injection_vaddr
break
self.binary.seek(0)
for lc, segment, sections in self.helper.headers[0].commands:
if lc.cmd in (LC_SEGMENT, LC_SEGMENT_64) and segment.segname == INJECTION_SEGMENT_NAME:
injection_offset = segment.fileoff + segment.filesize
segment.filesize += len(asm)
if segment.filesize + len(asm) > segment.vmsize:
segment.vmsize += 0x1000
for section in sections:
if section.sectname == INJECTION_SECTION_NAME:
section.size += len(asm)
self.next_injection_vaddr = section.addr + section.size
self.helper.headers[0].write(self.binary)
self.binary.seek(injection_offset)
self.binary.write(asm)
return injection_vaddr
| [
"macholib.MachO.MachO",
"struct.unpack",
"logging.warning"
] | [((372, 386), 'macholib.MachO.MachO', 'MachO', (['self.fp'], {}), '(self.fp)\n', (377, 386), False, 'from macholib.MachO import MachO\n'), ((3897, 3993), 'struct.unpack', 'struct.unpack', (["(self.pack_endianness + 'I' * dysymtab_command.nindirectsyms)", 'indirect_symbols'], {}), "(self.pack_endianness + 'I' * dysymtab_command.nindirectsyms,\n indirect_symbols)\n", (3910, 3993), False, 'import struct\n'), ((8149, 8268), 'logging.warning', 'logging.warning', (['"""prepare_for_injection() was not called before inject(). This may cause unexpected behavior"""'], {}), "(\n 'prepare_for_injection() was not called before inject(). This may cause unexpected behavior'\n )\n", (8164, 8268), False, 'import logging\n')] |
# Somefun, <NAME>
# <EMAIL>
# EEE/CPE Dept. FUTA
# (c) 2018
#
# Number Theory Computing
# Recursive Patterns in Powers
# Integer Powers of any real number
# param n: base number, n
# param r: exponent number, r
# return: power, the result of the operation
import time
import sys
sys.setrecursionlimit(1500)
class SPRS:
def __init__(self, n, r=2):
# Memory Map
self.n = n
self.r = r
# self.mem_list = []
self.memo = {0: 1, 1: n}
# Somefun's Square Method
# The square of any positive or negative real number value can be generalized
# into these returned formula expression below, where n is that number
self.a = self.n - 1
self.c = 3 + (self.n - 2)
self.memo[2] = (self.a * self.c) + 1
# returns the power, the result of the recursive power series pattern
def valr(self):
memo = self.memo
r = self.r
if r not in memo:
def pp_series(k):
if k in memo:
return memo[k]
p = k - 1
memo[k] = pp_series(p) * memo[1]
# print('Memory Map:\n')
# for k in memo:
# print(k, '->', memo[k], end=' || ')
# print('\n')
return memo[k]
return pp_series(r)
else:
return memo[r]
# returns the power, the result of the recursive power series pattern
def valr2(self):
memo = self.memo
r = self.r
if r not in memo:
if r % 2 != 0:
p = int((r - 1) * 0.5)
memo[r] = memo[1]
for i in range(p):
memo[r] = memo[r] * memo[2]
if r % 2 == 0:
p = int(r * 0.5)
memo[r] = memo[2]
for i in range(p):
memo[r] = memo[r] * memo[2]
return memo[r]
# returns the power, the result of the sum of power series pattern
def vals(self):
memo = self.memo
r = self.r
a = self.a
if r not in memo:
p = r - 3 + 1
sums = 0
for j in range(p):
if j not in memo:
isums = 0
k = j - 3 + 1
for o in range(k):
isums += memo[o]
memo[j] = memo[2] * (1 + (a * isums))
sums += memo[j]
memo[r] = memo[2] * (1 + (a * sums))
return memo[r]
@property
# returns a map of the recursive power series, r of a number, n
def map_mem(self):
# print('Memory Map:\n')
# for k in self.memo:
# self.mem_list.append(self.memo[k])
return self.memo
if __name__ == '__main__':
#
ts = time.clock()
for index in range(64):
psr = SPRS(2, 7)
tf = time.clock()
tfs0 = (tf - ts) / 64
print('Time: ', tfs0, 'secs')
#
t = 100
##
psr1 = SPRS(2, 7)
print(psr1.valr())
print('algo1')
ts = time.clock()
for index in range(t):
SPRS(2, t).valr()
tf = time.clock()
tfs1 = (tf - ts) / t
print('Time: ', tfs1, 'secs')
print(psr1.memo)
##
psr2 = SPRS(2, 7)
print(psr2.valr2())
print('algo2')
ts = time.clock()
for index in range(t):
SPRS(2, t).valr2()
tf = time.clock()
tfs2 = (tf - ts) / t
print('Time: ', tfs2, 'secs')
print(psr2.memo)
##
psr3 = SPRS(2, 7)
print(psr3.vals())
print('algo3')
ts = time.clock()
for index in range(t):
SPRS(2, t).vals()
tf = time.clock()
tfs3 = (tf - ts) / t
print('Time: ', tfs3, 'secs')
print(psr3.memo)
#
print('inbuilt')
ts = time.clock()
for index in range(t):
psi = 2 ** t
tf = time.clock()
tfs4 = (tf - ts) / t
print('Time: ', tfs4, 'secs')
#
mintfs = min(tfs1, tfs2, tfs3)
print('Min:', mintfs)
relspeed = [tfs0/mintfs, tfs1/mintfs, tfs2/mintfs, tfs3/mintfs, mintfs/tfs4]
print(relspeed)
| [
"sys.setrecursionlimit",
"time.clock"
] | [((281, 308), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(1500)'], {}), '(1500)\n', (302, 308), False, 'import sys\n'), ((2831, 2843), 'time.clock', 'time.clock', ([], {}), '()\n', (2841, 2843), False, 'import time\n'), ((2906, 2918), 'time.clock', 'time.clock', ([], {}), '()\n', (2916, 2918), False, 'import time\n'), ((3078, 3090), 'time.clock', 'time.clock', ([], {}), '()\n', (3088, 3090), False, 'import time\n'), ((3153, 3165), 'time.clock', 'time.clock', ([], {}), '()\n', (3163, 3165), False, 'import time\n'), ((3330, 3342), 'time.clock', 'time.clock', ([], {}), '()\n', (3340, 3342), False, 'import time\n'), ((3406, 3418), 'time.clock', 'time.clock', ([], {}), '()\n', (3416, 3418), False, 'import time\n'), ((3581, 3593), 'time.clock', 'time.clock', ([], {}), '()\n', (3591, 3593), False, 'import time\n'), ((3656, 3668), 'time.clock', 'time.clock', ([], {}), '()\n', (3666, 3668), False, 'import time\n'), ((3786, 3798), 'time.clock', 'time.clock', ([], {}), '()\n', (3796, 3798), False, 'import time\n'), ((3856, 3868), 'time.clock', 'time.clock', ([], {}), '()\n', (3866, 3868), False, 'import time\n')] |
from django.contrib import admin
from .models import UserDetail
# Register your models here.
admin.site.register(UserDetail) | [
"django.contrib.admin.site.register"
] | [((94, 125), 'django.contrib.admin.site.register', 'admin.site.register', (['UserDetail'], {}), '(UserDetail)\n', (113, 125), False, 'from django.contrib import admin\n')] |
from django.conf.urls import url
from organizers import views
urlpatterns = [
url(r'^review/$', views.ReviewApplicationView.as_view(), name='review'),
url(r'^ranking/$', views.RankingView.as_view(), name='ranking'),
url(r'^(?P<id>[\w-]+)$', views.ApplicationDetailView.as_view(), name="app_detail"),
url(r'^all/$', views.ApplicationsListView.as_view(), name="app_list"),
url(r'^export/$', views.ApplicationsExportView.as_view(), name="export"),
url(r'^invite/$', views.InviteListView.as_view(), name="invite_list"),
url(r'^waitlist/$', views.WaitlistListView.as_view(), name="waitlist_list"),
url(r'^invite/teams/$', views.InviteTeamListView.as_view(), name="invite_teams_list"),
url(r'^waitlist/teams/$', views.WaitlistTeamListView.as_view(), name="waitlist_teams_list"),
url(r'^recalculate/votes/$', views.recalc, name="recalc_votes"),
]
| [
"organizers.views.ApplicationDetailView.as_view",
"django.conf.urls.url",
"organizers.views.ApplicationsExportView.as_view",
"organizers.views.RankingView.as_view",
"organizers.views.InviteListView.as_view",
"organizers.views.ReviewApplicationView.as_view",
"organizers.views.WaitlistListView.as_view",
... | [((815, 877), 'django.conf.urls.url', 'url', (['"""^recalculate/votes/$"""', 'views.recalc'], {'name': '"""recalc_votes"""'}), "('^recalculate/votes/$', views.recalc, name='recalc_votes')\n", (818, 877), False, 'from django.conf.urls import url\n'), ((102, 139), 'organizers.views.ReviewApplicationView.as_view', 'views.ReviewApplicationView.as_view', ([], {}), '()\n', (137, 139), False, 'from organizers import views\n'), ((180, 207), 'organizers.views.RankingView.as_view', 'views.RankingView.as_view', ([], {}), '()\n', (205, 207), False, 'from organizers import views\n'), ((255, 292), 'organizers.views.ApplicationDetailView.as_view', 'views.ApplicationDetailView.as_view', ([], {}), '()\n', (290, 292), False, 'from organizers import views\n'), ((333, 369), 'organizers.views.ApplicationsListView.as_view', 'views.ApplicationsListView.as_view', ([], {}), '()\n', (367, 369), False, 'from organizers import views\n'), ((411, 449), 'organizers.views.ApplicationsExportView.as_view', 'views.ApplicationsExportView.as_view', ([], {}), '()\n', (447, 449), False, 'from organizers import views\n'), ((489, 519), 'organizers.views.InviteListView.as_view', 'views.InviteListView.as_view', ([], {}), '()\n', (517, 519), False, 'from organizers import views\n'), ((566, 598), 'organizers.views.WaitlistListView.as_view', 'views.WaitlistListView.as_view', ([], {}), '()\n', (596, 598), False, 'from organizers import views\n'), ((651, 685), 'organizers.views.InviteTeamListView.as_view', 'views.InviteTeamListView.as_view', ([], {}), '()\n', (683, 685), False, 'from organizers import views\n'), ((744, 780), 'organizers.views.WaitlistTeamListView.as_view', 'views.WaitlistTeamListView.as_view', ([], {}), '()\n', (778, 780), False, 'from organizers import views\n')] |
import urllib.request
import os
import json
from html.parser import HTMLParser
class MLStripper(HTMLParser):
def __init__(self):
super().__init__()
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def strip_tags(html):
s = MLStripper()
s.feed(html)
return s.get_data()
# Download all module.json files from the list
lines = open('modulelist.txt').readlines()
result = []
names = set()
for url in lines:
try:
myreq = urllib.request.urlopen(url)
mydata = myreq.read()
module = json.loads(mydata.decode('utf-8'))
if 'name' not in module:
print('No name specified:', url)
else:
if module['name'].lower() in names:
print('WARNING: Duplicate module name:', url)
names.add(module['name'].lower())
# Upgrade legacy fields
if 'options' in module:
if 'niceName' in module['options']:
module['options']['cliName'] = module['options']['niceName']
del module['options']['niceName']
if 'category' in module:
if 'keywords' not in module:
module['keywords'] = [module['category']]
else:
module['keywords'].append(module['category'])
del module['category']
else:
if 'keywords' not in module:
module['keywords'] = ['network']
result.append(module)
except:
print(url)
# Sort by name
def guiname(x):
if 'options' in x:
if 'guiName' in x['options']:
return strip_tags(x['options']['guiName'])
if 'cliName' in x['options']:
return strip_tags(x['options']['cliName'])
return x['name']
result = sorted(result, key = lambda x: guiname(x).lower())
with open('modulelist.json', 'w') as fh:
json.dump(result, fh)
print(len(result), 'mods listed!')
print(sum(1 for x in result if 'network' in x['keywords']), 'network mods listed!')
print(sum(1 for x in result if 'client' in x['keywords']), 'client mods listed!')
| [
"json.dump"
] | [((1984, 2005), 'json.dump', 'json.dump', (['result', 'fh'], {}), '(result, fh)\n', (1993, 2005), False, 'import json\n')] |
#!/usr/bin/env python3
# Platform-independent `mkdir`
import argparse
import pathlib
import sys
def main():
parser = argparse.ArgumentParser(description='Platform-independent `mkdir`')
parser.add_argument('-p', '--parents', action='store_true', required=False, help='no error if existing, make parent directories as needed')
parser.add_argument('dir', action='store', nargs='+')
ns = parser.parse_args()
for p in ns.dir:
pathlib.Path(p).mkdir(parents=ns.parents, exist_ok=True)
return 0
if __name__ == '__main__':
sys.exit(main())
| [
"argparse.ArgumentParser",
"pathlib.Path"
] | [((124, 191), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Platform-independent `mkdir`"""'}), "(description='Platform-independent `mkdir`')\n", (147, 191), False, 'import argparse\n'), ((452, 467), 'pathlib.Path', 'pathlib.Path', (['p'], {}), '(p)\n', (464, 467), False, 'import pathlib\n')] |
from __future__ import unicode_literals # isort:skip
from future import standard_library # isort:skip
standard_library.install_aliases() # noqa: E402
from collections import defaultdict
import csv
from datetime import datetime
from io import StringIO
from time import strftime
from flask import (
Blueprint,
Response,
jsonify,
make_response,
render_template,
request,
)
from flask_babel import gettext as _
from flask_user import roles_required
from werkzeug.exceptions import Unauthorized
from ..date_tools import FHIR_datetime
from ..extensions import oauth
from ..models.fhir import bundle_results
from ..models.organization import Organization, OrgTree
from ..models.questionnaire_bank import visit_name
from ..models.qb_status import QB_Status
from ..models.role import ROLE
from ..models.user import current_user, patients_query
from ..models.user_consent import latest_consent
reporting_api = Blueprint('reporting', __name__)
@reporting_api.route('/admin/overdue-table')
@roles_required([ROLE.STAFF.value, ROLE.INTERVENTION_STAFF.value])
@oauth.require_oauth()
def overdue_table(top_org=None):
"""View for admin access to generated email content
Typically called by scheduled job, expected this view is only
used for debugging & QA
:param org_id: Top level organization ID to test
:returns: html content typically sent directly to site resource
"""
from ..models.reporting import overdue_stats_by_org
if not top_org:
org_id = request.args.get('org_id', 0)
top_org = Organization.query.get_or_404(org_id)
# Use values from ScheduledJob.json - just debugging utility
# for now. If made mainstream, pull directly from table.
cutoff_days = []
if top_org.name == "TrueNTH Global Registry":
cutoff_days = [30, 60, 90]
if top_org.name == "IRONMAN":
cutoff_days = [7, 14, 21, 30]
return generate_overdue_table_html(
cutoff_days=cutoff_days, overdue_stats=overdue_stats_by_org(),
user=current_user(), top_org=top_org)
def generate_overdue_table_html(cutoff_days, overdue_stats, user, top_org):
cutoff_days.sort()
day_ranges = []
curr_min = 0
for cd in cutoff_days:
day_ranges.append("{}-{}".format(curr_min + 1, cd))
curr_min = cd
ot = OrgTree()
rows = []
totals = defaultdict(int)
for org_id, org_name in sorted(overdue_stats, key=lambda x: x[1]):
if top_org and not ot.at_or_below_ids(top_org.id, [org_id]):
continue
user_accessible = False
for user_org in user.organizations:
if ot.at_or_below_ids(user_org.id, [org_id]):
user_accessible = True
break
if not user_accessible:
continue
counts = overdue_stats[(org_id, org_name)]
org_row = [org_name]
source_row = [org_name+'[user_ids]']
curr_min = 0
row_total = 0
for cd in cutoff_days:
uids = []
for days_overdue, user_id in counts:
if days_overdue > curr_min and days_overdue <= cd:
uids.append(user_id)
count = len(
[i for i, uid in counts if ((i > curr_min) and (i <= cd))])
org_row.append(count)
source_row.append(uids)
totals[cd] += count
row_total += count
curr_min = cd
org_row.append(row_total)
rows.append(org_row)
# Uncomment the following row to display user ids behind numbers
# rows.append(source_row)
totalrow = [_("TOTAL")]
row_total = 0
for cd in cutoff_days:
totalrow.append(totals[cd])
row_total += totals[cd]
totalrow.append(row_total)
rows.append(totalrow)
return render_template(
'site_overdue_table.html', ranges=day_ranges, rows=rows)
@reporting_api.route('/admin/overdue-numbers')
@roles_required(
[ROLE.ADMIN.value, ROLE.STAFF.value, ROLE.INTERVENTION_STAFF.value])
@oauth.require_oauth()
def generate_numbers():
def overdue(qstats):
now = datetime.utcnow()
overdue = qstats.overdue_date
if not overdue:
return "No overdue date"
return (now - overdue).days
ot = OrgTree()
results = StringIO()
cw = csv.writer(results)
cw.writerow((
"User ID", "Email", "Questionnaire Bank", "Status",
"Days Overdue", "Organization"))
for user in patients_query(
acting_user=current_user(), include_test_role=False):
a_s = QB_Status(user, as_of_date=datetime.utcnow())
email = (
user.email.encode('ascii', 'ignore') if user.email else None)
od = overdue(a_s)
qb = a_s.current_qbd().questionnaire_bank.name
for org in user.organizations:
top = ot.find_top_level_orgs([org], first=True)
org_name = "{}: {}".format(
top.name, org.name) if top else org.name
cw.writerow((
user.id, email, qb, a_s.overall_status, od, org_name))
filename = 'overdue-numbers-{}.csv'.format(strftime('%Y_%m_%d-%H_%M'))
output = make_response(results.getvalue())
output.headers['Content-Disposition'] = "attachment; filename={}".format(
filename)
output.headers['Content-type'] = "text/csv"
return output
@reporting_api.route('/api/report/questionnaire_status')
@roles_required(
[ROLE.ADMIN.value, ROLE.STAFF.value, ROLE.INTERVENTION_STAFF.value])
@oauth.require_oauth()
def questionnaire_status():
"""Return ad hoc JSON or CSV listing questionnaire_status
---
tags:
- Report
- Questionnaire
operationId: questionnaire_status
parameters:
- name: org_id
in: query
description: optional TrueNTH organization ID used to limit results
to patients belonging to given organization identifier, and given
organization's child organizations
required: false
type: integer
format: int64
- name: as_of_date
in: query
description: optional query string param to request status at a
different (UTC) point in time. Defaults to now
required: false
type: string
format: date-time
- name: include_test_role
in: query
description: optional query string param to add patients with the
test role to the results. Excluded by default
required: false
type: string
- name: format
in: query
description: expects json or csv, defaults to json if not provided
required: false
type: string
produces:
- application/json
- text/csv
responses:
200:
description:
Returns JSON of the available questionnaire bank status for matching
set of users
400:
description: invalid query parameters
401:
description:
if missing valid OAuth token or if the authorized user lacks
permission to view requested user_id
"""
if request.args.get('as_of_date'):
as_of_date = FHIR_datetime.parse(request.args.get('as_of_date'))
else:
as_of_date = datetime.utcnow()
# If limited by org - grab org and all it's children as filter list
org_id = request.args.get('org_id')
requested_orgs = (
OrgTree().here_and_below_id(organization_id=org_id) if org_id
else None)
# Obtain list of qualifying patients
acting_user = current_user()
include_test_role = request.args.get('include_test_role', False)
patients = patients_query(
acting_user=acting_user,
include_test_role=include_test_role,
requested_orgs=requested_orgs)
results = []
for patient in patients:
if len(patient.organizations) == 0:
# Very unlikely we want to include patients w/o at least
# one org, skip this patient
continue
try:
acting_user.check_role('view', other_id=patient.id)
except Unauthorized:
# simply exclude any patients the user can't view
continue
qb_stats = QB_Status(user=patient, as_of_date=as_of_date)
row = {
'user_id': patient.id,
'site': patient.organizations[0].name,
'status': str(qb_stats.overall_status)}
consent = latest_consent(user=patient)
if consent:
row['consent'] = FHIR_datetime.as_fhir(consent.acceptance_date)
study_id = patient.external_study_id
if study_id:
row['study_id'] = study_id
# if no current, try previous (as current may be expired)
last_viable = qb_stats.current_qbd() or qb_stats.prev_qbd
if last_viable:
row['qb'] = last_viable.questionnaire_bank.name
row['visit'] = visit_name(last_viable)
results.append(row)
# as we require a full history, continue to add rows for each previous
# visit available
for qbd, status in qb_stats.older_qbds(last_viable):
historic = row.copy()
historic['status'] = status
historic['qb'] = qbd.questionnaire_bank.name
historic['visit'] = visit_name(qbd)
results.append(historic)
if request.args.get('format', 'json').lower() == 'csv':
def gen(items):
desired_order = [
'user_id', 'study_id', 'status', 'visit', 'site', 'consent']
yield ','.join(desired_order) + '\n' # header row
for i in items:
yield ','.join(
[str(i.get(k, "")) for k in desired_order]) + '\n'
# default file base title
base_name = 'Questionnaire-Timeline-Data'
if org_id:
base_name = '{}-{}'.format(
base_name,
Organization.query.get(org_id).name.replace(' ', '-'))
filename = '{}-{}.csv'.format(base_name, strftime('%Y_%m_%d-%H_%M'))
return Response(
gen(results),
headers={
'Content-Disposition': 'attachment;filename={}'.format(
filename),
'Content-type': "text/csv"}
)
else:
return jsonify(bundle_results(elements=results))
| [
"flask.render_template",
"flask.request.args.get",
"datetime.datetime.utcnow",
"flask_user.roles_required",
"csv.writer",
"time.strftime",
"future.standard_library.install_aliases",
"collections.defaultdict",
"flask_babel.gettext",
"io.StringIO",
"flask.Blueprint"
] | [((106, 140), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (138, 140), False, 'from future import standard_library\n'), ((933, 965), 'flask.Blueprint', 'Blueprint', (['"""reporting"""', '__name__'], {}), "('reporting', __name__)\n", (942, 965), False, 'from flask import Blueprint, Response, jsonify, make_response, render_template, request\n'), ((1014, 1079), 'flask_user.roles_required', 'roles_required', (['[ROLE.STAFF.value, ROLE.INTERVENTION_STAFF.value]'], {}), '([ROLE.STAFF.value, ROLE.INTERVENTION_STAFF.value])\n', (1028, 1079), False, 'from flask_user import roles_required\n'), ((3934, 4022), 'flask_user.roles_required', 'roles_required', (['[ROLE.ADMIN.value, ROLE.STAFF.value, ROLE.INTERVENTION_STAFF.value]'], {}), '([ROLE.ADMIN.value, ROLE.STAFF.value, ROLE.INTERVENTION_STAFF\n .value])\n', (3948, 4022), False, 'from flask_user import roles_required\n'), ((5427, 5515), 'flask_user.roles_required', 'roles_required', (['[ROLE.ADMIN.value, ROLE.STAFF.value, ROLE.INTERVENTION_STAFF.value]'], {}), '([ROLE.ADMIN.value, ROLE.STAFF.value, ROLE.INTERVENTION_STAFF\n .value])\n', (5441, 5515), False, 'from flask_user import roles_required\n'), ((2356, 2372), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (2367, 2372), False, 'from collections import defaultdict\n'), ((3802, 3874), 'flask.render_template', 'render_template', (['"""site_overdue_table.html"""'], {'ranges': 'day_ranges', 'rows': 'rows'}), "('site_overdue_table.html', ranges=day_ranges, rows=rows)\n", (3817, 3874), False, 'from flask import Blueprint, Response, jsonify, make_response, render_template, request\n'), ((4297, 4307), 'io.StringIO', 'StringIO', ([], {}), '()\n', (4305, 4307), False, 'from io import StringIO\n'), ((4317, 4336), 'csv.writer', 'csv.writer', (['results'], {}), '(results)\n', (4327, 4336), False, 'import csv\n'), ((7102, 7132), 'flask.request.args.get', 'request.args.get', (['"""as_of_date"""'], {}), "('as_of_date')\n", (7118, 7132), False, 'from flask import Blueprint, Response, jsonify, make_response, render_template, request\n'), ((7342, 7368), 'flask.request.args.get', 'request.args.get', (['"""org_id"""'], {}), "('org_id')\n", (7358, 7368), False, 'from flask import Blueprint, Response, jsonify, make_response, render_template, request\n'), ((7580, 7624), 'flask.request.args.get', 'request.args.get', (['"""include_test_role"""', '(False)'], {}), "('include_test_role', False)\n", (7596, 7624), False, 'from flask import Blueprint, Response, jsonify, make_response, render_template, request\n'), ((1511, 1540), 'flask.request.args.get', 'request.args.get', (['"""org_id"""', '(0)'], {}), "('org_id', 0)\n", (1527, 1540), False, 'from flask import Blueprint, Response, jsonify, make_response, render_template, request\n'), ((3608, 3618), 'flask_babel.gettext', '_', (['"""TOTAL"""'], {}), "('TOTAL')\n", (3609, 3618), True, 'from flask_babel import gettext as _\n'), ((4110, 4127), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4125, 4127), False, 'from datetime import datetime\n'), ((5130, 5156), 'time.strftime', 'strftime', (['"""%Y_%m_%d-%H_%M"""'], {}), "('%Y_%m_%d-%H_%M')\n", (5138, 5156), False, 'from time import strftime\n'), ((7238, 7255), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (7253, 7255), False, 'from datetime import datetime\n'), ((7175, 7205), 'flask.request.args.get', 'request.args.get', (['"""as_of_date"""'], {}), "('as_of_date')\n", (7191, 7205), False, 'from flask import Blueprint, Response, jsonify, make_response, render_template, request\n'), ((10011, 10037), 'time.strftime', 'strftime', (['"""%Y_%m_%d-%H_%M"""'], {}), "('%Y_%m_%d-%H_%M')\n", (10019, 10037), False, 'from time import strftime\n'), ((4597, 4614), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4612, 4614), False, 'from datetime import datetime\n'), ((9343, 9377), 'flask.request.args.get', 'request.args.get', (['"""format"""', '"""json"""'], {}), "('format', 'json')\n", (9359, 9377), False, 'from flask import Blueprint, Response, jsonify, make_response, render_template, request\n')] |
import json
import pytest
from apisports.response import AbstractResponse, ErrorResponse, HttpErrorResponse, Headers
from helpers import MockResponse, assert_response_ok, assert_response_error
def test_invalidjson():
response = AbstractResponse.create(None, MockResponse("-"))
assert_response_error(response)
assert type(response) is ErrorResponse
def test_httperror():
response = AbstractResponse.create(None, MockResponse('[]', 404))
assert_response_error(response)
assert type(response) is HttpErrorResponse
def test_reportederror():
response = AbstractResponse.create(None, MockResponse('{"errors": {"random": "error"}}', 200))
assert_response_error(response)
assert type(response) is ErrorResponse
assert response.errors == {"random": "error"}
def test_error():
response = AbstractResponse.create(None, MockResponse('{"errors": ["error"]}', 200))
assert_response_error(response)
assert type(response) is ErrorResponse
assert response.errors == {"errors": ["error"]}
@pytest.mark.parametrize("data", [
False,
None,
])
def test_error_simple_types(data):
response = AbstractResponse.create(
None,
MockResponse('{"errors": %s}' % (json.dumps(data),))
)
assert_response_ok(response)
assert response.errors == {}
def test_response_properties():
text = '{"response": "Test"}'
mock_response = MockResponse(text)
response = AbstractResponse.create(None, mock_response)
assert_response_ok(response)
assert response.text == text
assert response.raw is mock_response
assert type(response.headers) is Headers
@pytest.mark.parametrize("text", [
"",
'{"response": "ok"}'
])
def test_response_headers(text):
headers = {
"X-RateLimit-Limit": "RateLimit",
"X-RateLimit-Remaining": "RateRemaining",
"x-ratelimit-requests-limit": "RequestsLimit",
"x-ratelimit-requests-remaining": "RequestsRemaining",
"server": "Server",
}
mock_response = MockResponse("", headers=headers)
response = AbstractResponse.create(text, mock_response)
assert type(response.headers) is Headers
assert response.headers.rate_limit == "RateLimit"
assert response.headers.rate_limit_remaining == "RateRemaining"
assert response.headers.requests_limit == "RequestsLimit"
assert response.headers.requests_remaining == "RequestsRemaining"
assert response.headers.server == "Server"
assert response.headers.raw is headers
for key, value in headers.items():
assert key in response.headers
assert response.headers[key] == value
assert "X-Unknown-Header" not in response.headers
assert response.headers["X-Unknown-Header"] is None
| [
"helpers.MockResponse",
"apisports.response.AbstractResponse.create",
"json.dumps",
"pytest.mark.parametrize",
"helpers.assert_response_error",
"helpers.assert_response_ok"
] | [((1044, 1090), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""data"""', '[False, None]'], {}), "('data', [False, None])\n", (1067, 1090), False, 'import pytest\n'), ((1648, 1707), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text"""', '[\'\', \'{"response": "ok"}\']'], {}), '(\'text\', [\'\', \'{"response": "ok"}\'])\n', (1671, 1707), False, 'import pytest\n'), ((289, 320), 'helpers.assert_response_error', 'assert_response_error', (['response'], {}), '(response)\n', (310, 320), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((463, 494), 'helpers.assert_response_error', 'assert_response_error', (['response'], {}), '(response)\n', (484, 494), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((674, 705), 'helpers.assert_response_error', 'assert_response_error', (['response'], {}), '(response)\n', (695, 705), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((913, 944), 'helpers.assert_response_error', 'assert_response_error', (['response'], {}), '(response)\n', (934, 944), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((1263, 1291), 'helpers.assert_response_ok', 'assert_response_ok', (['response'], {}), '(response)\n', (1281, 1291), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((1413, 1431), 'helpers.MockResponse', 'MockResponse', (['text'], {}), '(text)\n', (1425, 1431), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((1447, 1491), 'apisports.response.AbstractResponse.create', 'AbstractResponse.create', (['None', 'mock_response'], {}), '(None, mock_response)\n', (1470, 1491), False, 'from apisports.response import AbstractResponse, ErrorResponse, HttpErrorResponse, Headers\n'), ((1497, 1525), 'helpers.assert_response_ok', 'assert_response_ok', (['response'], {}), '(response)\n', (1515, 1525), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((2031, 2064), 'helpers.MockResponse', 'MockResponse', (['""""""'], {'headers': 'headers'}), "('', headers=headers)\n", (2043, 2064), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((2080, 2124), 'apisports.response.AbstractResponse.create', 'AbstractResponse.create', (['text', 'mock_response'], {}), '(text, mock_response)\n', (2103, 2124), False, 'from apisports.response import AbstractResponse, ErrorResponse, HttpErrorResponse, Headers\n'), ((265, 282), 'helpers.MockResponse', 'MockResponse', (['"""-"""'], {}), "('-')\n", (277, 282), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((433, 456), 'helpers.MockResponse', 'MockResponse', (['"""[]"""', '(404)'], {}), "('[]', 404)\n", (445, 456), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((615, 667), 'helpers.MockResponse', 'MockResponse', (['"""{"errors": {"random": "error"}}"""', '(200)'], {}), '(\'{"errors": {"random": "error"}}\', 200)\n', (627, 667), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((864, 906), 'helpers.MockResponse', 'MockResponse', (['"""{"errors": ["error"]}"""', '(200)'], {}), '(\'{"errors": ["error"]}\', 200)\n', (876, 906), False, 'from helpers import MockResponse, assert_response_ok, assert_response_error\n'), ((1232, 1248), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1242, 1248), False, 'import json\n')] |
"""
-- For Development Purposes --
Serves all all api challenges and training materials
Look out for auth clashes and multiple cookies
"""
import uvicorn
from fastapi import FastAPI
from rest_introduction_app.api.api import api_router
app = FastAPI(
title='Testing HTTP responses',
description="Application to show different HTTP responses for learning testing purpose",
version="0.1",
docs_url="/",
redoc_url=None
)
app.include_router(api_router, prefix='/api')
# if __name__ == "__main__":
# uvicorn.run(app, host="0.0.0.0", port=8080)
| [
"fastapi.FastAPI"
] | [((243, 426), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Testing HTTP responses"""', 'description': '"""Application to show different HTTP responses for learning testing purpose"""', 'version': '"""0.1"""', 'docs_url': '"""/"""', 'redoc_url': 'None'}), "(title='Testing HTTP responses', description=\n 'Application to show different HTTP responses for learning testing purpose'\n , version='0.1', docs_url='/', redoc_url=None)\n", (250, 426), False, 'from fastapi import FastAPI\n')] |
import tensorflow as tf
from tensorflow.contrib import slim
import numpy as np
import os
import time
from utils import kde
from ops import *
tf.reset_default_graph()
os.environ['CUDA_VISIBLE_DEVICES'] = '6'
# Parameters
learning_rate = 1e-3
reg_param = 10.
batch_size = 128
x_dim = 2
z_dim = 2
sigma = 0.7
mu = 2
method = 'jare' # ['conopt', 'simgd', 'simregg', 'simregd', 'jare']
divergence = 'JS' # ['standard', 'JS', 'indicator', 'wgan']
opt_type = 'sgd' # ['sgd', 'rmsprop', 'adam']
outdir = os.path.join('affine_res', 'kde_Isotrlin', time.strftime("%Y%m%d"),
'{}_{}_bs{}_std{}_reg{}_lr{}_{}_mu{}'.format(method, divergence, batch_size, sigma,
reg_param, learning_rate, opt_type, mu))
sumdir = os.path.join('affine_res', 'summary_Isotrlin', time.strftime("%Y%m%d"),
'{}_{}_bs{}_std{}_reg{}_lr{}_{}_mu{}'.format(method, divergence, batch_size, sigma,
reg_param, learning_rate, opt_type, mu))
niter = 15000
n_save = 500
n_print = 100
bbox = [-2, 2, -2 + mu, 2 + mu]
# Target distribution
mus = np.vstack([0, mu] for _ in range(batch_size))
x_real = mus + sigma * tf.random_normal([batch_size, x_dim])
generator = tf.make_template('generator', generator4Gaussian_func1)
discriminator = tf.make_template('discriminator', discriminator4Gaussian_func1)
# g and d output
z = sigma * tf.random_normal([batch_size, z_dim])
x_fake = generator(z, x_dim, mu)
d_out_real = discriminator(x_real)
d_out_fake = discriminator(x_fake)
d_loss, g_loss = compute_loss(d_out_real, d_out_fake, divergence)
# collect two sets of trainable variables
g_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='generator')
d_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='discriminator')
d_loss_tot, g_loss_tot, train_op, reg, d_grad_norm, g_grad_norm = \
compute_gradients(d_loss, d_vars, g_loss, g_vars, opt_type, learning_rate, reg_param, method)
summary_op = tf.summary.merge([
tf.summary.scalar("loss/d_loss", d_loss),
tf.summary.scalar("loss/g_loss", g_loss),
tf.summary.scalar("loss/reg", reg),
tf.summary.scalar("loss/d_loss_tot", d_loss_tot),
tf.summary.scalar("loss/g_loss_tot", g_loss_tot),
tf.summary.scalar("grad/d_grad_norm", d_grad_norm),
tf.summary.scalar("grad/g_grad_norm", g_grad_norm),
])
print("Using the optimizer: {}".format(method))
# initialize and run
sess = tf.Session()
train_writer = tf.summary.FileWriter(sumdir, sess.graph)
sess.run(tf.global_variables_initializer())
if not os.path.exists(outdir):
os.makedirs(outdir)
print('Training: {}_{}_bs{}_mu{}_std{}_reg{}_lr{}'.format(
method, divergence, batch_size, mu, sigma, reg_param, learning_rate))
ztest = [sigma * np.random.randn(batch_size, z_dim) for i in range(10)]
# generate real samples
x_real_out = np.concatenate([sess.run(x_real)])
init_g = sess.run(g_vars[0])
init_d = sess.run(d_vars[0])
print('initial theta: {}'.format(init_d))
print('initial phi: {}'.format(init_g))
kde(x_real_out[:, 0], x_real_out[:, 1], bbox=bbox, save_file=os.path.join(outdir, 'real.png'))
for i in range(niter):
if i % n_print == 0:
d_loss_out, g_loss_out, summary_str = sess.run([d_loss, g_loss, summary_op])
train_writer.add_summary(summary_str, i)
print('iters = %d, d_loss = %.4f, g_loss = %.4f' % (i, d_loss_out, g_loss_out))
if i % n_save == 0:
x_out = np.concatenate([sess.run(x_fake, feed_dict={z: zt}) for zt in ztest], axis=0)
kde(x_out[:, 0], x_out[:, 1], bbox=bbox, save_file=os.path.join(outdir, '%d.png' % i))
sess.run(train_op)
sess.close()
| [
"os.path.exists",
"tensorflow.reset_default_graph",
"tensorflow.random_normal",
"os.makedirs",
"tensorflow.Session",
"time.strftime",
"os.path.join",
"tensorflow.global_variables_initializer",
"numpy.random.randn",
"tensorflow.summary.scalar",
"tensorflow.summary.FileWriter",
"tensorflow.make_... | [((144, 168), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (166, 168), True, 'import tensorflow as tf\n'), ((1302, 1357), 'tensorflow.make_template', 'tf.make_template', (['"""generator"""', 'generator4Gaussian_func1'], {}), "('generator', generator4Gaussian_func1)\n", (1318, 1357), True, 'import tensorflow as tf\n'), ((1374, 1437), 'tensorflow.make_template', 'tf.make_template', (['"""discriminator"""', 'discriminator4Gaussian_func1'], {}), "('discriminator', discriminator4Gaussian_func1)\n", (1390, 1437), True, 'import tensorflow as tf\n'), ((1728, 1798), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.TRAINABLE_VARIABLES'], {'scope': '"""generator"""'}), "(tf.GraphKeys.TRAINABLE_VARIABLES, scope='generator')\n", (1745, 1798), True, 'import tensorflow as tf\n'), ((1808, 1882), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.TRAINABLE_VARIABLES'], {'scope': '"""discriminator"""'}), "(tf.GraphKeys.TRAINABLE_VARIABLES, scope='discriminator')\n", (1825, 1882), True, 'import tensorflow as tf\n'), ((2518, 2530), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2528, 2530), True, 'import tensorflow as tf\n'), ((2546, 2587), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['sumdir', 'sess.graph'], {}), '(sumdir, sess.graph)\n', (2567, 2587), True, 'import tensorflow as tf\n'), ((546, 569), 'time.strftime', 'time.strftime', (['"""%Y%m%d"""'], {}), "('%Y%m%d')\n", (559, 569), False, 'import time\n'), ((841, 864), 'time.strftime', 'time.strftime', (['"""%Y%m%d"""'], {}), "('%Y%m%d')\n", (854, 864), False, 'import time\n'), ((1468, 1505), 'tensorflow.random_normal', 'tf.random_normal', (['[batch_size, z_dim]'], {}), '([batch_size, z_dim])\n', (1484, 1505), True, 'import tensorflow as tf\n'), ((2597, 2630), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2628, 2630), True, 'import tensorflow as tf\n'), ((2640, 2662), 'os.path.exists', 'os.path.exists', (['outdir'], {}), '(outdir)\n', (2654, 2662), False, 'import os\n'), ((2668, 2687), 'os.makedirs', 'os.makedirs', (['outdir'], {}), '(outdir)\n', (2679, 2687), False, 'import os\n'), ((1251, 1288), 'tensorflow.random_normal', 'tf.random_normal', (['[batch_size, x_dim]'], {}), '([batch_size, x_dim])\n', (1267, 1288), True, 'import tensorflow as tf\n'), ((2087, 2127), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""loss/d_loss"""', 'd_loss'], {}), "('loss/d_loss', d_loss)\n", (2104, 2127), True, 'import tensorflow as tf\n'), ((2133, 2173), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""loss/g_loss"""', 'g_loss'], {}), "('loss/g_loss', g_loss)\n", (2150, 2173), True, 'import tensorflow as tf\n'), ((2179, 2213), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""loss/reg"""', 'reg'], {}), "('loss/reg', reg)\n", (2196, 2213), True, 'import tensorflow as tf\n'), ((2219, 2267), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""loss/d_loss_tot"""', 'd_loss_tot'], {}), "('loss/d_loss_tot', d_loss_tot)\n", (2236, 2267), True, 'import tensorflow as tf\n'), ((2273, 2321), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""loss/g_loss_tot"""', 'g_loss_tot'], {}), "('loss/g_loss_tot', g_loss_tot)\n", (2290, 2321), True, 'import tensorflow as tf\n'), ((2328, 2378), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""grad/d_grad_norm"""', 'd_grad_norm'], {}), "('grad/d_grad_norm', d_grad_norm)\n", (2345, 2378), True, 'import tensorflow as tf\n'), ((2384, 2434), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""grad/g_grad_norm"""', 'g_grad_norm'], {}), "('grad/g_grad_norm', g_grad_norm)\n", (2401, 2434), True, 'import tensorflow as tf\n'), ((2839, 2873), 'numpy.random.randn', 'np.random.randn', (['batch_size', 'z_dim'], {}), '(batch_size, z_dim)\n', (2854, 2873), True, 'import numpy as np\n'), ((3168, 3200), 'os.path.join', 'os.path.join', (['outdir', '"""real.png"""'], {}), "(outdir, 'real.png')\n", (3180, 3200), False, 'import os\n'), ((3649, 3683), 'os.path.join', 'os.path.join', (['outdir', "('%d.png' % i)"], {}), "(outdir, '%d.png' % i)\n", (3661, 3683), False, 'import os\n')] |
from unittest.case import skip
from bs4.element import NavigableString, PageElement, Tag, TemplateString
from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine
from django.test import TestCase
# Create your tests here.
class GithubCodeTestCase(TestCase):
def test_about_bad_url_on_constructor(self):
self.assertRaises(BadUrlException, lambda: GitHubCode('https://www.google.com'))
@skip('skip this test other than local machine.')
def test_about_bad_url_on_open(self):
self.assertRaises(BadUrlException, lambda: GitHubCode('https://github.com/not_requesable_source').open())
def test_about_code_range(self):
gitHubCode = GitHubCode('https://github.com/not_requesable_source#L11-L22')
self.assertEqual(gitHubCode.get_start_line(), 11)
self.assertEqual(gitHubCode.get_end_line(), 22)
def test_about_code_range_without_end(self):
gitHubCode = GitHubCode('https://github.com/not_requesable_source#L11')
self.assertEqual(gitHubCode.get_start_line(), 11)
self.assertEqual(gitHubCode.get_end_line(), None)
def test_about_code_range_without_start_and_end(self):
gitHubCode = GitHubCode('https://github.com/not_requesable_source')
self.assertEqual(gitHubCode.get_start_line(), 1)
self.assertEqual(gitHubCode.get_end_line(), None)
class MockedLine :
def __init__(self, contents):
self.contents = contents
class GithubCodeLineTestCase(TestCase):
def test_about_line_as_string(self):
tag = Tag(name='span')
tag.string = 'abcde'
line = MockedLine([tag, NavigableString('aaa')])
self.assertEqual(len(GithubCodeLine(line).words), 2)
self.assertEqual(GithubCodeLine(line).line_as_string(), "abcdeaaa")
class DummyPageElement(PageElement):
pass
class GithubCodeElementTestCase(TestCase):
def test_about_tagged_element(self):
tag = Tag(name='span')
tag.string = 'abcde '
self.assertEqual(GithubCodeElement(tag).word, 'abcde ')
def test_about_navigable_string(self):
self.assertEqual(GithubCodeElement(NavigableString('abcde')).word, 'abcde')
def test_about_navigable_string_only_line_feed(self):
self.assertEqual(GithubCodeElement(NavigableString('\n')).word, ' ')
def test_about_unknown_target(self):
self.assertEqual(GithubCodeElement(DummyPageElement()).word, '')
| [
"pp.models.GithubCodeLine",
"bs4.element.Tag",
"unittest.case.skip",
"pp.models.GitHubCode",
"pp.models.GithubCodeElement",
"bs4.element.NavigableString"
] | [((430, 478), 'unittest.case.skip', 'skip', (['"""skip this test other than local machine."""'], {}), "('skip this test other than local machine.')\n", (434, 478), False, 'from unittest.case import skip\n'), ((701, 763), 'pp.models.GitHubCode', 'GitHubCode', (['"""https://github.com/not_requesable_source#L11-L22"""'], {}), "('https://github.com/not_requesable_source#L11-L22')\n", (711, 763), False, 'from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine\n'), ((948, 1006), 'pp.models.GitHubCode', 'GitHubCode', (['"""https://github.com/not_requesable_source#L11"""'], {}), "('https://github.com/not_requesable_source#L11')\n", (958, 1006), False, 'from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine\n'), ((1203, 1257), 'pp.models.GitHubCode', 'GitHubCode', (['"""https://github.com/not_requesable_source"""'], {}), "('https://github.com/not_requesable_source')\n", (1213, 1257), False, 'from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine\n'), ((1556, 1572), 'bs4.element.Tag', 'Tag', ([], {'name': '"""span"""'}), "(name='span')\n", (1559, 1572), False, 'from bs4.element import NavigableString, PageElement, Tag, TemplateString\n'), ((1942, 1958), 'bs4.element.Tag', 'Tag', ([], {'name': '"""span"""'}), "(name='span')\n", (1945, 1958), False, 'from bs4.element import NavigableString, PageElement, Tag, TemplateString\n'), ((387, 423), 'pp.models.GitHubCode', 'GitHubCode', (['"""https://www.google.com"""'], {}), "('https://www.google.com')\n", (397, 423), False, 'from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine\n'), ((1634, 1656), 'bs4.element.NavigableString', 'NavigableString', (['"""aaa"""'], {}), "('aaa')\n", (1649, 1656), False, 'from bs4.element import NavigableString, PageElement, Tag, TemplateString\n'), ((2014, 2036), 'pp.models.GithubCodeElement', 'GithubCodeElement', (['tag'], {}), '(tag)\n', (2031, 2036), False, 'from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine\n'), ((1688, 1708), 'pp.models.GithubCodeLine', 'GithubCodeLine', (['line'], {}), '(line)\n', (1702, 1708), False, 'from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine\n'), ((1745, 1765), 'pp.models.GithubCodeLine', 'GithubCodeLine', (['line'], {}), '(line)\n', (1759, 1765), False, 'from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine\n'), ((2140, 2164), 'bs4.element.NavigableString', 'NavigableString', (['"""abcde"""'], {}), "('abcde')\n", (2155, 2164), False, 'from bs4.element import NavigableString, PageElement, Tag, TemplateString\n'), ((2283, 2304), 'bs4.element.NavigableString', 'NavigableString', (['"""\n"""'], {}), "('\\n')\n", (2298, 2304), False, 'from bs4.element import NavigableString, PageElement, Tag, TemplateString\n'), ((580, 634), 'pp.models.GitHubCode', 'GitHubCode', (['"""https://github.com/not_requesable_source"""'], {}), "('https://github.com/not_requesable_source')\n", (590, 634), False, 'from pp.models import BadUrlException, GitHubCode, GithubCodeElement, GithubCodeLine\n')] |
import os
class Config:
NEWS_API_BASE_URL = 'https://newsapi.org/v2/{}?{}&apiKey={}'
SECRET_KEY = os.environ.get("SECRET_KEY")
NEWS_API_KEY = os.environ.get('NEWS_API_KEY')
class DevConfig(Config):
DEBUG = True
class ProdConfig(Config):
DEBUG = False
configuration = {
'develop': DevConfig,
'prod': ProdConfig
} | [
"os.environ.get"
] | [((107, 135), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (121, 135), False, 'import os\n'), ((155, 185), 'os.environ.get', 'os.environ.get', (['"""NEWS_API_KEY"""'], {}), "('NEWS_API_KEY')\n", (169, 185), False, 'import os\n')] |
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import pytest
import numpy as np
import mindspore.nn as nn
import mindspore.ops as ops
import mindspore.context as context
from mindspore import Tensor
from mindspore.ops.operations import _inner_ops as inner
class Net(nn.Cell):
def __init__(self, op, axis):
super(Net, self).__init__()
if op == "Cummin":
self.op = inner.Cummin(axis)
elif op == "Cummax":
self.op = ops.Cummax(axis)
else:
raise ValueError("op value error.")
def construct(self, x):
return self.op(x)
def cum_minmax_compare(op, x, expected, axis, data_type):
net = Net(op, axis)
x = np.array(x).astype(data_type)
expected = (np.array(expected[0]).astype(data_type), np.array(expected[1]).astype(data_type))
# Pynative
context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU")
output = net(Tensor(x))
assert np.allclose(output[0].asnumpy(), expected[0], equal_nan=True)
assert np.allclose(output[1].asnumpy(), expected[1])
# Graph
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
output = net(Tensor(x))
assert np.allclose(output[0].asnumpy(), expected[0], equal_nan=True)
assert np.allclose(output[1].asnumpy(), expected[1])
@pytest.mark.level0
@pytest.mark.env_onecard
@pytest.mark.platform_x86_gpu_training
@pytest.mark.parametrize("data_type", [np.uint8, np.int8, np.int32, np.float16, np.float32])
def test_cummin_multi_dims(data_type):
"""
Feature: Op Cummin
Description: test Cummin operator with multiple dimension.
Expectation: the result match expectation.
"""
op = "Cummin"
axis = 1
x = [[[14, 19, 18, 11, 6], [1, 4, 18, 6, 1], [15, 13, 12, 9, 19]],
[[16, 16, 17, 10, 15], [9, 7, 10, 9, 4], [6, 14, 16, 3, 2]],
[[1, 13, 15, 1, 6], [20, 6, 8, 19, 19], [3, 14, 20, 18, 19]],
[[20, 1, 14, 9, 3], [13, 11, 2, 17, 14], [0, 15, 13, 7, 10]]]
cummin_output = (
[[[14, 19, 18, 11, 6], [1, 4, 18, 6, 1], [1, 4, 12, 6, 1]],
[[16, 16, 17, 10, 15], [9, 7, 10, 9, 4], [6, 7, 10, 3, 2]],
[[1, 13, 15, 1, 6], [1, 6, 8, 1, 6], [1, 6, 8, 1, 6]], [[20, 1, 14, 9, 3], [13, 1, 2, 9, 3], [0, 1, 2, 7, 3]]],
[[[0, 0, 0, 0, 0], [1, 1, 1, 1, 1], [1, 1, 2, 1, 1]], [[0, 0, 0, 0, 0], [1, 1, 1, 1, 1], [2, 1, 1, 2, 2]],
[[0, 0, 0, 0, 0], [0, 1, 1, 0, 0], [0, 1, 1, 0, 0]], [[0, 0, 0, 0, 0], [1, 0, 1, 0, 0], [2, 0, 1, 2, 0]]])
cum_minmax_compare(op, x, cummin_output, axis, data_type)
@pytest.mark.level0
@pytest.mark.env_onecard
@pytest.mark.platform_x86_gpu_training
@pytest.mark.parametrize("data_type", [np.uint8, np.uint32, np.int8, np.int32, np.int64, np.float16, np.float32])
def test_cummax_multi_dims(data_type):
"""
Feature: Op Cummax
Description: test Cummax operator with multiple dimension.
Expectation: the result match expectation.
"""
op = "Cummax"
axis = 1
x = [[[11, 11, 1, 7, 11], [1, 8, 18, 0, 9], [12, 1, 16, 11, 8]],
[[18, 8, 10, 17, 14], [4, 20, 8, 20, 11], [14, 1, 8, 5, 16]],
[[6, 13, 19, 14, 8], [17, 19, 11, 0, 7], [18, 4, 13, 14, 16]],
[[10, 7, 7, 7, 19], [15, 0, 15, 5, 14], [9, 7, 10, 4, 14]]]
cummax_output = ([[[11, 11, 1, 7, 11], [11, 11, 18, 7, 11], [12, 11, 18, 11, 11]],
[[18, 8, 10, 17, 14], [18, 20, 10, 20, 14], [18, 20, 10, 20, 16]],
[[6, 13, 19, 14, 8], [17, 19, 19, 14, 8], [18, 19, 19, 14, 16]],
[[10, 7, 7, 7, 19], [15, 7, 15, 7, 19], [15, 7, 15, 7, 19]]],
[[[0, 0, 0, 0, 0], [0, 0, 1, 0, 0], [2, 0, 1, 2, 0]],
[[0, 0, 0, 0, 0], [0, 1, 0, 1, 0], [0, 1, 0, 1, 2]],
[[0, 0, 0, 0, 0], [1, 1, 0, 0, 0], [2, 1, 0, 2, 2]],
[[0, 0, 0, 0, 0], [1, 0, 1, 0, 0], [1, 2, 1, 0, 0]]])
cum_minmax_compare(op, x, cummax_output, axis, data_type)
@pytest.mark.level0
@pytest.mark.env_onecard
@pytest.mark.platform_x86_gpu_training
@pytest.mark.parametrize("data_type", [np.float16, np.float32])
def test_cumminmax_nan(data_type):
"""
Feature: Op Cummin/Cummax
Description: test Cummin/Cummax operator with nan input.
Expectation: the result match expectation.
"""
inf = float('inf')
nan = float('nan')
axis = 0
x = [4, inf, 1.5, -inf, 0, nan, 1]
cummin_output = ([4, 4, 1.5, -inf, -inf, nan, nan], [0, 0, 2, 3, 3, 5, 5])
cummax_output = ([4, inf, inf, inf, inf, nan, nan], [0, 1, 1, 1, 1, 5, 5])
cum_minmax_compare("Cummin", x, cummin_output, axis, data_type)
cum_minmax_compare("Cummax", x, cummax_output, axis, data_type)
| [
"mindspore.ops.Cummax",
"mindspore.context.set_context",
"pytest.mark.parametrize",
"numpy.array",
"mindspore.ops.operations._inner_ops.Cummin",
"mindspore.Tensor"
] | [((2017, 2113), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""data_type"""', '[np.uint8, np.int8, np.int32, np.float16, np.float32]'], {}), "('data_type', [np.uint8, np.int8, np.int32, np.\n float16, np.float32])\n", (2040, 2113), False, 'import pytest\n'), ((3272, 3389), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""data_type"""', '[np.uint8, np.uint32, np.int8, np.int32, np.int64, np.float16, np.float32]'], {}), "('data_type', [np.uint8, np.uint32, np.int8, np.\n int32, np.int64, np.float16, np.float32])\n", (3295, 3389), False, 'import pytest\n'), ((4683, 4745), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""data_type"""', '[np.float16, np.float32]'], {}), "('data_type', [np.float16, np.float32])\n", (4706, 4745), False, 'import pytest\n'), ((1462, 1530), 'mindspore.context.set_context', 'context.set_context', ([], {'mode': 'context.PYNATIVE_MODE', 'device_target': '"""GPU"""'}), "(mode=context.PYNATIVE_MODE, device_target='GPU')\n", (1481, 1530), True, 'import mindspore.context as context\n'), ((1706, 1771), 'mindspore.context.set_context', 'context.set_context', ([], {'mode': 'context.GRAPH_MODE', 'device_target': '"""GPU"""'}), "(mode=context.GRAPH_MODE, device_target='GPU')\n", (1725, 1771), True, 'import mindspore.context as context\n'), ((1548, 1557), 'mindspore.Tensor', 'Tensor', (['x'], {}), '(x)\n', (1554, 1557), False, 'from mindspore import Tensor\n'), ((1789, 1798), 'mindspore.Tensor', 'Tensor', (['x'], {}), '(x)\n', (1795, 1798), False, 'from mindspore import Tensor\n'), ((1018, 1036), 'mindspore.ops.operations._inner_ops.Cummin', 'inner.Cummin', (['axis'], {}), '(axis)\n', (1030, 1036), True, 'from mindspore.ops.operations import _inner_ops as inner\n'), ((1314, 1325), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (1322, 1325), True, 'import numpy as np\n'), ((1088, 1104), 'mindspore.ops.Cummax', 'ops.Cummax', (['axis'], {}), '(axis)\n', (1098, 1104), True, 'import mindspore.ops as ops\n'), ((1360, 1381), 'numpy.array', 'np.array', (['expected[0]'], {}), '(expected[0])\n', (1368, 1381), True, 'import numpy as np\n'), ((1401, 1422), 'numpy.array', 'np.array', (['expected[1]'], {}), '(expected[1])\n', (1409, 1422), True, 'import numpy as np\n')] |
import torch
from torch.distributions import Normal
def theta(mod, priors, y=None):
# Sample posterior model parameters
idx = [range(mod.N[i]) for i in range(mod.I)]
params = mod.sample_params(idx)
# Detach model parameters
mu0 = -params['delta0'].cumsum(0).detach()
mu1 = params['delta1'].cumsum(0).detach()
eta0 = params['eta0'].detach()
eta1 = params['eta1'].detach()
sig = params['sig2'].detach().sqrt()
H = params['H'].detach()
v = params['v'].detach()
eps = params['eps'].detach()
if mod.use_stick_break:
# Z = (v.cumprod(0) > Normal(0, 1).cdf(H)).double()
Z = (v.cumprod(0) > H).double()
else:
Z = (v > H).double()
W = params['W'].detach()
out = {}
out['mu0'] = mu0
out['mu1'] = mu1
out['eta0'] = eta0
out['eta1'] = eta1
out['sig'] = sig
out['W'] = W
out['Z'] = Z
out['eps'] = eps
out['noisy_sd'] = torch.sqrt(torch.tensor(priors['noisy_var']))
yout = []
for i in range(mod.I):
if y is None:
# Used the imputed y[i]
yi = params['y'][i].detach()
else:
# Used the user-provided y[i]
yi = y[i]
yout.append(yi)
out['y'] = yout
return out, mod
| [
"torch.tensor"
] | [((947, 980), 'torch.tensor', 'torch.tensor', (["priors['noisy_var']"], {}), "(priors['noisy_var'])\n", (959, 980), False, 'import torch\n')] |
'''Faça um program que mostre uma contagem regressiva para os estouros de fogos de artificil.
indo de 10 ate 0 com pausa de 1 segundo entre eles.'''
from time import sleep
for c in range(10, -1, -1):
print(c)
sleep(1)
print('\033[1;36;40mFELIZ ANO NOVO MEUS QUERIDOS!\033[m') | [
"time.sleep"
] | [((218, 226), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (223, 226), False, 'from time import sleep\n')] |
import random
a1 = input ('Nome do primeiro aluno: ')
a2 = input ('nome do segundo aluno: ')
a3 = input (' nome do terceiro aluno: ')
a4 = input (' nome do quarto aluno: ')
lista = [a1, a2, a3, a4]
print ('Alunos: \n {} \n {} \n {} \n {} \n' .format(a1,a2,a3,a4))
escolhido = random.choice (lista)
print (' O aluno sorteado foi {}' .format(escolhido))
random.shuffle (lista)
print ('a ordem de apresentação dos alunos será :')
print (lista)
| [
"random.choice",
"random.shuffle"
] | [((278, 298), 'random.choice', 'random.choice', (['lista'], {}), '(lista)\n', (291, 298), False, 'import random\n'), ((355, 376), 'random.shuffle', 'random.shuffle', (['lista'], {}), '(lista)\n', (369, 376), False, 'import random\n')] |
#!/usr/bin/env python3
import re
import os
import shutil
import argparse
from pprint import pprint
parser = argparse.ArgumentParser()
parser.add_argument("ssid", help = "network SSID.")
parser.add_argument("--psk", help = "network password, if not provided, script will attempt using saved credentials.")
args = parser.parse_args()
arg_ssid = args.ssid
arg_psk = args.psk
if arg_psk:
print("psk provided")
else:
print("psk not provided")
# const
config_file = "/etc/wpa_supplicant/wpa_supplicant.conf"
script_dir = os.path.dirname(os.path.realpath(__file__))
config_template = os.path.join(script_dir, 'wpa_supplicant.conf.tmp')
config_copy = os.path.join(script_dir, 'wpa_supplicant.conf')
# regex
NETWORK = re.compile(r'network\s*\=\s*{')
NETWORK_END = re.compile(r'}')
SSID = re.compile(r'\s*ssid\s*\=\s*\"(.*)\"')
PSK = re.compile(r'\s*psk\s*\=\s*\"(.*)\"')
KEY_MGMT = re.compile(r'\s*key_mgmt\s*\=\s*(.*)')
DISABLED = re.compile(r'\s*disabled\s*\=\s*(.*)')
# read the wpa_supplement.conf file
networks = {}
reading_network = False
ssid, psk, disable, key_mgmt = '','','',''
with open(config_file, 'r') as file:
for line in file:
if NETWORK.match(line):
reading_network = True
elif NETWORK_END.match(line):
reading_network = False
networks[ssid]={
"psk": psk,
"disabled": disable,
"key_mgmt": key_mgmt
}
if reading_network:
#print(line)
ssid_match = SSID.match(line)
psk_match = PSK.match(line)
disabled_match = DISABLED.match(line)
key_mgmt_match = KEY_MGMT.match(line)
if key_mgmt_match:
key_mgmt = key_mgmt_match.group(1)
if disabled_match:
disable = disabled_match.group(1)
if ssid_match:
ssid = ssid_match.group(1)
if psk_match:
psk = psk_match.group(1)
# if psk not provided, check if ssid exists and use the original psk. Otherwise quit since psk not available
if not arg_psk:
if arg_ssid in networks:
arg_psk = networks[ssid]["psk"]
else:
exit()
# create a tmp conf file
shutil.copyfile(config_template, config_copy)
#pprint(networks)
with open(os.path.join(script_dir, 'wpa_supplicant.conf'), 'a') as file:
for network in networks:
if network == arg_ssid:
continue
file.write("network={\n")
file.write(" ssid=\"%s\"\n" % (network))
file.write(" psk=\"%s\"\n" % (networks[network]["psk"]))
file.write(" disabled=1\n")
if "key_mgmt" in networks[network] and networks[network]["key_mgmt"]:
file.write(" key_mgmt="+networks[network]["key_mgmt"]+"\n")
file.write("}\n\n")
file.write("\n")
file.write("network={\n")
file.write(" ssid=\"%s\"\n" % (arg_ssid))
file.write(" psk=\"%s\"\n" % (arg_psk))
file.write(" disabled=0\n")
file.write("}\n")
# copy the config file to destination
shutil.copyfile(config_copy, config_file)
# reconfigure
os.system('wpa_cli -i wlan0 reconfigure') | [
"argparse.ArgumentParser",
"re.compile",
"os.path.join",
"os.path.realpath",
"shutil.copyfile",
"os.system"
] | [((109, 134), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (132, 134), False, 'import argparse\n'), ((587, 638), 'os.path.join', 'os.path.join', (['script_dir', '"""wpa_supplicant.conf.tmp"""'], {}), "(script_dir, 'wpa_supplicant.conf.tmp')\n", (599, 638), False, 'import os\n'), ((653, 700), 'os.path.join', 'os.path.join', (['script_dir', '"""wpa_supplicant.conf"""'], {}), "(script_dir, 'wpa_supplicant.conf')\n", (665, 700), False, 'import os\n'), ((720, 753), 're.compile', 're.compile', (['"""network\\\\s*\\\\=\\\\s*{"""'], {}), "('network\\\\s*\\\\=\\\\s*{')\n", (730, 753), False, 'import re\n'), ((766, 781), 're.compile', 're.compile', (['"""}"""'], {}), "('}')\n", (776, 781), False, 'import re\n'), ((790, 833), 're.compile', 're.compile', (['"""\\\\s*ssid\\\\s*\\\\=\\\\s*\\\\"(.*)\\\\\\""""'], {}), '(\'\\\\s*ssid\\\\s*\\\\=\\\\s*\\\\"(.*)\\\\"\')\n', (800, 833), False, 'import re\n'), ((835, 877), 're.compile', 're.compile', (['"""\\\\s*psk\\\\s*\\\\=\\\\s*\\\\"(.*)\\\\\\""""'], {}), '(\'\\\\s*psk\\\\s*\\\\=\\\\s*\\\\"(.*)\\\\"\')\n', (845, 877), False, 'import re\n'), ((884, 925), 're.compile', 're.compile', (['"""\\\\s*key_mgmt\\\\s*\\\\=\\\\s*(.*)"""'], {}), "('\\\\s*key_mgmt\\\\s*\\\\=\\\\s*(.*)')\n", (894, 925), False, 'import re\n'), ((934, 975), 're.compile', 're.compile', (['"""\\\\s*disabled\\\\s*\\\\=\\\\s*(.*)"""'], {}), "('\\\\s*disabled\\\\s*\\\\=\\\\s*(.*)')\n", (944, 975), False, 'import re\n'), ((2230, 2275), 'shutil.copyfile', 'shutil.copyfile', (['config_template', 'config_copy'], {}), '(config_template, config_copy)\n', (2245, 2275), False, 'import shutil\n'), ((3084, 3125), 'shutil.copyfile', 'shutil.copyfile', (['config_copy', 'config_file'], {}), '(config_copy, config_file)\n', (3099, 3125), False, 'import shutil\n'), ((3141, 3182), 'os.system', 'os.system', (['"""wpa_cli -i wlan0 reconfigure"""'], {}), "('wpa_cli -i wlan0 reconfigure')\n", (3150, 3182), False, 'import os\n'), ((541, 567), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (557, 567), False, 'import os\n'), ((2306, 2353), 'os.path.join', 'os.path.join', (['script_dir', '"""wpa_supplicant.conf"""'], {}), "(script_dir, 'wpa_supplicant.conf')\n", (2318, 2353), False, 'import os\n')] |
from rest_framework.decorators import api_view
from rest_framework.response import Response
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from homework.models import Presentation
from homework.serializers import PresentationSerializer
from homework.services.presentation_services import (
add_presentation_,
update_presentation_,
upload_image_,
)
@api_view(["GET"])
def get_presentations(request):
presentations = Presentation.objects.all()
page = request.query_params.get("page")
paginator = Paginator(presentations, 4)
try:
presentations = paginator.page(page)
except PageNotAnInteger:
presentations = paginator.page(1)
except EmptyPage:
presentations = paginator.page(paginator.num_pages)
if not page:
page = 1
page = int(page)
serializer = PresentationSerializer(presentations, many=True)
return Response(
{"presentations": serializer.data, "page": page, "pages": paginator.num_pages}
)
@api_view(["POST"])
def add_presentation(request):
presentation = add_presentation_(request.data)
serializer = PresentationSerializer(presentation, many=False)
return Response(serializer.data)
@api_view(["PUT"])
def update_presentation(request, pk):
presentation = update_presentation_(request.data, pk)
serializer = PresentationSerializer(presentation, many=False)
return Response(serializer.data)
@api_view(["POST"])
def upload_image(request):
result = upload_image_(request.data)
return Response(result)
@api_view(["DELETE"])
def delete_presentation(request, pk):
student = Presentation.objects.get(id=pk)
student.delete()
return Response("Presentation Deleted")
| [
"homework.models.Presentation.objects.get",
"homework.models.Presentation.objects.all",
"homework.services.presentation_services.upload_image_",
"homework.services.presentation_services.update_presentation_",
"rest_framework.response.Response",
"homework.serializers.PresentationSerializer",
"homework.se... | [((389, 406), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (397, 406), False, 'from rest_framework.decorators import api_view\n'), ((1021, 1039), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (1029, 1039), False, 'from rest_framework.decorators import api_view\n'), ((1228, 1245), 'rest_framework.decorators.api_view', 'api_view', (["['PUT']"], {}), "(['PUT'])\n", (1236, 1245), False, 'from rest_framework.decorators import api_view\n'), ((1448, 1466), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (1456, 1466), False, 'from rest_framework.decorators import api_view\n'), ((1566, 1586), 'rest_framework.decorators.api_view', 'api_view', (["['DELETE']"], {}), "(['DELETE'])\n", (1574, 1586), False, 'from rest_framework.decorators import api_view\n'), ((459, 485), 'homework.models.Presentation.objects.all', 'Presentation.objects.all', ([], {}), '()\n', (483, 485), False, 'from homework.models import Presentation\n'), ((546, 573), 'django.core.paginator.Paginator', 'Paginator', (['presentations', '(4)'], {}), '(presentations, 4)\n', (555, 573), False, 'from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n'), ((855, 903), 'homework.serializers.PresentationSerializer', 'PresentationSerializer', (['presentations'], {'many': '(True)'}), '(presentations, many=True)\n', (877, 903), False, 'from homework.serializers import PresentationSerializer\n'), ((915, 1007), 'rest_framework.response.Response', 'Response', (["{'presentations': serializer.data, 'page': page, 'pages': paginator.num_pages}"], {}), "({'presentations': serializer.data, 'page': page, 'pages':\n paginator.num_pages})\n", (923, 1007), False, 'from rest_framework.response import Response\n'), ((1090, 1121), 'homework.services.presentation_services.add_presentation_', 'add_presentation_', (['request.data'], {}), '(request.data)\n', (1107, 1121), False, 'from homework.services.presentation_services import add_presentation_, update_presentation_, upload_image_\n'), ((1139, 1187), 'homework.serializers.PresentationSerializer', 'PresentationSerializer', (['presentation'], {'many': '(False)'}), '(presentation, many=False)\n', (1161, 1187), False, 'from homework.serializers import PresentationSerializer\n'), ((1199, 1224), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (1207, 1224), False, 'from rest_framework.response import Response\n'), ((1303, 1341), 'homework.services.presentation_services.update_presentation_', 'update_presentation_', (['request.data', 'pk'], {}), '(request.data, pk)\n', (1323, 1341), False, 'from homework.services.presentation_services import add_presentation_, update_presentation_, upload_image_\n'), ((1359, 1407), 'homework.serializers.PresentationSerializer', 'PresentationSerializer', (['presentation'], {'many': '(False)'}), '(presentation, many=False)\n', (1381, 1407), False, 'from homework.serializers import PresentationSerializer\n'), ((1419, 1444), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (1427, 1444), False, 'from rest_framework.response import Response\n'), ((1507, 1534), 'homework.services.presentation_services.upload_image_', 'upload_image_', (['request.data'], {}), '(request.data)\n', (1520, 1534), False, 'from homework.services.presentation_services import add_presentation_, update_presentation_, upload_image_\n'), ((1546, 1562), 'rest_framework.response.Response', 'Response', (['result'], {}), '(result)\n', (1554, 1562), False, 'from rest_framework.response import Response\n'), ((1639, 1670), 'homework.models.Presentation.objects.get', 'Presentation.objects.get', ([], {'id': 'pk'}), '(id=pk)\n', (1663, 1670), False, 'from homework.models import Presentation\n'), ((1704, 1736), 'rest_framework.response.Response', 'Response', (['"""Presentation Deleted"""'], {}), "('Presentation Deleted')\n", (1712, 1736), False, 'from rest_framework.response import Response\n')] |
from nltk.corpus import stopwords
stop_words = set(stopwords.words("indonesian"))
print(stop_words)
| [
"nltk.corpus.stopwords.words"
] | [((52, 81), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""indonesian"""'], {}), "('indonesian')\n", (67, 81), False, 'from nltk.corpus import stopwords\n')] |
#!/usr/bin/python
try:
from gpxpy import gpx
except ImportError:
print("gpxpy not found - please run: pip install gpxpy")
sys.exit()
class GpxUtil:
def coords_2_sequences(self, coords):
""" Takes an array of arrays of coordinates (lon,lat) and returns a gpx object with sequences."""
gpx_file = gpx.GPX()
# Create first track in our GPX:
gpx_track = gpx.GPXTrack()
gpx_file.tracks.append(gpx_track)
for seq in coords:
gpx_segment = gpx.GPXTrackSegment()
for pair in seq:
# lon, lat
gpx_segment.points.append(gpx.GPXTrackPoint(pair[1], pair[0]))
gpx_track.segments.append(gpx_segment)
return gpx_file
class GpxClean:
def __init__(self, gpx_file):
self.gpx_file = gpx_file
self.open(gpx_file)
self.speed_factor = 4
def mean(self, numbers):
return float(sum(numbers)) / max(len(numbers), 1)
def calculate_mean_speed(self, segment):
# At i = 0 None is returned instead of a number: Ignore that.
return self.mean([segment.get_speed(i) for i in range(1, segment.get_points_no())])
def open(self, gpx_file):
with open(gpx_file, 'r') as f:
self.gpx = gpxpy.parse(f)
def clean(self):
removed_points = []
for track in self.gpx.tracks:
anything_removed = True
while anything_removed:
anything_removed = False
for segment in track.segments:
mean_speed = self.calculate_mean_speed(segment)
i = 0
while i < segment.get_points_no():
if segment.get_speed(i) > mean_speed * self.speed_factor:
removed_points.append(segment.points[i])
segment.remove_point(i)
anything_removed = True
else:
i += 1
return removed_points
def save_file(self, file):
fp = open(file, 'w')
fp.write(self.gpx.to_xml())
fp.close()
| [
"gpxpy.gpx.GPXTrack",
"gpxpy.gpx.GPXTrackPoint",
"gpxpy.gpx.GPXTrackSegment",
"gpxpy.gpx.GPX"
] | [((329, 338), 'gpxpy.gpx.GPX', 'gpx.GPX', ([], {}), '()\n', (336, 338), False, 'from gpxpy import gpx\n'), ((401, 415), 'gpxpy.gpx.GPXTrack', 'gpx.GPXTrack', ([], {}), '()\n', (413, 415), False, 'from gpxpy import gpx\n'), ((520, 541), 'gpxpy.gpx.GPXTrackSegment', 'gpx.GPXTrackSegment', ([], {}), '()\n', (539, 541), False, 'from gpxpy import gpx\n'), ((687, 722), 'gpxpy.gpx.GPXTrackPoint', 'gpx.GPXTrackPoint', (['pair[1]', 'pair[0]'], {}), '(pair[1], pair[0])\n', (704, 722), False, 'from gpxpy import gpx\n')] |
import os
os.system("nohup python src/Tulsi.py >> nohup.out 2>&1 &")
| [
"os.system"
] | [((11, 70), 'os.system', 'os.system', (['"""nohup python src/Tulsi.py >> nohup.out 2>&1 &"""'], {}), "('nohup python src/Tulsi.py >> nohup.out 2>&1 &')\n", (20, 70), False, 'import os\n')] |
import unittest
from kafka_influxdb.encoder import heapster_json_encoder
class TestHeapsterJsonEncoder(unittest.TestCase):
def setUp(self):
self.encoder = heapster_json_encoder.Encoder()
def testEncoder(self):
msg = b'{ "MetricsName":"memory/major_page_faults","MetricsValue":{"value":56}, "MetricsTimestamp":"2017-01-19T17:26:00Z", "MetricsTags":{"container_name":"docker/9be430d3a1a28601292aebd76e15512d5471c630a7fa164d6a2a2fd9cbc19e3d"} } '
encoded_message = self.encoder.encode(msg)
expected_msg = [
'memory/major_page_faults,container_name=docker/9be430d3a1a28601292aebd76e15512d5471c630a7fa164d6a2a2fd9cbc19e3d value=56 1484846760']
self.assertEqual(encoded_message, expected_msg)
| [
"kafka_influxdb.encoder.heapster_json_encoder.Encoder"
] | [((171, 202), 'kafka_influxdb.encoder.heapster_json_encoder.Encoder', 'heapster_json_encoder.Encoder', ([], {}), '()\n', (200, 202), False, 'from kafka_influxdb.encoder import heapster_json_encoder\n')] |
# -*- coding: utf-8 -*-
# Copyright (C) 2010-2013 Mag. <NAME> All rights reserved
# Glasauergasse 32, A--1130 Wien, Austria. <EMAIL>
# ****************************************************************************
# This module is part of the package GTW.OMP.Auth.
#
# This module is licensed under the terms of the BSD 3-Clause License
# <http://www.c-tanzer.at/license/bsd_3c.html>.
# ****************************************************************************
#
#++
# Name
# GTW.OMP.Auth.Account_in_Group
#
# Purpose
# Model association Account_in_Group
#
# Revision Dates
# 16-Jan-2010 (CT) Creation
# 18-Jan-2010 (CT) `auto_cache` added
# 15-May-2013 (CT) Rename `auto_cache` to `auto_rev_ref`
# ««revision-date»»···
#--
from _MOM.import_MOM import *
from _GTW import GTW
from _GTW._OMP._Auth import Auth
import _GTW._OMP._Auth.Entity
import _GTW._OMP._Auth.Account
import _GTW._OMP._Auth.Group
from _TFL.I18N import _, _T, _Tn
_Ancestor_Essence = Auth.Link2
class Account_in_Group (_Ancestor_Essence) :
"""Model association Account_in_Group"""
class _Attributes (_Ancestor_Essence._Attributes) :
_Ancestor = _Ancestor_Essence._Attributes
class left (_Ancestor.left) :
role_type = Auth.Account
auto_rev_ref = True
# end class left
class right (_Ancestor.right) :
role_type = Auth.Group
auto_rev_ref = True
# end class right
# end class _Attributes
# end class Account_in_Group
if __name__ != "__main__" :
GTW.OMP.Auth._Export ("*")
### __END__ GTW.OMP.Auth.Account_in_Group
| [
"_GTW.GTW.OMP.Auth._Export"
] | [((1610, 1635), '_GTW.GTW.OMP.Auth._Export', 'GTW.OMP.Auth._Export', (['"""*"""'], {}), "('*')\n", (1630, 1635), False, 'from _GTW import GTW\n')] |
from mtga.set_data import all_mtga_cards
from mtga.models.card import Card
from getpass import getuser
import json
from prettytable import PrettyTable
debug = False
class CardOwned:
card = Card()
owned = 0
filePath = "C:/Users/" + getuser() + "/AppData/LocalLow/Wizards Of The Coast/MTGA/output_log.txt"
filePos = 0
try:
log = open(filePath, 'r')
except FileNotFoundError:
print("error : log not found")
logContents = log.read()
def log_parse_json(start, end):
collectionStart = logContents.rfind(start)
collectionStart = logContents.find(')', collectionStart) + 1
collectionEnd = logContents.find(end + '\n', collectionStart)+1
if debug:
print(logContents[collectionStart:collectionEnd])
collectionParsed = json.loads(logContents[collectionStart:collectionEnd])
return collectionParsed
deckLists = log_parse_json("<== Deck.GetDeckListsV3(", ']')
collection = log_parse_json("<== PlayerInventory.GetPlayerCardsV3(", '}')
validCards = 0
cardCount = 0
sets = {}
rares = {}
setMaxNumber = {"M19": 280, "M20": 280, "DAR": 270, "GRN": 264, "RIX": 196, "RNA": 264, "WAR": 264, "XLN": 279}
def set_sort(dictionary, card, amount):
if card.set not in dictionary:
# print("new set "+card.set)
dictionary[card.set] = [1, amount]
else:
lastValues = dictionary[card.set]
dictionary[card.set] = [lastValues[0] + 1, lastValues[1] + amount]
def in_booster(card):
if card.set != "ANA":
if card.set_number < setMaxNumber[card.set]:
return True
return False
playerRares = 0
playerTotalRares = 0
cardList = {}
for key, value in collection.items():
try:
tempCard = all_mtga_cards.find_one(key)
# print(tempCard.card_type)
if tempCard.rarity != "Basic":
# print(tempCard.pretty_name + " " + tempCard.set + " x" + str(value) )
validCards = validCards + 1
cardCount += value
set_sort(sets, tempCard, value)
cardList[key] = value
if tempCard.rarity == "Rare":
set_sort(rares, tempCard, value)
playerRares = playerRares + 1
playerTotalRares = playerTotalRares + value
except ValueError as e:
pass
cards = 0
cardsInSet = {}
raresInSet = {}
mythicsInSet = {}
totalRares = 0
totalMythics = 0
for i in all_mtga_cards.cards:
if i.collectible is True:
cards = cards + 1
if i.rarity != "Basic":
set_sort(cardsInSet, i, 4)
if i.set != 'ANA':
if i.rarity == "Rare" and in_booster(i):
set_sort(raresInSet, i, 4)
totalRares = totalRares + 1
elif i.rarity == "Mythic Rare" and in_booster(i):
set_sort(mythicsInSet, i, 4)
print("###########################", raresInSet)
print("###########################", mythicsInSet)
cards = cards - 50
# print("Total count = " + str(cards-50))
# print("valid cards = " + str(validCards))
# print(raresInSet)
# print(cardsInSet)
# print(cards)
tableTitles = ['Unique', 'ALL']
tableColumns = [["Owned Cards", "Total Cards", "Percentage"], [validCards, cards, "{0:.0%}".format(validCards / cards)]]
# ,[cardCount,cards * 4,"{0:.0%}".format(cardCount/(cards * 4))]
for keys in sets:
tableTitles.append(keys)
# keys[0]
tUnique = PrettyTable()
iterator = 0
for i in tableTitles:
if i == tableTitles[0] or i == tableTitles[1]:
pass
else:
tableColumns.append([sets[i][0], cardsInSet[i][0], "{0:.0%}".format(sets[i][0] / cardsInSet[i][0])])
# print(tableColumns[iterator])
tUnique.add_column(i, tableColumns[iterator])
iterator = iterator + 1
print(tUnique)
tTotal = PrettyTable()
tableTitles[0] = 'Total'
tableColumns = [["Owned Cards", "Total Cards", "Percentage"],
[cardCount, cards * 4, "{0:.0%}".format(cardCount / (cards * 4))]]
iterator = 0
for i in tableTitles:
if i == tableTitles[0] or i == tableTitles[1]:
pass
else:
tableColumns.append([sets[i][1], cardsInSet[i][1], "{0:.0%}".format(sets[i][1] / cardsInSet[i][1])])
tTotal.add_column(i, tableColumns[iterator])
iterator = iterator + 1
print(tTotal)
def format_percentage(num):
return "{0:.0%}".format(num)
def get_percentage(num1, num2):
return "{0:.0%}".format(num1 / num2)
def make_table(table_name, column1, player_cards, complete_cards, index):
newTable = PrettyTable()
tableTitles[0] = table_name
print(tableTitles)
_tableColumns = [["Owned Cards", "Total Cards", "Percentage"], column1]
_iterator = 0
for i in tableTitles:
try:
if i == tableTitles[0] or i == tableTitles[1]:
pass
else:
num1 = player_cards[i][index]
num2 = complete_cards[i][index]
_tableColumns.append([num1, num2, get_percentage(num1, num2)])
#print("flag")
# print(_tableColumns[_iterator])
newTable.add_column(i, _tableColumns[_iterator])
except KeyError as error:
print(error)
_iterator = _iterator + 1
print(newTable)
def make_table_with_title(table_title, column0, column1, dict1, dict2):
newTable = PrettyTable()
_tableColumns = [column0, column1]
_iterator = 0
for i in table_title:
# print("iterator is = " + str(_iterator))
try:
if i == table_title[0] or i == table_title[1]:
pass
else:
num1 = dict1[i][1]
num2 = dict2[i][1]
_tableColumns.append([num1, num2, get_percentage(num1, num2)])
# print("flag_make")
#print(_tableColumns)
# print(_tableColumns[_iterator])
newTable.add_column(i, _tableColumns[_iterator])
except KeyError as error:
print(error)
_iterator = _iterator + 1
print(newTable)
tableTitles.remove('ANA')
make_table("Rares Unique", [playerRares, totalRares, get_percentage(playerRares, totalRares)], rares, raresInSet,
0)
make_table("Rares Total ", [playerTotalRares, totalRares * 4, get_percentage(playerTotalRares, totalRares * 4)], rares,
raresInSet, 1)
# Gets The wishlist deck
wishList = {}
for i in deckLists:
if i["name"] == "$WishList":
# print(i["mainDeck"])
iterator = 0
while iterator < len(i["mainDeck"]):
wishList[str(i["mainDeck"][iterator])] = i["mainDeck"][iterator+1]
iterator = iterator + 2
# var = input("Press enter to quit")
# print(wishList)
wishListDuds = []
wishCards = 0
for key in wishList:
if key in cardList:
if wishList[key] <= cardList[key]:
wishListDuds.append(key)
else:
wishList[key] = wishList[key] - cardList[key]
wishCards = wishCards + wishList[key]
for i in wishListDuds:
print(all_mtga_cards.find_one(i))
wishList.pop(i)
#print(wishList)
wishListSet = {}
playerRareMythics = {}
for key, value in collection.items():
try:
tempCard = all_mtga_cards.find_one(key)
if tempCard.rarity != "Basic":
# validCards = validCards + 1
# cardCount += value
# set_sort(sets, tempCard, value)
# cardList[key] = value
if tempCard.rarity == "Rare" or tempCard.rarity == "Mythic Rare" and in_booster(tempCard):
# playerRares = playerRares + 1
# playerTotalRares = playerTotalRares + value
playerRareMythics[tempCard.mtga_id] = value
except ValueError as e:
pass
rareMythicsList = {}
for i in all_mtga_cards.cards:
if i.collectible is True and in_booster(i) is True:
cards = cards + 1
if i.set != 'ANA':
if i.rarity == "Rare" or i.rarity == "Mythic Rare" and in_booster(i):
rareMythicsList[i.mtga_id] = 4
print(rareMythicsList)
class CCardListData:
def set_arrange(self,card,dictionary, amount):
if card.set not in dictionary:
dictionary[card.set] = amount
else:
dictionary[card.set] = dictionary[card.set] + amount
def sort_data(self):
for i in self.wishList:
card = all_mtga_cards.find_one(i)
if card.rarity == "Rare":
self.rares = self.rares + self.wishList[i]
self.set_arrange(card, self.raresDict, self.wishList[i])
elif card.rarity == "Mythic Rare":
self.mythics = self.mythics + self.wishList[i]
self.set_arrange(card, self.mythicsDict, self.wishList[i])
def __init__(self, wish_list):
self.raresDict = {}
self.mythicsDict = {}
self.rares = 0
self.mythics = 0
self.wishList = wish_list
self.sort_data()
WishListData = CCardListData(wishList)
CardData = CCardListData(rareMythicsList)
PlayerCardData = CCardListData(playerRareMythics)
for i in rareMythicsList:
if i in playerRareMythics:
rareMythicsList[i] = rareMythicsList[i] - playerRareMythics[i]
MissingCardData = CCardListData(rareMythicsList)
print("################################################")
print(WishListData.raresDict)
print(CardData.raresDict)
for i in wishList:
set_sort(wishListSet, all_mtga_cards.find_one(i), wishList[i])
card = all_mtga_cards.find_one(i)
#print(wishListSet)
tableTitlesRares = ["Rare Boosters", "All"]
tableTitlesWish = []
for i in tableTitles:
if i in wishListSet:
tableTitlesRares.append(i)
tableTitlesWish.append(i)
#print(tableTitles)
#print(tableTitlesRares)
missingRaresSet = {}
for i in wishListSet:
missingRaresSet[i] = [0, raresInSet[i][1] - rares[i][1]]
missingRares = totalRares * 4 - playerTotalRares
col0 = ["Wanted", "Total missing", "Percentage"]
col1 = [wishCards, missingRares, get_percentage(wishCards, missingRares)]
make_table_with_title(tableTitlesRares, col0, col1, wishListSet, missingRaresSet)
class Column:
def __init__(self, _title, _wanted_rares, _total_rares, _wanted_mythics, _total_mythics):
self.title = _title
self.wantedRares = _wanted_rares
self.totalRares = _total_rares
self.wantedMythics = _wanted_mythics
self.totalMythics = _total_mythics
self.percentage = 0
try:
self.percentage = format_percentage((_wanted_rares/_total_rares)*0.88
+ (_wanted_mythics/_total_mythics) * 0.12)
except TypeError:
pass
def get_title(self):
return self.title
def get_column(self):
return[self.wantedRares, self.totalRares, self.wantedMythics, self.totalMythics, self.percentage]
class Table:
table = PrettyTable()
TitleColumn = Column("Booster %", "Wanted Rares", "Missing Rares", "Wanted Mythics", "Missing Mythics")
TitleColumn.percentage = "Percent"
TotalColumn = Column("All", wishCards, missingRares, "Inserted Later", "Inserted Later")
SetColumns = []
prepared = False
def __init__(self, set_list, wish_list_data, missing_data):
#print(set_list)
for i in set_list:
self.SetColumns.append(Column(i, wish_list_data.raresDict[i], missing_data.raresDict[i],
wish_list_data.mythicsDict[i], missing_data.mythicsDict[i]))
self.TotalColumn.mythics = wish_list_data.mythics
self.TotalColumn.wantedMythics = wish_list_data.mythics
self.TotalColumn.totalMythics = missing_data.mythics
def prepare_table(self):
if not self.prepared:
self.prepared = True
self.table.add_column(self.TitleColumn.get_title(), self.TitleColumn.get_column())
self.table.add_column(self.TotalColumn.get_title(), self.TotalColumn.get_column())
iterator = 0
while iterator < len(self.SetColumns):
self.table.add_column(self.SetColumns[iterator].get_title(), self.SetColumns[iterator].get_column())
iterator = iterator + 1
else:
print("ERROR - already Prepared")
def print_table(self):
if self.prepared:
print(self.table)
else:
print("ERROR table not prepared")
BoosterTable = Table(tableTitlesWish, WishListData, MissingCardData)
BoosterTable.prepare_table()
BoosterTable.print_table()
tableColumns = []
for i in tableTitlesRares:
tableColumns.append(Column(i, 1, 2, 3, 4))
for i in wishList:
card = all_mtga_cards.find_one(i)
wishList[i] = [all_mtga_cards.find_one(i), wishList[i]]
#newTable = PrettyTable()
#for i in tableColumns:
# newTable.add_column(i.get_title(), i.get_column())
#print(newTable)
# TODO: use statistics to determine percentage
# TODO: fix makeTable
# TODO: Autoupdate for cards in set?
| [
"prettytable.PrettyTable",
"json.loads",
"mtga.models.card.Card",
"mtga.set_data.all_mtga_cards.find_one",
"getpass.getuser"
] | [((3339, 3352), 'prettytable.PrettyTable', 'PrettyTable', ([], {}), '()\n', (3350, 3352), False, 'from prettytable import PrettyTable\n'), ((3711, 3724), 'prettytable.PrettyTable', 'PrettyTable', ([], {}), '()\n', (3722, 3724), False, 'from prettytable import PrettyTable\n'), ((195, 201), 'mtga.models.card.Card', 'Card', ([], {}), '()\n', (199, 201), False, 'from mtga.models.card import Card\n'), ((758, 812), 'json.loads', 'json.loads', (['logContents[collectionStart:collectionEnd]'], {}), '(logContents[collectionStart:collectionEnd])\n', (768, 812), False, 'import json\n'), ((4431, 4444), 'prettytable.PrettyTable', 'PrettyTable', ([], {}), '()\n', (4442, 4444), False, 'from prettytable import PrettyTable\n'), ((5240, 5253), 'prettytable.PrettyTable', 'PrettyTable', ([], {}), '()\n', (5251, 5253), False, 'from prettytable import PrettyTable\n'), ((9360, 9386), 'mtga.set_data.all_mtga_cards.find_one', 'all_mtga_cards.find_one', (['i'], {}), '(i)\n', (9383, 9386), False, 'from mtga.set_data import all_mtga_cards\n'), ((10772, 10785), 'prettytable.PrettyTable', 'PrettyTable', ([], {}), '()\n', (10783, 10785), False, 'from prettytable import PrettyTable\n'), ((12546, 12572), 'mtga.set_data.all_mtga_cards.find_one', 'all_mtga_cards.find_one', (['i'], {}), '(i)\n', (12569, 12572), False, 'from mtga.set_data import all_mtga_cards\n'), ((243, 252), 'getpass.getuser', 'getuser', ([], {}), '()\n', (250, 252), False, 'from getpass import getuser\n'), ((1688, 1716), 'mtga.set_data.all_mtga_cards.find_one', 'all_mtga_cards.find_one', (['key'], {}), '(key)\n', (1711, 1716), False, 'from mtga.set_data import all_mtga_cards\n'), ((6908, 6934), 'mtga.set_data.all_mtga_cards.find_one', 'all_mtga_cards.find_one', (['i'], {}), '(i)\n', (6931, 6934), False, 'from mtga.set_data import all_mtga_cards\n'), ((7081, 7109), 'mtga.set_data.all_mtga_cards.find_one', 'all_mtga_cards.find_one', (['key'], {}), '(key)\n', (7104, 7109), False, 'from mtga.set_data import all_mtga_cards\n'), ((9308, 9334), 'mtga.set_data.all_mtga_cards.find_one', 'all_mtga_cards.find_one', (['i'], {}), '(i)\n', (9331, 9334), False, 'from mtga.set_data import all_mtga_cards\n'), ((12592, 12618), 'mtga.set_data.all_mtga_cards.find_one', 'all_mtga_cards.find_one', (['i'], {}), '(i)\n', (12615, 12618), False, 'from mtga.set_data import all_mtga_cards\n'), ((8247, 8273), 'mtga.set_data.all_mtga_cards.find_one', 'all_mtga_cards.find_one', (['i'], {}), '(i)\n', (8270, 8273), False, 'from mtga.set_data import all_mtga_cards\n')] |
#!/usr/bin/python3
import pygame
from pygame import Color
from enum import Enum
from random import randint
import re
import sys
size = width, height = [1024, 640]
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Hanoch")
clock = pygame.time.Clock()
class Colors(Enum):
WHITE = (255,255,255)
BLACK = (0, 0, 0)
def gera_cor_aleatoria():
return tuple([randint(0, 255) for i in range (0, 4)])
class Rect(pygame.Rect):
def __init__(self, left, top, width, height, color=None):
self.color = color or Colors.BLACK.value
super().__init__(left, top, width, height)
def arrasta(self, left, top):
self.move_ip(left - self.left,
top - self.top)
def desenha(self):
pygame.draw.rect(pygame.display.get_surface(),
self.color, self)
class Torre(Rect):
def __init__(self, n):
self.discos = []
self.n = n
self.width = 5
self.top = 40
self.height = height - self.top
self.left = ((1024 / 4) * self.n) - (self.width / 2)
super().__init__(self.left, self.top, self.width, self.height)
def empilha_disco(self, d):
d.move_para_torre(self)
self.discos.append(d)
def desempilha_disco(self):
if (self.discos):
return self.discos.pop()
return
class Disco(Rect):
def __init__(self, peso):
self.peso = peso
self.width = (200 * self.peso) / 16
self.height = 20
self.color = gera_cor_aleatoria()
super().__init__(0, 0, self.width, self.height, self.color)
def move_para_torre(self, torre):
pos_x = ((width / 4) * torre.n) - (self.width / 2)
pos_y = height - (self.height * (len(torre.discos) + 1))
self.arrasta(pos_x, pos_y)
self.desenha()
def coloca_ndiscos(torre, ndiscos):
for i in range(ndiscos, 1 - 1, -1):
d = Disco(i)
torre.empilha_disco(d)
def move_disco(origem, destino):
disco = origem.desempilha_disco()
if disco:
destino.empilha_disco(disco)
def quit_on_close():
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
def traduz_comandos(comando):
comandos = {
'A': 0,
'B': 1,
'C': 2,
'->': 1,
'<-': -1
}
return comandos.get(comando)
def run():
torres = [Torre(i) for i in range(1, 3 + 1)]
move_pattern = re.compile("(A|B|C) (->|<-) (A|B|C)")
for line in sys.stdin:
quit_on_close()
if 'DISCOS:' in line:
ndiscos = int(line.split()[1])
coloca_ndiscos(torres[0], ndiscos)
else:
m = move_pattern.search(line)
if m:
origem = direcao = destino = 0
origem = traduz_comandos(m.group(1))
direcao = traduz_comandos(m.group(2))
destino = traduz_comandos(m.group(3))
if direcao == 1:
move_disco(torres[origem],
torres[destino])
else:
move_disco(torres[destino],
torres[origem])
screen.fill(Colors.WHITE.value)
for torre in torres:
torre.desenha()
for disco in torre.discos:
disco.desenha()
pygame.display.update()
pygame.display.flip()
#clock.tick(2)
while True:
quit_on_close()
clock.tick(1)
if __name__ == '__main__':
run()
| [
"pygame.display.set_caption",
"pygame.quit",
"re.compile",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.display.get_surface",
"pygame.time.Clock",
"pygame.display.update",
"random.randint"
] | [((174, 203), 'pygame.display.set_mode', 'pygame.display.set_mode', (['size'], {}), '(size)\n', (197, 203), False, 'import pygame\n'), ((204, 240), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Hanoch"""'], {}), "('Hanoch')\n", (230, 240), False, 'import pygame\n'), ((250, 269), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (267, 269), False, 'import pygame\n'), ((2191, 2209), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (2207, 2209), False, 'import pygame\n'), ((2594, 2631), 're.compile', 're.compile', (['"""(A|B|C) (->|<-) (A|B|C)"""'], {}), "('(A|B|C) (->|<-) (A|B|C)')\n", (2604, 2631), False, 'import re\n'), ((3557, 3580), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (3578, 3580), False, 'import pygame\n'), ((3589, 3610), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (3608, 3610), False, 'import pygame\n'), ((384, 399), 'random.randint', 'randint', (['(0)', '(255)'], {}), '(0, 255)\n', (391, 399), False, 'from random import randint\n'), ((798, 826), 'pygame.display.get_surface', 'pygame.display.get_surface', ([], {}), '()\n', (824, 826), False, 'import pygame\n'), ((2261, 2274), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (2272, 2274), False, 'import pygame\n')] |
# Copyright 2020 Amazon.com, Inc. or its affiliates.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file.
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
import sys
import boto3
import requests
from neptune_python_utils.endpoints import Endpoints
class GlueNeptuneConnectionInfo:
def __init__(self, region, role_arn):
self.region = region
self.role_arn = role_arn
def neptune_endpoints(self, connection_name):
"""Gets Neptune endpoint information from the AWS Glue Data Catalog.
You may need to install a Glue VPC Endpoint in your VPC for this method to work.
You can either create a Glue Connection type of 'JDBC' or 'NETWORK'.
When you use Glue Connection Type of 'JDBC' store the Amazon Neptune endpoint in 'JDBC_CONNECTION_URL' field, e.g. 'jdbc:wss://my-neptune-cluster.us-east-1.neptune.amazonaws.com:8182/gremlin'.
When you use Glue Connection Type of 'NETWORK' store the Amazon Neptune endpoint in 'Description' field, e.g. 'wss://my-neptune-cluster.us-east-1.neptune.amazonaws.com:8182/gremlin'.
When you invoke the method it returns Neptune endpoint, e.g. 'wss://my-neptune-cluster.us-east-1.neptune.amazonaws.com:8182/gremlin'
Example:
>>> gremlin_endpoint = GlueNeptuneConnectionInfo(glueContext).neptune_endpoint('neptune')
"""
glue = boto3.client('glue', region_name=self.region)
connection = glue.get_connection(Name=connection_name)['Connection']
if connection['ConnectionType'] == "JDBC":
neptune_uri = connection['ConnectionProperties']['JDBC_CONNECTION_URL'][5:]
if connection['ConnectionType'] == "NETWORK":
neptune_uri = connection['Description']
parse_result = requests.utils.urlparse(neptune_uri)
netloc_parts = parse_result.netloc.split(':')
host = netloc_parts[0]
port = netloc_parts[1]
return Endpoints(neptune_endpoint=host, neptune_port=port, region_name=self.region, role_arn=self.role_arn) | [
"neptune_python_utils.endpoints.Endpoints",
"boto3.client",
"requests.utils.urlparse"
] | [((1841, 1886), 'boto3.client', 'boto3.client', (['"""glue"""'], {'region_name': 'self.region'}), "('glue', region_name=self.region)\n", (1853, 1886), False, 'import boto3\n'), ((2235, 2271), 'requests.utils.urlparse', 'requests.utils.urlparse', (['neptune_uri'], {}), '(neptune_uri)\n', (2258, 2271), False, 'import requests\n'), ((2412, 2516), 'neptune_python_utils.endpoints.Endpoints', 'Endpoints', ([], {'neptune_endpoint': 'host', 'neptune_port': 'port', 'region_name': 'self.region', 'role_arn': 'self.role_arn'}), '(neptune_endpoint=host, neptune_port=port, region_name=self.region,\n role_arn=self.role_arn)\n', (2421, 2516), False, 'from neptune_python_utils.endpoints import Endpoints\n')] |
import os
import io
import sys
import numpy as np
from array import array
import cv2
from PIL import Image, ImageDraw
import streamlit as st
from image_processing import *
from azure_api import *
MAGE_EMOJI_URL = "https://emojipedia-us.s3.dualstack.us-west-1.amazonaws.com/thumbs/240/twitter/259/mage_1f9d9.png"
# Set page title and favicon.
st.set_page_config(
page_title="OCR Generator", page_icon=MAGE_EMOJI_URL,
)
st.title('A web app for OCR on images')
image = st.file_uploader('Upload an image', type=['png', 'jpg'])
subscription_key = os.environ.get('subscription_key')
endpoint = os.environ.get('endpoint')
if image is not None:
st.image(image)
boxes, texts = get_ocr(image, subscription_key, endpoint)
polygons = encode_bboxes(boxes)
image = np.array(Image.open(image))
image = overlay_rect(image, polygons)
image = overlay_text(image, boxes, texts)
st.image(image)
| [
"streamlit.image",
"PIL.Image.open",
"streamlit.file_uploader",
"os.environ.get",
"streamlit.set_page_config",
"streamlit.title"
] | [((345, 417), 'streamlit.set_page_config', 'st.set_page_config', ([], {'page_title': '"""OCR Generator"""', 'page_icon': 'MAGE_EMOJI_URL'}), "(page_title='OCR Generator', page_icon=MAGE_EMOJI_URL)\n", (363, 417), True, 'import streamlit as st\n'), ((426, 465), 'streamlit.title', 'st.title', (['"""A web app for OCR on images"""'], {}), "('A web app for OCR on images')\n", (434, 465), True, 'import streamlit as st\n'), ((475, 531), 'streamlit.file_uploader', 'st.file_uploader', (['"""Upload an image"""'], {'type': "['png', 'jpg']"}), "('Upload an image', type=['png', 'jpg'])\n", (491, 531), True, 'import streamlit as st\n'), ((552, 586), 'os.environ.get', 'os.environ.get', (['"""subscription_key"""'], {}), "('subscription_key')\n", (566, 586), False, 'import os\n'), ((598, 624), 'os.environ.get', 'os.environ.get', (['"""endpoint"""'], {}), "('endpoint')\n", (612, 624), False, 'import os\n'), ((652, 667), 'streamlit.image', 'st.image', (['image'], {}), '(image)\n', (660, 667), True, 'import streamlit as st\n'), ((903, 918), 'streamlit.image', 'st.image', (['image'], {}), '(image)\n', (911, 918), True, 'import streamlit as st\n'), ((789, 806), 'PIL.Image.open', 'Image.open', (['image'], {}), '(image)\n', (799, 806), False, 'from PIL import Image, ImageDraw\n')] |
import datetime
from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, \
StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, \
S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream
from bitmovin.errors import BitmovinError
API_KEY = '<YOUR_API_KEY>'
S3_INPUT_ACCESSKEY = '<YOUR_S3_OUTPUT_ACCESSKEY>'
S3_INPUT_SECRETKEY = '<YOUR_S3_OUTPUT_SECRETKEY>'
S3_INPUT_BUCKETNAME = '<YOUR_S3_OUTPUT_BUCKETNAME>'
S3_INPUT_PATH = '<YOUR_S3_INPUT_PATH>'
S3_OUTPUT_ACCESSKEY = '<YOUR_S3_OUTPUT_ACCESSKEY>'
S3_OUTPUT_SECRETKEY = '<YOUR_S3_OUTPUT_SECRETKEY>'
S3_OUTPUT_BUCKETNAME = '<YOUR_S3_OUTPUT_BUCKETNAME>'
FAIRPLAY_KEY = '<YOUR_FAIRPLAY_KEY>'
FAIRPLAY_IV = '<YOUR_FAIRPLAY_IV>'
FAIRPLAY_URI = '<YOUR_FAIRPLAY_LICENSING_URL>'
date_component = str(datetime.datetime.now()).replace(' ', '_').replace(':', '-').split('.')[0].replace('_', '__')
OUTPUT_BASE_PATH = 'your/output/base/path/{}/'.format(date_component)
def main():
bitmovin = Bitmovin(api_key=API_KEY)
s3_input = S3Input(access_key=S3_INPUT_ACCESSKEY,
secret_key=S3_INPUT_SECRETKEY,
bucket_name=S3_INPUT_BUCKETNAME,
name='Sample S3 Output')
s3_input = bitmovin.inputs.S3.create(s3_input).resource
s3_output = S3Output(access_key=S3_OUTPUT_ACCESSKEY,
secret_key=S3_OUTPUT_SECRETKEY,
bucket_name=S3_OUTPUT_BUCKETNAME,
name='Sample S3 Output')
s3_output = bitmovin.outputs.S3.create(s3_output).resource
encoding = Encoding(name='hls fairplay example encoding - {}'.format(date_component))
encoding = bitmovin.encodings.Encoding.create(encoding).resource
video_codec_configuration_480p = H264CodecConfiguration(name='example_video_codec_configuration_480p',
bitrate=1200000,
rate=None,
height=480,
profile=H264Profile.HIGH)
video_codec_configuration_480p = bitmovin.codecConfigurations.H264.create(video_codec_configuration_480p).resource
video_codec_configuration_360p = H264CodecConfiguration(name='example_video_codec_configuration_360p',
bitrate=800000,
rate=None,
height=360,
profile=H264Profile.HIGH)
video_codec_configuration_360p = bitmovin.codecConfigurations.H264.create(video_codec_configuration_360p).resource
video_codec_configuration_240p = H264CodecConfiguration(name='example_video_codec_configuration_240p',
bitrate=400000,
rate=None,
height=240,
profile=H264Profile.HIGH)
video_codec_configuration_240p = bitmovin.codecConfigurations.H264.create(video_codec_configuration_240p).resource
audio_codec_configuration_stereo = AACCodecConfiguration(name='example_audio_codec_configuration_stereo',
bitrate=128000,
rate=48000)
audio_codec_configuration_stereo = bitmovin.codecConfigurations.AAC.create(
audio_codec_configuration_stereo).resource
video_input_stream = StreamInput(input_id=s3_input.id,
input_path=S3_INPUT_PATH,
selection_mode=SelectionMode.AUTO)
audio_input_stream_en_stereo = StreamInput(input_id=s3_input.id,
input_path=S3_INPUT_PATH,
selection_mode=SelectionMode.AUTO)
video_stream_480p = Stream(codec_configuration_id=video_codec_configuration_480p.id,
input_streams=[video_input_stream],
name='Sample Stream 480p')
video_stream_480p = bitmovin.encodings.Stream.create(object_=video_stream_480p,
encoding_id=encoding.id).resource
video_stream_360p = Stream(codec_configuration_id=video_codec_configuration_360p.id,
input_streams=[video_input_stream],
name='Sample Stream 360p')
video_stream_360p = bitmovin.encodings.Stream.create(object_=video_stream_360p,
encoding_id=encoding.id).resource
video_stream_240p = Stream(codec_configuration_id=video_codec_configuration_240p.id,
input_streams=[video_input_stream],
name='Sample Stream 240p')
video_stream_240p = bitmovin.encodings.Stream.create(object_=video_stream_240p,
encoding_id=encoding.id).resource
audio_stream_en_stereo = Stream(codec_configuration_id=audio_codec_configuration_stereo.id,
input_streams=[audio_input_stream_en_stereo],
name='Sample Audio Stream EN Stereo')
audio_stream_en_stereo = bitmovin.encodings.Stream.create(object_=audio_stream_en_stereo,
encoding_id=encoding.id).resource
acl_entry = ACLEntry(permission=ACLPermission.PUBLIC_READ)
video_muxing_stream_480p = MuxingStream(video_stream_480p.id)
video_muxing_stream_360p = MuxingStream(video_stream_360p.id)
video_muxing_stream_240p = MuxingStream(video_stream_240p.id)
audio_muxing_stream_en_stereo = MuxingStream(audio_stream_en_stereo.id)
video_muxing_480p_output = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH + 'video/hls/480p',
acl=[acl_entry])
video_muxing_480p = TSMuxing(segment_length=4,
segment_naming='seg_%number%.ts',
streams=[video_muxing_stream_480p],
name='Sample Muxing 480p')
video_muxing_480p = bitmovin.encodings.Muxing.TS.create(object_=video_muxing_480p,
encoding_id=encoding.id).resource
fair_play_480p = FairPlayDRM(key=FAIRPLAY_KEY,
iv=FAIRPLAY_IV,
uri=FAIRPLAY_URI,
outputs=[video_muxing_480p_output],
name='FairPlay 480p')
fair_play_480p = bitmovin.encodings.Muxing.TS.DRM.FairPlay.create(object_=fair_play_480p,
encoding_id=encoding.id,
muxing_id=video_muxing_480p.id).resource
video_muxing_360p_output = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH + 'video/hls/360p',
acl=[acl_entry])
video_muxing_360p = TSMuxing(segment_length=4,
segment_naming='seg_%number%.ts',
streams=[video_muxing_stream_360p],
name='Sample Muxing 360p')
video_muxing_360p = bitmovin.encodings.Muxing.TS.create(object_=video_muxing_360p,
encoding_id=encoding.id).resource
fair_play_360p = FairPlayDRM(key=FAIRPLAY_KEY,
iv=FAIRPLAY_IV,
uri=FAIRPLAY_URI,
outputs=[video_muxing_360p_output],
name='FairPlay 360p')
fair_play_360p = bitmovin.encodings.Muxing.TS.DRM.FairPlay.create(object_=fair_play_360p,
encoding_id=encoding.id,
muxing_id=video_muxing_360p.id).resource
video_muxing_240p_output = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH + 'video/hls/240p',
acl=[acl_entry])
video_muxing_240p = TSMuxing(segment_length=4,
segment_naming='seg_%number%.ts',
streams=[video_muxing_stream_240p],
name='Sample Muxing 240p')
video_muxing_240p = bitmovin.encodings.Muxing.TS.create(object_=video_muxing_240p,
encoding_id=encoding.id).resource
fair_play_240p = FairPlayDRM(key=FAIRPLAY_KEY,
iv=FAIRPLAY_IV,
uri=FAIRPLAY_URI,
outputs=[video_muxing_240p_output],
name='FairPlay 240p')
fair_play_240p = bitmovin.encodings.Muxing.TS.DRM.FairPlay.create(object_=fair_play_240p,
encoding_id=encoding.id,
muxing_id=video_muxing_240p.id).resource
audio_muxing_output_en_stereo = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH + 'audio/hls/en_2_0',
acl=[acl_entry])
audio_muxing_en_stereo = TSMuxing(segment_length=4,
segment_naming='seg_%number%.ts',
streams=[audio_muxing_stream_en_stereo],
name='Sample Audio Muxing EN Stereo')
audio_muxing_en_stereo = bitmovin.encodings.Muxing.TS.create(object_=audio_muxing_en_stereo,
encoding_id=encoding.id).resource
fair_play_audio = FairPlayDRM(key=FAIRPLAY_KEY,
iv=FAIRPLAY_IV,
uri=FAIRPLAY_URI,
outputs=[audio_muxing_output_en_stereo],
name='FairPlay Audio')
fair_play_audio = bitmovin.encodings.Muxing.TS.DRM.FairPlay.create(object_=fair_play_audio,
encoding_id=encoding.id,
muxing_id=audio_muxing_en_stereo.id).resource
bitmovin.encodings.Encoding.start(encoding_id=encoding.id)
try:
bitmovin.encodings.Encoding.wait_until_finished(encoding_id=encoding.id)
except BitmovinError as bitmovin_error:
print("Exception occurred while waiting for encoding to finish: {}".format(bitmovin_error))
# Manifest ##
manifest_output = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH,
acl=[acl_entry])
hls_manifest = HlsManifest(manifest_name='example_manifest_hls.m3u8',
outputs=[manifest_output],
name='Sample HLS FairPlay Manifest')
hls_manifest = bitmovin.manifests.HLS.create(hls_manifest).resource
audio_media = AudioMedia(name='Sample Audio Media',
group_id='audio_group',
segment_path=audio_muxing_output_en_stereo.outputPath,
encoding_id=encoding.id,
stream_id=audio_stream_en_stereo.id,
muxing_id=audio_muxing_en_stereo.id,
drm_id=fair_play_audio.id,
language='en',
uri='audiomedia.m3u8')
audio_media = bitmovin.manifests.HLS.AudioMedia.create(manifest_id=hls_manifest.id, object_=audio_media).resource
variant_stream_480p = VariantStream(audio=audio_media.groupId,
closed_captions='NONE',
segment_path=video_muxing_480p_output.outputPath,
uri='video_480p.m3u8',
encoding_id=encoding.id,
stream_id=video_stream_480p.id,
muxing_id=video_muxing_480p.id,
drm_id=fair_play_480p.id)
bitmovin.manifests.HLS.VariantStream.create(manifest_id=hls_manifest.id,
object_=variant_stream_480p)
variant_stream_360p = VariantStream(audio=audio_media.groupId,
closed_captions='NONE',
segment_path=video_muxing_360p_output.outputPath,
uri='video_360p.m3u8',
encoding_id=encoding.id,
stream_id=video_stream_360p.id,
muxing_id=video_muxing_360p.id,
drm_id=fair_play_360p.id)
bitmovin.manifests.HLS.VariantStream.create(manifest_id=hls_manifest.id,
object_=variant_stream_360p)
variant_stream_240p = VariantStream(audio=audio_media.groupId,
closed_captions='NONE',
segment_path=video_muxing_240p_output.outputPath,
uri='video_240p.m3u8',
encoding_id=encoding.id,
stream_id=video_stream_240p.id,
muxing_id=video_muxing_240p.id,
drm_id=fair_play_240p.id)
bitmovin.manifests.HLS.VariantStream.create(manifest_id=hls_manifest.id,
object_=variant_stream_240p)
bitmovin.manifests.HLS.start(manifest_id=hls_manifest.id)
try:
bitmovin.manifests.HLS.wait_until_finished(manifest_id=hls_manifest.id)
except BitmovinError as bitmovin_error:
print("Exception occurred while waiting for HLS manifest creation to finish: {}".format(bitmovin_error))
if __name__ == '__main__':
main()
| [
"bitmovin.HlsManifest",
"bitmovin.VariantStream",
"bitmovin.H264CodecConfiguration",
"bitmovin.Bitmovin",
"bitmovin.FairPlayDRM",
"bitmovin.AACCodecConfiguration",
"bitmovin.MuxingStream",
"bitmovin.AudioMedia",
"bitmovin.S3Input",
"datetime.datetime.now",
"bitmovin.Stream",
"bitmovin.ACLEntry... | [((1054, 1079), 'bitmovin.Bitmovin', 'Bitmovin', ([], {'api_key': 'API_KEY'}), '(api_key=API_KEY)\n', (1062, 1079), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((1096, 1227), 'bitmovin.S3Input', 'S3Input', ([], {'access_key': 'S3_INPUT_ACCESSKEY', 'secret_key': 'S3_INPUT_SECRETKEY', 'bucket_name': 'S3_INPUT_BUCKETNAME', 'name': '"""Sample S3 Output"""'}), "(access_key=S3_INPUT_ACCESSKEY, secret_key=S3_INPUT_SECRETKEY,\n bucket_name=S3_INPUT_BUCKETNAME, name='Sample S3 Output')\n", (1103, 1227), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((1370, 1505), 'bitmovin.S3Output', 'S3Output', ([], {'access_key': 'S3_OUTPUT_ACCESSKEY', 'secret_key': 'S3_OUTPUT_SECRETKEY', 'bucket_name': 'S3_OUTPUT_BUCKETNAME', 'name': '"""Sample S3 Output"""'}), "(access_key=S3_OUTPUT_ACCESSKEY, secret_key=S3_OUTPUT_SECRETKEY,\n bucket_name=S3_OUTPUT_BUCKETNAME, name='Sample S3 Output')\n", (1378, 1505), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((1838, 1977), 'bitmovin.H264CodecConfiguration', 'H264CodecConfiguration', ([], {'name': '"""example_video_codec_configuration_480p"""', 'bitrate': '(1200000)', 'rate': 'None', 'height': '(480)', 'profile': 'H264Profile.HIGH'}), "(name='example_video_codec_configuration_480p',\n bitrate=1200000, rate=None, height=480, profile=H264Profile.HIGH)\n", (1860, 1977), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((2371, 2509), 'bitmovin.H264CodecConfiguration', 'H264CodecConfiguration', ([], {'name': '"""example_video_codec_configuration_360p"""', 'bitrate': '(800000)', 'rate': 'None', 'height': '(360)', 'profile': 'H264Profile.HIGH'}), "(name='example_video_codec_configuration_360p',\n bitrate=800000, rate=None, height=360, profile=H264Profile.HIGH)\n", (2393, 2509), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((2903, 3041), 'bitmovin.H264CodecConfiguration', 'H264CodecConfiguration', ([], {'name': '"""example_video_codec_configuration_240p"""', 'bitrate': '(400000)', 'rate': 'None', 'height': '(240)', 'profile': 'H264Profile.HIGH'}), "(name='example_video_codec_configuration_240p',\n bitrate=400000, rate=None, height=240, profile=H264Profile.HIGH)\n", (2925, 3041), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((3437, 3539), 'bitmovin.AACCodecConfiguration', 'AACCodecConfiguration', ([], {'name': '"""example_audio_codec_configuration_stereo"""', 'bitrate': '(128000)', 'rate': '(48000)'}), "(name='example_audio_codec_configuration_stereo',\n bitrate=128000, rate=48000)\n", (3458, 3539), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((3815, 3914), 'bitmovin.StreamInput', 'StreamInput', ([], {'input_id': 's3_input.id', 'input_path': 'S3_INPUT_PATH', 'selection_mode': 'SelectionMode.AUTO'}), '(input_id=s3_input.id, input_path=S3_INPUT_PATH, selection_mode=\n SelectionMode.AUTO)\n', (3826, 3914), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((4020, 4119), 'bitmovin.StreamInput', 'StreamInput', ([], {'input_id': 's3_input.id', 'input_path': 'S3_INPUT_PATH', 'selection_mode': 'SelectionMode.AUTO'}), '(input_id=s3_input.id, input_path=S3_INPUT_PATH, selection_mode=\n SelectionMode.AUTO)\n', (4031, 4119), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((4234, 4365), 'bitmovin.Stream', 'Stream', ([], {'codec_configuration_id': 'video_codec_configuration_480p.id', 'input_streams': '[video_input_stream]', 'name': '"""Sample Stream 480p"""'}), "(codec_configuration_id=video_codec_configuration_480p.id,\n input_streams=[video_input_stream], name='Sample Stream 480p')\n", (4240, 4365), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((4624, 4755), 'bitmovin.Stream', 'Stream', ([], {'codec_configuration_id': 'video_codec_configuration_360p.id', 'input_streams': '[video_input_stream]', 'name': '"""Sample Stream 360p"""'}), "(codec_configuration_id=video_codec_configuration_360p.id,\n input_streams=[video_input_stream], name='Sample Stream 360p')\n", (4630, 4755), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((5014, 5145), 'bitmovin.Stream', 'Stream', ([], {'codec_configuration_id': 'video_codec_configuration_240p.id', 'input_streams': '[video_input_stream]', 'name': '"""Sample Stream 240p"""'}), "(codec_configuration_id=video_codec_configuration_240p.id,\n input_streams=[video_input_stream], name='Sample Stream 240p')\n", (5020, 5145), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((5409, 5568), 'bitmovin.Stream', 'Stream', ([], {'codec_configuration_id': 'audio_codec_configuration_stereo.id', 'input_streams': '[audio_input_stream_en_stereo]', 'name': '"""Sample Audio Stream EN Stereo"""'}), "(codec_configuration_id=audio_codec_configuration_stereo.id,\n input_streams=[audio_input_stream_en_stereo], name=\n 'Sample Audio Stream EN Stereo')\n", (5415, 5568), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((5839, 5885), 'bitmovin.ACLEntry', 'ACLEntry', ([], {'permission': 'ACLPermission.PUBLIC_READ'}), '(permission=ACLPermission.PUBLIC_READ)\n', (5847, 5885), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((5918, 5952), 'bitmovin.MuxingStream', 'MuxingStream', (['video_stream_480p.id'], {}), '(video_stream_480p.id)\n', (5930, 5952), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((5984, 6018), 'bitmovin.MuxingStream', 'MuxingStream', (['video_stream_360p.id'], {}), '(video_stream_360p.id)\n', (5996, 6018), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((6050, 6084), 'bitmovin.MuxingStream', 'MuxingStream', (['video_stream_240p.id'], {}), '(video_stream_240p.id)\n', (6062, 6084), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((6122, 6161), 'bitmovin.MuxingStream', 'MuxingStream', (['audio_stream_en_stereo.id'], {}), '(audio_stream_en_stereo.id)\n', (6134, 6161), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((6194, 6302), 'bitmovin.EncodingOutput', 'EncodingOutput', ([], {'output_id': 's3_output.id', 'output_path': "(OUTPUT_BASE_PATH + 'video/hls/480p')", 'acl': '[acl_entry]'}), "(output_id=s3_output.id, output_path=OUTPUT_BASE_PATH +\n 'video/hls/480p', acl=[acl_entry])\n", (6208, 6302), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((6415, 6543), 'bitmovin.TSMuxing', 'TSMuxing', ([], {'segment_length': '(4)', 'segment_naming': '"""seg_%number%.ts"""', 'streams': '[video_muxing_stream_480p]', 'name': '"""Sample Muxing 480p"""'}), "(segment_length=4, segment_naming='seg_%number%.ts', streams=[\n video_muxing_stream_480p], name='Sample Muxing 480p')\n", (6423, 6543), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((6841, 6967), 'bitmovin.FairPlayDRM', 'FairPlayDRM', ([], {'key': 'FAIRPLAY_KEY', 'iv': 'FAIRPLAY_IV', 'uri': 'FAIRPLAY_URI', 'outputs': '[video_muxing_480p_output]', 'name': '"""FairPlay 480p"""'}), "(key=FAIRPLAY_KEY, iv=FAIRPLAY_IV, uri=FAIRPLAY_URI, outputs=[\n video_muxing_480p_output], name='FairPlay 480p')\n", (6852, 6967), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((7427, 7535), 'bitmovin.EncodingOutput', 'EncodingOutput', ([], {'output_id': 's3_output.id', 'output_path': "(OUTPUT_BASE_PATH + 'video/hls/360p')", 'acl': '[acl_entry]'}), "(output_id=s3_output.id, output_path=OUTPUT_BASE_PATH +\n 'video/hls/360p', acl=[acl_entry])\n", (7441, 7535), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((7648, 7776), 'bitmovin.TSMuxing', 'TSMuxing', ([], {'segment_length': '(4)', 'segment_naming': '"""seg_%number%.ts"""', 'streams': '[video_muxing_stream_360p]', 'name': '"""Sample Muxing 360p"""'}), "(segment_length=4, segment_naming='seg_%number%.ts', streams=[\n video_muxing_stream_360p], name='Sample Muxing 360p')\n", (7656, 7776), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((8073, 8199), 'bitmovin.FairPlayDRM', 'FairPlayDRM', ([], {'key': 'FAIRPLAY_KEY', 'iv': 'FAIRPLAY_IV', 'uri': 'FAIRPLAY_URI', 'outputs': '[video_muxing_360p_output]', 'name': '"""FairPlay 360p"""'}), "(key=FAIRPLAY_KEY, iv=FAIRPLAY_IV, uri=FAIRPLAY_URI, outputs=[\n video_muxing_360p_output], name='FairPlay 360p')\n", (8084, 8199), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((8659, 8767), 'bitmovin.EncodingOutput', 'EncodingOutput', ([], {'output_id': 's3_output.id', 'output_path': "(OUTPUT_BASE_PATH + 'video/hls/240p')", 'acl': '[acl_entry]'}), "(output_id=s3_output.id, output_path=OUTPUT_BASE_PATH +\n 'video/hls/240p', acl=[acl_entry])\n", (8673, 8767), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((8880, 9008), 'bitmovin.TSMuxing', 'TSMuxing', ([], {'segment_length': '(4)', 'segment_naming': '"""seg_%number%.ts"""', 'streams': '[video_muxing_stream_240p]', 'name': '"""Sample Muxing 240p"""'}), "(segment_length=4, segment_naming='seg_%number%.ts', streams=[\n video_muxing_stream_240p], name='Sample Muxing 240p')\n", (8888, 9008), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((9306, 9432), 'bitmovin.FairPlayDRM', 'FairPlayDRM', ([], {'key': 'FAIRPLAY_KEY', 'iv': 'FAIRPLAY_IV', 'uri': 'FAIRPLAY_URI', 'outputs': '[video_muxing_240p_output]', 'name': '"""FairPlay 240p"""'}), "(key=FAIRPLAY_KEY, iv=FAIRPLAY_IV, uri=FAIRPLAY_URI, outputs=[\n video_muxing_240p_output], name='FairPlay 240p')\n", (9317, 9432), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((9897, 10007), 'bitmovin.EncodingOutput', 'EncodingOutput', ([], {'output_id': 's3_output.id', 'output_path': "(OUTPUT_BASE_PATH + 'audio/hls/en_2_0')", 'acl': '[acl_entry]'}), "(output_id=s3_output.id, output_path=OUTPUT_BASE_PATH +\n 'audio/hls/en_2_0', acl=[acl_entry])\n", (9911, 10007), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((10135, 10279), 'bitmovin.TSMuxing', 'TSMuxing', ([], {'segment_length': '(4)', 'segment_naming': '"""seg_%number%.ts"""', 'streams': '[audio_muxing_stream_en_stereo]', 'name': '"""Sample Audio Muxing EN Stereo"""'}), "(segment_length=4, segment_naming='seg_%number%.ts', streams=[\n audio_muxing_stream_en_stereo], name='Sample Audio Muxing EN Stereo')\n", (10143, 10279), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((10607, 10739), 'bitmovin.FairPlayDRM', 'FairPlayDRM', ([], {'key': 'FAIRPLAY_KEY', 'iv': 'FAIRPLAY_IV', 'uri': 'FAIRPLAY_URI', 'outputs': '[audio_muxing_output_en_stereo]', 'name': '"""FairPlay Audio"""'}), "(key=FAIRPLAY_KEY, iv=FAIRPLAY_IV, uri=FAIRPLAY_URI, outputs=[\n audio_muxing_output_en_stereo], name='FairPlay Audio')\n", (10618, 10739), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((11522, 11612), 'bitmovin.EncodingOutput', 'EncodingOutput', ([], {'output_id': 's3_output.id', 'output_path': 'OUTPUT_BASE_PATH', 'acl': '[acl_entry]'}), '(output_id=s3_output.id, output_path=OUTPUT_BASE_PATH, acl=[\n acl_entry])\n', (11536, 11612), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((11701, 11824), 'bitmovin.HlsManifest', 'HlsManifest', ([], {'manifest_name': '"""example_manifest_hls.m3u8"""', 'outputs': '[manifest_output]', 'name': '"""Sample HLS FairPlay Manifest"""'}), "(manifest_name='example_manifest_hls.m3u8', outputs=[\n manifest_output], name='Sample HLS FairPlay Manifest')\n", (11712, 11824), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((11973, 12267), 'bitmovin.AudioMedia', 'AudioMedia', ([], {'name': '"""Sample Audio Media"""', 'group_id': '"""audio_group"""', 'segment_path': 'audio_muxing_output_en_stereo.outputPath', 'encoding_id': 'encoding.id', 'stream_id': 'audio_stream_en_stereo.id', 'muxing_id': 'audio_muxing_en_stereo.id', 'drm_id': 'fair_play_audio.id', 'language': '"""en"""', 'uri': '"""audiomedia.m3u8"""'}), "(name='Sample Audio Media', group_id='audio_group', segment_path=\n audio_muxing_output_en_stereo.outputPath, encoding_id=encoding.id,\n stream_id=audio_stream_en_stereo.id, muxing_id=audio_muxing_en_stereo.\n id, drm_id=fair_play_audio.id, language='en', uri='audiomedia.m3u8')\n", (11983, 12267), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((12632, 12897), 'bitmovin.VariantStream', 'VariantStream', ([], {'audio': 'audio_media.groupId', 'closed_captions': '"""NONE"""', 'segment_path': 'video_muxing_480p_output.outputPath', 'uri': '"""video_480p.m3u8"""', 'encoding_id': 'encoding.id', 'stream_id': 'video_stream_480p.id', 'muxing_id': 'video_muxing_480p.id', 'drm_id': 'fair_play_480p.id'}), "(audio=audio_media.groupId, closed_captions='NONE',\n segment_path=video_muxing_480p_output.outputPath, uri='video_480p.m3u8',\n encoding_id=encoding.id, stream_id=video_stream_480p.id, muxing_id=\n video_muxing_480p.id, drm_id=fair_play_480p.id)\n", (12645, 12897), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((13347, 13612), 'bitmovin.VariantStream', 'VariantStream', ([], {'audio': 'audio_media.groupId', 'closed_captions': '"""NONE"""', 'segment_path': 'video_muxing_360p_output.outputPath', 'uri': '"""video_360p.m3u8"""', 'encoding_id': 'encoding.id', 'stream_id': 'video_stream_360p.id', 'muxing_id': 'video_muxing_360p.id', 'drm_id': 'fair_play_360p.id'}), "(audio=audio_media.groupId, closed_captions='NONE',\n segment_path=video_muxing_360p_output.outputPath, uri='video_360p.m3u8',\n encoding_id=encoding.id, stream_id=video_stream_360p.id, muxing_id=\n video_muxing_360p.id, drm_id=fair_play_360p.id)\n", (13360, 13612), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((14062, 14327), 'bitmovin.VariantStream', 'VariantStream', ([], {'audio': 'audio_media.groupId', 'closed_captions': '"""NONE"""', 'segment_path': 'video_muxing_240p_output.outputPath', 'uri': '"""video_240p.m3u8"""', 'encoding_id': 'encoding.id', 'stream_id': 'video_stream_240p.id', 'muxing_id': 'video_muxing_240p.id', 'drm_id': 'fair_play_240p.id'}), "(audio=audio_media.groupId, closed_captions='NONE',\n segment_path=video_muxing_240p_output.outputPath, uri='video_240p.m3u8',\n encoding_id=encoding.id, stream_id=video_stream_240p.id, muxing_id=\n video_muxing_240p.id, drm_id=fair_play_240p.id)\n", (14075, 14327), False, 'from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, ACLPermission, MuxingStream, S3Input, FairPlayDRM, TSMuxing, HlsManifest, AudioMedia, VariantStream\n'), ((861, 884), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (882, 884), False, 'import datetime\n')] |
#!/usr/bin/env python3
# Based on pcgod's mumble-ping script found at http://0xy.org/mumble-ping.py.
import socket
import sys
import time
import datetime
from struct import pack, unpack
if len(sys.argv) < 3:
print("Usage: %s <host> <port>" % sys.argv[0])
sys.exit()
host = sys.argv[1]
port = int(sys.argv[2])
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.settimeout(1)
buf = pack(">iQ", 0, datetime.datetime.now().microsecond)
s.sendto(buf, (host, port))
try:
data, addr = s.recvfrom(1024)
except socket.timeout:
print("%d:NaN:NaN" % (time.time()))
sys.exit()
print("recvd %d bytes" % len(data))
r = unpack(">bbbbQiii", data)
version = r[1:4]
# r[0,1,2,3] = version
# r[4] = ts
# r[5] = users
# r[6] = max users
# r[7] = bandwidth
ping = (datetime.datetime.now().microsecond - r[4]) / 1000.0
if ping < 0:
ping = ping + 1000
print(
"Version %d.%d.%d, %d/%d Users, %.1fms, %dkbit/s"
% (version + (r[5], r[6], ping, r[7] / 1000))
)
| [
"socket.socket",
"datetime.datetime.now",
"struct.unpack",
"sys.exit",
"time.time"
] | [((325, 373), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (338, 373), False, 'import socket\n'), ((637, 662), 'struct.unpack', 'unpack', (['""">bbbbQiii"""', 'data'], {}), "('>bbbbQiii', data)\n", (643, 662), False, 'from struct import pack, unpack\n'), ((265, 275), 'sys.exit', 'sys.exit', ([], {}), '()\n', (273, 275), False, 'import sys\n'), ((412, 435), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (433, 435), False, 'import datetime\n'), ((584, 594), 'sys.exit', 'sys.exit', ([], {}), '()\n', (592, 594), False, 'import sys\n'), ((778, 801), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (799, 801), False, 'import datetime\n'), ((566, 577), 'time.time', 'time.time', ([], {}), '()\n', (575, 577), False, 'import time\n')] |
import random
import numpy as np
from mesa import Agent
# [STRATEGY_CHEATERS, STRATEGY_FAIR, STRATEGY_GENEROUS, STRATEGY_MARTYRS, STRATEGY_PRUDENT]
SMART_VAMPIRE_STRATEGIES_PROB = [0.25, 0.25, 0.125, 0.25, 0.125]
class Vampire(Agent):
def __init__(self, id, model, root_id):
super().__init__(id, model)
self.root_id = root_id
self.survival_time = 60
def step(self):
self.perform_hunt()
shared_food = self.perform_food_sharing()
self.perform_reproduction(shared_food)
self.survival_time -= 12
def get_root(self, root):
return [agent for agent in self.model.schedule.agents if agent.root_id == root]
def perform_hunt(self):
if random.random() < self.model.hunt_probability:
self.survival_time = 60
else:
self.survival_time -= 12
def perform_food_sharing(self):
if self.model.food_sharing:
if self.survival_time <= 24:
group = range(self.model.n_roots)
prob = np.ones(self.model.n_roots)
prob[self.root_id] = prob[self.root_id] * (self.model.n_roots - 1) * 9
prob = prob / np.sum(prob)
group_id = np.random.choice(group, p=prob)
group_member = self.get_root(group_id)
if len(group_member) > 0:
other = random.choice(group_member)
return self.share_food(other)
return False
def perform_reproduction(self, shared_food):
if self.model.reproduction and shared_food:
if random.random() < self.model.reproduction_probability:
id = max([agent.unique_id[1] for agent in self.get_root(self.root_id)]) + 1
baby_vampire = self.model.vampire_type((self.root_id, id), self.model,
random.choice(range(self.model.n_roots)))
self.model.schedule.add(baby_vampire)
def is_dead(self):
return self.survival_time <= 0
def share_food(self, other):
raise NotImplementedError
class SimpleVampire(Vampire):
def share_food(self, other):
if other.survival_time >= 48:
other.survival_time -= 6
self.survival_time += 6
return True
return False
class SmartVampire(Vampire):
STRATEGY_CHEATERS = 'Cheater'
STRATEGY_FAIR = 'Fair'
STRATEGY_MARTYRS = 'Martyrs'
STRATEGY_GENEROUS = 'Generous'
STRATEGY_PRUDENT = 'Prudent'
STRATEGIES = [STRATEGY_CHEATERS, STRATEGY_FAIR, STRATEGY_GENEROUS, STRATEGY_MARTYRS, STRATEGY_PRUDENT]
def __init__(self, id, model, root_id):
super().__init__(id, model, root_id)
self.motivation = np.random.choice(self.STRATEGIES, p=self.model.smart_vampire_strategies_prob)
def share_food(self, other):
if other.motivation == other.STRATEGY_CHEATERS:
return False
elif other.motivation == other.STRATEGY_MARTYRS:
other.survival_time -= 12
self.survival_time += 12
return True
elif other.motivation == other.STRATEGY_FAIR:
if other.survival_time >= 48:
other.survival_time -= 12
self.survival_time += 12
return True
elif other.survival_time >= 24:
other.survival_time -= 6
self.survival_time += 6
return True
elif other.motivation == other.STRATEGY_GENEROUS:
if other.survival_time >= 48:
other.survival_time -= 24
self.survival_time += 24
return True
elif other.survival_time >= 24:
other.survival_time -= 12
self.survival_time += 12
return True
elif other.motivation == other.STRATEGY_PRUDENT:
if other.survival_time >= 48:
other.survival_time -= 6
self.survival_time += 6
return True
return False
class SmartDynamicVampire(Vampire):
def __init__(self, id, model, root_id, motivation=None):
super().__init__(id, model, root_id)
self.motivation = np.random.randint(-4, 7) if not motivation else motivation
def step(self):
self.perform_hunt()
shared_food = self.perform_food_sharing()
self.motivation = max(min(self.motivation, self.model.max_motivation), self.model.min_motivation)
self.perform_reproduction(shared_food)
self.survival_time -= 12
def share_food(self, other):
if other.motivation < -2: # Cheater
self.motivation -= 1
return False
elif -2 <= other.motivation < 0: # Prudent
if other.survival_time >= 48:
other.survival_time -= 6
self.survival_time += 6
self.motivation += 1
return True
elif 0 <= other.motivation <= 1: # Fair
if other.survival_time >= 48:
other.survival_time -= 12
self.survival_time += 12
self.motivation += 1
return True
elif other.survival_time >= 24:
other.survival_time -= 6
self.survival_time += 6
self.motivation += 1
return True
elif 1 < other.motivation <= 4: # Generous
if other.survival_time >= 48:
other.survival_time -= 24
self.survival_time += 24
self.motivation += 1
return True
elif other.survival_time >= 24:
other.survival_time -= 12
self.survival_time += 12
self.motivation += 1
return True
elif other.motivation > 4: # Martyr
other.survival_time -= 12
self.survival_time += 12
self.motivation += 1
return True
self.motivation -= 1
return False
def perform_reproduction(self, shared_food):
if self.model.reproduction and shared_food:
if random.random() < self.model.reproduction_probability:
id = max([agent.unique_id[1] for agent in self.get_root(self.root_id)]) + 1
baby_vampire = self.model.vampire_type((self.root_id, id), self.model,
random.choice(range(self.model.n_roots)), -2)
self.model.schedule.add(baby_vampire)
| [
"random.choice",
"numpy.ones",
"numpy.random.choice",
"numpy.sum",
"numpy.random.randint",
"random.random"
] | [((2752, 2829), 'numpy.random.choice', 'np.random.choice', (['self.STRATEGIES'], {'p': 'self.model.smart_vampire_strategies_prob'}), '(self.STRATEGIES, p=self.model.smart_vampire_strategies_prob)\n', (2768, 2829), True, 'import numpy as np\n'), ((720, 735), 'random.random', 'random.random', ([], {}), '()\n', (733, 735), False, 'import random\n'), ((4226, 4250), 'numpy.random.randint', 'np.random.randint', (['(-4)', '(7)'], {}), '(-4, 7)\n', (4243, 4250), True, 'import numpy as np\n'), ((1041, 1068), 'numpy.ones', 'np.ones', (['self.model.n_roots'], {}), '(self.model.n_roots)\n', (1048, 1068), True, 'import numpy as np\n'), ((1226, 1257), 'numpy.random.choice', 'np.random.choice', (['group'], {'p': 'prob'}), '(group, p=prob)\n', (1242, 1257), True, 'import numpy as np\n'), ((1599, 1614), 'random.random', 'random.random', ([], {}), '()\n', (1612, 1614), False, 'import random\n'), ((6154, 6169), 'random.random', 'random.random', ([], {}), '()\n', (6167, 6169), False, 'import random\n'), ((1186, 1198), 'numpy.sum', 'np.sum', (['prob'], {}), '(prob)\n', (1192, 1198), True, 'import numpy as np\n'), ((1383, 1410), 'random.choice', 'random.choice', (['group_member'], {}), '(group_member)\n', (1396, 1410), False, 'import random\n')] |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django.shortcuts import render
from .models import Pag1juegos,Aventura,Lucha,Rol,MundoAbierto
from django.http import HttpResponse
# Create your views here.
def home(request):
Juegos = Pag1juegos.objects.all()
data = {
'juegos':Juegos
}
return render(request,'core/home.html',data)
def Adventures (request):
JuegosAD = Aventura.objects.all()
data = {
'juegosad':JuegosAD
}
return render(request,'core/Adventures.html',data)
def Luchas (request):
JuegosLu = Lucha.objects.all()
data = {
'juegoslu':JuegosLu
}
return render(request,'core/Luchas.html',data)
def rol (request):
JuegosRol = Rol.objects.all()
data = {
'juegosro':JuegosRol
}
return render(request,'core/rol.html',data)
def openWorld (request):
JuegosMA = MundoAbierto.objects.all()
data = {
'juegosmu':JuegosMA
}
return render(request,'core/openWorld.html',data)
def principal(request):
return render(request)
def Lista(request):
return render(request)
def Modificar(request):
return render(request)
def Borrar(request):
return render(request)
def CAT(request):
return render(request)
def ModificarCAT(request):
return render(request)
def ListaCAT(request):
return render(request)
| [
"django.shortcuts.render"
] | [((370, 409), 'django.shortcuts.render', 'render', (['request', '"""core/home.html"""', 'data'], {}), "(request, 'core/home.html', data)\n", (376, 409), False, 'from django.shortcuts import render\n'), ((531, 576), 'django.shortcuts.render', 'render', (['request', '"""core/Adventures.html"""', 'data'], {}), "(request, 'core/Adventures.html', data)\n", (537, 576), False, 'from django.shortcuts import render\n'), ((691, 732), 'django.shortcuts.render', 'render', (['request', '"""core/Luchas.html"""', 'data'], {}), "(request, 'core/Luchas.html', data)\n", (697, 732), False, 'from django.shortcuts import render\n'), ((849, 887), 'django.shortcuts.render', 'render', (['request', '"""core/rol.html"""', 'data'], {}), "(request, 'core/rol.html', data)\n", (855, 887), False, 'from django.shortcuts import render\n'), ((1016, 1060), 'django.shortcuts.render', 'render', (['request', '"""core/openWorld.html"""', 'data'], {}), "(request, 'core/openWorld.html', data)\n", (1022, 1060), False, 'from django.shortcuts import render\n'), ((1099, 1114), 'django.shortcuts.render', 'render', (['request'], {}), '(request)\n', (1105, 1114), False, 'from django.shortcuts import render\n'), ((1153, 1168), 'django.shortcuts.render', 'render', (['request'], {}), '(request)\n', (1159, 1168), False, 'from django.shortcuts import render\n'), ((1216, 1231), 'django.shortcuts.render', 'render', (['request'], {}), '(request)\n', (1222, 1231), False, 'from django.shortcuts import render\n'), ((1270, 1285), 'django.shortcuts.render', 'render', (['request'], {}), '(request)\n', (1276, 1285), False, 'from django.shortcuts import render\n'), ((1320, 1335), 'django.shortcuts.render', 'render', (['request'], {}), '(request)\n', (1326, 1335), False, 'from django.shortcuts import render\n'), ((1383, 1398), 'django.shortcuts.render', 'render', (['request'], {}), '(request)\n', (1389, 1398), False, 'from django.shortcuts import render\n'), ((1454, 1469), 'django.shortcuts.render', 'render', (['request'], {}), '(request)\n', (1460, 1469), False, 'from django.shortcuts import render\n')] |
from numpy import random
x = random.multinomial(n=6, pvals=[1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6])
print(x)
| [
"numpy.random.multinomial"
] | [((30, 103), 'numpy.random.multinomial', 'random.multinomial', ([], {'n': '(6)', 'pvals': '[1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6]'}), '(n=6, pvals=[1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6])\n', (48, 103), False, 'from numpy import random\n')] |
import click
from marinetrafficapi import constants
from marinetrafficapi.bind import bind_request
from marinetrafficapi.vessels_positions.\
PS01_vessel_historical_track.models import VesselHistoricalPosition
from marinetrafficapi.vessels_positions.\
PS01_vessel_historical_track.query_params import PS01QueryParams
from marinetrafficapi.vessels_positions.\
PS02_PS06_vessel_positions.models import FleetVesselPosition
from marinetrafficapi.vessels_positions.\
PS02_PS06_vessel_positions.query_params import PS02PS06QueryParams
from marinetrafficapi.vessels_positions.\
PS07_single_vessel_positions.models import SingleVesselPosition
from marinetrafficapi.vessels_positions.\
PS07_single_vessel_positions.query_params import PS07QueryParams
class VesselPositions:
"""Retrieve forecasted information for any vessel.
Get ETA and voyage related information using one of these APIs."""
vessel_historical_track = bind_request(
api_path='/exportvesseltrack',
model=VesselHistoricalPosition,
query_parameters=PS01QueryParams,
default_parameters={
'v': '2',
constants.ClientConst.MSG_TYPE: constants.ClientConst.SIMPLE,
constants.RequestConst.PROTOCOL: constants.FormatterConst.JSONO
},
description='{}: \nGet all historical positions \n'
'for one or more vessels over a period of time'
.format(click.style("API CALL PS01", fg="red"))
)
fleet_vessel_positions = bind_request(
api_path='/exportvessels',
model=FleetVesselPosition,
query_parameters=PS02PS06QueryParams,
default_parameters={
'v': '8',
constants.ClientConst.MSG_TYPE: constants.ClientConst.SIMPLE,
constants.RequestConst.PROTOCOL: constants.FormatterConst.JSONO
},
description='{}:\nGet positional information for a set of predefined vessels \n'
'{}:\nMonitor vessel activity for your MarineTraffic fleet(s)\n'
'{}:\nMonitor vessel activity in one or more ports of your interest\n'
'{}:\nMonitor vessel activity in an area of your interest\n'
'{}:\nRetrieve positions for vessels sailing in an area that \n'
'you define each time you call the service'
.format(click.style("API CALL PS02", fg="red"),
click.style("API CALL PS03", fg="red"),
click.style("API CALL PS04", fg="red"),
click.style("API CALL PS05", fg="red"),
click.style("API CALL PS06", fg="red"))
)
single_vessel_positions = bind_request(
api_path='/exportvessel',
model=SingleVesselPosition,
query_parameters=PS07QueryParams,
default_parameters={
'v': '5',
constants.ClientConst.MSG_TYPE: constants.ClientConst.SIMPLE,
constants.RequestConst.PROTOCOL: constants.FormatterConst.JSONO
},
description='{}:\nGet the latest available position or voyage \n'
'information for a particular vessel'
.format(click.style("API CALL PS07", fg="red"))
)
| [
"click.style"
] | [((1446, 1484), 'click.style', 'click.style', (['"""API CALL PS01"""'], {'fg': '"""red"""'}), "('API CALL PS01', fg='red')\n", (1457, 1484), False, 'import click\n'), ((2379, 2417), 'click.style', 'click.style', (['"""API CALL PS02"""'], {'fg': '"""red"""'}), "('API CALL PS02', fg='red')\n", (2390, 2417), False, 'import click\n'), ((2439, 2477), 'click.style', 'click.style', (['"""API CALL PS03"""'], {'fg': '"""red"""'}), "('API CALL PS03', fg='red')\n", (2450, 2477), False, 'import click\n'), ((2499, 2537), 'click.style', 'click.style', (['"""API CALL PS04"""'], {'fg': '"""red"""'}), "('API CALL PS04', fg='red')\n", (2510, 2537), False, 'import click\n'), ((2559, 2597), 'click.style', 'click.style', (['"""API CALL PS05"""'], {'fg': '"""red"""'}), "('API CALL PS05', fg='red')\n", (2570, 2597), False, 'import click\n'), ((2619, 2657), 'click.style', 'click.style', (['"""API CALL PS06"""'], {'fg': '"""red"""'}), "('API CALL PS06', fg='red')\n", (2630, 2657), False, 'import click\n'), ((3186, 3224), 'click.style', 'click.style', (['"""API CALL PS07"""'], {'fg': '"""red"""'}), "('API CALL PS07', fg='red')\n", (3197, 3224), False, 'import click\n')] |
#!/usr/bin/env python3
import sys
import os
import types
import subprocess
import json
import yaml
import shutil
def run_dialog(parameters):
dialog_cmd = ["dialog"] + parameters
dialog_env = os.environ.copy()
# By default dialog returns 255 on ESC. It gets mixed up with error code -1
# converted to unsigned 8-bit. We set DIALOG_ESC variable to use the same
# code as Cancel since we don't need to distinguish ESC and Cancel.
dialog_env["DIALOG_ESC"] = "1"
proc = subprocess.Popen(dialog_cmd, stderr = subprocess.PIPE, env=dialog_env)
stderr = proc.communicate()[1]
if proc.returncode == 1:
sys.exit("Cancelled by user")
elif proc.returncode != 0:
print(stderr.decode("utf-8"))
raise subprocess.CalledProcessError(proc.returncode, dialog_cmd, output=stderr)
else:
return stderr.decode("utf-8")
def load_roles():
roles = []
for dir_entry in os.listdir("/opt/ansible-playbooks"):
full_path = os.path.join("/opt/ansible-playbooks", dir_entry)
if os.path.isdir(full_path):
role_info = read_role_info(dir_entry, full_path)
if not role_info is None:
roles.append(role_info)
return roles
def read_role_info(role_name, role_path):
role_info = None
if os.path.isfile(os.path.join(role_path, "tasks", "main.yml")):
role_info = types.SimpleNamespace()
role_info.name = role_name
vars_file = os.path.join(role_path, "defaults", "main.yml")
if os.path.isfile(vars_file):
with open(vars_file, "r") as f:
role_info.default_vars = yaml.safe_load(f)
else:
role_info.default_vars = None
return role_info
def save_config():
os.makedirs(os.path.dirname(config_file_name), exist_ok=True)
with open(config_file_name, "w", encoding="utf-8") as f:
json.dump(config, f, ensure_ascii=False, indent=4)
if shutil.which("dialog") is None:
sys.exit("ERROR: Command 'dialog' is not found. Please install corresponding package")
config_file_name = os.path.expanduser("~/.cache/cheretbe/ansible-playbooks/run_role_cfg.json")
if os.path.isfile(config_file_name):
with open(config_file_name) as f:
config = json.load(f)
else:
config = {}
roles = load_roles()
roles.sort(key=lambda x: x.name)
last_used_role = config.get("last_used_role", None)
last_used_role_idx = None
dialog_list = []
for idx, i in enumerate(roles):
dialog_list += [str(idx), i.name]
if last_used_role and (i.name == last_used_role):
last_used_role_idx = idx
dialog_params = ["--keep-tite", "--no-tags", "--menu", "Select a role:",
"0", "0", "0"] + dialog_list
if not last_used_role_idx is None:
dialog_params = ["--default-item", str(last_used_role_idx)] + dialog_params
selection = run_dialog(dialog_params)
current_role = roles[int(selection)].name
config["last_used_role"] = current_role
save_config()
print(f"Using role '{current_role}'")
role_default_vars = roles[int(selection)].default_vars
last_used_custom_vars = None
if config.get("custom_vars", None):
last_used_custom_vars = config["custom_vars"].get(current_role, None)
current_role_vars = {}
if not role_default_vars is None:
caption_length = 0
for var_name in role_default_vars:
if len(var_name) > caption_length:
caption_length = len(var_name)
dialog_list = []
for idx, key in enumerate(role_default_vars):
var_value = ""
if last_used_custom_vars:
if key in last_used_custom_vars:
var_value = last_used_custom_vars[key]
dialog_list += [key + ":", str(idx + 1), "2", var_value, str(idx + 1),
str(caption_length + 4), "100", "0"]
selection = run_dialog(["--keep-tite", "--no-tags",
"--form", f"Override variable values for role '{current_role}':", "0", "0", "0"] +
dialog_list)
dialog_vars = selection.split("\n")
for idx, key in enumerate(role_default_vars):
if dialog_vars[idx]:
current_role_vars[key] = dialog_vars[idx]
if not config.get("custom_vars", None):
config["custom_vars"] = {}
config["custom_vars"][current_role] = current_role_vars
save_config()
inventory = json.loads(subprocess.check_output(["ansible-inventory",
"--list", "--export"]))
inventory_groups = []
for group in inventory["all"]["children"]:
if group in inventory:
inventory_groups.append(group)
if len(inventory_groups) == 0:
subprocess.run(["ansible-inventory", "--list"])
sys.exit("ERROR: No groups were found in the inventory. Check inventory configuration")
last_used_group = config.get("last_used_group", None)
if last_used_group == "all":
last_used_group_idx = "all"
else:
last_used_group_idx = None
dialog_list = ["all", "All hosts"]
for idx, i in enumerate(inventory_groups):
dialog_list += [str(idx), i]
if last_used_group and i == last_used_group:
last_used_group_idx = idx
# --menu <text> <height> <width> <menu height> <tag1> <item1> ...
dialog_params = ["--keep-tite", "--no-tags", "--menu", "Select a group:",
"0", "0", "0"] + dialog_list
if not last_used_group_idx is None:
dialog_params = ["--default-item", str(last_used_group_idx)] + dialog_params
selection = run_dialog(dialog_params)
if selection == "all":
print("Using all hosts")
current_group = None
config["last_used_group"] = "all"
else:
current_group = inventory_groups[int(selection)]
print(f"Using group '{current_group}'")
config["last_used_group"] = current_group
save_config()
hosts_subset = []
if current_group:
inventory_hosts = []
for host in inventory[current_group]["hosts"]:
inventory_hosts.append(host)
last_used_hosts = inventory_hosts
if not config.get("hosts", None) is None:
last_used_hosts = config["hosts"].get(current_group, inventory_hosts)
dialog_list = []
for idx, i in enumerate(inventory_hosts):
item_state = "on" if i in last_used_hosts else "off"
dialog_list += [str(idx), i, item_state]
selection = run_dialog(["--keep-tite", "--no-tags",
"--checklist", f"Select hosts ({current_group}):", "0", "0", "0"] +
dialog_list)
current_hosts = []
for host_idx in selection.split():
current_hosts.append(inventory_hosts[int(host_idx)])
if len(current_hosts) == 0:
sys.exit("No hosts were selected. Exiting")
print("Using hosts", current_hosts)
hosts_subset = ["-l", ",".join(current_hosts)]
if not config.get("hosts", None):
config["hosts"] = {}
config["hosts"][current_group] = current_hosts
save_config()
ansible_playbook_cmd = ["ansible-playbook", "/opt/ansible-playbooks/run_role.yml",
"--extra-vars", f"role_name={current_role}"] + hosts_subset
if len(current_role_vars) != 0:
ansible_playbook_cmd += ["--extra-vars", json.dumps(current_role_vars)]
subprocess.check_call(ansible_playbook_cmd) | [
"sys.exit",
"os.listdir",
"subprocess.Popen",
"subprocess.CalledProcessError",
"subprocess.run",
"json.dumps",
"os.path.isdir",
"os.path.expanduser",
"subprocess.check_output",
"subprocess.check_call",
"types.SimpleNamespace",
"shutil.which",
"os.path.isfile",
"os.path.dirname",
"os.path... | [((2081, 2156), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.cache/cheretbe/ansible-playbooks/run_role_cfg.json"""'], {}), "('~/.cache/cheretbe/ansible-playbooks/run_role_cfg.json')\n", (2099, 2156), False, 'import os\n'), ((2160, 2192), 'os.path.isfile', 'os.path.isfile', (['config_file_name'], {}), '(config_file_name)\n', (2174, 2192), False, 'import os\n'), ((6920, 6963), 'subprocess.check_call', 'subprocess.check_call', (['ansible_playbook_cmd'], {}), '(ansible_playbook_cmd)\n', (6941, 6963), False, 'import subprocess\n'), ((201, 218), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (216, 218), False, 'import os\n'), ((495, 563), 'subprocess.Popen', 'subprocess.Popen', (['dialog_cmd'], {'stderr': 'subprocess.PIPE', 'env': 'dialog_env'}), '(dialog_cmd, stderr=subprocess.PIPE, env=dialog_env)\n', (511, 563), False, 'import subprocess\n'), ((928, 964), 'os.listdir', 'os.listdir', (['"""/opt/ansible-playbooks"""'], {}), "('/opt/ansible-playbooks')\n", (938, 964), False, 'import os\n'), ((1938, 1960), 'shutil.which', 'shutil.which', (['"""dialog"""'], {}), "('dialog')\n", (1950, 1960), False, 'import shutil\n'), ((1974, 2070), 'sys.exit', 'sys.exit', (['"""ERROR: Command \'dialog\' is not found. Please install corresponding package"""'], {}), '(\n "ERROR: Command \'dialog\' is not found. Please install corresponding package"\n )\n', (1982, 2070), False, 'import sys\n'), ((4249, 4317), 'subprocess.check_output', 'subprocess.check_output', (["['ansible-inventory', '--list', '--export']"], {}), "(['ansible-inventory', '--list', '--export'])\n", (4272, 4317), False, 'import subprocess\n'), ((4491, 4538), 'subprocess.run', 'subprocess.run', (["['ansible-inventory', '--list']"], {}), "(['ansible-inventory', '--list'])\n", (4505, 4538), False, 'import subprocess\n'), ((4543, 4640), 'sys.exit', 'sys.exit', (['"""ERROR: No groups were found in the inventory. Check inventory configuration"""'], {}), "(\n 'ERROR: No groups were found in the inventory. Check inventory configuration'\n )\n", (4551, 4640), False, 'import sys\n'), ((638, 667), 'sys.exit', 'sys.exit', (['"""Cancelled by user"""'], {}), "('Cancelled by user')\n", (646, 667), False, 'import sys\n'), ((986, 1035), 'os.path.join', 'os.path.join', (['"""/opt/ansible-playbooks"""', 'dir_entry'], {}), "('/opt/ansible-playbooks', dir_entry)\n", (998, 1035), False, 'import os\n'), ((1047, 1071), 'os.path.isdir', 'os.path.isdir', (['full_path'], {}), '(full_path)\n', (1060, 1071), False, 'import os\n'), ((1315, 1359), 'os.path.join', 'os.path.join', (['role_path', '"""tasks"""', '"""main.yml"""'], {}), "(role_path, 'tasks', 'main.yml')\n", (1327, 1359), False, 'import os\n'), ((1382, 1405), 'types.SimpleNamespace', 'types.SimpleNamespace', ([], {}), '()\n', (1403, 1405), False, 'import types\n'), ((1461, 1508), 'os.path.join', 'os.path.join', (['role_path', '"""defaults"""', '"""main.yml"""'], {}), "(role_path, 'defaults', 'main.yml')\n", (1473, 1508), False, 'import os\n'), ((1520, 1545), 'os.path.isfile', 'os.path.isfile', (['vars_file'], {}), '(vars_file)\n', (1534, 1545), False, 'import os\n'), ((1763, 1796), 'os.path.dirname', 'os.path.dirname', (['config_file_name'], {}), '(config_file_name)\n', (1778, 1796), False, 'import os\n'), ((1882, 1932), 'json.dump', 'json.dump', (['config', 'f'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(config, f, ensure_ascii=False, indent=4)\n', (1891, 1932), False, 'import json\n'), ((2249, 2261), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2258, 2261), False, 'import json\n'), ((6392, 6435), 'sys.exit', 'sys.exit', (['"""No hosts were selected. Exiting"""'], {}), "('No hosts were selected. Exiting')\n", (6400, 6435), False, 'import sys\n'), ((6889, 6918), 'json.dumps', 'json.dumps', (['current_role_vars'], {}), '(current_role_vars)\n', (6899, 6918), False, 'import json\n'), ((751, 824), 'subprocess.CalledProcessError', 'subprocess.CalledProcessError', (['proc.returncode', 'dialog_cmd'], {'output': 'stderr'}), '(proc.returncode, dialog_cmd, output=stderr)\n', (780, 824), False, 'import subprocess\n'), ((1632, 1649), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (1646, 1649), False, 'import yaml\n')] |
#getLog(mt) : Returns the natural log of the matrix.
import numpy as np
from .isPSDMd import isPSD
__all__ = ['getLog']
def getLog(M, eps=1e-15):
r"""Takes as input a matrix M and returns the natural log of M.
Parameters
----------
M : numpy.ndarray
2-d array representing a hermitian matrix
eps : float
Optional with defaul 1e-15, sets tolerance for the smallest eigenvalue
Returns
----------
lgMt : numpy.ndarray
log of the input array
Notes
----------
Scales by eps, all eigenvalues between their actual value and 1.0,
if any of the eigenvalue is smaller than eps
"""
try :
(psd, val, vec) = isPSD(M,eps,flag=True)
except :
raise ValueError('Input matrix is not square and hermitian')
if psd == False:
raise ValueError('Eigenvalues of input matrix not sufficiently positive')
n = len(val)
#If any of the eigenvalues is smaller than eps, then rescale the spectrum
#to make all eigenvalues at least eps, this prevents log from complaining
if np.any(val<eps):
val = (1-eps)*val + eps*1.
lgMt = np.dot(np.log(val)*vec,vec.conj().T)
return lgMt
| [
"numpy.log",
"numpy.any"
] | [((1135, 1152), 'numpy.any', 'np.any', (['(val < eps)'], {}), '(val < eps)\n', (1141, 1152), True, 'import numpy as np\n'), ((1211, 1222), 'numpy.log', 'np.log', (['val'], {}), '(val)\n', (1217, 1222), True, 'import numpy as np\n')] |
# Copyright 2020 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import cv2
import numpy as np
###############################################################################################################
# Get the environment variables (calibration dataset, image names)
calib_image_dir = os.environ['CALIB_DATASET']
calib_image_list = os.environ['CALIB_IMAGE_LIST']
calib_batch_size = int(os.environ['BATCH_SIZE'])
input_node=os.environ['INPUT_NODE_NAME']
input_width=int(os.environ['INPUT_WIDTH'])
input_height=int(os.environ['INPUT_HEIGHT'])
size = (input_width, input_width)
###############################################################################################################
def preprocess(image):
"""
Resize the image to fit the model input size.
Normalize image from [0:255] pixel values to the range [0:1].
"""
# Resize the image to match the model requirements
image = cv2.resize(image, size, interpolation=cv2.INTER_NEAREST)
# Set the values to float type
image = np.asarray(image)
image = image.astype(np.float32)
# Scale image
return image / 255.0 # TODO : vraiment resize ?
###############################################################################################################
def calib_input(iter):
"""
Input of the Yolo algorithm for calibration, using a batch of images.
"""
images = []
# Read content of the calibration image list
line = open(calib_image_list).readlines()
# Run a batch
for index in range(0, calib_batch_size):
# Get the image name to process
curline = line[iter * calib_batch_size + index]
calib_image_name = curline.strip()
# Open the corresponding image file
filename = os.path.join(calib_image_dir, calib_image_name)
image = cv2.imread(filename)
# Check whether the image is empty
if image is None :
raise TypeError("Image {} is empty.".format(filename))
# Resize and normalize image
image = preprocess(image)
# Append image to list of inputs
images.append(image)
print("Iteration number : {} and index number {} and file name {} ".format(iter, index, filename))
# Link input images to the input node name
return {input_node: images}
| [
"os.path.join",
"cv2.resize",
"numpy.asarray",
"cv2.imread"
] | [((1452, 1508), 'cv2.resize', 'cv2.resize', (['image', 'size'], {'interpolation': 'cv2.INTER_NEAREST'}), '(image, size, interpolation=cv2.INTER_NEAREST)\n', (1462, 1508), False, 'import cv2\n'), ((1551, 1568), 'numpy.asarray', 'np.asarray', (['image'], {}), '(image)\n', (1561, 1568), True, 'import numpy as np\n'), ((2223, 2270), 'os.path.join', 'os.path.join', (['calib_image_dir', 'calib_image_name'], {}), '(calib_image_dir, calib_image_name)\n', (2235, 2270), False, 'import os\n'), ((2281, 2301), 'cv2.imread', 'cv2.imread', (['filename'], {}), '(filename)\n', (2291, 2301), False, 'import cv2\n')] |
import os
class Config(object):
"""
Parent configuration class
"""
DEBUG = False
CSRF_ENABLED = True
SECRET = os.getenv('SECRET')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""
Configuration for Dev
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/.usrmgrdev.db'
class TestingConfig(Config):
"""
Configuration for testing, with a different db
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/.usrmgrtest.db'
DEBUG = True
class StagingConfig(Config):
"""
Staging configuration
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/.usrmgrstaging.db'
class ProductionConfig(Config):
"""
Production configuration
"""
DEBUG = False
TESTING = False
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/.usrmgrprod.db'
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
| [
"os.getenv"
] | [((136, 155), 'os.getenv', 'os.getenv', (['"""SECRET"""'], {}), "('SECRET')\n", (145, 155), False, 'import os\n'), ((186, 211), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (195, 211), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
TSDB DAO
"""
# This code is a part of coolisf library: https://github.com/letuananh/intsem.fx
# :copyright: (c) 2014 <NAME> <<EMAIL>>
# :license: MIT, see LICENSE for more details.
import os
import os.path
import logging
from delphin import itsdb
from coolisf.model import Document, Sentence
# ----------------------------------------------------------------------
# Configuration
# ----------------------------------------------------------------------
logger = logging.getLogger(__name__)
MY_DIR = os.path.dirname(os.path.realpath(__file__))
# ----------------------------------------------------------------------
# Configuration
# ----------------------------------------------------------------------
def read_tsdb(path, name=None, title=None):
""" Read a TSDB profile in ISF format """
prof = itsdb.ItsdbProfile(path)
if name is None:
name = os.path.basename(path)
if title is None:
title = name
doc = Document(name=name, title=title)
# Read all sentences
tbl_item = prof.read_table('item')
for row in tbl_item:
iid = row.get('i-id')
raw_text = row.get('i-input').strip()
sent = Sentence(text=raw_text, ident=iid)
doc.add(sent)
# Read all parses
tbl_result = prof.read_table('result')
for row in tbl_result:
pid = row.get('parse-id')
mrs = row.get('mrs')
if pid not in doc.ident_map:
raise Exception('pid {} cannot be found in provided TSDB profile'.format(pid))
elif mrs:
doc.ident_map[pid].add(mrs)
else:
raise Exception("Invalid MRS string in provided TSDB profile")
return doc
| [
"logging.getLogger",
"coolisf.model.Document",
"delphin.itsdb.ItsdbProfile",
"os.path.realpath",
"os.path.basename",
"coolisf.model.Sentence"
] | [((499, 526), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (516, 526), False, 'import logging\n'), ((552, 578), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (568, 578), False, 'import os\n'), ((846, 870), 'delphin.itsdb.ItsdbProfile', 'itsdb.ItsdbProfile', (['path'], {}), '(path)\n', (864, 870), False, 'from delphin import itsdb\n'), ((983, 1015), 'coolisf.model.Document', 'Document', ([], {'name': 'name', 'title': 'title'}), '(name=name, title=title)\n', (991, 1015), False, 'from coolisf.model import Document, Sentence\n'), ((907, 929), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (923, 929), False, 'import os\n'), ((1196, 1230), 'coolisf.model.Sentence', 'Sentence', ([], {'text': 'raw_text', 'ident': 'iid'}), '(text=raw_text, ident=iid)\n', (1204, 1230), False, 'from coolisf.model import Document, Sentence\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Advent of Code 2021, day seventeen."""
import re
from typing import Optional
MAX_ITER = 250
INPUT_FILE = 'data/day_17.txt'
INPUT_RE = re.compile(r'target area:\sx=(\d+)\.\.(\d+),\s'
r'y=(-?\d+)\.\.(-?\d+)')
def simulate(x: int, y: int,
target_x_range: range, target_y_range: range) -> Optional[int]:
"""Simulate a shoot."""
i = 0
j = 0
max_height = 0
for _ in range(MAX_ITER):
if i in target_x_range and j in target_y_range:
return max_height
i += x
x = max(0, x - 1)
j += y
y -= 1
max_height = max(max_height, j)
return None
def main() -> None:
"""Shooting."""
with open(INPUT_FILE, encoding='utf-8') as input_file:
match = INPUT_RE.match(input_file.readline().strip())
target_y_0 = int(match.group(3))
target_y_1 = int(match.group(4))
target_x_range = range(int(match.group(1)),
int(match.group(2)) + 1)
target_y_range = range(int(match.group(3)),
int(match.group(4)) + 1)
# Preserving this way for calculating max height.
# However this can be also found in calculation of part two.
max_height = 0
for n in range(0, MAX_ITER):
height = n * (n + 1) // 2 # Rising
height_prime_min = height - target_y_1
height_prime_max = height - target_y_0
for m in range(0, MAX_ITER):
height_prime = m * (m + 1) // 2 # Falling
if height_prime_min <= height_prime <= height_prime_max:
max_height = height
print(f'Part One: Maximum height: {max_height}')
solutions = []
max_height = 0
for x in range(-MAX_ITER, MAX_ITER):
for y in range(-MAX_ITER, MAX_ITER):
height = simulate(x, y, target_x_range, target_y_range)
if height is not None:
max_height = max(max_height, height)
solutions.append((x, y))
print(f'Part Two: Permutations of velocities that reach target: '
f'{len(solutions)}')
main()
| [
"re.compile"
] | [((186, 263), 're.compile', 're.compile', (['"""target area:\\\\sx=(\\\\d+)\\\\.\\\\.(\\\\d+),\\\\sy=(-?\\\\d+)\\\\.\\\\.(-?\\\\d+)"""'], {}), "('target area:\\\\sx=(\\\\d+)\\\\.\\\\.(\\\\d+),\\\\sy=(-?\\\\d+)\\\\.\\\\.(-?\\\\d+)')\n", (196, 263), False, 'import re\n')] |
import numpy as np
import torch
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, num):
self.val = val
self.sum += val * num
self.count += num
self.avg = self.sum / self.count
def convert_to_unit_vector(angles):
x = -torch.cos(angles[:, 0]) * torch.sin(angles[:, 1])
y = -torch.sin(angles[:, 0])
z = -torch.cos(angles[:, 1]) * torch.cos(angles[:, 1])
norm = torch.sqrt(x**2 + y**2 + z**2)
x /= norm
y /= norm
z /= norm
return x, y, z
def compute_angle_error(preds, labels):
pred_x, pred_y, pred_z = convert_to_unit_vector(preds)
label_x, label_y, label_z = convert_to_unit_vector(labels)
angles = pred_x * label_x + pred_y * label_y + pred_z * label_z
return torch.acos(angles) * 180 / np.pi
| [
"torch.sin",
"torch.cos",
"torch.sqrt",
"torch.acos"
] | [((566, 602), 'torch.sqrt', 'torch.sqrt', (['(x ** 2 + y ** 2 + z ** 2)'], {}), '(x ** 2 + y ** 2 + z ** 2)\n', (576, 602), False, 'import torch\n'), ((439, 462), 'torch.sin', 'torch.sin', (['angles[:, 1]'], {}), '(angles[:, 1])\n', (448, 462), False, 'import torch\n'), ((472, 495), 'torch.sin', 'torch.sin', (['angles[:, 0]'], {}), '(angles[:, 0])\n', (481, 495), False, 'import torch\n'), ((531, 554), 'torch.cos', 'torch.cos', (['angles[:, 1]'], {}), '(angles[:, 1])\n', (540, 554), False, 'import torch\n'), ((413, 436), 'torch.cos', 'torch.cos', (['angles[:, 0]'], {}), '(angles[:, 0])\n', (422, 436), False, 'import torch\n'), ((505, 528), 'torch.cos', 'torch.cos', (['angles[:, 1]'], {}), '(angles[:, 1])\n', (514, 528), False, 'import torch\n'), ((900, 918), 'torch.acos', 'torch.acos', (['angles'], {}), '(angles)\n', (910, 918), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.9.6)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x08\x75\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x03\xc9\x00\x00\x03\xc9\
\x01\xf5\x14\x91\x1b\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x07\xf2\x49\x44\
\x41\x54\x78\x9c\xed\x9d\x59\x4c\x54\x57\x18\xc7\xff\x77\x76\x61\
\x04\x06\x1d\x56\x57\xdc\x40\x11\x10\x15\x10\xd3\x2d\xd1\xb4\xf1\
\xc5\xb6\x69\x4d\x7d\xa8\x35\xc6\x36\xb1\x69\x6b\x35\x69\xfa\x50\
\x13\x63\x13\x93\xa2\x36\x3e\x74\x49\x9b\x36\x8d\xb5\x69\x6c\x7c\
\x30\x4d\x9a\xb6\x31\x36\x6d\xa8\xa0\x25\x15\x94\x45\x71\x47\x71\
\x65\x15\x04\x64\x66\x18\xe6\xf6\x01\xb0\x38\x2c\x33\x30\x33\xf7\
\xbb\x73\xbe\xf3\x7b\xe3\xce\xbd\xf7\xfb\xe6\xcc\x8f\x73\xce\x3d\
\x67\xce\x19\x45\x55\x55\x88\x82\xb2\x67\x8f\xa1\xb8\xae\x79\x07\
\x14\xbc\x09\x60\x21\x00\x25\x4c\xb7\x56\x01\x5c\x86\x8a\x6f\x4e\
\x65\x27\x1d\x54\x77\xef\xf6\x85\xe9\xbe\xe4\x98\xa8\x13\x08\x27\
\xc5\xe7\x9b\x3f\x86\x82\x8f\x22\x70\x6b\x05\xc0\x22\x28\x38\x50\
\x7c\xbe\xd9\x01\x60\x57\x04\x62\x90\xa0\x88\x52\x03\x64\x6f\xd8\
\x60\x89\x87\xb3\x13\x80\x2d\xc2\xa1\x5c\x9d\x68\x89\xaf\x3b\x7a\
\xd4\x13\xe1\x38\x9a\x60\xa0\x4e\x20\x5c\x24\xf8\x92\x16\x20\xf2\
\x1f\x3e\x00\xd8\x06\x63\x09\x81\x30\x02\xa8\x06\xd5\x2c\x62\xac\
\x48\x23\x8c\x00\x92\xc9\x21\x05\x60\x8e\x14\x80\x39\x52\x00\xe6\
\x90\x3f\x06\xae\x2d\x3f\x9c\x0a\x60\x15\x80\xc5\x08\x61\x5c\xa2\
\xbf\xad\xd7\xee\xad\xb8\xbd\x22\x6c\x89\x8d\x83\xa9\x70\xc6\x19\
\xe3\xb4\x29\xdd\x21\xdc\xc2\x0b\xe0\x02\x80\xd3\x27\x56\x6f\xba\
\x17\xa6\xb4\x26\x05\x99\x00\x6b\xcb\x0f\x1b\x01\x7c\x08\x60\x37\
\x00\x0b\x49\x12\xf4\x78\x00\xec\x01\x50\x72\x62\xf5\xa6\x7e\x8a\
\x04\x28\x9b\x80\x3d\x00\xf6\x82\xef\x87\x0f\x0c\xbc\xf7\xbd\x18\
\x28\x0b\x12\x48\x6a\x80\xb5\xe5\x87\x33\x01\xd4\x01\x30\x6a\x1e\
\x5c\x9f\xf4\x03\xc8\x3e\xb1\x7a\xd3\x45\xad\x03\x53\xd5\x00\xcf\
\x42\x7e\xf8\xc3\x31\x62\xa0\x4c\x34\x87\x4a\x80\xc5\x44\x71\xf5\
\x0c\x49\x99\x50\x09\xc0\xb9\xdd\x1f\x0b\x92\x32\x91\xe3\x00\xcc\
\x91\x02\x30\x47\x0a\xc0\x1c\x29\x00\x73\xa4\x00\xcc\x91\x02\x30\
\x47\x0a\xc0\x1c\x29\x00\x73\xa4\x00\xcc\x91\x02\x30\x47\x0a\xc0\
\x1c\x29\x00\x73\xa4\x00\xcc\x91\x02\x30\x47\x0a\xc0\x1c\x29\x00\
\x73\xa4\x00\xcc\x91\x02\x30\x47\x0a\xc0\x1c\x29\x00\x73\xa4\x00\
\xcc\x91\x02\x30\x47\x0a\xc0\x1c\x92\x5d\xc2\xbc\x6d\xde\x02\xd5\
\x2d\xc6\xe6\x54\xe1\x42\xb1\x2a\x05\x14\x71\x69\xb6\x89\xf3\xc1\
\xaa\x7a\xa5\x00\xc3\x51\xcc\x8a\x95\x22\xae\x6c\x02\x98\x23\x05\
\x60\x8e\x14\x80\x39\x52\x00\xe6\x48\x01\x98\x23\x05\x60\x8e\x14\
\x80\x39\x52\x00\xe6\x48\x01\x02\x60\x50\xc2\xf5\x9b\x13\xfa\x44\
\x0a\x30\x0e\x39\xd3\x92\x11\x6b\x16\x7b\x37\x1b\x29\xc0\x18\xc4\
\x59\xac\x58\x9f\x91\x89\x2e\x8f\x9b\x3a\x95\x88\x22\x05\x18\x83\
\x77\x72\x8a\xd0\xd8\xdd\x49\x9d\x46\xc4\x91\x02\x8c\xc2\xba\x39\
\x0b\xb1\x3c\x29\x15\xd5\xad\x4d\xd4\xa9\x44\x1c\x29\x80\x1f\x73\
\xa6\x26\xe0\x8d\xcc\x5c\xf4\xf9\x7c\xa8\x6f\x6b\xa1\x4e\x27\xe2\
\x48\x01\x86\x61\x35\x98\xb0\x63\x59\x31\x4c\x06\x23\x2e\x77\xb4\
\xc2\xed\xf3\x52\xa7\x14\x71\xa4\x00\xc3\xd8\xb2\x64\x19\x66\xd8\
\xe3\x00\x00\xd5\xad\xf7\x89\xb3\xd1\x06\x29\xc0\x20\x45\x29\x33\
\xb1\x66\xe6\xbc\xc7\x7f\xd7\x32\x68\xff\x01\x29\x00\x00\xc0\x69\
\x8b\xc5\xb6\xa5\x2b\x1f\xff\xdd\xd3\xd7\x87\xab\x9d\xed\x84\x19\
\x69\x07\x7b\x01\x0c\x8a\x82\xed\xcb\x8a\x60\x1f\x36\xe0\x53\xd7\
\xde\x04\x9f\x20\x3f\xa8\x19\x08\xf6\x02\xbc\x32\x7f\x09\xb2\x1c\
\xce\x27\x8e\xd5\x30\xa9\xfe\x01\xe6\x02\x64\x3a\x9c\x78\x75\xfe\
\x92\x11\xc7\x6b\x98\x74\x00\x01\xc6\x02\xc4\x9a\xcd\x78\x7f\x59\
\xd1\x88\xc9\x9e\x16\x57\x0f\xee\xf6\x74\x11\x65\xa5\x3d\x6c\x05\
\xd8\x96\x5d\x00\xa7\x2d\x76\xc4\xf1\xda\xd6\x66\x82\x6c\xe8\x60\
\x29\xc0\x9a\x99\x19\x58\x95\x3a\x73\xd4\xd7\xb8\x3c\xff\x0f\xc1\
\x4e\x80\x74\x7b\x1c\xb6\x2c\xc9\x1f\xf5\x35\x15\x40\x6d\x1b\x9f\
\x0e\x20\xc0\x4c\x00\x93\xc1\x88\x9d\x79\xab\x60\x35\x8c\xbe\x20\
\xaa\xb1\xab\x03\x9d\x6e\x97\xc6\x59\xd1\xc2\x4a\x80\x4d\x99\x39\
\x98\x13\xe7\x18\xf3\x75\x6e\xd5\x3f\x40\x24\x80\xea\x53\x35\x9f\
\x68\xcf\x4f\x4a\xc3\xba\x39\x8b\xc6\x3d\x87\x72\xfa\x97\xa2\x4c\
\x00\x2a\x01\xdc\x6a\x8d\x96\xf1\x12\xac\x36\xbc\x9b\x53\x88\xf1\
\xbe\xdd\x47\x3d\xfd\xab\x75\x99\x0c\x21\x7c\x13\xa0\x00\x78\x2f\
\xb7\x08\x71\x96\xf1\x17\xdf\x72\x99\xfe\xf5\x47\x78\x01\xd6\x67\
\x64\x22\x77\x7a\x4a\xc0\xf3\x38\xb6\xff\x80\xe0\x02\xcc\x8b\x4f\
\xc4\xc6\x85\x39\x41\x9d\xcb\x65\xfa\xd7\x1f\x61\x05\xb0\x19\x4d\
\xd8\x99\x57\x0c\x93\x21\xf0\x5b\x7c\xe4\xe5\x33\xfd\xeb\x8f\xb0\
\x02\x6c\xcd\x5e\x8e\x94\x58\x7b\x50\xe7\xd6\xb6\x35\xb3\x99\xfe\
\xf5\x47\x48\x01\x9e\x4a\x9b\x8d\xe7\xd2\xe7\x06\x7d\x7e\x2d\xd3\
\xf6\x1f\x10\x50\x80\xa4\x98\x58\xbc\x95\xbd\x32\xf0\x89\xc3\xe0\
\xda\x01\x04\x04\x13\xc0\xa8\x18\xb0\x23\xb7\x18\x31\xa6\xe0\xf7\
\xbe\x6a\x75\x3d\x62\x35\xfd\xeb\x8f\x50\x02\xbc\xb6\x20\x1b\x0b\
\x1d\xd3\x26\x74\x0d\xd7\xde\xff\x10\x34\xdb\xc4\x45\x80\xec\x69\
\x49\x78\x71\x5e\xd6\x84\xaf\x8b\x35\x5b\xf0\xf2\x24\xae\x1b\x8d\
\xeb\x0f\x3b\x70\xae\xe5\x5e\x58\xee\xa5\x15\x42\x08\x30\xd5\x62\
\xc1\xf6\xbc\x91\xdf\xee\x09\x86\x82\xe4\x74\x14\x24\xa7\x87\x9c\
\x43\x87\xdb\x85\x0f\xca\x8e\x87\x7c\x1f\xad\x11\xa2\x09\x78\x7b\
\x69\x21\x12\xad\x31\x64\xf1\xbd\x3e\x1f\xf6\x9f\x2d\x43\xbb\xbb\
\x97\x2c\x87\xc9\x12\xf5\x02\xbc\x30\x7b\x41\x58\xfe\x83\x43\xe1\
\xbb\xfa\x2a\x5c\x6c\x6f\x25\xcd\x61\xb2\x44\xb5\x00\xb3\xec\xf1\
\xd8\x9c\x99\x47\x9a\xc3\x9f\xb7\x1b\x70\xfc\xe6\x55\xd2\x1c\x42\
\x21\x6a\x05\xb0\x18\x4c\xd8\x91\x5f\x0c\xb3\xd1\x48\x96\xc3\x95\
\x8e\x76\x7c\x5d\x77\x86\x2c\x7e\x38\x88\x5a\x01\x36\x67\xe5\x61\
\x96\x3d\x9e\x2c\x7e\xa7\xdb\x85\xfd\x55\x65\xf0\xfa\xfa\xc9\x72\
\x08\x07\x51\x29\x40\x61\x72\x3a\x9e\x9f\x3d\x9f\x2c\x7e\xbf\xea\
\xc3\xfe\xaa\x72\xb4\xb9\x1e\x91\xe5\x10\x2e\xa2\x4e\x80\x44\x6b\
\x0c\xb6\xe5\x14\x92\xe6\x70\xa8\xfe\x2c\xea\x1f\x88\xb1\x79\x44\
\xd4\x8d\x03\x2c\x48\x70\xe0\x8f\xc6\x6b\x21\xdf\x67\x7d\x46\xe6\
\xa4\xc6\x0d\xfe\xba\xd3\x80\xdf\x6e\x5c\x09\x39\xbe\x5e\x88\x3a\
\x01\x2a\x9a\xee\xa0\xa2\xe9\x4e\x48\xf7\x98\x1b\xe7\xc0\x4b\x93\
\x18\xfd\xbb\xd6\x19\xfd\x9d\x3e\x7f\xa2\xae\x09\x08\x07\xcb\x93\
\xd2\x26\x7c\xcd\x43\x8f\x1b\xfb\x2a\xcb\xd0\xd7\x1f\xdd\x9d\x3e\
\x7f\x78\x0a\xe0\x9c\x98\x00\xfd\xaa\x0f\x07\xaa\xca\xd1\x2a\x40\
\xa7\xcf\x1f\x76\x02\xc4\x59\xac\x98\x9f\x90\x38\xa1\x6b\xbe\xaf\
\xaf\xc6\xf9\x76\x31\x17\x8d\xb2\x13\x20\xdf\x99\x36\xa1\xce\x5f\
\xe9\x9d\x1b\xf8\xf5\xc6\xa5\x08\x66\x44\x0b\x3b\x01\x56\x4c\xa0\
\xfd\x6f\x78\xf8\x00\x5f\xd5\x8a\xd5\xe9\xf3\x87\x95\x00\x46\xc5\
\x80\x5c\x67\xe0\x35\x02\x00\xd0\xe5\x71\xa3\xa4\xb2\x0c\x1e\xc1\
\x17\x8b\xb0\x12\x20\x2b\x71\x3a\x62\x4c\xe6\x80\xe7\xf9\x54\x15\
\x9f\x9e\x3d\x85\x96\xde\x1e\x0d\xb2\xa2\x85\x95\x00\xc1\x3e\xfe\
\x1d\xbe\x58\xcd\x66\x9f\x00\x5e\x02\x38\x03\x7f\x6f\xa0\xec\xee\
\x4d\xfc\xd2\x70\x51\x83\x6c\xf4\x01\x1b\x01\x92\x63\xec\x48\xb7\
\x4f\x1d\xf7\x9c\x1b\x5d\x1d\xf8\xb2\xe6\x5f\x8d\x32\xd2\x07\x6c\
\x04\x08\x54\xfd\x77\x79\x3c\x28\xa9\x3c\xc9\x6e\x85\x30\x1b\x01\
\xc6\x7b\xfc\xf3\xa9\x2a\x0e\x9e\x3b\x85\xe6\x47\xe2\x77\xfa\xfc\
\x61\x21\x80\xcd\x68\xc2\xe2\xc4\xa4\x31\x5f\xff\xf1\x52\x35\xdb\
\xd5\x41\x2c\x04\xc8\x9d\x9e\x02\xf3\x18\xab\x84\xcb\xef\x35\xe2\
\xe7\xeb\x7c\x3a\x7d\xfe\xb0\x10\x60\xac\xf6\xbf\xb1\xab\x03\x5f\
\xd4\x54\x68\x9c\x8d\xbe\x10\x5e\x00\x05\x40\xbe\x33\x75\xc4\xf1\
\xee\x3e\x0f\x3e\xa9\x3c\x09\xb7\x60\xd3\xbb\x13\x45\x78\x01\x32\
\xe2\x13\xe1\xb0\x4d\x79\xe2\xd8\x40\xa7\xef\x34\x9a\x18\x76\xfa\
\xfc\x11\x5e\x80\xd1\xaa\xff\x23\x97\x6b\xa2\x6e\x0d\x5f\xa4\x60\
\x20\xc0\x93\xd5\xff\xe9\xfb\xb7\x70\xec\x5a\x3d\x51\x36\xfa\x43\
\x68\x01\xe2\xad\x36\xcc\x8b\xff\x7f\xb9\x78\x63\x77\x27\x3e\xaf\
\xe6\xdd\xe9\xf3\x47\x68\x01\xf2\x9d\xa9\x8f\x37\x87\xec\xe9\xeb\
\x43\x49\xe5\x49\xb8\xfa\x79\x8d\xf4\x05\x42\x68\x01\x86\x46\xff\
\x86\x3a\x7d\xf7\x7b\xba\x89\x33\xd2\x1f\xc2\x0a\x60\x32\x18\x90\
\x3b\x7d\xa0\xfd\xff\xe9\x4a\x2d\xce\xb6\xdc\x25\xce\x48\x9f\x08\
\x2b\xc0\x62\x87\x13\x53\x4c\x26\x54\x34\xdd\xc6\xb1\xab\x17\xa8\
\xd3\xd1\x2d\xc2\x0a\xb0\x3c\x39\x0d\xb7\xba\x1e\xe2\xb3\xea\x7f\
\xc0\x73\x07\xc0\xe0\x10\x56\x80\x2c\x87\x13\xfb\xaa\x4e\xa2\xd7\
\x2b\x3b\x7d\xe3\x21\xa4\x00\x29\xb1\x76\x1c\xbd\x5a\xc7\x7a\xfb\
\xb7\x60\x89\xba\xb5\x81\xc1\xd0\xde\xeb\x92\x3d\xfe\x20\x11\xb2\
\x06\x10\xfd\xab\xdc\xe1\x44\x48\x01\x24\xc1\x43\x25\x00\xef\x39\
\xd8\xd1\x21\x29\x13\x2a\x01\x6e\x13\xc5\xd5\x33\x24\x65\x42\x25\
\x40\x03\x51\x5c\x3d\x43\x52\x26\x54\x02\xd4\x12\xc5\xd5\x33\x24\
\x65\x42\x22\x40\xe9\xc6\xad\xe7\x01\x94\x52\xc4\xd6\x29\xa5\x83\
\x65\xa2\x39\x94\x4f\x01\x25\x84\xb1\xf5\x06\x59\x59\x90\x09\x50\
\xba\x71\xeb\xef\x00\x76\x51\xc5\xd7\x11\xbb\x06\xcb\x82\x04\x45\
\x25\xfe\xb1\xa4\x67\x8e\x7c\xbb\x19\xc0\xeb\x00\x9e\x86\xa0\x23\
\x93\xa3\xe0\x05\xf0\x37\x80\x1f\x4a\x37\x6e\x3d\x44\x99\xc8\x7f\
\xe8\x1b\x19\xd2\x34\x5d\xba\x11\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x24\xa4\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x02\xf4\x00\x00\x02\xf4\
\x01\x80\xad\xac\x55\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x20\x00\x49\x44\
\x41\x54\x78\x9c\xed\x9d\x77\x78\x54\xc5\xda\xc0\x7f\xb3\xbb\xa9\
\x24\x90\x04\x42\x02\x29\x84\x10\x3a\x84\xa2\x52\x44\x4a\xa8\x22\
\x45\x04\x44\x10\x14\x91\x2b\x88\x8a\x8a\xf7\x7a\xc1\x2b\x0a\x72\
\xf5\x8a\xde\x2b\x7e\x08\x76\x10\xa9\x82\x60\x03\x69\xd2\x3b\x04\
\x81\xd0\x43\x28\x81\x10\x52\x68\x21\x10\x12\x20\xc9\xce\xf7\xc7\
\x49\x76\xf7\x9c\x2d\x69\x9b\x10\x21\xbf\xe7\x39\xcf\x93\x9d\x33\
\x33\x67\x4e\xe6\x3d\x53\xde\x79\xe7\x1d\x21\xa5\xa4\xb8\x08\x21\
\x0c\xc0\xb7\xc0\x93\x40\x25\x8b\x5b\xfb\x80\xd1\x52\xca\x7d\xc5\
\xce\xbc\x02\x15\x42\x88\xc1\xc0\x0f\xce\xce\x57\x57\xc2\xf4\x7f\
\x07\x9e\x43\x5d\xf9\x00\x0f\x00\x0b\x84\x10\xa2\x84\xf9\x57\x50\
\xca\x94\x54\x00\x9e\x73\x70\xaf\x01\xd0\xba\x84\xf9\x57\x50\xca\
\x14\x28\x00\x42\x88\xce\x42\x88\x9f\x84\x10\x5f\x09\x21\x1e\xb4\
\x08\x6f\x8a\x52\xc9\x8e\x78\xd2\x22\xbe\x8f\x10\xe2\x5f\x42\x88\
\x95\x42\x88\x89\x42\x08\x8f\xe2\x17\xbb\x02\x67\x21\x1c\x8d\x01\
\x84\x10\xc3\x81\xef\x50\x0b\xca\x4a\x60\x32\xd0\x17\x78\x27\x3f\
\xd0\x60\x70\xa1\x6e\xdd\x7a\x1c\x3f\x7e\xd4\x32\x8b\x04\xa0\x39\
\xf0\x1a\xf0\x3a\x50\xc5\xe2\xde\x06\xa0\x8f\x94\x32\xab\x64\xaf\
\x70\x7f\x20\x84\x08\x44\xf9\x5f\x3a\x17\x29\xa5\xcd\x0b\x18\x06\
\xe4\x02\xd2\xce\x95\x65\xf9\xbb\x49\xe3\x48\x39\x72\xe4\x8b\xb6\
\xe2\xdd\x74\x90\xc7\x1f\x80\xbb\xbd\x32\x54\x5c\xa5\x7f\x19\xec\
\x48\x5b\x4b\xe0\x7b\x1c\x77\x11\xee\x96\x3f\x22\x23\x9b\x13\x51\
\xa7\x1e\x9e\x9e\x95\xc8\xcc\xbc\x69\x79\xcb\xd3\x41\x1e\xdd\x80\
\x69\xc0\x4b\x0e\xe2\x54\x00\xe4\x75\xbf\x7f\x2b\x64\xf4\x59\x52\
\xca\x3f\x85\x10\xfd\x80\x47\x1d\x45\xb4\x29\x00\xc0\xb3\x80\x5e\
\x53\x80\xfc\x96\xc1\x8a\x6a\xd5\xfc\x69\xd8\xb0\x09\x3a\x9d\x8e\
\x76\xed\x3a\xb0\x6e\xdd\x6a\xbb\x0f\xb4\x91\xcf\x08\x21\xc4\xcb\
\xb2\x24\xf3\xd1\xfb\x83\x08\x60\x74\x21\xe3\x6e\x06\xfe\x04\xda\
\x14\x94\xc6\xde\x17\x7e\x46\x1b\xf0\xe4\x93\x4f\xf3\x48\xbb\x8e\
\x18\x0c\x2e\xaa\xf0\xe0\xe0\x10\xc6\xbc\xf8\x2a\xae\xae\xae\x00\
\x74\xed\xd2\x83\xce\x51\xdd\xd0\xe9\xd4\x59\x87\x85\x85\x33\x72\
\xe4\x8b\x54\xad\x5a\x4d\x9b\x75\x7c\x45\xe5\xdf\x3d\xec\xb5\x00\
\xf3\x80\x29\x58\x0c\xda\xa2\xa3\x77\x31\xe6\xc5\x57\xe9\xd4\xa9\
\x0b\x47\x8e\x1c\x02\xc0\xdf\xbf\x3a\x11\x11\xf5\xac\x12\xf7\xe8\
\xd1\x8b\xd6\x6d\xda\x71\xe2\xc4\x31\x72\x72\x72\x08\x0c\xa8\x41\
\x9d\x3a\x75\x89\x39\xb8\x9f\xcb\x97\x2f\x69\xa3\xcf\x70\xca\x9b\
\x54\x50\x2c\x6c\x0a\x80\x94\xf2\x9a\x10\xe2\x33\x2c\x46\xf9\x67\
\xcf\x9e\xe1\xe4\xa9\x38\xea\x46\xd4\xa3\x6d\xdb\x47\x54\xf1\x73\
\x73\x73\x49\x4e\x4e\x22\x33\xeb\x26\x41\x35\x83\xa9\x54\xc9\x0b\
\x9f\x2a\x3e\xb4\x6e\xf5\xb0\x29\x8e\xd1\x68\x64\xfd\xfa\x35\xda\
\x47\x25\x02\xb3\x9d\xf5\x32\xf7\x38\xe7\x80\x25\x45\x88\x0b\x70\
\xb0\xa0\x34\xf6\x5a\x00\x80\x4f\x51\xa6\x6f\x95\xf3\x03\x4e\x9c\
\x38\x46\x5d\x8b\x2f\xfe\xc6\x8d\xeb\xfc\xfa\xdb\x32\x8e\x1f\x3f\
\x4a\x6e\x6e\xae\x29\xbc\x6a\xd5\x6a\xf4\xed\xd3\x9f\x06\x0d\x1a\
\x99\xc2\xae\x5c\xb9\xcc\xa5\x4b\x17\xb5\xcf\x98\x2a\xa5\xbc\xe3\
\xf8\x5d\x2a\x00\x90\x52\xee\x02\x76\x15\x31\xcd\x0f\x14\xa0\x3e\
\xb6\x12\x00\x21\x84\x1e\xe8\x08\x0c\x02\x54\xaa\x5c\x9d\x30\xf7\
\xeb\x67\xce\x9c\x62\xfe\x82\x39\xda\x11\x3f\xa0\x54\xf6\x9c\xef\
\xbf\xa1\x6d\xdb\x47\xe8\xf7\xf8\xc0\xfc\xe2\xd8\x7a\xfe\x40\x21\
\xc4\x1d\xe0\x17\x29\xe5\x65\xc7\xaf\x73\x7f\x23\x84\xf0\x01\xc2\
\x9c\x9d\xaf\x49\x00\x84\x10\xae\xc0\x24\x94\xa9\x46\x75\x5b\x91\
\x83\x82\x43\x00\xc8\xca\xca\x64\xd1\x0f\xf3\x6c\x56\xbe\x25\xbb\
\x76\x6d\x27\x24\x38\x94\x07\x1e\x68\x85\xaf\xaf\x9f\xad\x29\x62\
\xa7\xbc\xeb\x0b\x21\xc4\x06\x60\x9c\x94\xf2\xb8\x45\x99\x5c\x80\
\x56\x40\x08\xb0\x55\x4a\x99\x54\xe4\x37\xbc\x77\x78\x94\x52\x5e\
\x0c\xfa\x2f\xf0\x2f\xec\x54\x7e\xf5\xea\x01\x34\x69\x1c\x09\xc0\
\xea\xd5\xbf\x73\xe3\xc6\x75\x87\x99\xe5\xb3\x7c\xc5\xcf\x64\x65\
\x65\x62\x30\xb8\xd0\xa9\x53\x17\x7b\xe5\x30\x00\x3d\x80\x5d\x42\
\x08\x4b\xfd\xc2\xc7\xc0\x76\x94\x17\x4f\x14\x42\xec\xc9\x53\x27\
\x37\x76\xfc\x5a\x15\x14\x16\xcb\x3a\xb3\xab\x30\x08\x0c\xa8\xc1\
\xa8\x17\x5e\x46\xaf\x57\x54\x03\x71\x27\x63\x55\xf7\x1b\xe1\xce\
\x62\x42\xd9\x46\x04\x23\xf1\x53\xdd\xbb\x75\xeb\x16\xe7\xcf\x27\
\x00\xd0\xb1\x43\x67\x3a\x47\x75\xc3\xc1\x22\x61\x95\xdf\xbe\x69\
\xb7\x8f\xf8\x41\x8b\xb3\xe3\x06\xfe\x57\xa7\x13\x63\x2d\xee\x09\
\x94\xd6\xe0\x03\xe0\x88\x10\xe2\x84\x10\x62\x54\x5e\xcb\x55\x41\
\x31\xb1\x1c\x03\xb8\x59\xde\xf0\xf6\xae\x4c\x64\x64\x73\x22\x23\
\x5b\x50\x2b\x34\xcc\x54\x69\x99\x99\x37\x49\x4b\xbb\xaa\xca\xe4\
\x55\xaa\xd1\x1c\x65\x6d\xe7\x0d\xfc\x59\xcd\x0d\x92\xc8\x36\xdd\
\x4f\x4c\x3c\x4f\xbd\x7a\xca\xba\x51\x8f\x1e\xbd\x68\xd3\xa6\x1d\
\x87\x0e\xc5\x10\x73\x70\x1f\x89\x89\xe7\x55\x79\xb9\xba\xe8\x1a\
\x01\x8d\x5c\x5c\x74\x34\xad\x5f\x85\x83\xc7\xaf\xd9\x2b\x7b\x3d\
\xe0\x6b\x60\xa2\x10\xe2\x23\x14\xed\xd7\xed\x22\xbe\xff\x7d\x8f\
\x5d\x55\xaf\xa7\xa7\x27\xbe\xbe\x7e\xf8\xfa\xf8\xaa\xbe\x58\xcb\
\xd1\x7e\x3e\x95\x2c\xc6\x8a\x7a\xc0\x43\x3d\x76\x24\x37\x37\x47\
\xf5\xdb\xcb\xcb\x1b\xff\xea\xd5\xf1\xf7\x0f\x70\x58\xb8\xb5\x73\
\xdb\x33\xe0\xd1\x60\x74\x3a\x87\x66\x05\x21\xc0\x4c\xe0\x8c\x10\
\x62\xcc\x3d\x6c\x83\xf0\x13\xe0\xeb\xec\xcb\xb2\x05\x50\xd5\x52\
\x6a\x6a\x0a\xbf\xff\xfe\x2b\x2b\x57\xfe\x46\xfd\x7a\x0d\x19\x3a\
\x74\x38\xae\xae\x6e\x78\x7b\x57\xc6\xdb\xbb\xb2\x6a\x0c\xf0\x05\
\x57\xf8\x0f\xae\x54\x43\xcf\x8f\x5c\xe3\x34\xea\x99\x5d\x50\x50\
\x88\xe9\xef\x7d\xfb\xa2\xf9\x7d\xe5\x6f\x76\x07\x90\x06\x83\xb9\
\xfe\x02\xaa\xb9\xb3\xec\x8b\xb6\x24\x5f\xbc\xc5\xa2\xe5\x09\xcc\
\xff\xe5\x9c\xa3\x16\xa1\x26\xf0\x05\xf0\x84\x10\xe2\xb9\x7b\x6d\
\xc0\x28\xa5\xcc\x06\xec\xbe\x7c\x71\x31\x2d\x07\x0b\x21\xbe\x03\
\x46\xd8\x8b\x58\xbb\x76\x38\xa3\x47\x8d\x45\x08\xc1\xa2\x1f\xe6\
\x71\xf0\xe0\x7e\xd5\x7d\x3d\xe0\x85\x9e\x74\xd4\x2d\x84\xc1\x60\
\xe0\xad\x09\x93\xf0\xf2\xf2\x26\xe6\xe0\x7e\x16\x2f\x9e\x6f\x77\
\x4d\xc1\xcd\x55\xc7\xc5\x3f\xfb\x52\xd9\xcb\xc5\xe6\x7d\x80\xc3\
\x27\xd2\x99\xff\xcb\x39\x16\x2d\x4f\xe0\x42\x8a\xdd\x95\xe4\xab\
\x28\x26\x69\xcb\xec\x66\x74\x0f\x22\x84\x58\x02\xd4\xb7\x08\x9a\
\x2e\xa5\x9c\xe3\x28\x8d\x65\x17\xf0\x77\x60\x11\x58\x74\xde\x16\
\xc4\xc7\x9f\xe1\x44\x9c\x32\x43\xeb\xf5\x58\x5f\xdc\xdd\x55\x8b\
\x81\xe4\x82\x55\xe5\x03\x74\xef\xfe\x18\x5e\x5e\xde\x18\x8d\x46\
\xd6\xac\xf9\xdd\x6e\xe5\xd7\x0f\xf7\x66\xf5\x9c\xf6\x0e\x2b\x1f\
\xa0\x69\xfd\x2a\x7c\x3c\x21\x92\x84\xed\xbd\x58\xf3\x7d\x7b\xea\
\x87\x7b\xdb\x8a\xe6\x07\x2c\x15\x42\x7c\xaf\x99\x55\xdc\xeb\xd4\
\x07\x9a\x59\x5c\x8e\xfb\x58\x2c\x04\x40\x4a\x99\x26\xa5\x1c\x0a\
\x04\x02\x23\x81\xb5\x68\xba\x85\x33\x67\x4e\x03\x50\xa5\x8a\x0f\
\x03\xfa\x3f\x85\xc1\xe0\x48\x91\x08\x0d\x1a\x34\xa2\x43\xfb\x28\
\x40\x51\x0e\x69\x07\x8f\x75\xc3\xbc\xf8\xd7\x4b\x0d\x89\x59\xd9\
\x8d\xd8\xf5\x8f\x12\xd5\xd6\xe6\x0c\xd4\x76\xc1\x75\x82\x1e\x1d\
\x02\x39\xf0\x7b\x37\xc6\x0e\x8f\xc0\x4e\xcf\x3f\x1c\x58\x21\x84\
\x30\x2d\x49\x0b\x85\xe7\x84\x10\xf3\x85\x10\x8b\xf3\xae\xf9\x79\
\x61\xf7\xea\xf8\xc1\x2e\x56\x35\x28\xa5\xbc\x0a\x7c\x27\x84\xf8\
\x01\x65\x55\x30\x30\xff\x9e\xd1\x68\xfe\xc2\x23\x23\x5b\x10\x10\
\x50\x83\x25\x3f\x2e\xe4\xc2\x05\xcd\x48\xde\xd5\x95\xc7\x7a\xf6\
\xa5\x4d\x9b\x76\x16\x03\x48\xeb\x2f\x7f\xd2\x6b\x8d\x19\xfa\x78\
\x68\x89\x5e\xc0\xc3\x5d\xcf\x67\x93\x5a\xd0\xb7\x6b\x4d\x46\xbc\
\xb9\x97\x44\xeb\x6e\xa1\x2b\xb0\x52\x08\xd1\x5b\x4a\x79\x13\x78\
\x13\xf8\xc8\x46\x56\xc3\x80\xaa\xc0\x27\x25\x2a\xd0\x5f\x0c\xbb\
\x26\x61\x42\x88\xd7\x51\xd6\x03\x4c\x0c\x1b\x36\x82\xa6\x4d\x9a\
\x59\xc5\x4d\x4b\xbb\x4a\x62\xe2\xf9\xbc\xc5\xa0\x10\x02\x03\x6b\
\x58\xb5\x0e\x39\x39\xd9\x4c\xfd\xe8\xdf\xaa\xc1\x63\x83\x3a\xde\
\x1c\x5d\xdb\xa3\xa0\x51\x7e\xa1\xb9\x76\x3d\x9b\x57\x26\xed\x67\
\xe1\x6f\x09\xb6\x6e\xef\x00\x7a\x03\xb1\xd8\x6f\x1a\x53\x81\x9a\
\x52\x4a\xa3\x53\x0a\x54\xc6\x08\x21\x62\x50\x9a\xfe\x7c\xde\x92\
\x52\x4e\x75\x94\xc6\xe6\x34\x30\xcf\x60\x73\xbc\x65\x58\x60\x60\
\x0d\x9a\x34\x8e\x24\x37\x37\x97\x93\xa7\xe2\x38\x11\x77\xdc\xb4\
\xb4\xeb\xeb\xeb\x47\xd3\xa6\xcd\x68\xdd\xea\x61\x82\x83\x43\x30\
\x18\x0c\xe4\xe4\x64\x13\x1f\x7f\x9a\x93\x27\x4f\x90\x96\x76\x15\
\x83\xc1\x85\xa8\x4e\x5d\x55\xcf\x89\x3d\x7d\x83\x25\x2b\xd5\xad\
\x47\x49\xf0\xa9\xec\xc2\x82\x4f\x5b\xf3\xc9\xdb\xd6\x42\x0a\xb4\
\x03\x0e\xe0\xb8\x5f\x0c\x00\x3a\x58\x06\x08\x21\xda\x0a\x21\x46\
\x08\x21\xfc\x9d\x56\xd0\x72\x84\xbd\x4e\x3c\x7f\x2c\x60\xa2\x53\
\xc7\x2e\xec\xda\xb5\x9d\xcd\x5b\x36\x90\x9e\xae\xcc\x46\x84\x10\
\x34\x6c\xd8\x84\xc1\x4f\x0d\xc3\xcd\xcd\xac\x47\x8a\xde\xbb\x9b\
\xd5\xab\x97\x93\x99\x99\x09\x80\x5e\xaf\xa7\x65\xcb\x87\xe8\xd0\
\x3e\x8a\x2a\x95\xab\x90\x7e\x3d\xdd\x14\xf7\x93\x6f\xe3\x18\xd2\
\xa7\x64\xdd\x80\x25\x6b\xb6\xa4\xf0\xce\xb4\x23\xf6\x6e\x87\x59\
\xfe\xf0\xf0\xf0\x40\xa7\xd3\x71\xf3\xa6\x6a\x4a\xfa\x24\xb0\x59\
\x08\xf1\x18\xf0\x2e\x66\xd3\xf6\x1b\x42\x88\x7e\x52\xca\x8d\x4e\
\x2b\x6c\x39\xc0\x66\x17\x20\x84\x98\x0e\xbc\x6a\x19\xe6\xee\xe6\
\xce\xad\xdb\xb7\x6c\x66\x12\x1a\x1a\xc6\x8b\xa3\xc7\xa2\xd7\xeb\
\xd9\xfb\xe7\x1e\x7e\xfa\x69\xb1\xcd\xd1\xbe\x5e\xaf\xc7\x60\x30\
\x70\xfb\xb6\x59\x61\xe7\xe6\xaa\x23\xeb\xf8\x00\x7b\x83\xb8\x22\
\xb1\x66\x4b\x0a\xfd\x46\xef\xe0\xf6\x9d\xc2\xb5\xe0\xad\x5b\x3f\
\x8c\x41\x6f\x60\xc7\xce\xad\x96\xc1\x97\x81\x78\xe0\x21\x1b\x49\
\x32\x51\x2c\x99\xcb\xa5\x10\x14\xa7\x0b\xb0\xd7\x02\x2c\x47\x23\
\x00\xf6\x2a\x1f\x20\x21\xe1\x2c\x71\x71\xb1\x34\x68\xd0\x88\xf5\
\xeb\x56\xdb\x9d\xea\xe5\xe6\xe6\x5a\x69\x12\xfb\x74\xa9\xe9\x94\
\xca\xdf\x7d\xe0\x4a\x91\x2a\x1f\xa0\x79\xb3\x96\x08\xa1\xd3\x0a\
\x40\xb5\xbc\xcb\x16\x9e\x28\xb3\x8a\x0e\xd2\xc6\xb6\x37\x21\xc4\
\xc3\x58\xd8\x4f\xdc\x05\x6c\xce\x89\x1d\x61\xcf\x22\x68\x83\x10\
\xe2\x3d\x94\xe5\xe1\x42\x71\xe8\x70\x0c\x9e\x95\x2a\x71\x2d\xbd\
\xf0\xca\xaa\xa6\xf5\xab\xf0\xc5\xbf\x5b\x16\x3a\xbe\x23\xde\xfb\
\xec\x58\x91\x2a\xdf\xa7\x8a\x0f\xb5\x6b\xd7\x51\xfe\xf6\xf1\xe5\
\xda\xb5\xb4\xc2\x26\xf5\x04\xde\x07\x7a\xda\xb8\xf7\x05\xea\x2f\
\xb0\xdc\x63\x77\x2d\x40\x4a\x39\x79\x58\xbf\x5a\x7b\xb4\xe1\xf5\
\xc3\xbd\x99\x3f\xad\x15\xaf\x8d\xa8\xab\x0a\x3f\x76\xec\x30\x7b\
\xf7\xee\x56\x85\x55\xaf\xea\x46\xec\xfa\x47\x19\x3e\x20\x0c\xbd\
\x5e\xfd\x99\x37\xad\x5f\x85\x0d\x0b\x3b\xe2\xef\xa7\x5a\x83\x2a\
\x36\x7a\xcd\x4c\x42\xaf\xd7\xd3\xa9\x63\x17\x2b\x85\x15\x28\x6b\
\x11\xcf\x3f\xff\x22\x42\x08\x84\x10\x3c\x3f\x62\x34\x5e\x5e\xd6\
\x1f\x8f\xbb\xbb\x3b\x51\x51\x5d\xad\x0c\x61\xd1\xe8\x47\xca\x31\
\x05\x1a\xdb\xda\xdf\x19\x14\x3f\xa8\x15\xb0\xeb\xbb\xa5\xf1\xba\
\x69\xb3\xe2\xa8\xe6\xe7\xc6\xdf\x9e\xaa\xcd\x90\x3e\xa1\xe8\xf5\
\x82\xdd\x07\xae\xd0\x76\x80\xe3\xae\x70\xf4\xd3\xe1\x7c\xf5\xfe\
\x03\x00\x9c\x3a\x97\xc1\xc7\x5f\x9f\x60\xe7\xbe\xcb\xb4\x6a\xe6\
\xc7\xc7\x6f\x45\x52\xcd\xd7\x39\x95\x0f\xb0\xe0\xd7\x73\x3c\xf3\
\x46\xb4\x2a\x6c\x40\xff\xa7\x88\x8c\x6c\xc1\x8e\x1d\x5b\x38\x75\
\xfa\x24\x00\xb5\x6a\x85\xf1\x70\xdb\xf6\x54\xae\x5c\x45\x15\xf7\
\xfa\xf5\x74\x76\xee\xda\xc6\xb9\x73\x67\x01\x88\xa8\x53\x97\x76\
\xed\x3a\x72\xe4\xe8\x21\x96\x2e\x5d\xa4\x7d\xdc\x33\x52\xca\x05\
\xda\x40\x1b\x7d\xf0\xdd\xa6\x95\x94\x72\xaf\xa3\x08\x2a\x01\x10\
\x42\x54\x47\x31\x22\x0c\x7b\x73\x54\x7d\x8f\x8f\x27\x44\xda\x9d\
\x32\x49\x09\x61\xed\x57\x92\x90\x94\x69\x37\xf3\x0d\x0b\x3b\xd2\
\xb9\x08\xda\xbd\x92\x90\x9b\x2b\x69\xdc\x63\x2d\x27\xce\xdc\x30\
\x85\xf9\xf9\x55\xe5\xcd\x7f\xbc\x6d\x65\xa2\x5e\x58\x8c\x46\x23\
\xff\xfb\xe4\x3f\x5c\xb9\xa2\xb2\x56\x3b\x01\x34\x96\x52\x5a\xe9\
\xbd\xcb\x91\x00\x5c\x04\x3e\x97\x52\x4e\x29\x28\xa2\x76\x0c\xf0\
\x09\x8a\x89\x16\xff\xfd\xe6\x04\x03\x7b\x06\xd3\xaa\x99\x9f\x55\
\x22\x00\x21\xe0\xe9\xbe\xa1\x4c\xfd\x2a\xd6\xe6\xfd\xe0\x40\x0f\
\x3a\xb6\x2a\xbb\xa9\xb3\x5e\x2f\x98\xf8\x4a\x43\x55\x2b\x70\xf5\
\xea\x15\x4e\x9c\x38\x4e\xc3\x86\x66\x03\x22\xa3\xd1\xc8\xce\x9d\
\xdb\x38\x75\x3a\xce\x64\x8b\x10\x1c\x1c\x42\x44\x9d\x7a\x3c\xfc\
\x70\x7b\x95\xb0\xc4\xc5\xc5\x6a\x2b\x1f\xe0\x7d\x5b\x95\x0f\x20\
\xa5\x74\xfe\xde\xbd\x52\x46\x2b\x00\xbe\x96\x3f\xee\x64\x3b\x1e\
\x54\xbd\x33\xb6\x11\xbb\x63\xae\xb0\x79\xb7\xda\xd6\x3f\xd0\xdf\
\x9d\xb9\x9f\xb4\xb2\xea\xf7\x4b\x93\x5b\xb7\x73\xf1\x70\xd7\x63\
\xd0\x0b\x72\x72\xcd\xad\xda\x9d\x3b\xe6\x29\x67\xea\xc5\x14\x96\
\x2c\x59\xc0\x85\x0b\x89\xaa\xb4\xc7\x8f\x1f\xe5\xf8\xf1\xa3\xec\
\x3f\xb0\x97\xa7\x9e\x1a\x46\x40\x75\x45\x05\x72\xdb\x7a\xe6\x93\
\x03\x64\x09\x21\xdc\xa5\x94\xf6\xa7\x45\x7f\x21\xb4\x02\x30\xde\
\xa0\x17\x1d\x73\x72\xa5\x57\xd7\x76\x01\x76\xbf\xfe\x7c\x3c\x3d\
\xf4\x6c\x5a\xd4\x89\x4b\x57\x6f\x73\x33\x53\x19\x17\xe9\x74\x82\
\xe0\x40\x0f\xa7\xa9\x77\x0b\x62\xf5\xe6\x14\x16\xfc\x7a\x8e\x15\
\x1b\x92\xb8\x71\xd3\x7a\x6c\x96\x3f\xb8\xcb\xca\xca\x62\xf6\xac\
\x2f\x55\x4a\x28\x2d\x17\x2e\x24\x32\x7b\xd6\x97\x8c\x1b\x37\x01\
\x0f\x0f\x0f\x5b\x03\x43\x03\xb0\x0c\x45\x29\xb4\x02\x58\x20\xa5\
\xb4\xbf\x0f\xee\x2f\x80\x4a\x00\xa4\x94\x47\xd3\x62\xfa\x6d\xbe\
\x9a\x7e\xa7\x77\x9d\x50\xaf\x42\x67\xe2\xef\xe7\xe6\xb4\xd1\x7c\
\x51\xf8\xfb\x07\x07\x99\x36\x3b\xce\xee\xfd\x6a\xd5\xfc\x09\x0f\
\x8f\x00\x60\xc5\x8a\x9f\x1d\x56\x7e\x3e\xe9\xd7\xd3\x59\xb1\xe2\
\x67\x06\x0d\x1a\x4a\x78\x78\x04\xd5\xaa\xf9\xdb\xda\xcd\xe4\x0d\
\x3c\x0d\x3c\x2d\x84\xf8\x44\x4a\xf9\x8f\x62\xbf\xc4\x5d\x46\x3d\
\x0b\x88\x1f\x54\x0d\x65\x41\xa4\x58\xa3\xa6\x0d\x3b\x2f\xf2\xeb\
\x1f\x17\x38\x7e\xea\xba\xaa\x19\x2e\x2d\x76\xfc\x79\xd9\xee\x73\
\xbc\xbc\xbc\x18\x3d\x6a\x2c\xd5\xab\x07\x90\x9d\x7d\x87\x77\x27\
\x4d\xc0\x68\x34\x77\x69\xfe\x18\x18\x82\x0f\x00\x3f\x70\x8d\x4b\
\x16\x33\x3b\x9d\x4e\xc7\x94\xf7\xa6\xe2\xe2\xe2\xca\xa5\x4b\xa9\
\x7c\xfd\xcd\xe7\x36\xad\xa0\xf3\xc8\x06\xca\x42\x33\x98\x0b\xc4\
\x01\xcb\xa5\x94\x9b\x9c\x95\xa9\xb6\x0b\xe8\x4e\x31\x2b\xff\x95\
\x49\x07\xf8\x7c\xfe\xa9\x92\x97\xa8\x84\xf8\xfb\x07\x10\x19\xd9\
\x9c\x87\x1e\x6c\x8d\xaf\xaf\xd2\x85\x25\x25\x27\xa9\x2a\x1f\xe0\
\x13\x6a\xd0\x2a\x6f\xe7\xfa\x83\x78\xf0\x2c\xe6\x45\x29\xa3\xd1\
\x48\x52\x72\x12\xb5\x42\xc3\xf0\xf7\x0f\xe0\x95\x97\xc7\xb1\xf7\
\xcf\x3d\x1c\x3a\x74\x80\x8b\x17\x53\xb5\x8f\x74\x41\x31\x69\x2f\
\x0b\x1e\x03\x5e\x17\x42\x7c\x2a\xa5\x7c\xc3\x19\x19\x6a\x05\xa0\
\x58\x2f\xb2\xe4\xf7\xf3\xe5\xa2\xf2\x1f\x7e\xb8\x3d\x3d\x7a\xf4\
\xc2\xdd\x4d\xad\xfc\x49\xbb\x7a\x45\xf5\xdb\x1d\x41\x4b\xcc\x1e\
\x6a\x5a\xe2\x81\x3b\x82\x5b\x16\x7a\x93\xb4\xab\x57\xa8\x15\x1a\
\x06\x28\x9a\xc2\x6e\x5d\x1f\xa5\x7d\xfb\x4e\xac\x5d\xbb\x92\x9d\
\x3b\xb7\x95\xde\x4b\x14\x8e\x71\x42\x88\xdd\x52\xca\x1f\x4b\x9a\
\x91\x01\x40\x08\xf1\x0f\x60\xd0\xb3\xfd\x6b\x35\x99\xfe\x6e\x0b\
\x7c\x2a\x3b\x36\xcb\xd2\xf2\xdd\xd2\xf8\x92\x96\xc3\x29\xec\xdc\
\xb9\x8d\xe8\xe8\x5d\xd4\xab\xd7\x90\x0e\xed\xa3\xa8\x5d\x3b\x1c\
\x80\x80\x00\xd5\xc2\x26\xb7\x90\xac\x23\x83\x9e\x79\xaa\xf3\x75\
\x64\xa8\x2a\x1f\xa0\x7a\x75\x73\x9a\xf8\xf8\x33\x6c\xdd\xb6\x89\
\xb8\xb8\xe3\xe4\xe4\x94\x1b\x25\xe0\x48\xa0\xe4\x02\x20\x84\xe8\
\x80\xb2\x2b\x88\x79\x3f\x9f\x23\x38\xd0\x93\x0f\xfe\xd1\xa4\x48\
\x99\x1c\x3c\xae\x1e\x5c\xf5\xed\x5a\x93\x86\x11\xa5\xbf\x26\xf2\
\xe5\x82\xd3\x5c\xcf\x50\x9b\x30\xe6\xe4\xe4\x70\xec\xd8\x61\xe2\
\xe2\x8e\x33\xfc\xd9\xbf\x51\xaf\x5e\x03\x02\x02\x6a\x60\x30\xb8\
\x90\x93\x63\x8e\xfb\x0f\x92\x59\x85\xa2\x34\xda\x48\x86\x2a\x0f\
\x83\xc1\x85\xc0\xc0\x1a\x80\xa2\x0b\x98\x3b\x6f\x96\xfd\x8a\xf7\
\xf6\x86\xe1\x23\x9d\xf8\x56\x76\x38\x79\x02\xd6\xaa\x26\x1c\x4e\
\x51\x38\x19\x80\xce\x96\x01\x0e\xcc\xae\xed\x72\xeb\xb6\x5a\x2f\
\x32\xa4\x4f\x28\x83\xfb\x84\xd8\x89\xed\x3c\xea\x84\x56\xe2\xe5\
\x77\x0f\x90\x9d\x63\xad\xaf\xc8\xc9\xc9\x61\xf1\x92\x05\xfc\xeb\
\xad\xc9\x18\x0c\x06\x3a\x75\xec\xcc\xfa\x0d\x6b\x4d\xf7\x73\x91\
\xac\xe3\x86\x55\x3a\x80\x4e\x1d\x3b\xa3\xd3\xe9\x4c\x79\xd8\xad\
\x7c\x17\x17\x98\xf4\x3e\x0c\x1d\xee\x94\xf7\x71\xc8\xaf\x3f\x69\
\x05\xc0\x29\xc6\xae\x06\x34\x86\x1f\x95\x3c\x1d\x1b\x7a\x96\x27\
\x5e\x18\x1c\x4e\xf7\xf6\x81\xfc\xb8\xf2\x3c\x4b\x57\x25\xb2\xf7\
\x90\xda\xe8\xf4\xe6\xcd\x0c\x4e\x9e\x3c\x41\xc3\x86\x8d\xe9\xdc\
\xb9\x3b\xc7\x8e\x1f\x25\x29\x29\xd1\x4e\x6e\x0a\x35\x6b\x06\xd3\
\xb9\x73\x77\x00\x4e\x9e\x3c\xc1\xcd\x9b\x19\xd6\x91\x9a\xb7\x84\
\x3e\xfd\xa0\xef\x13\x10\x5c\x08\x41\x3f\x1b\x0f\x31\x16\x66\xf4\
\x9e\x95\xa0\x7b\xde\x4e\xbc\x5f\x7f\xb2\x9d\xc6\xd7\x0f\xda\x77\
\x84\x62\xaa\xb1\x0b\x8b\x01\x8d\x89\x54\x8d\xea\x7f\x2d\x2b\xea\
\x5a\x41\x9e\xbc\x39\xaa\x3e\x6f\x8e\xaa\xcf\x91\xb8\x74\x9a\xf7\
\x5a\x47\xae\x0d\x4d\xa0\x5e\xaf\x67\xd4\x0b\x2f\xb3\x7c\xc5\xcf\
\xec\xdf\x6f\x7b\x7d\xa4\x65\xcb\x87\xe8\xdb\xa7\xbf\x69\x0f\xa4\
\xa5\x16\x11\x50\x2a\xe3\x8f\x2d\xd0\xb8\x69\xd1\x0a\xb9\x75\x13\
\x8c\xb7\x18\xb4\x07\x87\x98\x05\x60\x8c\x83\xee\x23\xb4\x16\x6c\
\xdc\x09\x95\xb4\x8e\x58\x9d\x87\x95\x00\x04\xfa\x97\xad\x00\xc4\
\x1c\x2b\xde\x66\x17\xbd\x5e\xd0\xb4\xbe\x7a\x45\x6f\xd5\xa6\x14\
\x55\xe5\x03\xaa\x55\x3f\x0f\x0f\x0f\x9e\x1a\x34\x94\x87\x1e\x6c\
\xcd\xa9\x53\x71\x24\xe6\x59\x33\x07\x07\x85\x10\x11\x51\xcf\xa4\
\x34\xb2\x95\x16\x00\xa3\x11\x56\xaf\x2c\xba\x00\x14\x97\x84\x73\
\xf0\xeb\xb2\x52\xed\x62\xac\xba\x80\xc0\x6a\x65\x2b\x00\x2d\x7a\
\xaf\x2b\x56\xba\x2a\xde\x2e\x5c\x3b\xd8\xcf\xf4\xfb\x66\x66\x0e\
\xff\xfb\xf6\x84\x2a\x4e\xad\xd0\x30\x93\xd1\x87\x25\xe1\xe1\x11\
\x56\x95\x6d\x8b\xda\xb5\xeb\x50\x2b\x34\x8c\x73\x09\x67\xcd\x81\
\xdf\x7e\x01\xa3\xc6\x80\x56\x38\x4a\x8b\xb3\xa5\x3b\xc3\xb2\x6a\
\x01\xaa\xfa\xfe\x35\x77\x5b\x7f\xb5\xe8\x0c\x97\xae\xaa\x9b\xec\
\x6e\xdd\x7a\x92\x98\x78\x9e\x75\xeb\x57\x73\xfa\xf4\x49\xf4\x3a\
\x3d\xa1\xa1\x61\x44\x45\x75\xb5\x12\x80\x33\x67\x4e\xb1\x69\xd3\
\x7a\x12\x12\xce\x92\x6b\xcc\xa5\x4e\x9d\xba\x74\xeb\xda\x93\x6e\
\xdd\x7a\x32\x6b\xf6\x97\xe6\x88\xd7\xaf\xc3\xac\xaf\xe0\x0d\x95\
\xd1\xb4\x63\x5c\xdd\xa0\xb2\xc5\xac\xc8\xdb\x62\x8d\xa1\xb2\x66\
\xb6\x94\x99\x09\x65\x38\xd5\x34\xa0\x71\x03\x73\x33\xd3\xe6\x4a\
\x67\xb9\x67\xf5\xe6\x64\xd5\x6f\x17\x17\x17\xb6\xef\xd8\x42\x6c\
\xec\x31\x53\x58\x36\xd9\xc4\x9d\x8c\xe5\xf4\x99\x93\x0c\x1b\x3a\
\x82\x46\x8d\x94\xe9\xee\xb1\x63\x47\x58\xb0\x70\x8e\xca\x5e\x31\
\x36\xf6\x18\xb1\xb1\xc7\x68\xd0\xa0\x11\x2e\x2e\x2e\x64\x67\x5b\
\x4c\x37\xb7\x6d\x29\x9a\x00\x0c\x1e\xaa\x5c\xb6\x38\xa1\xd9\xc3\
\x30\x64\x00\x6c\xde\x50\xf8\xbc\x4b\x88\x0e\x50\x19\xc3\x5d\xbb\
\xfe\xd7\xf4\xd9\x54\x33\x40\xed\x7b\x3a\x3b\x3b\x5b\x55\xf9\x96\
\xe4\xe6\xe6\xb2\x24\x6f\x7a\x97\x93\x93\xc3\x92\x25\x0b\x6c\x6e\
\x7b\x07\x45\x10\x54\x95\x0f\x50\x33\xc8\x29\x65\x2e\x0f\x18\x50\
\x04\xa0\x66\x7e\xc0\xb5\x1b\x36\xf7\x86\x96\x1a\x69\x31\xfd\x0a\
\x8e\x64\x03\xad\x25\xf1\x87\x6f\x36\x65\x6b\xf4\x25\xce\x5d\xb0\
\x6f\xa1\x64\xc9\xad\xdb\xb7\x88\x8b\x8b\x35\xfd\x5d\x68\x6a\xd4\
\x84\xb7\xde\x2d\x7c\xfc\x72\x8e\x01\xcd\x9e\xf3\x6b\xd7\xcb\x56\
\x00\x8a\xaa\x76\xb6\x47\x50\xa0\x07\x9b\x16\x75\xa2\xe3\xe0\xcd\
\x9c\x4f\x56\x0b\x81\xbb\x9b\x9e\xbf\x3d\x55\x9b\x55\x9b\x93\x39\
\x93\x60\xde\x04\x72\xe8\xf0\x01\xeb\x8c\x6a\x85\x29\x53\xb4\x79\
\xdf\x83\x56\x30\x6a\xd4\x84\x9f\x7e\x2f\xdc\xdc\xff\x2f\x82\x01\
\xb8\x60\x19\x90\x72\xa9\x6c\x0d\x5d\x9a\xf7\xb2\x3d\x0b\x98\xf8\
\x4a\x43\x06\xf6\x0c\x66\xce\xb2\xb3\x4c\x9f\x73\xd2\xea\xbe\x77\
\x25\x03\xdb\x7e\x8c\x52\x85\xd5\x0e\xa9\xc4\xa6\x1f\x3a\xd2\xf7\
\x85\x1d\x1c\x3b\x79\x1d\x77\x37\x3d\xa3\x86\x84\x33\x7e\x74\x7d\
\x6a\x06\x78\x50\xf9\x7f\x2e\xfc\xe7\x0b\x93\x13\x32\x0e\x1f\x3e\
\x68\xfd\xe0\xc7\xfb\x2b\x5f\xf8\xcb\xaf\xc3\xe7\xff\x67\x16\x84\
\x46\x8d\x61\xd6\x7c\xc8\x5b\x5f\x28\x35\xa6\x7e\x02\x19\x16\x1a\
\xca\x6a\xa5\x6b\x56\x67\x40\xf1\x26\x39\x28\x3f\x60\xd7\xfe\x2b\
\xf6\x63\x97\x02\xf6\x54\xcf\x97\xf3\x46\xf4\xa9\x97\x6f\xd9\x8c\
\x53\xc5\xdb\x76\xcb\x51\x27\xd4\x8b\xc3\xab\xbb\x73\xf8\x44\x3a\
\x11\xb5\xbc\x54\x9a\xcd\x27\x1f\x0b\x56\x09\x80\x4d\x15\x6f\x9f\
\xbc\x2e\x29\x20\x10\xa6\x4c\x85\xb7\x26\x41\x62\x02\x44\xd4\xb3\
\xee\x77\x4a\x83\x5a\x61\xa5\xff\x0c\x0b\xf2\x05\xc0\x44\x5c\xfc\
\x0d\x52\x2f\xdf\x22\xa0\x8c\xf5\x01\xce\x44\xa7\x13\x34\x6b\xe8\
\x63\x15\xde\xbc\x91\x0f\x75\xc3\xbc\x38\x79\xd6\x86\x7a\x17\x20\
\xac\x36\x34\x89\x54\x87\x79\x78\x40\xdd\xfa\xb6\xe3\x97\x06\x33\
\x3e\x85\xa3\x87\xcd\xbf\x7b\x3c\x06\x4f\x0c\xb4\x1f\xbf\x84\xe8\
\x80\x18\x6d\xe0\xb6\xbd\xf7\xae\xd3\xce\xa9\xe3\x23\xed\xdf\x7c\
\xf6\xf9\xb2\x2b\x88\x3d\x76\x6e\x87\xdf\x7e\x36\x5f\xc7\xec\x6e\
\x74\x75\x0a\x06\x29\xe5\x05\x21\xc4\x05\xc0\x34\xb7\xd9\xb6\xf7\
\x32\x03\x7b\x06\x97\xea\x83\xf3\xf9\xf0\x9f\xb6\xd5\xaa\x6d\x5a\
\x54\x05\xa0\x53\x6b\x7f\x9b\x71\xdc\x5d\xf5\x56\x61\x85\xa1\x7f\
\x8f\x20\xf6\xaf\xe8\xc6\x8c\xb9\x27\x99\xb3\xec\xac\xf9\x86\x4e\
\x07\xfd\x9f\xb4\x9b\x8e\x63\x47\xe1\x97\xa5\xc5\x7a\xa6\x43\xde\
\x9e\xec\xfc\x3c\x8b\x40\x7e\x07\xf9\x0b\xf0\x4a\x7e\xe0\xaa\x4d\
\xc9\xfc\xdf\x3b\xcd\xcb\xa4\xcb\x9b\xf0\xa2\xe3\x73\xa7\xda\xb4\
\xa8\x6a\x12\x06\x67\xd1\xa2\xb1\x75\xf7\xc0\x83\xad\x94\x7e\xdf\
\x1e\x71\xb1\x30\xf3\xff\x9c\x5a\x0e\xa0\xdc\x08\xc0\x52\x2c\x04\
\xe0\xd4\xb9\x0c\xd6\x6d\x4f\xa5\x7b\x7b\xfb\xbe\x14\xce\x27\x67\
\xb2\x7e\xc7\x45\x6e\xdf\xce\x2d\x70\xff\x80\x23\xbe\x5a\x78\xba\
\x58\xe9\xdc\xdc\xf4\x8c\x18\x18\x56\xac\xb4\x37\x6e\xe6\xf0\xe3\
\x2a\xcd\xb2\x70\xef\xc7\x8b\x95\x97\xd3\xe9\xda\x1d\x42\x2c\xfc\
\x25\x3c\x60\x6b\x97\xba\xf3\xc8\x17\x80\xed\x40\x0a\x16\x0b\x43\
\x33\xe7\x9d\xb2\x2b\x00\x3b\xf7\x5f\xa1\xe3\x53\x9b\x9c\x62\xf9\
\x3b\xe6\x9d\xfd\x05\x47\xb2\x41\x15\x6f\x97\x62\x0b\xc0\xe2\x15\
\x09\xa6\x7d\x0c\x80\x32\xba\xef\xd5\xb7\x58\x79\x39\x9d\x91\x85\
\x3d\x15\xc6\x39\x18\x00\xa4\x94\xc6\x3c\x1f\x73\xaf\xe5\xdf\x58\
\xb9\x29\x99\xf8\xf3\x37\xa9\x1d\x62\xbd\x16\xfd\xe6\x7f\x0e\x96\
\x89\xd9\x77\x69\x31\x6b\x89\x66\x85\xad\x79\xcb\x82\xd5\xbb\x01\
\x81\x60\xdf\xd9\xf5\x5f\x16\x4b\xf3\x9f\x4f\x81\x97\xf3\xc3\x8c\
\x46\xc9\xb4\xd9\x71\x4c\x9b\xd8\x0c\x17\x83\xd9\x2a\x25\x31\x25\
\x8b\x5d\x07\x1c\xeb\x0a\xa4\xc6\xc0\x52\x4a\x45\xc1\x54\x1e\x8c\
\x4d\xd6\x6c\x49\x21\xfa\xa0\xda\x72\x88\x41\x43\x0a\x4e\xd8\xb6\
\x9d\x72\xdd\x63\x98\x04\x40\x4a\x79\x2e\xcf\x35\xdc\x33\xf9\x61\
\x33\xe7\x9d\xe2\xfb\x65\x67\x79\xe9\x99\x3a\xfc\xe3\x85\xfa\xf8\
\xfb\xb9\xb1\x6c\x55\x22\x96\x7b\x49\x84\x10\xe8\x74\x3a\xd5\x62\
\xca\xee\x03\x57\x19\xd2\x27\x94\x1b\x37\x73\x98\x39\xef\x14\xd3\
\x66\xc5\x71\x39\xed\x36\xad\x9b\xfb\xb1\xf8\xb3\x36\x84\x05\x9b\
\x5b\x95\x5a\x41\x8e\x4e\x95\xb3\x4f\x41\x0e\x25\x6d\x71\x27\xdb\
\xc8\x6b\x53\x34\xb3\xde\xd0\x5a\x30\xf4\xd9\x82\x13\x67\x66\x82\
\xf5\x46\x51\xdb\x54\xad\x06\x9e\x9e\x90\x7e\x4d\x59\x3e\x76\x44\
\x88\xf3\xfc\x23\x15\x07\xed\xf6\xf0\xc6\xc0\x61\x34\x4b\xc4\xa0\
\xd8\x0a\x8e\x19\x5a\x87\x4d\xbb\x2e\xb2\xef\x88\x79\x01\xb1\x6e\
\x44\x3d\x7c\xfd\xaa\x12\x1d\x6d\x3e\xcd\x24\x28\xd0\x83\x31\x43\
\xeb\x30\x6d\x76\x1c\x57\xaf\xa9\x57\x17\x43\x6b\x7a\xb2\x69\x51\
\x27\xc2\x43\x4b\xcf\xcc\xc9\x1e\x1f\x7f\x7d\x82\xf1\x1f\x1d\x52\
\x07\x7e\xfe\x0d\xf4\x1f\x64\x3b\x81\x25\xbf\xfe\xe4\xd8\x7c\xcb\
\x92\x2f\x67\x43\xbf\x01\xf0\xc1\xe4\x82\x67\x0e\xc9\x85\xb4\x88\
\xb2\x7e\x7e\xba\x94\xd2\xc6\x74\xa6\x68\x68\x2d\x0e\x93\x00\xab\
\x8d\x70\x60\xb6\xb8\xb1\xac\x7c\x80\x26\x4d\x9b\xd1\x2c\xb2\x85\
\x2a\xec\x42\x4a\x16\x13\x3f\x39\x62\x55\xf9\x00\x09\x49\x99\x74\
\x1a\xb2\x99\xb3\x89\x8e\x4f\x1b\x71\x36\x49\xa9\x59\xfc\x7b\x86\
\x66\x79\xb8\x71\x53\x78\xc2\xc1\xdc\xff\x3e\x40\x2b\x00\xef\x60\
\xe7\xc4\x10\x5b\x08\x21\x68\xd2\x38\x92\xf0\xf0\x08\x2a\x55\x2a\
\xfc\x66\xd2\xf3\xc9\x99\xbc\xf1\x81\x8d\x85\x98\x52\x22\x37\x57\
\xf2\xdc\x9b\x7b\xc9\xc8\xd4\xe8\xfe\x27\x4e\x2e\x1b\xfd\x7e\x39\
\x46\x2b\x00\xb5\x2d\x7f\x08\x21\xa8\x53\xa7\x2e\xb6\x10\x42\xf0\
\x78\xdf\x01\x78\x79\x79\xa3\xd3\xe9\x18\x38\x60\xb0\xc9\x9a\x56\
\x4b\x70\x70\x08\x1e\x1e\x6a\x83\x8d\xf8\xf3\x65\xd7\x02\x8c\x7b\
\x3f\x86\x75\xdb\x35\x7b\xfa\x3a\x76\xbe\x27\x47\xf5\x45\x45\xbb\
\x09\x60\x1d\x60\xb2\xd0\x90\x52\xd2\xba\x55\x5b\x1e\xed\xd1\x8b\
\x0d\x1b\xfe\x20\xf6\x84\xd2\x84\x06\x05\x85\xd0\xa5\x73\x77\x1a\
\x5b\x58\xc7\x36\x6a\xd4\x84\xe7\x47\x8c\x66\xfd\x86\x35\xc4\xc7\
\x2b\x07\x8f\x06\x07\x87\xd0\xb5\xcb\xa3\xf8\xf8\xf8\x32\xfd\xb3\
\xff\xaa\x1e\xd4\xed\x91\x02\x1d\x59\x3b\x85\xaf\x17\x9d\x61\xc6\
\x5c\xcd\xbe\xc5\xc0\x40\xf8\xec\x4b\xdb\x09\xec\xd1\xbd\x27\x44\
\x1f\x2a\x38\x1e\x28\x83\x40\x80\x57\x5e\x2f\x1f\xeb\x0b\x0e\xd0\
\x0e\x02\xdd\x50\x1c\x44\x9b\x2c\x84\x02\xaa\x07\x32\x6e\xdc\x78\
\x47\xe7\xfc\x58\x21\xa5\xc4\x68\x34\x9a\x5a\x84\x05\x0b\xe6\x70\
\xf8\x88\xb9\xc9\xf7\x70\xd7\x13\xbf\xf5\xb1\x52\x5f\x71\xdc\xb0\
\xf3\x22\x8f\x0e\xdf\xaa\xd6\x59\xb8\xb8\xc0\xcf\x2b\x15\xd5\xef\
\x5f\x89\x52\x1a\x04\x6a\x1d\x44\xdc\xce\x3b\x7f\x67\x7a\x7e\x58\
\xea\xc5\x14\x8e\x1e\x3b\x6c\x3a\x31\x2c\x9f\xcb\x97\x2f\x71\xfa\
\xf4\x49\x12\x13\x13\xc8\xcd\xcd\x25\x28\x28\x84\xda\xb5\xeb\x50\
\xb3\x66\x10\x42\x08\x53\xe5\x5f\xba\x94\xca\x91\xa3\xea\x2f\xe7\
\xc5\xa7\xeb\x94\x7a\xe5\xff\xbc\xf6\x02\x43\x5f\xdf\x63\xad\xb0\
\x9a\xf2\x61\xf1\x2a\x7f\xd7\x0e\xf8\x6c\x9a\x73\x0a\x67\xc9\x0f\
\x76\x76\x06\x95\x11\xb6\xf6\x81\x2d\x05\xa6\x82\x79\xff\xf4\xf5\
\x74\xf3\xe6\x4f\xa3\xd1\xc8\xc6\x8d\x7f\xb0\x61\xe3\x1f\xaa\x3d\
\xf7\xfb\xf6\xef\x45\x08\x41\x9b\x36\xed\x78\xac\x67\x5f\xd3\x61\
\xd2\x57\xd3\xae\x5a\x79\x0e\x7d\xb6\x7f\x2d\xa7\xbe\x84\x96\xcf\
\xbe\x3f\xc9\xb8\xf7\x0f\x62\x34\x6a\x2a\x7f\xd0\x10\x78\xae\xb0\
\x27\xb0\x6b\x48\x4d\x29\x53\x6b\xdd\xb2\x42\x07\xca\xa9\x94\x79\
\x07\x26\xac\x44\x39\x77\x56\x35\x62\xcb\xaf\x4c\x80\xf9\x0b\xe6\
\xb0\x6e\xfd\x1a\x2b\x87\x0b\xa0\x34\xfd\xbb\x76\x6d\xe7\xeb\x6f\
\x66\x98\x14\x43\x06\xbd\xb5\x8c\x3d\xd0\x77\x3d\x1d\x07\x6f\x66\
\xe6\xbc\x53\x24\x5f\x74\x9e\x09\x9a\x94\x8a\xdb\x98\xd7\xa6\xc4\
\x58\x57\xfe\x23\x1d\xe0\xe3\x4f\x6d\x27\xbc\x8f\xd1\x09\x21\x7a\
\xa2\xf4\xfb\x73\x50\x3c\x50\xa8\x54\x6c\x06\x83\x81\x86\x0d\x15\
\xfb\xf9\x3f\xf7\x45\x73\xec\xd8\x61\xab\x4c\xb4\x24\x26\x9e\x67\
\xe3\x26\xc5\xd6\x2f\x2c\x2c\x9c\x2a\x9a\x5d\x34\x46\xa3\x64\x6b\
\xf4\x25\xc6\x4e\x3e\x40\xed\x0e\x2b\x9d\xe6\x5f\xe0\x8b\x05\xa7\
\x6c\xfb\x0c\xea\x37\x00\x16\x2d\x03\xb7\xbb\xaf\x8a\x2e\x6f\x18\
\x50\x7c\x03\xfa\xda\xba\x29\x84\xa0\x4f\x9f\x27\xa8\x94\xb7\x39\
\x71\xcd\x9a\xdf\xad\xe2\xb4\xc4\x83\x4a\xe8\xd8\xce\x4d\xd5\x0a\
\xc0\xa6\x4d\xeb\xe8\xd8\xa1\x33\xae\xae\xae\x0c\x18\x30\x98\xb9\
\xf3\x66\xd9\xb4\xbd\xbf\x7d\xc7\xc8\x1b\xef\x1f\x64\x58\xbf\x5a\
\xb8\xba\x94\x6c\x27\xec\x67\xdf\xdb\xf0\x52\x32\xea\x25\x98\xfc\
\x41\xc9\xe7\xfb\xf5\x1a\x28\xa3\xfa\x7b\x0c\x03\xd0\xd0\xd6\x8d\
\xd0\x90\x5a\x3c\xf2\x48\x47\x9a\x35\x53\x9c\x39\xa7\xa7\x5f\xb3\
\x72\x94\x34\x81\xea\x3c\x97\x27\x3b\xeb\xc8\x60\xac\x85\x81\x71\
\x6e\x6e\x2e\x49\x49\x89\x84\x85\x85\x53\xbf\x7e\x43\x9e\x7d\x76\
\x24\x1b\x37\xae\x23\x21\xe1\xac\xd5\x98\x20\xfd\x46\x36\x49\xa9\
\x59\xaa\x35\x82\xe2\x50\xa3\x96\xef\x95\xb8\xf8\x1b\x8a\xf5\x88\
\x10\xf0\xce\x14\x18\x33\xb6\x80\x54\x85\xa4\x51\x63\xe5\xba\xc7\
\xb0\xea\xa0\x9b\x37\x7f\x80\x47\x7b\xf4\x32\x39\x58\xca\x27\x25\
\x45\xbd\xf5\x4a\x07\x0c\xc0\xdc\xb4\x77\xc1\x0b\x3f\xf4\x5c\xb5\
\x38\x39\x2c\x25\x25\x99\xb0\x30\xc5\x8c\xba\x41\xfd\x46\x34\xa8\
\xdf\x88\xf4\xf4\x6b\xec\xd8\xb1\x95\x2d\x5b\x8b\xe7\x58\x6b\xdf\
\x91\x34\x42\x6b\x7a\xaa\xdc\xd2\x19\x11\x57\x26\xba\xf4\x8b\xdd\
\x32\x7b\x7a\x3b\xbe\x9f\x05\x07\xf6\x29\x03\xbe\x6e\x76\x4f\xc3\
\xad\x20\x0f\x2b\x01\x88\x89\xd9\xc7\xc5\x8b\xa9\x34\x8b\x6c\x4e\
\x8b\x16\x0f\x52\xa5\x8a\x32\xd5\xac\xe2\xa3\x9e\x72\x1a\x81\x18\
\xb2\x68\x8f\xf2\xd5\x9e\xe5\x8e\xaa\xf2\x01\x53\x5a\x50\x1c\x35\
\x1e\x3e\x1c\xc3\xa1\xc3\x31\x9c\x3e\x6d\x6d\xe7\x5f\x18\xba\x3f\
\xbb\x95\x75\xdb\x53\x71\x73\xd5\x31\xe7\xbf\x0f\x31\xb0\x4f\xed\
\xf3\x33\x0c\x9d\xe3\x27\xba\xf4\x7b\x28\x0b\xd7\x76\x18\x80\x97\
\x5e\x2d\x30\x9f\x0a\xcc\x18\x80\x2b\x28\xa7\x65\x99\x48\x4a\x4a\
\x24\x29\x29\x91\xad\xdb\x36\x33\x7a\xf4\x2b\x04\x54\x0f\xa4\xba\
\x7f\x00\x2e\x2e\xae\x64\x67\x9b\x17\x78\xde\x23\x95\x11\xf8\x51\
\x09\xc1\x77\x58\xfb\xdb\x0f\xce\xdb\x41\x93\x91\x91\xc1\xd7\xdf\
\xcc\xb0\xe5\x62\x0d\x50\xf6\xfa\xfb\x54\x76\xbc\x2b\x79\xdf\x91\
\x34\x93\x3a\xf7\xf6\x1d\x23\x63\x3f\xbf\x90\xf1\xcc\xa0\xd5\x35\
\x73\xd1\xdd\x3b\xdb\x74\xee\x02\x3a\xc0\xae\x4e\xf4\xe6\xcd\x0c\
\xe6\xcc\xf9\x06\xa3\xd1\x88\x4e\xa7\xa3\x45\x8b\x07\x54\xf7\x13\
\xc9\xe6\xdf\xa4\x32\x81\x14\xe2\x50\x6f\xcd\xae\x5b\xb7\x3e\xde\
\xde\xca\xd6\xe7\x45\x3f\xcc\xb5\x5b\xf9\xa0\xf8\x14\x72\xb4\x45\
\x4c\x42\x86\xac\x1e\x78\x40\xef\xa2\x37\xcd\x3d\xaf\x34\x6e\xe3\
\x95\x8b\xae\x78\xa6\xc1\x15\x98\x30\x48\x29\xdf\x11\x42\xfc\x89\
\x72\x6c\x6c\x0f\x34\xce\x87\xd2\xd2\xae\x12\x1b\x7b\x8c\x46\x8d\
\x9a\xd0\xab\xd7\xe3\xc4\xc5\xc5\x16\x78\xba\x86\xbb\x9b\x3b\x03\
\x07\x2a\x56\x36\x29\xa9\xc9\x36\x9b\x7c\x9d\x5e\x27\x23\x9b\x55\
\x4f\x1f\xf2\x78\xd8\x8d\xd7\x87\x85\x61\x44\xba\xdd\xc2\x70\xed\
\xba\xf0\xb8\x71\x59\x78\xdd\x4a\xc2\x27\xe7\xac\xae\x2a\xbf\xe8\
\x5a\xf8\xad\xd7\x37\x6a\x98\x53\x4b\xd7\x82\xe9\x9d\xe0\xeb\xcf\
\x21\xbc\x0e\xfc\xf3\x6d\xfb\x05\xc8\xca\x82\x6c\x27\xef\x72\x76\
\x71\x55\x36\x89\xdc\x63\xe4\xdb\x04\xfe\x06\xfc\x26\x84\xf0\x06\
\xfa\x00\xdf\x02\x26\x53\x9d\xfc\x53\xc2\xdc\xdd\xdc\x19\x31\x62\
\x14\x8b\x17\xcf\x27\x39\xd9\xf6\xd9\xcc\xbe\xbe\x7e\x3c\x35\x68\
\x28\x3e\x79\xfd\x7f\xba\xad\x23\x64\x3e\xf8\x18\x63\xbf\x01\x22\
\xc6\xaf\xaa\x4f\x0c\xf8\x58\xec\xb4\x77\xbc\x14\xfd\xc4\xc0\xc2\
\xed\x92\x19\xf7\xb2\xb2\xa9\xc2\x99\x3c\xde\x1f\xbe\xfa\xce\xb9\
\x79\x96\x03\xb4\x6b\x01\x37\x84\x10\x17\xb1\xa8\x7c\x00\x9d\xde\
\x3c\x3f\x0f\x0c\xa8\xc1\xd8\x57\xfe\xce\xd6\xad\x9b\x88\x3b\x19\
\x4b\xd2\x85\x44\x72\x8d\xb9\xd4\xac\x11\x44\xed\xf0\x08\x3a\x47\
\x75\x53\x1d\x21\xe7\x62\x7d\xdc\x8a\x32\x45\xf3\x73\xae\xad\x7f\
\x05\xc5\xc3\xd6\x5a\x80\xea\xa0\x28\x77\x77\x77\x22\x9b\xaa\x2d\
\x7e\xf4\x7a\x3d\x51\x51\x5d\x89\x8a\xea\x8a\x94\x12\x29\xa5\xdd\
\x53\x39\xc2\xc2\xc2\xf1\xf7\xaf\xce\xa5\x4b\x17\xcd\x81\x33\x3e\
\x85\x61\xc3\x95\x66\xb5\x82\xbb\x8a\x4a\x00\x84\x10\x5d\x81\x47\
\x2c\xc3\x1e\x79\xa4\x13\xae\xae\xae\x44\xef\xdd\x4d\x6c\xec\x51\
\xa4\x54\x46\xf7\xad\x5b\x3d\x8c\x97\x97\x97\xe9\xe0\x25\x80\xec\
\xec\x3b\xec\xdf\xff\x27\xa7\xcf\x9c\x02\x29\xa9\x15\x56\x9b\xd6\
\xad\xda\xd2\xa5\x73\x77\x16\x2f\xb1\x38\x62\x27\x39\x09\x7e\x58\
\x50\xee\xd7\xca\xef\x07\xb4\xf6\x00\xdf\xa1\x0c\x06\xf3\x7f\xd3\
\xb3\x67\x1f\x76\xef\xde\xc1\x55\x8d\xc3\x65\x2f\x2f\x6f\x46\xbd\
\xf0\xb2\xc9\x0f\x6f\x46\xc6\x0d\xbe\xf9\xf6\x73\x52\x53\x53\x54\
\xf1\xbc\xbd\x2b\xd3\xbe\x7d\x27\x36\x6d\x5a\x4f\x56\x96\x85\xe3\
\x86\x87\x5a\xc3\xf2\xb5\x94\x0a\x89\xe7\x15\x8b\x5c\x67\x52\xc5\
\xe7\xee\x3a\x86\x28\x25\x7b\x00\xad\x00\x2c\x44\x39\x08\xa1\x50\
\x78\x7b\x57\xe6\xad\x09\x93\xd0\xeb\xf5\x7c\xfe\xc5\xff\x91\x60\
\xe9\x4e\xad\x20\x9a\x36\x53\x9c\x2e\x56\x50\x38\xca\xc2\x20\x04\
\xe5\x58\xf5\x7e\x68\x06\x81\xf6\xb8\x71\xe3\x3a\xa7\x4e\xc7\x51\
\xd5\xaf\x5a\xd1\x2a\x5f\x08\xf8\xfb\x84\xc2\xc7\x2f\x2a\xb3\xbe\
\x82\x3f\xa3\x0b\x8e\x07\xe6\x91\xfd\xdb\xff\x74\x6c\xf7\xff\x60\
\x2b\xf8\xdb\x8b\x25\x2f\x5b\x39\x43\x3b\x0b\x38\x24\x84\xe8\x03\
\xac\xc0\xb6\x10\xc4\x03\x5e\x80\xc9\x6f\xc9\xe1\x43\x31\xf8\x6a\
\x47\xf4\x6e\xee\x10\x51\x57\xed\xe8\x20\x1f\x21\xe0\x93\x19\xd0\
\xc3\xd6\xc1\x9b\x4e\xe2\xcf\xe8\xc2\x4f\x03\xf3\x05\xe0\x8f\xd5\
\x4a\xd7\xe1\x88\x7b\x5d\x00\x00\xa4\x94\x1b\xf3\x84\x60\x39\x90\
\xbf\x3c\x77\x1a\xf8\x0f\x30\x0f\xa5\x95\x30\x39\xbe\x3d\x72\xf4\
\x10\xee\xee\x1a\x05\x49\xe7\xae\x30\x7b\x3e\xac\x5d\x05\xff\x9b\
\x6a\x16\x04\x9d\x0e\xfe\xf7\x19\x0c\x19\x56\x0a\xaf\x72\x8f\xb0\
\x78\x21\x58\x2a\xce\x1e\x7e\x04\xa2\xba\x96\xda\xe3\xec\x9d\x1d\
\xbc\x51\x08\x51\x0f\xc5\x77\xd0\x19\x60\x95\x94\x32\x07\x40\x08\
\xb1\x14\x0b\x01\xc8\xca\xca\x22\x2b\x2b\x4b\x9d\x41\xd7\x1e\xca\
\x97\xfe\x68\x2f\xc5\xc5\xc9\xd6\xcd\x70\xf8\xa0\xb2\x3a\x57\xdf\
\xb1\x3f\x80\xfb\x9e\xdf\x7e\xb6\x36\x3d\x2b\x6b\x01\x00\x90\x52\
\x26\x01\xb6\xf6\x35\xed\x01\x12\x00\xfb\x9b\xda\x3c\x2d\x7a\x0f\
\x21\xa0\x63\x94\x72\x95\x15\x1d\xa2\x8a\xee\xcb\x77\xc0\x20\xb8\
\x7a\xd5\xfe\xfd\xe6\xce\x39\xe4\xba\xbc\x51\xe4\xc3\x01\xa4\x94\
\x52\x08\x31\x1d\xc5\x92\xa8\x7c\xf2\xf4\x33\xca\x55\x14\x26\xbc\
\x53\x3a\x65\x29\xe7\x14\xf7\x74\x88\x4f\x51\x8e\x31\xeb\x0c\xb8\
\x01\x5d\x4a\x90\x97\xf3\x39\x7a\x04\x2e\xa6\x14\x1c\xaf\x28\x54\
\x0f\x84\xc6\x45\x3b\x4a\xa7\x58\x34\x53\x6b\x5d\xb1\xb3\x33\xcb\
\x59\x14\xab\xd2\xa4\xa2\x3c\x98\x9e\x77\x21\x84\xb8\x06\x94\x91\
\xff\xf4\x42\x30\x63\xda\x5f\x77\x31\x68\xc2\xc4\xd2\x7f\x86\x05\
\xe5\xe7\xab\xbd\x9f\xc8\xc9\x51\xd4\xe1\x96\xd4\xa8\x09\x06\x03\
\x5c\xba\x08\xb7\x2c\x4c\xe5\x2b\x57\x56\xb4\x90\xa5\x44\x85\x00\
\xdc\x0d\x92\x93\xa0\x95\xc6\x5f\x61\xf4\x21\xc5\x59\xc4\xab\x63\
\xd4\xb3\x80\x57\x5e\x2f\x55\x4f\x62\xa5\x7b\x22\x51\x05\xe5\x9e\
\x7b\xb3\x05\xf8\xea\xbb\x7b\xd2\x78\xa3\x34\xa8\x68\x01\xee\x73\
\xee\xcd\x16\xa0\xbc\x53\xa3\xa6\xb5\xaf\x81\x1a\x79\x3b\xf2\x3f\
\xfb\xd2\x7a\x10\x58\x8a\x54\x08\xc0\xdd\xc0\x60\xb0\xef\x1d\xcc\
\xbf\xd0\x1e\x7a\x9c\x82\xd0\x6e\xd3\x2a\x56\x26\x5a\x3d\x40\xe3\
\x26\xe0\x5f\x36\x1e\x40\xee\x1b\x2e\xa5\x2a\x0a\x2e\x33\xce\x37\
\x08\x29\x56\x06\x42\xe8\x51\x8e\x9d\x29\xbc\x97\xa8\x0a\x9c\x41\
\x06\xe0\x23\xa5\x2c\xd1\x31\x6f\xce\x18\x04\xbe\x4b\x45\xe5\xdf\
\x0d\xbc\x50\xfe\xf7\x25\xa2\x44\x2d\x40\xde\xd1\xf3\x1b\x81\x8a\
\x1d\x3a\x77\x87\x5c\xa0\xb3\x94\x72\x6b\x71\x33\x28\xb6\x00\x08\
\x21\xfc\x50\x8e\x9b\x51\x9d\x2c\xf1\xcc\x33\xcf\xe2\xe5\x55\xf6\
\x5e\x40\xef\x07\x32\x32\x6e\x32\x7f\xfe\x3c\x6d\x70\x22\xd0\x4c\
\x4a\xe9\x60\x2d\xdb\x3e\x25\x99\x05\xcc\x46\x53\xf9\xc3\x87\x3f\
\xc7\x7b\xef\x4d\x29\x41\x96\x15\x14\x84\x4e\xa7\x63\xee\xdc\xef\
\x2d\x83\x82\x51\xea\xe2\x89\xe2\xe4\x57\xac\x16\x40\x08\xf1\x12\
\xf0\xb9\x65\x58\x83\x06\x0d\x59\xbe\x7c\x05\xae\xae\xae\xec\xdf\
\xbf\x8f\x0b\x17\x2e\xd8\x49\x0d\x1e\x1e\x9e\x74\xed\xaa\xb6\x72\
\x59\xbf\x5e\x63\x36\xae\x21\x28\x28\x88\x96\x2d\xcd\x9b\x53\x73\
\x72\x72\x58\xbd\x7a\x95\xc3\x72\x36\x6c\xd8\x88\x88\x08\xf3\x19\
\xc1\x97\x2e\x5d\x62\xf7\xee\x5d\x0e\x52\x40\x9b\x36\x6d\xf1\xf7\
\x37\x1f\xd5\x76\xea\xd4\x29\x8e\x1f\xb7\x7d\x02\x69\x3e\x3d\x7b\
\x3e\x86\xc1\x60\xfe\x96\x4a\xf3\xfd\xef\xdc\xb9\x43\xdf\xbe\x7d\
\x88\x8d\x3d\xae\x8d\xf2\xb2\x94\xf2\x0b\x87\x05\xb5\x45\xfe\xce\
\x9e\xc2\x5e\x40\x13\x20\x0b\x90\xf9\x97\x87\x87\x87\x5c\xbf\x7e\
\xa3\x3c\x7b\x36\x41\x9e\x3d\x9b\x20\x7b\xf7\xee\x23\x2d\xef\x6b\
\xaf\xa0\xa0\x20\x53\xdc\xfc\x2b\x28\x28\xc8\x61\x9a\xde\xbd\xfb\
\xa8\xe2\x1f\x3a\x74\xc4\x61\x7c\x40\x8e\x1f\x3f\x41\x95\x66\xee\
\xdc\x79\x05\xa6\x99\x3b\x77\x9e\x2a\xcd\xf8\xf1\x13\x0a\x4c\x73\
\xe8\xd0\x11\x55\x9a\xd2\x7e\xff\xf5\xeb\x37\x4a\x0f\x0f\x0f\x6d\
\x9c\x2c\xa0\x49\x51\xeb\xb3\x48\xb3\x00\x21\x84\x07\xb0\x18\xcd\
\x0e\xe2\x49\x93\x26\xab\xbe\xb4\x0a\x4a\x97\x88\x88\x08\x26\x4d\
\x9a\xac\x0d\x76\x07\x16\xe7\xd5\x51\xa1\x29\xea\x34\xf0\x53\x40\
\xe5\x28\xa7\x77\xef\xde\x0c\x1e\x5c\x88\x03\x17\x2a\x70\x2a\x83\
\x07\x0f\xa1\x77\xef\xde\xda\xe0\xc6\x28\x75\x54\x68\x0a\x3d\x08\
\x14\x42\x44\x31\x69\x7a\x00\x00\x04\x15\x49\x44\x41\x54\xf4\x07\
\x54\x07\xda\x04\x07\x07\xf3\xe1\x87\x1f\x59\xc5\xf5\xf4\xf4\xc4\
\xdb\xdb\xdb\x6e\x5e\xb6\xee\x79\x7b\x7b\x3b\x4c\xe3\xe9\xa9\xde\
\xa6\x20\x84\x70\x18\x1f\x50\xed\x52\x06\x30\x18\x5c\x0a\x4c\x63\
\xd0\xec\x66\x76\x73\x73\x2b\x30\x8d\xd6\x8d\x6e\x59\xbc\x3f\xc0\
\x87\x1f\x7e\x44\x4c\x4c\x0c\x89\x89\xaa\x03\xb0\x46\x0b\x21\xfe\
\x90\x52\x16\xca\x24\xaa\x50\x83\x40\x21\x44\x28\xca\x01\x93\x26\
\x77\x72\x06\x83\x81\x1f\x7f\x5c\xaa\x1a\x98\x55\x50\xf6\xec\xdf\
\xbf\x8f\x41\x83\x9e\xd4\x1e\x83\x9b\x06\x34\x97\x52\x26\x14\x94\
\xbe\xc0\x2e\x20\x4f\xd5\xbb\x10\x8d\x2f\xc1\x71\xe3\xde\xa8\xa8\
\xfc\x72\x40\xcb\x96\x0f\x30\x6e\xdc\x1b\xda\x60\x5f\x60\x61\x5e\
\xdd\x39\xa4\xc0\x16\x40\x08\xf1\x1e\x1a\x95\x63\xdb\xb6\x0f\xb3\
\x70\xe1\x22\xbb\x3e\x01\xee\x36\x39\x39\x39\xbc\xfb\xee\x3b\xac\
\x5b\xf7\x07\xb7\x6e\x95\xcc\x15\xad\xbb\xbb\x3b\xdd\xba\x75\x67\
\xca\x94\x7f\xab\xa6\x7a\xe5\x09\xa3\xd1\xc8\xd0\xa1\x4f\xb3\x6b\
\xd7\x4e\xed\xad\x29\x52\xca\x49\xb6\xd2\xe4\xe3\x50\x00\x6c\xa9\
\x7a\xfd\xfc\xfc\x58\xbd\x7a\x2d\x01\x01\xe5\x77\xb5\xef\xd3\x4f\
\xa7\x31\x7d\xba\x73\x4f\xf9\x7c\xed\xb5\xd7\x6d\x7d\x69\xe5\x86\
\xd4\xd4\x54\x7a\xf6\xec\xc1\x55\xf5\xe6\x96\x02\x55\xc5\x76\x05\
\xc0\x9e\xaa\x77\xf6\xec\x39\x74\xe9\xd2\x85\x9b\x37\x33\x18\x38\
\xd0\xb1\xbf\x9e\xe7\x9f\x7f\x9e\x27\x9f\x34\x1f\xc8\xb4\x6f\xdf\
\x3e\x26\x4e\x74\xe0\xdc\x09\x78\xff\xfd\x0f\x78\xe0\x01\x73\xd7\
\xb2\x74\xe9\x8f\x7c\xf7\x9d\x63\xf3\xae\x65\xcb\x96\x99\x8e\xac\
\xc9\xce\xce\xa6\x75\xeb\x56\x56\xfe\x0c\x4a\x8a\x9f\x5f\x55\xf6\
\xec\x89\xc6\xc5\x45\x19\x24\x96\xc7\xf7\xdf\xb0\x61\x03\x23\x47\
\x8e\xd0\xde\x76\xa8\x2a\x76\xd4\xa6\x59\xa9\x7a\x47\x8c\x18\x41\
\x97\x2e\xca\x31\x2b\xb9\xb9\xc6\x02\x35\x64\x97\x2f\xab\xb7\x5b\
\x67\x64\xdc\x28\x30\x4d\x46\xc6\x0d\xab\x3c\x0a\x4a\x93\x9b\x6b\
\xf6\x5c\xbe\x76\xed\x1a\xa7\x57\x3e\xc0\xd5\xab\x57\x58\xbb\x76\
\x0d\xbd\x7b\xf7\x31\x3d\xb3\xbc\xbd\x7f\x97\x2e\x5d\x18\x31\x62\
\x04\x73\xe6\xcc\xb1\xbc\xed\x50\x55\x6c\x53\x00\x84\x10\x63\xb0\
\x38\x3a\x06\xa0\x51\xa3\xc6\xbc\xf5\x96\x63\xe9\x2d\x0f\x2c\x5c\
\xb8\x50\xf5\xdb\xcd\xcd\x8d\xea\xd5\x1d\x1c\x0a\xed\x80\x8b\x17\
\x53\xb8\x7d\xdb\xec\xff\x70\xe1\xc2\x85\x26\x01\x28\xaf\xbc\xf5\
\xd6\xdb\xec\xd9\x13\xcd\xb1\x63\x47\x2d\x83\xfb\x09\x21\xc6\x48\
\x29\xad\x7c\x42\x5a\x09\x80\x10\xa2\x09\xa0\x3a\x1a\xc3\xd3\xd3\
\x93\x99\x33\x67\xaa\xce\x0d\x28\x8f\xc4\xc7\x9f\xb1\x1a\x08\xf9\
\xf9\x55\xc3\xc7\xc7\xa6\x33\xf4\x02\xc9\xc9\xc9\x21\x39\xd9\xac\
\xd3\xdf\xb5\x6b\x27\xf1\xf1\x67\xa8\x5d\x3b\xbc\x44\xe5\x2c\x4d\
\x5c\x5d\x5d\x99\x39\x73\x26\xbd\x7b\xf7\x22\x33\x53\xb5\xb6\x30\
\x4d\x08\xb1\x4d\x4a\xa9\x32\x2b\xd2\x3a\x89\xb2\xa9\xea\x9d\x3c\
\xf9\x3d\xc2\xc3\xeb\xa8\x1e\xe4\xee\xee\xce\xf8\xf1\x8e\xbd\x7c\
\xb4\x69\xd3\x46\xf5\x3b\x3c\xbc\x4e\x81\x69\xb4\xcf\x69\xd3\xa6\
\x4d\x81\x69\xdc\xdd\x95\xe2\x6a\xbf\x7e\x21\x74\x56\x4e\xaf\x8b\
\x82\xaf\xaf\x1f\x29\x29\xc9\x48\x69\xee\x62\x16\x2e\x5c\xc8\xc4\
\x89\xef\x94\xcb\xf7\xb7\xcc\x63\xf2\xe4\xf7\xf8\xe7\x3f\xdf\x54\
\x45\x43\x51\x15\x3f\x24\xa5\x34\xed\xe7\xd7\xfa\x08\xfa\x0a\x8d\
\xb6\xaf\x4f\x9f\xbe\xcc\x98\x31\xd3\x61\x01\xca\x03\x77\xee\xdc\
\xa1\x75\xeb\x87\x48\x4b\x33\x7b\x31\xf5\xf1\xf1\x25\x24\xa4\x64\
\xc7\xd3\x9c\x3f\x7f\x4e\xe5\x19\xd5\xd7\xd7\x97\x3d\x7b\xf6\x96\
\xfb\xd6\x10\x60\xec\xd8\x57\x58\xb1\x62\xb9\x36\xf8\x6b\x29\xa5\
\xc9\xd5\x89\x69\x22\x6f\x4b\xd5\x1b\x12\x12\xc2\x87\x1f\x4e\x2d\
\xd5\x42\x3a\x8b\x55\xab\x56\xaa\x2a\x1f\x94\xe6\xbf\xa4\x68\xf3\
\x48\x4b\x4b\x63\xd5\xaa\x95\x25\xce\xb7\x2c\xf8\xf0\xc3\xa9\x84\
\x84\x58\x79\x36\x1b\x9d\x57\xd7\x80\xf9\xcc\xa0\x10\x60\x96\x65\
\x2c\x83\xc1\xc0\x8c\x19\x33\xf1\xf2\xfa\x6b\x98\xfb\x2d\x5a\xa4\
\x1d\xfc\xb9\x9b\x4e\x3a\x29\x09\x95\x2a\x55\xc2\x4d\x73\xd4\x8c\
\xf6\x59\xe5\x15\x2f\x2f\x2f\x66\xcc\x98\x69\x4b\x81\x35\x2b\xaf\
\xce\x31\xe4\xa9\x0b\x17\xa1\x51\xf5\x46\x45\x45\x91\x9e\x9e\xce\
\x96\x2d\x9b\xcb\xa4\xb0\x25\x21\x2d\x2d\x8d\xe8\x68\xb5\x57\x30\
\x4f\x4f\x4f\xab\x13\x4e\x8a\x8b\xa7\xa7\x27\xb7\x6f\x9b\x35\x8a\
\xd1\xd1\xd1\xfc\xfa\xeb\x2f\xf8\xfa\x16\x6f\x70\x59\xd6\x44\x45\
\x45\xb1\x6e\xdd\x3a\xcb\x20\x5f\x60\x91\x10\xa2\x93\x00\xfe\x05\
\x7c\x70\x57\x4a\x56\xc1\xdd\xe6\x6d\x81\x72\x62\x78\x8d\xbb\x5d\
\x92\x0a\xee\x0a\xc9\x3a\x94\x4d\x1d\x15\xdc\x9f\x5c\xd3\xa3\x78\
\xfd\x6a\x0b\x94\xdf\xd5\x9d\x0a\x4a\x83\xc3\xc0\x73\xff\x0f\x85\
\x99\x7b\xdc\x47\x68\xa9\xad\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x76\x41\
\xff\
\xd8\xff\xe0\x00\x10\x4a\x46\x49\x46\x00\x01\x01\x01\x01\x2c\x01\
\x2c\x00\x00\xff\xe1\x26\xc4\x45\x78\x69\x66\x00\x00\x4d\x4d\x00\
\x2a\x00\x00\x00\x08\x00\x0a\x00\x0b\x00\x02\x00\x00\x00\x26\x00\
\x00\x08\x92\x01\x12\x00\x03\x00\x00\x00\x01\x00\x01\x00\x00\x01\
\x1a\x00\x05\x00\x00\x00\x01\x00\x00\x08\xb8\x01\x1b\x00\x05\x00\
\x00\x00\x01\x00\x00\x08\xc0\x01\x28\x00\x03\x00\x00\x00\x01\x00\
\x02\x00\x00\x01\x31\x00\x02\x00\x00\x00\x26\x00\x00\x08\xc8\x01\
\x32\x00\x02\x00\x00\x00\x14\x00\x00\x08\xee\x02\x13\x00\x03\x00\
\x00\x00\x01\x00\x01\x00\x00\x87\x69\x00\x04\x00\x00\x00\x01\x00\
\x00\x09\x02\xea\x1c\x00\x07\x00\x00\x08\x0c\x00\x00\x00\x86\x00\
\x00\x11\x86\x1c\xea\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x57\
\x69\x6e\x64\x6f\x77\x73\x20\x50\x68\x6f\x74\x6f\x20\x45\x64\x69\
\x74\x6f\x72\x20\x31\x30\x2e\x30\x2e\x31\x30\x30\x31\x31\x2e\x31\
\x36\x33\x38\x34\x00\x00\x00\x01\x2c\x00\x00\x00\x01\x00\x00\x01\
\x2c\x00\x00\x00\x01\x57\x69\x6e\x64\x6f\x77\x73\x20\x50\x68\x6f\
\x74\x6f\x20\x45\x64\x69\x74\x6f\x72\x20\x31\x30\x2e\x30\x2e\x31\
\x30\x30\x31\x31\x2e\x31\x36\x33\x38\x34\x00\x32\x30\x32\x31\x3a\
\x30\x37\x3a\x32\x30\x20\x31\x35\x3a\x35\x34\x3a\x32\x35\x00\x00\
\x06\x90\x03\x00\x02\x00\x00\x00\x14\x00\x00\x11\x5c\x90\x04\x00\
\x02\x00\x00\x00\x14\x00\x00\x11\x70\x92\x91\x00\x02\x00\x00\x00\
\x03\x35\x39\x00\x00\x92\x92\x00\x02\x00\x00\x00\x03\x35\x39\x00\
\x00\xa0\x01\x00\x03\x00\x00\x00\x01\x00\x01\x00\x00\xea\x1c\x00\
\x07\x00\x00\x08\x0c\x00\x00\x09\x50\x00\x00\x00\x00\x1c\xea\x00\
\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x32\x30\x32\x31\x3a\x30\x37\
\x3a\x32\x30\x20\x31\x35\x3a\x35\x33\x3a\x34\x32\x00\x32\x30\x32\
\x31\x3a\x30\x37\x3a\x32\x30\x20\x31\x35\x3a\x35\x33\x3a\x34\x32\
\x00\x00\x00\x00\x06\x01\x03\x00\x03\x00\x00\x00\x01\x00\x06\x00\
\x00\x01\x1a\x00\x05\x00\x00\x00\x01\x00\x00\x11\xd4\x01\x1b\x00\
\x05\x00\x00\x00\x01\x00\x00\x11\xdc\x01\x28\x00\x03\x00\x00\x00\
\x01\x00\x02\x00\x00\x02\x01\x00\x04\x00\x00\x00\x01\x00\x00\x11\
\xe4\x02\x02\x00\x04\x00\x00\x00\x01\x00\x00\x14\xd7\x00\x00\x00\
\x00\x00\x00\x00\x60\x00\x00\x00\x01\x00\x00\x00\x60\x00\x00\x00\
\x01\xff\xd8\xff\xdb\x00\x43\x00\x08\x06\x06\x07\x06\x05\x08\x07\
\x07\x07\x09\x09\x08\x0a\x0c\x14\x0d\x0c\x0b\x0b\x0c\x19\x12\x13\
\x0f\x14\x1d\x1a\x1f\x1e\x1d\x1a\x1c\x1c\x20\x24\x2e\x27\x20\x22\
\x2c\x23\x1c\x1c\x28\x37\x29\x2c\x30\x31\x34\x34\x34\x1f\x27\x39\
\x3d\x38\x32\x3c\x2e\x33\x34\x32\xff\xdb\x00\x43\x01\x09\x09\x09\
\x0c\x0b\x0c\x18\x0d\x0d\x18\x32\x21\x1c\x21\x32\x32\x32\x32\x32\
\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\
\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\
\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\x32\xff\xc0\x00\
\x11\x08\x00\x6d\x00\x86\x03\x01\x21\x00\x02\x11\x01\x03\x11\x01\
\xff\xc4\x00\x1f\x00\x00\x01\x05\x01\x01\x01\x01\x01\x01\x00\x00\
\x00\x00\x00\x00\x00\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\
\x0b\xff\xc4\x00\xb5\x10\x00\x02\x01\x03\x03\x02\x04\x03\x05\x05\
\x04\x04\x00\x00\x01\x7d\x01\x02\x03\x00\x04\x11\x05\x12\x21\x31\
\x41\x06\x13\x51\x61\x07\x22\x71\x14\x32\x81\x91\xa1\x08\x23\x42\
\xb1\xc1\x15\x52\xd1\xf0\x24\x33\x62\x72\x82\x09\x0a\x16\x17\x18\
\x19\x1a\x25\x26\x27\x28\x29\x2a\x34\x35\x36\x37\x38\x39\x3a\x43\
\x44\x45\x46\x47\x48\x49\x4a\x53\x54\x55\x56\x57\x58\x59\x5a\x63\
\x64\x65\x66\x67\x68\x69\x6a\x73\x74\x75\x76\x77\x78\x79\x7a\x83\
\x84\x85\x86\x87\x88\x89\x8a\x92\x93\x94\x95\x96\x97\x98\x99\x9a\
\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xb2\xb3\xb4\xb5\xb6\xb7\xb8\
\xb9\xba\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xd2\xd3\xd4\xd5\xd6\
\xd7\xd8\xd9\xda\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xf1\xf2\
\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xff\xc4\x00\x1f\x01\x00\x03\x01\
\x01\x01\x01\x01\x01\x01\x01\x01\x00\x00\x00\x00\x00\x00\x01\x02\
\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\xff\xc4\x00\xb5\x11\x00\x02\
\x01\x02\x04\x04\x03\x04\x07\x05\x04\x04\x00\x01\x02\x77\x00\x01\
\x02\x03\x11\x04\x05\x21\x31\x06\x12\x41\x51\x07\x61\x71\x13\x22\
\x32\x81\x08\x14\x42\x91\xa1\xb1\xc1\x09\x23\x33\x52\xf0\x15\x62\
\x72\xd1\x0a\x16\x24\x34\xe1\x25\xf1\x17\x18\x19\x1a\x26\x27\x28\
\x29\x2a\x35\x36\x37\x38\x39\x3a\x43\x44\x45\x46\x47\x48\x49\x4a\
\x53\x54\x55\x56\x57\x58\x59\x5a\x63\x64\x65\x66\x67\x68\x69\x6a\
\x73\x74\x75\x76\x77\x78\x79\x7a\x82\x83\x84\x85\x86\x87\x88\x89\
\x8a\x92\x93\x94\x95\x96\x97\x98\x99\x9a\xa2\xa3\xa4\xa5\xa6\xa7\
\xa8\xa9\xaa\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xc2\xc3\xc4\xc5\
\xc6\xc7\xc8\xc9\xca\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xe2\xe3\
\xe4\xe5\xe6\xe7\xe8\xe9\xea\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\
\xff\xda\x00\x0c\x03\x01\x00\x02\x11\x03\x11\x00\x3f\x00\xf7\x2a\
\x28\x00\xa2\x80\x0a\x28\x00\xac\x0f\x19\xf8\x91\x7c\x27\xe1\x8b\
\xad\x54\xc4\x25\x91\x31\x1c\x31\x93\xc3\x3b\x1c\x0c\xfb\x75\x27\
\xe9\x41\x51\x57\x69\x1c\x47\xf6\x77\xc5\x0f\xec\xaf\xed\xff\x00\
\xf8\x48\xa1\xfb\x47\x97\xe7\xff\x00\x66\x79\x43\x6e\xcc\x67\x6f\
\x4c\x67\x1d\xbf\x0c\xe6\xbb\x7f\x06\x78\x91\x7c\x57\xe1\x7b\x5d\
\x58\x44\x22\x91\xf2\x93\x46\x0f\x0a\xeb\xc1\xc7\xb7\x42\x3e\xb4\
\x91\xa4\xd4\x5a\xbc\x74\x37\xe8\xa6\x62\x14\x50\x01\x45\x00\x14\
\x50\x01\x4d\x69\x63\x43\x86\x91\x14\xfa\x16\x02\xa6\x53\x8c\x55\
\xe4\xec\x34\x9b\xd8\xad\x79\xa9\x5a\x58\xd9\x4f\x77\x34\xa0\xc5\
\x02\x19\x18\x27\xcc\xd8\x03\xb0\x1c\x93\xec\x2b\x82\x3f\x19\x34\
\xdc\xf1\xe1\xed\x74\x8e\xdf\xb8\x51\xfd\x6b\x37\x89\xa5\xfc\xcb\
\xef\x46\xb0\xa1\x39\x74\x13\xfe\x17\x26\x9b\xff\x00\x42\xee\xbb\
\xff\x00\x7e\x57\xfc\x68\xff\x00\x85\xc9\xa6\xff\x00\xd0\xbb\xae\
\xff\x00\xdf\x95\xff\x00\x1a\x5f\x59\xa5\xfc\xcb\xef\x45\xfd\x52\
\xa7\xf5\x73\x6f\xc3\x5f\x10\xb4\xef\x12\xdf\xbd\x92\x58\x6a\x16\
\x33\x2a\x17\x5f\xb5\xc4\x15\x5c\x0e\xa0\x30\xe3\x3d\x38\xa4\xf8\
\x91\x61\x6f\xad\x78\x0b\x54\x80\xca\x0c\x90\xc7\xf6\x98\x82\x10\
\xc4\xba\x72\x06\x3d\xf9\x1f\x8d\x1f\x58\xa4\xd7\xc6\xbe\xf4\x4f\
\xb2\x9c\x26\xb4\x3c\xdc\xfc\x57\xf1\x2f\xfc\x23\x7f\xd9\xbf\xd8\
\x8d\xf6\xdf\x27\xc9\xfb\x76\xc7\xce\x31\x8d\xdb\x36\xe3\x76\x3d\
\xf1\x9e\x71\xda\xbd\x23\xe1\xbe\x9f\x6f\xa2\xf8\x0b\x4b\x80\x4a\
\x04\x92\xc7\xf6\x89\x43\x90\xa4\x3b\xf2\x46\x3d\xb8\x1f\x85\x0b\
\x11\x4a\xff\x00\x1a\xfb\xd1\x75\x28\xb8\xc6\xd1\x5b\x9d\x5f\x9f\
\x0f\xfc\xf6\x8f\xfe\xfb\x14\x79\xf0\xff\x00\xcf\x68\xff\x00\xef\
\xb1\x4f\xeb\x14\xbf\x9d\x7d\xe8\xc3\xd9\xcf\xb3\x39\xff\x00\x14\
\xf8\xce\xcb\xc2\xd1\x5a\xbc\xb6\x77\x77\xc6\xe1\x98\x2a\xd9\xa8\
\x7d\xb8\xc6\x4b\x73\xc7\x51\x5c\xd7\xfc\x2e\x4d\x3b\xfe\x85\xdd\
\x77\xfe\xfc\xaf\xf8\xd2\xfa\xcd\x2f\xe6\x5f\x7a\x34\x8e\x1e\x72\
\x57\xff\x00\x33\xaf\xf0\xe7\x89\xed\x3c\x49\xa4\xfd\xbe\x28\x67\
\xb4\x02\x46\x8c\xc5\x76\x02\x38\x23\x1d\xbd\x39\xeb\x5b\x0b\x2c\
\x6e\x70\x92\x23\x1f\x40\xc0\xd5\x46\xbd\x39\x3b\x29\x2f\xbc\xce\
\x54\xe7\x17\xaa\x1d\x45\x6a\x40\x74\xe6\xb8\x09\x1d\xa5\x91\x9d\
\xce\xe6\x63\x92\x4d\x7c\xaf\x13\xc9\xf2\xd3\x8f\x4d\x7f\x43\xd5\
\xca\xd2\xbc\x9f\xa0\xde\x3b\x52\xee\x3e\xa7\xf3\xaf\x91\x3d\x70\
\xdc\x7d\x4f\xe7\x46\xe3\xfd\xe3\xf9\xd2\xb0\x01\x24\xf5\x24\xd2\
\x71\x9a\x60\x2e\x4f\xa9\xfc\xe9\x38\xa0\x03\x03\xd2\x8e\x28\x00\
\x1c\x74\xfd\x29\x77\x1f\x53\xf9\xd0\x02\x1e\x79\x3c\xd3\xa2\x76\
\x8e\x55\x74\x3b\x59\x4e\x41\x15\x50\x93\x8c\x94\x97\x41\x34\x9a\
\xb3\x3b\xa8\xe6\x0c\xbf\x37\x0d\xde\x8a\xfd\x58\xf9\x43\x9e\xd4\
\xfc\x71\xa6\xe9\xda\xb4\xfa\x5a\x5a\x6a\x97\xd7\x56\xe1\x7c\xf1\
\x63\x66\xd3\x08\xb7\x0c\x80\xc4\x74\x38\xe6\xb1\x3a\xf3\xcf\x3e\
\xd5\xf2\x7c\x4e\xff\x00\x85\xff\x00\x6f\x7e\x87\xb1\x96\x45\xa5\
\x2b\xf9\x7e\xa7\x37\xad\xcd\x7e\x9a\x93\x0b\x79\xf5\xc4\x8f\x62\
\xe1\x6c\xac\x92\x58\xff\x00\x06\x6e\x73\x59\xdf\x68\xd5\xbf\xe7\
\xeb\xc5\x3f\xf8\x2c\x8b\xfc\x2b\xce\xa1\x0a\x0e\x9c\x5c\xa3\x4e\
\xf6\xeb\x39\x27\xf3\x5d\xce\xb9\x4a\x49\xbb\x36\x1f\x68\xd5\xbf\
\xe7\xeb\xc5\x5f\xf8\x2d\x87\xfc\x28\xfb\x46\xad\xff\x00\x3f\x5e\
\x2a\xff\x00\xc1\x6c\x3f\xe1\x5a\xfb\x3c\x37\xf2\xd2\xff\x00\xc0\
\xe4\x2e\x69\xf7\x7f\x72\x0f\xb4\x6a\xdf\xf3\xf5\xe2\xaf\xfc\x16\
\xc3\xfe\x14\x7d\xa3\x55\xff\x00\x9f\x9f\x14\xff\x00\xe0\xb2\x2a\
\x3d\x9e\x1b\xf9\x69\x7f\xe0\x72\x17\x34\xfb\xbf\xb9\x1d\x36\x8e\
\xd3\x3e\x99\x19\xb8\x7b\xc7\x93\x73\x64\xde\x44\xb1\xc9\xd7\xba\
\x8e\x31\xe9\x57\x8b\x05\x52\xcc\x40\x50\x32\x49\x3c\x01\x5e\x25\
\x54\xbd\xab\x51\xb6\xfd\x35\x5f\x26\x75\x47\xe1\xd4\xcf\xb2\x7d\
\x77\x5d\x87\xed\x5a\x2e\x99\x6d\xf6\x03\x9f\x2e\xea\xfe\x73\x18\
\x9c\x7a\xa2\x80\x4e\xdf\x73\x8c\xf6\xa7\x43\x7d\x75\x0e\xa5\xfd\
\x97\xab\xd9\x1b\x2b\xe2\x85\xe3\x0b\x27\x99\x14\xea\x3a\x94\x7e\
\xe4\x77\x07\x91\x5e\xde\x27\x21\x9d\x1c\x2f\xb6\xe6\xbc\x96\xad\
\x7f\x5d\x8e\x4a\x78\xc8\xce\xa7\x21\x7e\xb3\xc6\xb1\x04\x91\xbc\
\xd0\x5b\x5f\x5c\x5a\xa1\x21\xae\x60\xb4\x79\x22\x18\xeb\xf3\x01\
\xc8\x1e\xd9\x15\xe5\x61\x70\x55\xf1\x6d\xaa\x2a\xf6\xf9\x1d\x15\
\x2a\xc2\x9a\x5c\xcc\xbb\x0c\xd1\x5c\x42\x93\xc3\x22\xc9\x14\x8a\
\x19\x1d\x4e\x43\x03\xdc\x1a\x7f\x4e\x7d\x2b\x95\xa7\x19\x59\x9a\
\x5e\xea\xe4\xa7\xe2\x3e\x87\x6d\x28\x4d\x46\x3d\x4b\x4b\xdc\xa4\
\xa3\xde\xd9\x34\x6a\xf8\xc6\x40\xf5\xeb\x45\x7e\xac\x9e\x87\xcc\
\x7b\x29\xf4\x2f\x78\x5b\xfe\x47\x1f\x1b\x7f\xd7\xfc\x3d\x3f\xeb\
\x80\xac\xd3\xd4\xd7\xca\x71\x3f\xfc\xba\xff\x00\xb7\xbf\x43\xd3\
\xcb\xb7\x97\xc8\xe4\x3c\x43\x0c\x72\x6a\xcc\xcd\x6b\x0c\x87\xcb\
\x5f\x99\xf5\x93\x6c\x7f\xef\x81\xfc\xfb\xd6\x57\xd9\xa2\xff\x00\
\x9f\x1b\x6f\xfc\x29\x1a\xb9\x70\xf3\x92\xa5\x1f\x7d\xed\xff\x00\
\x3e\x93\xfc\x7a\xfa\x9d\x33\xb7\x33\xd3\xf1\x0f\xb3\x45\xff\x00\
\x3e\x36\xbf\xf8\x52\x35\x1f\x66\x8b\xfe\x7c\x6d\x7f\xf0\xa4\x6a\
\xdf\xda\x4b\xf9\xdf\xfe\x09\x44\xe9\xdb\xff\x00\x26\x0f\xb3\x45\
\xff\x00\x3e\x36\xbf\xf8\x52\x35\x1f\x66\x8b\xfe\x7c\xad\xbf\xf0\
\xa4\x6a\x3d\xa4\xbf\x9d\xff\x00\xe0\x94\x1a\x76\xff\x00\xc9\x8e\
\xc3\x41\x50\x9a\x44\x4a\xb1\xac\x63\x73\x7c\xa9\x75\xf6\x80\x39\
\xfe\xff\x00\x7a\x8f\xc5\x2b\x2b\x78\x57\x53\x10\x86\x2d\xe4\x1c\
\x85\xea\x57\x8d\xd8\xff\x00\x80\xe6\xbc\x28\xb5\xf5\xc4\xe5\xb7\
\x32\xe9\x6e\xbd\xba\x7a\x1d\x2f\xf8\x7a\x76\x3b\xd6\x12\x5c\xe8\
\x50\x0d\x0e\x7b\x78\x56\x48\xa3\xf2\x25\x2a\x4a\xac\x44\x0e\x57\
\x1d\xf6\xf4\xed\x9c\x67\x35\xcd\x78\xe2\x0b\x7b\x5b\x4f\x0d\xda\
\x5b\xe7\xed\x09\xa9\x47\xe4\x65\xb7\x3e\xc0\xad\xe6\x92\x7a\x9c\
\xaf\xde\x3d\xc9\xfa\x57\xe8\x78\xbb\x7d\x5e\x7c\xdb\x59\xfe\x47\
\xcf\xd0\xbf\xb5\x8d\xbb\x99\xba\xea\xca\xda\x05\xfa\xc2\x1c\xb9\
\x81\xb0\x13\xef\x11\xfc\x40\x7b\xe3\x38\xae\xe6\x09\x5d\xec\xac\
\x66\xd1\x56\xd6\x6d\x3b\xca\x05\x14\x31\x5c\xc7\x81\xb3\x63\x0e\
\x06\x07\x63\xf4\xc8\xaf\x07\x86\x5a\xf6\x33\xef\x7f\xd3\xfe\x1c\
\xee\xcc\xaf\xcd\x1b\xec\x71\x1a\x7b\x40\xf7\x7a\xb4\xb6\x78\xfb\
\x04\x97\xf2\xb5\xb6\xdf\xba\x47\x1b\x8a\xff\x00\xb2\x5f\x79\x18\
\xe3\xd2\xb4\x07\x51\x5f\x39\x99\x72\xfd\x76\xa7\x2f\x76\x7a\x34\
\x2f\xec\xa3\x7e\xc7\x2d\xf1\xe0\x9f\xec\x1d\x1b\x19\x3f\xe9\x2f\
\xff\x00\xa0\x8a\x2b\xf4\x76\x78\xf4\x3e\x04\x74\xd6\xbe\x24\xd2\
\xbc\x31\xe3\x7f\x15\xc5\xad\xdd\x1b\x23\x79\x71\x04\xf6\xed\x24\
\x4e\x56\x54\xf2\x82\x92\x08\x07\xb8\xa7\x93\xce\x7b\x57\xcb\x71\
\x3f\xfc\xba\xff\x00\xb7\xbf\x43\xab\x2e\x4f\xde\x7e\x87\x1b\xe2\
\x39\x6d\x93\x58\x65\x96\x7d\x35\x1f\xcb\x5e\x2e\x74\x97\xb8\x7f\
\xfb\xec\x71\x8f\x6a\xc9\xfb\x45\x8f\xfc\xfd\xe8\x9f\xf8\x4f\xc9\
\x5c\xf8\x68\x54\x74\x63\x68\xcf\x6e\x95\x12\x5f\x77\x4f\x43\xa2\
\x4d\x73\x3d\x57\xdc\x1f\x68\xb2\xff\x00\x9f\xbd\x13\xff\x00\x09\
\xf9\x28\xfb\x45\x97\xfc\xfd\xe8\x9f\xf8\x4f\xc9\x5b\xf2\x55\xfe\
\x59\xff\x00\xe0\xd4\x4d\xe3\xdd\x7d\xc1\xf6\x8b\x2f\xf9\xfb\xd1\
\x3f\xf0\x9f\x92\x8f\xb4\x59\x7f\xcf\xde\x89\xff\x00\x84\xfc\x94\
\x7b\x3a\xbf\xcb\x3f\xfc\x1a\x82\xf1\xee\xbe\xe3\xb3\xf0\xf3\x46\
\xda\x34\x26\x27\xb7\x74\xdc\xd8\x36\xf6\xc6\x04\xeb\xd9\x0f\x4f\
\xaf\x7a\xd4\xe0\xf0\x40\x39\xf5\xaf\x9a\xc4\x5d\x56\x95\xfb\xbd\
\xdd\xdf\xdf\xd7\xd4\xeb\x87\xc2\x8c\xdb\x5b\x3d\x5b\x46\x56\x87\
\x42\xd6\x4d\xa5\x99\x24\x8b\x4b\x8b\x71\x3c\x71\x93\xd7\x66\x48\
\x2a\x3d\xb2\x45\x3e\xdf\x4f\x97\xed\xed\xa9\x6a\x37\xd2\xea\x1a\
\x81\x4f\x2c\x4d\x2a\x85\x11\x27\x52\xb1\xa0\xe1\x47\xae\x39\x35\
\xeb\x62\x33\xca\xd5\xf0\xde\xc1\xad\x5e\xef\xbf\xfc\x3f\x53\x9e\
\x18\x38\x42\xa7\xb4\x46\x87\x4f\x6f\xc6\xb3\x5f\x42\xd3\x98\xc8\
\x3c\x99\x12\x39\x09\x69\x21\x8e\x67\x48\x9c\x9e\xb9\x45\x21\x4f\
\xbf\x1c\xd7\x99\x87\xc5\xd6\xc3\xc9\xba\x32\xb3\x66\xf3\xa7\x19\
\xab\x49\x1a\x08\x89\x14\x6b\x1c\x68\xa8\x8a\x00\x55\x51\x80\xa3\
\xb0\x02\x97\xeb\xd2\xb9\xef\x77\x76\x5d\x8e\x23\xe2\x9e\xb1\x65\
\xe3\x1b\x0d\x3a\xcb\xc3\xaf\x2e\xa5\x71\x04\xaf\x2c\xab\x04\x2f\
\xf2\x2e\x00\xc9\xc8\xf5\xa2\xbf\x54\x3c\x2a\x56\x8c\x6d\x26\x7b\
\xb9\x00\x8e\x40\x38\xe9\xc7\x4f\xce\xb8\x03\xd4\xd7\xcb\x71\x47\
\xfc\xba\xff\x00\xb7\xbf\x43\x7c\xaf\xed\x7c\xbf\x53\x9a\xd6\x96\
\xfc\xea\x4d\xf6\x75\xf1\x01\x8f\x62\xe3\xec\x12\xc4\xb1\x67\xe8\
\xdc\xe7\xd6\xb3\xb6\xea\xdf\xdc\xf1\x7f\xfd\xff\x00\x82\xbc\xea\
\x0e\x87\xb3\x8f\x33\xa5\x7b\x75\x52\xbf\xcf\xcc\xed\x92\x7c\xcf\
\x7f\xc0\x36\xea\xdf\xdc\xf1\x7f\xfd\xff\x00\x82\x8d\xba\xb7\xf7\
\x3c\x5f\xff\x00\x7f\xe0\xad\x6f\x87\xef\x47\xee\x91\x36\x7e\x7f\
\x80\x6d\xd5\xbf\xb9\xe2\xff\x00\xfb\xff\x00\x05\x1b\x35\x6f\xee\
\x78\xbf\xfe\xff\x00\x41\x45\xf0\xdd\xe8\xfd\xd2\x0b\x3f\x3f\xc0\
\xe9\xb4\x71\x30\xd3\x23\x17\x02\xf4\x49\xb9\xb2\x2f\x99\x5a\x5e\
\xbc\x64\xaf\x1f\x4a\x8f\xc4\x33\xcb\x6f\xa1\x5c\xcb\x0c\x8d\x1c\
\x8b\xb3\x0c\xa7\x04\x65\xd4\x1f\xd0\xd7\x93\x15\x19\x62\x92\xd2\
\xce\x5d\x36\xdf\xa5\xfa\x1d\x1b\x40\x77\x9d\x2f\xfc\x25\x82\xdf\
\xcc\x6f\x27\xec\x5b\xf6\x67\x8c\xf9\xb8\xce\x3d\x71\xc5\x65\xdb\
\xde\xdd\x37\x87\xf4\x89\x4d\xc4\x86\x49\x2f\xd1\x1d\xb7\x72\xca\
\x65\x61\x83\xed\x8c\x56\xf4\xe9\xc1\xc5\x5d\x7f\x2f\xe5\x22\x1b\
\x7f\xd7\xc8\xbc\xb7\x13\x7f\x6a\x6b\x51\xf9\xaf\xb2\x2b\x68\x9a\
\x35\xcf\xdc\x25\x1f\x24\x7a\x74\x15\x5e\xde\xea\xe1\xad\xfc\x30\
\x4c\xef\x9b\x85\xfd\xf1\xcf\xfa\xcf\xdc\x16\xe7\xd7\x9e\x68\x54\
\xe1\x6d\xba\x2f\xfd\x21\xbf\xcf\x51\xdd\xff\x00\x5e\xa5\xdd\x2e\
\x69\x25\xbd\xd5\xd6\x49\x19\x96\x2b\xcd\x88\x09\xe1\x47\x96\x87\
\x03\xf1\x24\xfe\x35\xa6\x3a\xfe\x35\xc9\x59\x25\x52\xcb\xb2\xfc\
\x91\x71\xd8\xeb\xd4\x00\x38\xe3\xe8\x28\xaf\xd4\x91\xf2\xd7\x66\
\x8f\x6a\xf3\xf3\xd4\xd7\xc9\xf1\x3f\xfc\xba\xff\x00\xb7\xbf\x43\
\xd5\xca\xfe\xdf\xcb\xf5\x31\xf5\x4d\x1f\x43\xba\xb8\x5b\xad\x4e\
\x18\xbc\xd9\x08\x8c\x3c\x92\xb2\xe4\xf3\x81\xf7\x80\xcf\x5a\x8f\
\xfe\x10\xfd\x03\x1f\xf2\x0e\x5f\xfb\xfa\xff\x00\xfc\x55\x78\x50\
\xcc\x31\x94\xe0\x94\x65\x68\xf4\xd1\x7f\x91\xe9\x3a\x50\x6f\x50\
\xff\x00\x84\x3f\x40\xff\x00\xa0\x72\x7f\xdf\xc7\xff\x00\xe2\xab\
\x9d\xd5\xb4\xcd\x22\xc3\x50\x6b\x78\xad\x74\x24\x50\xaa\x76\xdd\
\xea\x32\x47\x27\x23\x3c\xae\x7a\x57\x66\x13\x1f\x8b\xad\x53\x96\
\x53\x96\xdf\x66\x29\xbf\xba\xc6\x75\x29\x42\x2a\xfa\x7c\xca\x3e\
\x46\x99\xff\x00\x3c\x3c\x2f\xff\x00\x83\x69\x68\xf2\x34\xcf\xf9\
\xe1\xe1\x7f\xfc\x1b\x4b\x5e\x97\x3e\x23\xf9\xaa\xff\x00\xe0\x08\
\xc7\x96\x3e\x5f\x79\xdb\x78\x75\x62\x5d\x16\x21\x02\xda\x2c\x7b\
\xdf\x02\xd2\x63\x2c\x7f\x78\xf4\x63\xc9\x3e\xb4\xdf\x14\x7f\xc8\
\xbb\x77\xff\x00\x6c\xff\x00\xf4\x62\xd7\xcf\xab\xfd\x75\x5e\xf7\
\xe6\xea\xac\xf7\xea\xba\x33\xab\xfe\x5d\xfc\x87\xff\x00\xcc\xe9\
\xff\x00\x6e\x1f\xfb\x5a\xb1\xed\xbf\xe4\x59\xd1\x3f\xec\x25\x1f\
\xfe\x8e\x7a\xde\x9f\xc3\x1f\xfb\x77\xf2\x91\x0f\x77\xf3\xfd\x0d\
\x04\xff\x00\x90\xce\xbd\xff\x00\x5e\xb0\xff\x00\xe8\x12\x55\x6b\
\x6f\xf8\xf5\xf0\x87\xfb\x9f\xfb\x6c\x68\x5b\x3f\x45\xff\x00\xa6\
\xd8\xfa\xff\x00\x5d\xcb\xfa\x47\xfc\x7f\xeb\x7f\xf5\xfd\xff\x00\
\xb4\xa3\xad\x71\xd4\x57\x15\x7f\xe2\x7c\x97\xe4\x8d\x23\xb1\xd7\
\x8e\x94\x57\xea\x27\xca\x9a\x3d\xab\xcf\xcf\x53\x5f\x27\xc5\x1f\
\xf2\xeb\xfe\xde\xfd\x0f\x5b\x2b\xfb\x7f\x2f\xd4\xe7\xfc\x6b\x67\
\xf6\xdf\x08\xdf\xa6\xdc\xb4\x6a\x26\x5e\x3f\xba\x73\xfc\xb3\x59\
\x7a\x47\x8c\x74\xed\x3f\xc1\xb6\x13\x5e\xcf\xba\xe1\x53\xc9\xf2\
\x93\x97\x62\xa7\x19\xf6\x18\xc1\xc9\xaf\x33\x0d\x87\x9e\x2b\x04\
\xa9\xd3\x5a\xa9\xfe\x0d\x7f\xc0\x3b\xe7\x35\x0a\x97\x7d\x8e\xcc\
\x10\x46\x47\x43\xd2\xb9\x4d\x7a\x67\x4d\x55\xd4\x5c\xcd\x18\xd8\
\xbf\x2a\x68\xdf\x69\x1d\x3f\xbf\x8e\x7e\x9d\xab\x93\x01\x14\xea\
\xb4\xd5\xf4\xfe\x6e\x5f\xc7\xf4\x2e\xab\xf7\x7f\xa6\x66\x7d\xa6\
\x5f\xf9\xfe\xb9\xff\x00\xc2\x6f\xff\x00\xad\x47\xda\x64\xff\x00\
\x9f\xdb\x9f\xfc\x26\xff\x00\xfa\xd5\xec\x7b\x38\xff\x00\x22\xff\
\x00\xc1\xc7\x3e\xbd\xff\x00\x03\xad\xd0\x98\xbe\x93\x13\x34\x8d\
\x21\xdc\xdf\x33\x5a\xfd\x9c\x9e\x7f\xb9\xdb\xfa\xd4\x3e\x28\xff\
\x00\x91\x72\xef\xfe\xd9\xff\x00\xe8\xc5\xaf\x16\x2a\xd8\xc4\xbf\
\xbc\xba\xdf\xaf\x7e\xbe\xa7\x43\xfe\x1f\xc8\x7f\xfc\xce\x9f\xf6\
\xe1\xff\x00\xb5\xab\x1a\xdb\xfe\x45\x8d\x13\xfe\xc2\x51\xff\x00\
\xe8\xe7\xae\x8a\x7f\x0c\x7f\xed\xdf\xca\x44\x3d\xdf\xcf\xf4\x34\
\x53\xfe\x43\x3a\xff\x00\xfd\x7a\x43\xff\x00\xa0\x49\x55\xad\xbf\
\xe3\xd7\xc2\x1f\xee\x7f\xed\xb1\xa1\x6c\xfd\x17\xfe\x9b\x63\xeb\
\xfd\x77\x2f\xe9\x1f\xf1\xff\x00\xad\xff\x00\xd7\xf7\xfe\xd2\x8e\
\xb5\xc7\x51\x5c\x55\xff\x00\x89\xf2\x5f\x92\x34\x8e\xc7\x5e\x3a\
\x51\x5f\xa8\x9f\x2a\x68\xf6\xaf\x3f\x3d\x4d\x7c\x9f\x14\x7f\xcb\
\xaf\xfb\x7b\xf4\x3d\x6c\xaf\xed\xfc\xbf\x52\x39\xa1\x5b\x88\x24\
\x81\xfe\xec\x88\x51\xbe\x84\x62\xbc\x66\xe3\xc1\xba\x85\x8e\x81\
\x73\xa9\xde\x7e\xe5\x62\x65\x55\x88\x8f\x9d\xc6\xed\xbb\x8f\xa0\
\xf4\xcd\x72\x64\x58\xc8\x50\xe6\x84\xb7\x93\x8a\x5f\x8f\xe4\x76\
\x62\x69\xb9\x6a\xba\x1e\xab\xe1\xbb\xbf\xb7\x78\x6b\x4d\xb8\x63\
\xf3\x3c\x0a\xa7\xfd\xe1\xf2\x9f\xe5\x59\x7a\xe4\x37\x6f\xaa\x3b\
\x43\x6f\xaa\x3a\x6c\x5c\x35\xbe\xaa\xb0\x27\x4f\xee\x13\x91\xf5\
\xef\x5c\x34\x94\x69\xe2\xa7\x19\x34\x92\xba\xd6\x3c\xcb\x7e\xdf\
\xa9\xa4\x9b\x70\x56\x33\xbe\xcd\x7f\xff\x00\x3e\x9a\xe7\xfe\x0f\
\xa3\xff\x00\x1a\x3e\xcf\x7f\xff\x00\x3e\x9a\xe7\xfe\x0f\xa3\xff\
\x00\x1a\xf4\x3d\xa5\x1f\xe6\x87\xfe\x0a\x66\x56\x9f\x9f\xde\x75\
\x3a\x2a\xc8\x9a\x5c\x6b\x2a\x5c\x23\xee\x6c\x8b\x8b\x81\x3b\xf5\
\xee\xe3\x83\xfd\x2a\xec\xb1\x47\x3c\x4d\x1c\xb1\xac\x88\x7a\xab\
\x0c\x83\xe9\xfa\xd7\x85\x56\x56\xac\xe5\x17\xd7\xa2\xb7\xdc\xba\
\x1d\x31\x5e\xee\xa5\x03\xab\xe9\x1f\x6c\xdf\xf6\xa8\x3c\xe0\x7c\
\x9f\x3b\x07\x6e\x73\xf7\x77\xe3\x6f\x5e\xd9\xa9\xae\x4e\x9f\x63\
\x68\x9f\x68\x10\x43\x02\x38\x28\x19\x46\x03\xe7\x23\x03\xbb\x67\
\x3d\x39\xad\x65\x87\xc4\xd3\x94\x61\x28\xb4\xe5\x6b\x79\xf6\xfc\
\xff\x00\x12\x54\xe9\xb4\xdf\x60\xb5\xba\xd3\xef\x64\x9d\xad\xda\
\x27\x94\x80\xb3\x02\x9b\x5f\x1c\xe3\x70\x20\x1c\x72\x71\x50\xcd\
\xa8\x68\xf6\x92\x24\x32\xc9\x02\x35\xaf\xdd\x0a\x84\x88\x38\xc7\
\x50\x30\x9c\x7a\x91\xc5\x54\x70\xd8\xb9\x55\x74\x54\x5f\x32\xe9\
\xf2\xb7\xe4\x0e\x74\xd4\x79\x9b\x2f\x42\xb0\xe1\xa5\x84\x47\x89\
\x8f\x98\xce\x98\xc3\x92\x07\xcd\x91\xd7\x80\x39\xfa\x54\xa3\xa8\
\xae\x46\xdf\x36\xa6\x9d\x0e\xbc\x74\xa2\xbf\x56\x3e\x4c\xd1\xed\
\x5e\x7e\x7a\x9a\xf9\x3e\x28\xff\x00\x97\x5f\xf6\xf7\xe8\x7a\xd9\
\x5f\xdb\xf9\x7e\xa2\x57\x31\xe3\x8b\x6d\x62\xef\x45\x78\xb4\xd5\
\x8e\x48\x08\xff\x00\x48\x88\x2e\x64\x61\x9c\xfc\xbe\xde\xdd\x6b\
\xe7\xf2\xe9\x52\x8e\x26\x12\xad\xb2\x7f\xf0\xdf\x2b\x9e\x9d\x54\
\xdc\x1d\x8c\x2f\x86\xda\x43\xb0\x9b\x55\xb8\xdc\x55\x33\x05\xba\
\xb1\x38\x1f\xdf\x20\x76\xf4\xfc\xea\xf7\x89\x4e\x9f\xfd\xb7\x27\
\xda\x4f\x87\x7c\xcd\x8b\x9f\xb7\x47\x21\x97\xa7\x72\xbc\x63\xd2\
\xbd\xaa\xd5\x67\x53\x32\x9f\xb3\xe6\xd1\x5b\xdd\xb5\xfa\x5f\x7e\
\x97\x39\xd2\xb5\x15\x7f\xc4\xc8\xdd\xa4\x7a\xf8\x3f\xfe\xfd\x4d\
\x46\xed\x1f\xd7\xc1\xff\x00\xf7\xea\x6a\xe9\xb6\x27\xfe\x9f\x7d\
\xf1\x33\xbc\x7c\x8e\xdf\xc3\x66\x03\xa1\xc3\xf6\x7f\xb0\xf9\x5b\
\x9f\x1f\x61\x56\x11\x75\x39\xc0\x6e\x7e\xb5\x36\xb8\x64\x1a\x25\
\xe1\x8c\xb8\xf9\x3e\x6d\x9f\x7b\x66\x46\xfc\x7b\xed\xdd\x5f\x3b\
\x6f\xf6\xdb\x54\xbf\xc5\xad\xf7\xdf\x5b\xfe\xa7\x5e\xbe\xcf\x4e\
\xc7\x56\x3f\xe3\xd3\x72\x59\xc3\xfd\x83\x8f\xb3\x88\xbe\xd1\x1f\
\xd9\xbe\xc9\x8d\xde\x7f\x4f\x4f\x97\x6e\x7a\x73\x5c\x77\x84\x64\
\x78\xf5\x08\xa4\xfb\x31\xb9\xba\x16\x23\xfb\x32\x39\xa4\x08\xcc\
\x86\x56\x0e\x43\x37\x47\x11\xf9\x47\xa6\x70\x31\xdc\xd7\xdf\x57\
\xe4\xf6\xf4\xb9\xb7\xbb\xb7\xfe\x02\xcf\x0a\x9f\x37\xb3\x9d\xbc\
\xaf\xf7\x9a\x9e\x28\x59\xe2\xb8\xd3\x21\x65\x66\xd5\x61\x9d\xa3\
\xb6\xb8\x92\x45\x69\x6e\x6d\xfc\xa2\x5d\xdc\x28\x1b\x46\xfd\xa3\
\x07\x8c\x81\x8e\xb5\x73\xc3\x6d\x72\x3c\x33\x62\xfa\x3d\xab\x49\
\x0c\x36\xe9\x21\x54\xb8\x44\xfb\x5d\xc3\x02\xb3\x24\xb9\x52\x55\
\x95\xb2\xc4\x93\xc9\xe3\xa5\x10\x50\xfa\xdc\xed\xf1\x72\xc7\xf3\
\x97\xf5\xf7\x04\xaf\xec\x23\x7d\xae\xcc\x9b\x2f\xb3\x8b\xad\x50\
\x58\xed\xfe\xcf\x17\xd2\x7d\x9b\x67\xdc\xc6\x06\xed\xbd\xb6\xef\
\xdf\x8c\x71\xe9\x57\x47\x51\x5f\x05\x99\x72\xfd\x72\xa7\x2e\xd7\
\x67\xb9\x42\xfe\xca\x37\xec\x76\x03\xa0\xa2\xbf\x4a\x3e\x60\xd0\
\xec\x6b\xcf\xcf\x53\x5f\x27\xc5\x1f\xf2\xeb\xfe\xde\xfd\x0f\x5b\
\x2b\xfb\x7f\x2f\xd4\x4a\x3f\xcf\x15\xf2\x68\xf5\xc4\x0a\xaa\x30\
\xaa\x00\xce\x70\x06\x39\xac\xcb\xdd\x32\xf6\xea\xe9\xa5\x83\x59\
\xb8\xb5\x8c\x80\x04\x51\xc3\x1b\x01\xef\x96\x19\xe6\xb7\xa3\x56\
\x30\x97\x34\xe3\xcd\xe4\xdb\xfd\x08\x94\x5b\x56\x4e\xc5\x7f\xec\
\x4d\x4f\xfe\x86\x4b\xcf\xfc\x06\x87\xff\x00\x89\xa3\xfb\x13\x53\
\xff\x00\xa1\x92\xf3\xff\x00\x01\xa1\xff\x00\xe2\x6b\xab\xeb\x98\
\x7f\xf9\xf0\xbe\xf9\x7f\x99\x1e\xce\x7f\xcd\xf9\x1a\x96\x56\xf3\
\x5a\xda\xac\x33\xdd\x3d\xd4\x8a\x4e\x65\x75\x55\x27\xf0\x5e\x2a\
\xcf\x4f\x6a\xe0\x9c\x94\xa6\xe4\x95\x97\x63\x54\x9d\xb5\x33\xbf\
\xb0\xf4\xcd\xd9\xfb\x22\xed\xdd\xbf\xca\xde\xde\x56\xee\xb9\xd9\
\x9d\xb9\xfc\x2a\xd5\xd5\x9d\xbd\xec\x42\x3b\x98\x96\x45\x0d\xb9\
\x73\x9c\xa9\xf5\x04\x72\x0f\xd2\xba\x25\x8d\xc4\x4e\x51\x9c\xa6\
\xef\x1d\xbc\x88\x54\xa2\x93\x49\x6e\x32\xd7\x4f\xb5\xb3\x77\x78\
\x62\xc4\x92\x60\x3c\x8e\xec\xee\xc0\x74\x05\x98\x92\x47\xb6\x6a\
\x29\xb4\x7b\x0b\x89\x64\x96\x4b\x7f\x9a\x5f\xf5\xbb\x64\x64\x12\
\x7f\xbc\x14\x80\xdf\x88\xa7\x1c\x76\x22\x35\x5d\x65\x37\xcc\xfa\
\x87\xb2\x87\x2f\x2d\xb4\x2e\x24\x69\x14\x6b\x1c\x68\xa8\x8a\x30\
\xaa\xa0\x00\xa3\xd0\x01\xda\x9e\x3a\x8a\xe5\xbb\x6e\xec\xbb\x1d\
\x80\xe9\x45\x7e\xae\x7c\x99\xa1\x5c\x3d\xe5\x8d\xd5\x8d\xb4\xd7\
\x12\xc1\x23\x24\x4a\x59\x84\x43\x7b\x10\x3a\xe1\x47\x26\xbe\x7f\
\x3c\xc0\x57\xc5\xf2\x7b\x15\x7b\x5e\xff\x00\x3b\x1e\x86\x02\xbd\
\x3a\x5c\xdc\xef\x7b\x1c\x87\xfc\x27\xfe\x1c\xff\x00\x9f\xc9\x3f\
\xf0\x1d\xff\x00\xc2\x8f\xf8\x4f\xfc\x39\xff\x00\x3f\x92\xff\x00\
\xdf\x87\xff\x00\x0a\xf9\xef\xec\x2c\x77\xf2\xaf\xbd\x1e\xaf\xd6\
\x69\xf7\x0f\xf8\x4f\xfc\x39\xff\x00\x3f\x92\xff\x00\xdf\x87\xff\
\x00\x0a\x3f\xe1\x3f\xf0\xe7\xfc\xfe\x4b\xff\x00\x80\xef\xfe\x14\
\x7f\x61\x63\xbf\x95\x7d\xe8\x3e\xb1\x4c\x4f\xf8\x58\x3e\x1a\xff\
\x00\x9f\xf7\xff\x00\xbf\x2f\x4b\xff\x00\x0b\x07\xc3\x3f\xf4\x10\
\x6f\xfb\xf2\xdf\xe1\x47\xf6\x16\x3b\xf9\x57\xde\x83\xeb\x14\xc3\
\xfe\x16\x0f\x86\x7f\xe8\x20\xdf\xf7\xe5\xbf\xc2\x8f\xf8\x58\x3e\
\x19\xff\x00\xa0\x83\x7f\xdf\x96\xff\x00\x0a\x3f\xb0\xb1\xdf\xca\
\xbe\xf4\x1f\x58\xa6\x27\xfc\x2c\x1f\x0d\x7f\xcf\xfb\xff\x00\xdf\
\x97\xa5\xff\x00\x84\xff\x00\xc3\x9f\xf3\xf9\x2f\xfe\x03\xbf\xf8\
\x51\xfd\x85\x8e\xfe\x55\xf7\xa0\xfa\xc5\x30\xff\x00\x84\xff\x00\
\xc3\x9f\xf3\xf9\x2f\xfd\xf8\x7f\xf0\xa3\xfe\x13\xff\x00\x0e\x7f\
\xcf\xec\xbf\xf7\xe1\xff\x00\xc2\x8f\xec\x2c\x77\xf2\xaf\xbd\x07\
\xd6\x69\xf7\x0f\xf8\x4f\xfc\x39\x8f\xf8\xfd\x93\xff\x00\x01\xdf\
\xfc\x2b\xae\xb3\xb2\xb9\xbd\xb4\x86\xea\x28\x5d\x63\x95\x43\xa8\
\x94\x6c\x6c\x1e\x46\x54\xf2\x3f\xfa\xf4\xd6\x45\x8e\xfe\x55\xf7\
\xa2\x65\x8b\xa3\x1d\xd9\xd5\x62\x8a\xfd\x00\xf9\xc2\xfd\x45\x34\
\x61\x90\xb7\x46\x1c\xe6\x80\x3c\xcb\xc4\x9f\x09\x74\xdd\x73\x54\
\x7d\x42\xd2\xf1\xb4\xe6\x94\x93\x2c\x51\xc2\x1d\x19\xbf\xbc\x06\
\x46\x33\xde\xb1\xff\x00\xe1\x47\xc5\xff\x00\x43\x04\x9f\xf8\x0a\
\x3f\xf8\xaa\x9e\x53\xa2\x38\x86\x95\xac\x1f\xf0\xa3\xe2\xe7\xfe\
\x2a\x09\x3a\x7f\xcf\xa0\xff\x00\xe2\xab\x77\x47\xf8\x4b\xe1\xed\
\x29\x95\xef\x04\x9a\x9c\xc0\x67\x33\x9d\xb1\xff\x00\xdf\x03\x8f\
\xcc\x9a\x14\x42\x58\x86\xd6\x8a\xc7\x5c\xba\x56\x9d\x1a\x2a\x2e\
\x9d\x66\x15\x46\x00\x16\xe9\xc0\xfc\xa8\xfe\xcd\xd3\xff\x00\xe7\
\xc2\xd3\xfe\xfc\x27\xf8\x55\x18\x73\x30\xfe\xcd\xd3\xff\x00\xe7\
\xc2\xd3\xfe\xfc\x27\xf8\x51\xfd\x9b\xa7\xff\x00\xcf\x85\xa7\xfd\
\xf8\x4f\xf0\xa0\x39\x98\x36\x95\xa7\x48\x8c\x8f\xa7\x59\xb2\xb0\
\xc1\x06\xdd\x30\x47\xe5\x5c\x8e\xb1\xf0\x97\xc3\xda\xa9\x67\xb3\
\x12\x69\x93\x11\x9c\xc0\x77\x47\xff\x00\x7c\x1e\x3f\x22\x29\x34\
\x99\x50\xab\x28\xb3\x0b\xfe\x14\x7c\x5f\xf4\x30\x49\xff\x00\x80\
\x83\xff\x00\x8a\xa3\xfe\x14\x74\x5f\xf4\x30\x49\xff\x00\x80\x83\
\xff\x00\x8a\xa9\xe5\x37\xfa\xcb\xec\x6b\x78\x77\xe1\x16\x9d\xa3\
\x6a\xd1\xdf\xdd\xdf\x36\xa1\xe4\xfc\xd1\x42\xf0\x84\x50\xfd\x98\
\xf2\x73\x8e\xc3\xd6\xbd\x10\x1c\x8c\xfa\xf3\x54\x95\x8c\x6a\x54\
\x73\x77\x1d\x45\x33\x33\xff\xd9\x00\xff\xed\x00\x64\x50\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x20\x33\x2e\x30\x00\x38\x42\x49\x4d\x04\
\x04\x00\x00\x00\x00\x00\x2c\x1c\x01\x5a\x00\x03\x1b\x25\x47\x1c\
\x01\x00\x00\x02\x00\x04\x1c\x02\x37\x00\x08\x32\x30\x32\x31\x30\
\x37\x32\x30\x1c\x02\x3c\x00\x0b\x31\x35\x35\x33\x34\x32\x2b\x30\
\x30\x30\x30\x38\x42\x49\x4d\x04\x25\x00\x00\x00\x00\x00\x10\x12\
\xee\x45\x2c\x12\x7e\x8e\xc1\xe8\x81\x01\xfa\x16\x76\xe5\xbc\xff\
\xe1\x33\xe9\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\
\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x00\
\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\x69\x6e\x3d\
\x27\xef\xbb\xbf\x27\x20\x69\x64\x3d\x27\x57\x35\x4d\x30\x4d\x70\
\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\x7a\x6b\x63\
\x39\x64\x27\x3f\x3e\x0d\x0a\x3c\x78\x3a\x78\x6d\x70\x6d\x65\x74\
\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\x62\x65\
\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\x6d\x70\
\x74\x6b\x3d\x22\x49\x6d\x61\x67\x65\x3a\x3a\x45\x78\x69\x66\x54\
\x6f\x6f\x6c\x20\x31\x30\x2e\x31\x30\x22\x3e\x0d\x0a\x09\x3c\x72\
\x64\x66\x3a\x52\x44\x46\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\
\x6f\x72\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\
\x64\x66\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x3e\x0d\
\x0a\x09\x09\x3c\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\
\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\x22\
\x20\x78\x6d\x6c\x6e\x73\x3a\x74\x69\x66\x66\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x74\x69\x66\x66\x2f\x31\x2e\x30\x2f\x22\x3e\x0d\x0a\x09\x09\
\x09\x3c\x74\x69\x66\x66\x3a\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\
\x6e\x55\x6e\x69\x74\x3e\x32\x3c\x2f\x74\x69\x66\x66\x3a\x52\x65\
\x73\x6f\x6c\x75\x74\x69\x6f\x6e\x55\x6e\x69\x74\x3e\x0d\x0a\x09\
\x09\x09\x3c\x74\x69\x66\x66\x3a\x58\x52\x65\x73\x6f\x6c\x75\x74\
\x69\x6f\x6e\x3e\x33\x30\x30\x2f\x31\x3c\x2f\x74\x69\x66\x66\x3a\
\x58\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\x3e\x0d\x0a\x09\x09\
\x09\x3c\x74\x69\x66\x66\x3a\x59\x52\x65\x73\x6f\x6c\x75\x74\x69\
\x6f\x6e\x3e\x33\x30\x30\x2f\x31\x3c\x2f\x74\x69\x66\x66\x3a\x59\
\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\x3e\x0d\x0a\x09\x09\x3c\
\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\
\x3e\x0d\x0a\x09\x09\x3c\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\
\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\
\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\
\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\
\x3e\x0d\x0a\x09\x09\x09\x3c\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\
\x75\x6d\x65\x6e\x74\x49\x44\x3e\x61\x64\x6f\x62\x65\x3a\x64\x6f\
\x63\x69\x64\x3a\x73\x74\x6f\x63\x6b\x3a\x64\x65\x62\x64\x35\x30\
\x62\x66\x2d\x64\x32\x39\x39\x2d\x34\x36\x39\x38\x2d\x39\x63\x30\
\x66\x2d\x30\x35\x39\x33\x31\x36\x30\x65\x61\x66\x66\x33\x3c\x2f\
\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\
\x3e\x0d\x0a\x09\x09\x09\x3c\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\
\x74\x61\x6e\x63\x65\x49\x44\x3e\x78\x6d\x70\x2e\x69\x69\x64\x3a\
\x65\x38\x30\x32\x37\x61\x65\x33\x2d\x38\x64\x37\x31\x2d\x34\x64\
\x65\x66\x2d\x38\x35\x39\x34\x2d\x32\x36\x66\x30\x61\x64\x61\x63\
\x30\x36\x30\x35\x3c\x2f\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\
\x61\x6e\x63\x65\x49\x44\x3e\x0d\x0a\x09\x09\x3c\x2f\x72\x64\x66\
\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x0d\x0a\x09\
\x09\x3c\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x3e\x3c\x78\x6d\x70\x3a\
\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3e\x57\x69\x6e\x64\
\x6f\x77\x73\x20\x50\x68\x6f\x74\x6f\x20\x45\x64\x69\x74\x6f\x72\
\x20\x31\x30\x2e\x30\x2e\x31\x30\x30\x31\x31\x2e\x31\x36\x33\x38\
\x34\x3c\x2f\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\
\x6f\x6c\x3e\x3c\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\x65\x44\x61\
\x74\x65\x3e\x32\x30\x32\x31\x2d\x30\x37\x2d\x32\x30\x54\x31\x35\
\x3a\x35\x33\x3a\x34\x32\x2e\x35\x39\x34\x3c\x2f\x78\x6d\x70\x3a\
\x43\x72\x65\x61\x74\x65\x44\x61\x74\x65\x3e\x3c\x2f\x72\x64\x66\
\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x3c\x2f\x72\
\x64\x66\x3a\x52\x44\x46\x3e\x0d\x0a\x3c\x2f\x78\x3a\x78\x6d\x70\
\x6d\x65\x74\x61\x3e\x0d\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\
\x20\x65\x6e\x64\x3d\x27\x77\x27\x3f\x3e\xff\xdb\x00\x43\x00\x03\
\x02\x02\x03\x02\x02\x03\x03\x03\x03\x04\x03\x03\x04\x05\x08\x05\
\x05\x04\x04\x05\x0a\x07\x07\x06\x08\x0c\x0a\x0c\x0c\x0b\x0a\x0b\
\x0b\x0d\x0e\x12\x10\x0d\x0e\x11\x0e\x0b\x0b\x10\x16\x10\x11\x13\
\x14\x15\x15\x15\x0c\x0f\x17\x18\x16\x14\x18\x12\x14\x15\x14\xff\
\xdb\x00\x43\x01\x03\x04\x04\x05\x04\x05\x09\x05\x05\x09\x14\x0d\
\x0b\x0d\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\
\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\
\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\
\x14\x14\x14\x14\xff\xc0\x00\x11\x08\x00\x6d\x00\x86\x03\x01\x22\
\x00\x02\x11\x01\x03\x11\x01\xff\xc4\x00\x1f\x00\x00\x01\x05\x01\
\x01\x01\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x02\x03\
\x04\x05\x06\x07\x08\x09\x0a\x0b\xff\xc4\x00\xb5\x10\x00\x02\x01\
\x03\x03\x02\x04\x03\x05\x05\x04\x04\x00\x00\x01\x7d\x01\x02\x03\
\x00\x04\x11\x05\x12\x21\x31\x41\x06\x13\x51\x61\x07\x22\x71\x14\
\x32\x81\x91\xa1\x08\x23\x42\xb1\xc1\x15\x52\xd1\xf0\x24\x33\x62\
\x72\x82\x09\x0a\x16\x17\x18\x19\x1a\x25\x26\x27\x28\x29\x2a\x34\
\x35\x36\x37\x38\x39\x3a\x43\x44\x45\x46\x47\x48\x49\x4a\x53\x54\
\x55\x56\x57\x58\x59\x5a\x63\x64\x65\x66\x67\x68\x69\x6a\x73\x74\
\x75\x76\x77\x78\x79\x7a\x83\x84\x85\x86\x87\x88\x89\x8a\x92\x93\
\x94\x95\x96\x97\x98\x99\x9a\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\
\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xc2\xc3\xc4\xc5\xc6\xc7\xc8\
\xc9\xca\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xe1\xe2\xe3\xe4\xe5\
\xe6\xe7\xe8\xe9\xea\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xff\
\xc4\x00\x1f\x01\x00\x03\x01\x01\x01\x01\x01\x01\x01\x01\x01\x00\
\x00\x00\x00\x00\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\
\xff\xc4\x00\xb5\x11\x00\x02\x01\x02\x04\x04\x03\x04\x07\x05\x04\
\x04\x00\x01\x02\x77\x00\x01\x02\x03\x11\x04\x05\x21\x31\x06\x12\
\x41\x51\x07\x61\x71\x13\x22\x32\x81\x08\x14\x42\x91\xa1\xb1\xc1\
\x09\x23\x33\x52\xf0\x15\x62\x72\xd1\x0a\x16\x24\x34\xe1\x25\xf1\
\x17\x18\x19\x1a\x26\x27\x28\x29\x2a\x35\x36\x37\x38\x39\x3a\x43\
\x44\x45\x46\x47\x48\x49\x4a\x53\x54\x55\x56\x57\x58\x59\x5a\x63\
\x64\x65\x66\x67\x68\x69\x6a\x73\x74\x75\x76\x77\x78\x79\x7a\x82\
\x83\x84\x85\x86\x87\x88\x89\x8a\x92\x93\x94\x95\x96\x97\x98\x99\
\x9a\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xb2\xb3\xb4\xb5\xb6\xb7\
\xb8\xb9\xba\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xd2\xd3\xd4\xd5\
\xd6\xd7\xd8\xd9\xda\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xf2\xf3\
\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xff\xda\x00\x0c\x03\x01\x00\x02\x11\
\x03\x11\x00\x3f\x00\xfd\x30\xa2\x8a\x82\x6b\xfb\x6b\x59\x0a\x4d\
\x73\x0c\x2f\xfd\xd9\x24\x0a\x7f\x22\x6a\x25\x38\xc1\x5e\x4e\xc8\
\xa5\x17\x27\x64\x89\xe8\xac\x3f\x12\xf8\xdb\x46\xf0\xaf\x87\xb5\
\x2d\x6a\xfe\xf5\x5a\xcb\x4f\xb7\x7b\xa9\x96\xd7\x13\x4a\x55\x54\
\x92\x11\x14\xee\x66\x3d\x02\x8e\x49\x38\xaf\x96\xdb\xfe\x0a\x59\
\xe1\x5d\xc7\x6f\xc3\x2f\x88\x4e\x99\xca\x9f\xec\xf8\xc6\x47\xae\
\x37\xf1\xc6\x2b\x0f\xad\x50\x5b\xcd\x7d\xe8\xde\x18\x5a\xd5\x3e\
\x08\x36\x7d\x81\x45\x7c\x7b\xff\x00\x0f\x2c\xf0\xb7\xfd\x12\xff\
\x00\x88\x3f\xf8\x01\x1f\xff\x00\x17\x47\xfc\x3c\xb3\xc2\xdf\xf4\
\x4b\xfe\x20\xe3\xfe\xbc\x23\xff\x00\xe2\xe8\xfa\xd6\x1f\xf9\xd7\
\xde\x6b\xf5\x1c\x4f\xf2\x33\xec\x2a\xf2\xcf\xda\x63\xe3\x6c\x5f\
\xb3\xe7\xc1\xdd\x6b\xc6\x46\xcd\x35\x0b\xc8\x4c\x76\xb6\x36\xb2\
\x12\x16\x5b\x99\x4e\xd8\xc3\x11\xce\xc1\x82\xcd\x8e\x48\x52\x07\
\x5a\xe5\xbe\x07\xfe\xd8\xde\x17\xf8\xe1\xe2\x4b\x8d\x0a\x0f\x0e\
\xf8\x93\xc2\xd7\xf1\xdb\xb5\xcc\x67\x5e\xb3\x58\xa1\x9d\x54\x80\
\xca\xb2\x29\x20\x30\xc8\x38\x24\x13\xdb\x38\x34\xdf\xdb\x6b\xc2\
\x3a\x67\xc4\xdf\xd9\x9f\xc6\xba\x79\xbb\x59\x2e\xec\x6d\xc6\xaf\
\x68\xb6\xce\x24\x91\xe7\x80\xee\x54\x0a\x0e\x4e\xe1\xb9\x78\xe4\
\x6e\xcf\x6a\x1e\x2a\x8d\x9b\x53\x5f\x7a\x26\x18\x79\xc6\xac\x63\
\x56\x2e\xc7\x8c\x1f\x06\x7e\xd7\x8b\xe0\xaf\xf8\x59\x1f\xf0\xb3\
\xec\x8e\xaf\xf6\x6f\xed\x23\xe0\xbf\xb1\x27\x91\xe4\x6d\xdf\xe4\
\xe3\x66\xcd\xfb\x7f\x83\xfe\x03\xe6\x66\xbe\x93\xfd\x99\xfe\x36\
\x45\xfb\x41\x7c\x1c\xd1\x3c\x64\x2d\x13\x4f\xbc\xb8\xdf\x6d\x7b\
\x69\x19\x25\x61\xb9\x88\xed\x70\xa4\xf3\xb4\xf0\xc3\x3c\x80\xc0\
\x1c\xe3\x35\xf0\xa3\x7f\xc1\x41\xbe\x2b\xb7\xc2\x4f\xf8\x45\x7f\
\xe1\x01\x93\xfe\x12\x2f\xb0\xfd\x83\xfe\x12\x5f\x22\xe7\x76\xcf\
\x2f\xcb\xf3\x7c\x8f\x2f\x6f\x9b\xb7\xbe\xed\xbb\xbe\x6d\xbf\xc3\
\x5f\x60\xfe\xc4\xfe\x0f\xd3\x3e\x19\x7e\xcc\xfe\x09\xd3\x85\xda\
\xc7\x75\x7b\x6d\xfd\xad\x76\x97\x2e\xb1\xc8\x93\xcf\xf3\x32\x15\
\x27\x8d\xa0\x2a\xff\x00\xc0\x49\xef\x51\x1c\x4d\x0b\xe9\x35\xf7\
\xa3\xb3\x11\x42\xa4\x69\xde\xa4\x2c\xef\xa5\x97\x4f\x91\xef\x94\
\x55\x41\xab\x58\xff\x00\xcf\xf5\xb7\xfd\xff\x00\x5f\xf1\xa5\xfe\
\xd6\xb1\xff\x00\x9f\xeb\x5f\xfb\xfe\xbf\xe3\x5a\xfd\x66\x8f\xf3\
\xaf\xbd\x1e\x5f\xb3\x9f\xf2\xbf\xb8\xb5\x45\x78\xf7\xc7\xbf\xda\
\x6b\x42\xf8\x07\x67\xa3\x4d\x79\xa2\x6b\x5e\x29\x93\x54\x92\x54\
\x8e\x2f\x0f\x44\x93\x98\x44\x61\x49\x69\x09\x61\xb4\x1d\xca\x07\
\xaf\x3e\x95\xe3\xdf\xf0\xf2\xbf\x0b\x64\xff\x00\xc5\xb1\xf8\x83\
\x8c\xff\x00\xcf\x84\x5d\xcf\xfb\xdf\xe7\x15\x3f\x5a\xa1\xfc\xeb\
\xef\x37\x8e\x12\xbc\xd7\x34\x60\xec\x7d\x85\x45\x79\xc7\xc1\x5f\
\x8e\x9a\x2f\xc6\xcf\x04\x8f\x11\xd9\x58\xea\x1e\x1d\x8c\x5c\xc9\
\x68\xd6\x1a\xea\x2c\x17\x0a\xc9\x8e\x40\xce\x0a\x9d\xc0\x86\x1e\
\xfe\x95\xe8\x10\xdf\xdb\x5d\x49\xb2\x1b\x98\x66\x7e\xbb\x63\x90\
\x31\xfd\x2a\xe3\x88\xa5\x27\x68\xcd\x37\xea\x8c\x65\x4a\xa4\x1b\
\x52\x8e\xc4\xf4\x51\x45\x6e\x64\x23\x31\x5e\x47\x6a\xf9\x66\xf6\
\xe2\x4b\xeb\xb9\xa7\xb8\x63\x2c\xd2\x39\x66\x66\x1c\x9a\xf4\x1f\
\x1c\xfe\xd5\x3e\x15\xf0\x6f\x8e\x35\x2f\x08\xc1\xa2\xf8\xb3\xc5\
\x5a\xde\x98\x91\x1d\x42\x3f\x0c\xe8\x52\x5f\x25\x97\x9a\xbb\xd1\
\x25\x75\x20\x2b\x15\x21\xb0\x32\x00\x3e\xb9\xaf\x39\x2c\x18\xee\
\xf9\x80\x6e\x79\x1f\xd3\xd6\xbf\x9f\xbc\x58\x9b\xe5\xc1\x41\x3d\
\x1f\x3f\xfe\xd8\x7e\x89\xc2\x74\xdc\x7d\xb4\xa4\xbf\x97\xf5\x1a\
\xbb\x47\x2b\x81\xee\x29\xfe\x73\xf5\x2e\xd9\x3d\x7e\x63\xfe\x35\
\xe3\x9f\x14\x35\x2f\x11\xdb\xf8\xb2\x44\xd3\x75\x0f\x88\x16\xf6\
\xbe\x44\x64\x47\xe1\xcd\x06\xda\xf2\xcc\x1c\x1c\xe2\x49\x06\xed\
\xde\xa3\xa0\xe2\xb9\x3f\xed\xaf\x19\xff\x00\xd0\x5f\xe2\xef\xfe\
\x12\x76\x3f\xfc\x4d\x7e\x43\x43\x20\x9e\x22\x94\x6a\xfb\x78\x2e\
\x65\x7b\x3e\x7b\xfe\x11\x6b\xf1\x3e\xce\x58\xa8\xc6\x4e\x2e\x2f\
\x4f\x4f\xf3\x3e\x8f\xf3\x9f\xfb\xed\xff\x00\x7d\x1f\xf1\xa5\xf3\
\x9f\xfe\x7a\x37\xfd\xf4\x7f\xc6\xbe\x6f\xfe\xda\xf1\x9f\xfd\x05\
\xfe\x2f\x7f\xe1\x27\x63\xff\x00\xc4\xd1\xfd\xb5\xe3\x3f\xfa\x0b\
\xfc\x5e\xff\x00\xc2\x4e\xc7\xff\x00\x89\xad\xbf\xd5\x99\xff\x00\
\xd0\x44\x3f\xf2\x7f\xfe\x40\x8f\xae\x47\xf9\x5f\xe1\xfe\x67\xd1\
\xcd\x21\x7e\x19\x8b\x0f\x72\x4f\xf3\x34\xdf\x94\x37\xa7\x6f\xc3\
\xfc\xf6\xaf\x9c\xff\x00\xb6\xbc\x67\xff\x00\x41\x7f\x8b\xdf\xf8\
\x49\xd8\xff\x00\xf1\x34\x87\x59\xf1\x97\x7d\x5b\xe2\xef\xfe\x12\
\x76\x3f\xe1\x54\xb8\x6e\xa7\xfd\x04\x43\xff\x00\x27\xff\x00\xe4\
\x03\xeb\xb1\xfe\x57\xf8\x7f\x99\xf4\x7f\x98\xff\x00\xdf\x7c\xe7\
\xd4\xf5\xfc\xfd\x69\x18\x02\x72\x46\x7b\x03\xed\xda\xb9\x7f\x86\
\xb7\x17\xd7\x1e\x0f\xb4\x7d\x4a\x6d\x6a\xe2\xf0\xc9\x28\x67\xf1\
\x05\x9c\x76\x97\x98\xde\x70\x1a\x38\xfe\x50\xa0\x74\xc7\x51\xcf\
\x7a\xe9\xa4\x99\x2d\xe3\x79\x65\x75\x8e\x38\xd4\xb3\xbb\x9c\x2a\
\xa8\xe4\x93\xed\xef\xdb\xad\x7c\xad\x6a\x2e\x8d\x69\x51\xbd\xda\
\x76\xd2\xfa\xfe\xa7\x74\x65\xcd\x15\x21\x76\xaf\xf7\x47\xe5\x46\
\xd5\x1d\x80\xfc\x2b\x93\xf0\xbd\xd7\xc4\x5f\x8a\xd6\x1f\xda\xfe\
\x02\xf0\xa6\x98\x3c\x2e\xe5\xbe\xcb\xad\xf8\xa7\x50\x92\xd1\x35\
\x05\x04\x82\xf0\x42\x88\xce\x63\x24\x1c\x3b\xed\x0d\xc1\x02\x9f\
\xa7\x78\xab\x58\xd2\xfc\x58\x7c\x21\xe3\x6d\x01\xbc\x2f\xe2\x67\
\x85\xe7\xb5\x48\xee\x85\xcd\x96\xa3\x12\x1c\x3b\x5b\xce\x00\xdc\
\x57\x8d\xd1\xb0\x0e\xb9\x19\x15\xf5\x38\xae\x11\xce\xf0\x78\x3f\
\xaf\x57\xc3\xb5\x4e\xd7\x7a\xab\xa5\xdd\xc5\x3b\xaf\x9a\xd3\xad\
\x8f\x3e\x9e\x65\x84\xad\x57\xd8\xc2\xa2\x72\x3a\xb5\x62\xb9\xda\
\x48\xff\x00\x74\x91\x9a\x5f\x39\xcf\xf1\xb1\xcf\x5f\x98\xd3\x4e\
\x49\xf5\x27\xf2\xf4\xe9\xf8\x74\xae\x52\x3f\x89\x3a\x7d\xe5\xac\
\xf7\xda\x7e\x97\xe2\x0d\x63\x44\xb7\x2c\xb3\x6b\x5a\x5e\x8b\x73\
\x75\x60\x85\x4e\x18\x89\x91\x48\x75\x5e\xe5\x32\x07\xaf\x15\xf3\
\xf8\x3c\xbb\x17\x98\x4a\x51\xc2\x52\x95\x46\xb5\x7c\xa9\xbb\x7d\
\xc7\x65\x4a\xd4\xe8\xa4\xea\xc9\x2b\xf7\x3a\xc6\xf9\xf9\x7f\x98\
\x9e\xe7\x27\xf5\xa9\x6c\x6e\x24\xb1\xbc\x86\x7b\x77\x31\x4d\x1b\
\x06\x57\x5e\x08\x39\xaa\x1a\x6e\xa5\x69\xac\xe9\xf6\xd7\xf6\x17\
\x31\x5e\x59\x5d\x46\xb2\xc3\x71\x03\x87\x49\x10\x8c\x86\x56\x1d\
\x45\x59\x0d\xb7\xe6\xf4\xe7\x8e\xbe\xb5\xc9\x19\x4e\x8d\x55\xd2\
\x49\xfc\xd3\x5f\xaa\x35\x76\x94\x6c\xf5\x4c\xfa\x82\xcb\x52\x5b\
\x88\xf3\x21\xdb\x27\x04\xf6\x07\x8e\xa2\x8a\xf9\xad\xbf\x6d\x6f\
\x87\xba\x1d\xe4\x70\x78\x9e\xdb\xc5\x1e\x04\x12\x46\xcd\x04\xfe\
\x25\xd0\x25\xb5\x8e\x7d\xa5\x43\x2c\x64\x93\xb8\x8c\x83\xf4\xa2\
\xbf\xd0\x15\x25\x6d\xcf\xc0\xfd\x85\x5e\x91\x67\x4d\xf0\x17\x3f\
\xf0\xbe\xbf\x69\x1c\x71\x9f\x11\xe9\xe0\xed\xef\xff\x00\x12\xd4\
\x38\xae\x3e\x4e\x59\xb9\xc7\x27\x24\xfd\x7d\x6a\xb6\x83\xf1\xb3\
\xc1\xdf\x02\x7f\x68\xaf\x8e\x76\x7e\x3f\xd5\xdf\xc3\x2f\xaf\x6a\
\x9a\x76\xa5\xa6\x4d\x77\x69\x3b\xc7\x79\x00\xb1\x48\xcb\x23\x22\
\x10\x70\xc0\x82\x33\xd7\xe8\x6a\xc3\x36\xe6\xdd\x9f\x94\xf2\x33\
\xcf\xd3\x3f\xe7\x35\xfc\xf9\xe2\xbf\xfc\xc1\x7f\xdc\x4f\xfd\xb0\
\xfd\x13\x86\x53\xbd\x66\xf6\x6a\x1f\x93\x3c\x13\xe3\x26\x99\x6f\
\x77\xe3\x89\x64\x93\x48\xb1\xbc\x63\x6d\x08\xf3\x6e\x3c\x76\xfa\
\x3b\x63\x07\x8f\xb3\xa9\xc0\x03\xfb\xdf\xc5\x5c\x3f\xf6\x1d\xa7\
\xfd\x0b\xda\x57\xfe\x1d\x79\x7f\xc6\xbb\x0f\x8d\x5a\x86\x93\x6d\
\xe3\xb9\x63\xbc\xd4\x3c\x2d\x6b\x3f\xd9\x61\x26\x3d\x63\xc1\x73\
\xea\xb7\x18\xc1\xe4\xce\x83\x69\x1e\x8b\xda\xb8\x3f\xed\x7f\x0f\
\xff\x00\xd0\x67\xc0\x43\xfe\xe9\x9d\xcf\xf8\x57\x8d\x95\xaa\xdf\
\x52\xa3\x65\x3f\x85\x6d\xf5\x9e\xdf\xdd\x7c\xbf\x76\x9d\x8f\x62\
\xb5\xbd\xa4\xb6\xdf\xfb\x85\xcf\xec\x4b\x4f\xfa\x17\xf4\x9f\xfc\
\x3a\xf2\xff\x00\x8d\x1f\xd8\x96\x9f\xf4\x2f\xe9\x3f\xf8\x75\xe5\
\xff\x00\x1a\xa7\xfd\xaf\xa0\x7f\xd0\x6b\xc0\x5f\xf8\x6c\xee\x7f\
\xc2\x8f\xed\x7d\x03\xfe\x83\x5e\x02\xff\x00\xc3\x67\x73\xfe\x15\
\xe9\xda\xbf\x69\xfd\xd8\xaf\xf3\x31\xd3\xcb\xff\x00\x24\x2e\x7f\
\x62\x5a\x7f\xd0\xbf\xa4\xff\x00\xe1\xd7\x97\xfc\x69\x0e\x87\x69\
\xd7\xfb\x03\x4a\x1f\x4f\x8a\xf2\xe7\xf9\xd5\x4f\xed\x7d\x03\xfe\
\x83\x5e\x02\xff\x00\xc3\x67\x73\xfe\x14\x7f\x6c\x68\x1f\xf4\x1a\
\xf0\x16\x7a\x7f\xc9\x33\xba\xff\x00\x0a\x2d\x5f\xb4\xfe\xec\x57\
\xf9\x8f\x4f\x2f\xfc\x90\xfa\x33\xe1\x1d\xba\x5a\xf8\x16\xca\x38\
\xad\xa1\xb3\x41\x2c\xc4\x43\x06\xb0\x75\x64\x5c\xb9\x3c\x5c\x9e\
\x5f\xe9\xdb\xa5\x53\xf8\xf9\x15\xdc\xdf\x05\xfc\x66\x96\x4b\x23\
\xcc\x74\xd9\x37\x2c\x39\xde\xd1\x64\x79\xa1\x7d\xcc\x7b\xea\xc7\
\xc1\xd9\xed\xae\x3c\x01\x60\xd6\x73\xe9\xd7\x16\xfe\x6c\xd8\x93\
\x4a\xd2\x9b\x4c\xb7\x27\x79\xce\x2d\xdb\x95\x23\xa1\x3f\xc4\x46\
\x6b\xb5\x2a\xac\x30\xc0\x30\x3c\x61\x86\x47\xe5\xdf\x8e\xc7\x8a\
\xfc\x7a\xb5\x77\x84\xcd\x9d\x76\xae\xe1\x52\xf6\x77\xd6\xd2\xbd\
\x9f\x35\xe5\xeb\x7d\x7b\xea\x7b\xd1\x8f\xb4\xa0\xa3\xdd\x7f\x5b\
\x1e\xcd\x3a\x5d\x6b\x5f\x0d\x74\xc1\xf0\xfb\x51\xd3\xb4\xe8\xae\
\x6d\x2d\x3f\xb3\xaf\xa4\x88\xbc\x31\xd9\x90\x83\x74\x61\x0f\x0e\
\x22\xce\xc2\x72\xa1\x82\xee\x04\x64\x1f\x1c\xfd\xa9\xf4\xbd\x33\
\x41\xd0\xfe\x10\x68\xba\x6e\x7f\xb5\xed\xfc\x59\x6d\xfd\x9b\xe6\
\x48\x64\x9c\xc2\x90\xca\x6f\x1d\xd8\xfc\xcd\xba\x32\x3c\xc6\x3c\
\x12\xfc\xf6\xae\x33\xc3\xfe\x1b\xf1\x9f\xc3\x48\x65\xb1\xf8\x77\
\xe3\xb6\xf0\xef\x87\xdd\xda\x44\xd0\x75\x6d\x32\x3d\x4e\xd6\xd1\
\x9b\x92\x2d\x8b\x32\xbc\x4b\x93\x9d\x9b\x8a\x8e\x70\x05\x58\xd1\
\xbc\x1b\x76\xbe\x25\x9b\xc5\x3e\x28\xf1\x05\xe7\x8b\xfc\x56\xd0\
\x9b\x55\xd4\xaf\x63\x48\xa3\xb3\x87\x39\x31\x5b\x40\x9f\x24\x28\
\x4f\x5d\xb9\x2d\xdc\xd7\xf4\x26\x6b\xe2\x16\x4d\x5b\x2b\xa9\x1a\
\x0d\xca\xa4\xe2\xd7\x23\x8b\xd1\xb5\x6d\x5f\xc3\x65\xd6\xcd\x9f\
\x05\x84\xc8\x31\x74\xb1\x51\x94\xda\xe5\x8b\xbd\xfb\x8c\xf8\xb1\
\x0d\xdc\xdf\x0c\x7c\x4f\x15\x82\xcc\xf7\x0d\xa7\xc8\x04\x76\xf9\
\xf3\x5e\x3c\x7e\xf1\x53\x1c\xee\x29\xbc\x0c\x73\x9c\x63\x9c\x57\
\xd2\xda\x5d\xf4\xd7\x5e\x1e\xf0\xd5\xf7\x80\xa2\xd2\x75\x1f\x09\
\xfd\x8d\x4c\x30\xc7\x23\x45\xba\xd8\x04\xf2\x0d\xbc\xaa\x0a\x28\
\x45\x07\x2a\xe0\x64\xe0\x65\x08\x24\xf8\xb6\x76\x93\x83\xb7\xf1\
\xfc\x73\xfe\x7f\xa5\x72\x17\x5f\x09\xfc\x31\x70\xd7\x48\x2c\xae\
\x2d\x6d\x2e\xdd\xa5\xb9\xd3\xad\x2f\xee\x6d\xec\xa7\x73\x9d\xc5\
\xed\xe3\x91\x62\x62\x4e\x73\xf2\xfc\xc4\x9c\xe7\x9a\xfc\xdb\x82\
\xf8\xc3\x0d\xc3\x74\xab\x50\xc4\xd3\x72\x53\x69\xa7\x1b\x5e\xf6\
\xb5\x9d\xda\xd3\xe7\xa6\xba\x6a\x7d\x06\x71\x94\xcf\x31\x70\x94\
\x25\x6b\x74\x64\xfe\x0e\x9b\x4f\xba\xd6\xbc\x75\x77\xa1\x6c\xff\
\x00\x84\x62\xe7\xc4\xb7\x92\xe9\x46\x1c\x79\x4c\x84\x20\x99\xa2\
\xc7\x1e\x5b\x5c\x0b\x82\x08\xc8\x39\x24\x70\x45\x75\x0b\x9d\xcb\
\x8e\xb9\xe3\x1e\xb5\x15\xbd\xac\x16\x36\xd1\x5b\x5b\x43\x1d\xb5\
\xbc\x28\x12\x28\x62\x50\xa9\x1a\x01\x85\x55\x03\x80\x00\x1c\x01\
\x52\x02\x17\x96\xfb\xbd\x4f\xd3\xbd\x7e\x7d\x9a\x63\xbf\xb4\xf3\
\x1a\xd8\xde\x5e\x5f\x69\x26\xed\xda\xff\x00\xd7\xcc\xf7\x70\xf4\
\x7e\xaf\x46\x14\xaf\x7e\x54\x91\xe2\x7f\xf0\x56\x86\x6f\xf8\x56\
\x7f\x0f\x48\xdc\xd8\xd5\xae\x40\x23\x92\x07\x90\x9c\x7d\x28\xae\
\x5f\xf6\xfa\xf8\x91\xa1\x7e\xd2\x7e\x19\xf0\x8e\x81\xf0\xc6\x6b\
\xbf\x1a\xea\x9a\x6d\xed\xc5\xed\xe4\x5a\x5d\x8d\xc1\xf2\x22\x31\
\xa4\x61\x98\x98\xc7\x56\xe3\x8c\xf4\xa2\xbf\xba\x59\xf9\x86\x07\
\xdc\xa0\x94\x9d\x99\xfa\x7f\x22\x2c\x8a\x77\x2a\xb6\x01\xc6\x46\
\x76\xfd\x33\x9f\xf0\xaf\x95\x9f\xef\x37\xd7\xa0\xfe\x95\xf5\x53\
\x7d\xd3\xf4\xaf\x95\x18\x02\xcd\x9e\x84\xe0\xd7\xe0\xbe\x2c\xff\
\x00\xcc\x17\xfd\xc4\xff\x00\xdb\x0f\x5b\x84\xbf\xe5\xff\x00\xfd\
\xbb\xff\x00\xb7\x1e\x3b\xf1\x3e\x1f\x11\xbf\x8b\x64\x3a\x5c\x5f\
\x12\x5e\xd7\xc8\x88\x03\xe1\x7b\xbb\x28\xec\xf7\x60\xe7\x0b\x2f\
\xcd\xbb\xd6\xb9\x3f\x23\xc6\x7f\xf3\xc3\xe3\x57\xfe\x0c\x34\xda\
\xf4\xef\x1e\x7c\x36\xf8\x7f\xaf\x6a\x91\xeb\x1e\x2c\xb1\xb3\x37\
\x97\x2c\x96\x89\x73\x79\x79\x2c\x21\xdb\x9d\x88\x31\x20\x5c\xfd\
\xec\x01\xc9\xaa\x9f\xf0\xcd\xff\x00\x0d\xb6\x8f\xf8\xa5\xe1\xc7\
\x6f\xf4\xbb\x9c\x7f\xe8\xda\xfc\xeb\x0b\x9b\xe0\x68\x61\xa9\xc6\
\xac\x1d\xd2\xb5\xfd\x9a\x7b\x6f\xaf\xb5\x57\xfb\x97\xa1\xf5\xb5\
\x30\xf3\x94\xdb\x4f\xf1\xff\x00\x80\x79\xe7\x91\xe3\x3f\xf9\xe1\
\xf1\xab\xff\x00\x06\x1a\x6d\x1f\x67\xf1\x9f\xfc\xf0\xf8\xd5\xff\
\x00\x83\x0d\x36\xbd\x13\xfe\x19\xbf\xe1\xbf\x4f\xf8\x45\xe1\x1f\
\xf6\xf7\x73\x9f\xfd\x1b\x5e\x4d\xf1\x13\xc0\xfe\x0a\xf0\x9f\x8a\
\x26\xd3\x6d\x34\x8f\x87\xd6\xd0\xa4\x51\xc8\x23\xd7\xbc\x53\x77\
\x69\x75\x96\x5c\x92\xd1\x82\x70\x3d\x0e\x79\x15\xec\xe0\xb3\x0c\
\x1e\x3e\xa7\xb2\xa1\x0d\x6d\x7f\xe1\x25\xff\x00\xb9\xce\x6a\x94\
\x67\x49\x5e\x5f\x9f\xff\x00\x6a\x6a\xfd\x9f\xc6\x7f\xf3\xc3\xe3\
\x57\xfe\x0c\x34\xda\x3e\xcf\xe3\x3f\xf9\xe1\xf1\xab\xff\x00\x03\
\xf4\xd3\x5c\x2f\xf6\x5f\x84\xbf\xe8\x1f\xf0\x97\xff\x00\x0b\x5b\
\xca\x0e\x97\xe1\x2e\x7f\xe2\x5f\xf0\x97\xd3\xfe\x47\x5b\xca\xf6\
\xfe\xaf\x1f\xf9\xf6\xbf\xf0\x5c\x7f\xf9\x79\xcf\xcd\xe7\xf8\xff\
\x00\xf6\xa7\xd3\xff\x00\x0d\x56\xfd\x3c\x1f\x68\x35\x25\xd7\x92\
\xef\xcc\x93\x72\xf8\x96\x58\xa4\xbe\xc6\xe3\x8d\xed\x17\xc8\x46\
\x31\x8c\x76\xc6\x6a\x9f\xc6\x4d\x52\xf3\x45\xf8\x6d\xac\x5e\x58\
\x5c\xc9\x69\x75\x19\xb7\xd9\x34\x2d\xb5\x86\x6e\x62\x53\xcf\xb8\
\x24\x1f\x62\x6a\x3f\x83\x30\xd9\x41\xf0\xfe\xc5\x34\xf8\xf4\x68\
\xad\x7c\xe9\xca\xae\x81\x7c\xf7\xb6\x99\x32\x1c\xed\x95\xfe\x66\
\x39\xeb\x9e\x87\x35\x0f\xc7\x7f\xf9\x25\x3a\xe7\xd6\xdb\xff\x00\
\x4a\xa1\xaf\xcb\xa9\xc1\x7f\x6e\x42\x12\x5a\x7b\x54\xad\x6b\x7d\
\xbe\xd7\x7f\x75\xdf\xab\x3d\xbd\xb0\xd7\x5d\xbf\x43\x47\xfb\x4a\
\xef\xfe\x17\x72\xe9\xbf\x69\x93\xfb\x3f\xfb\x00\xcf\xf6\x6d\xc7\
\xcb\xf3\x3e\xdb\xb7\x7e\x3f\xbd\xb7\x8c\xfa\x71\x5c\x46\x8f\xe2\
\x6d\x5a\x6f\x85\xfe\x02\xbd\x7d\x4a\xe5\xae\xee\xfc\x49\x6f\x6d\
\x71\x3f\x9a\x77\xc9\x11\xbd\x99\x0a\x13\x8e\x57\x68\x03\x1e\x82\
\xba\xf6\xff\x00\x93\x82\x1f\xf6\x2d\x9f\xfd\x2f\xaf\x3d\xd1\x3f\
\xe4\x8f\xfc\x37\xff\x00\xb1\xae\xd7\xff\x00\x4e\x13\xd7\xa9\x86\
\xa7\x4d\xd3\xa7\x78\xad\xe9\x7e\x31\xab\x7f\xc9\x7d\xc7\x34\x9b\
\xbb\xd7\xbf\xe6\x8e\xe2\x2d\x62\xff\x00\xfe\x13\x3f\x88\x96\xe6\
\xf2\x63\x05\x8e\x95\x65\x35\xb4\x41\xf8\x81\xda\x0b\x92\xcc\x9e\
\x84\x95\x52\x7d\xc5\x65\x69\x1a\xfe\xa5\x2e\x95\xf0\x59\xda\xfe\
\x72\xfa\xa4\x40\xdf\x31\x7f\xf8\xf9\x3f\xd9\x8f\x26\x5f\xfb\xdf\
\x38\x0d\xcf\x71\x56\x6d\xff\x00\xe4\x7e\xf8\xa3\xff\x00\x60\x6d\
\x3f\xff\x00\x44\x5d\x56\x36\x87\xff\x00\x20\x7f\x80\x5f\xf5\xc4\
\x7f\xe9\xa6\x4a\x71\xa7\x0b\x3f\x75\x7c\x11\xff\x00\xd4\x69\x3f\
\xcf\x5f\x52\xae\xee\x95\xfa\xff\x00\xed\xe7\x67\xe0\x4d\x4a\xea\
\xff\x00\xc4\x1e\x3d\x86\xe2\xe6\x59\xe2\xb3\xd7\x7c\x8b\x74\x91\
\xb2\xb0\xc7\xf6\x5b\x77\xda\xa3\xb0\xdc\xec\x7e\xac\x6b\xb2\x5e\
\x5c\x67\xa6\x6b\x84\xf8\x73\xff\x00\x23\x37\xc4\x8f\xfb\x18\xbf\
\xf6\xce\xd6\xbb\xb5\xfb\xcb\xf5\xaf\x9a\xc7\xc5\x47\x12\x92\x56\
\xf7\x61\xff\x00\xa4\x44\xec\xa7\xac\x3e\xff\x00\xcc\xf7\xe8\x55\
\x51\x72\xbf\xbb\x24\x0e\x50\x00\x7f\x4c\x51\x4b\x1f\xdd\x1f\x41\
\x45\x7f\x7a\x2d\x8f\xc0\x6e\xce\xb1\xbe\xe9\xfa\x57\xca\x8f\xf7\
\x9b\xeb\x5f\x55\xb7\xdd\x3f\x4a\xf9\x51\xfe\xf3\x7d\x6b\xf9\xf3\
\xc5\xad\xb0\x5f\xf7\x13\xff\x00\x6c\x3f\x42\xe1\x2f\xf9\x7f\xff\
\x00\x6e\xff\x00\xed\xc7\x95\xfe\xd3\xde\x1b\x1e\x26\xf8\x1b\xe2\
\x98\x7c\xbf\x32\x4b\x48\x56\xfe\x31\x8c\xf3\x13\x86\x3f\xf8\xee\
\xe1\xf8\xd7\x11\xf0\xeb\xf6\x92\xf0\xc7\x83\xfe\x01\xf8\x5a\xfb\
\x5f\xd4\x0c\xba\xac\x70\x9d\x38\x58\xdb\xe1\xee\x25\x68\x5b\xcb\
\x0d\x8c\xfc\xab\xb4\x21\x2c\x71\xd4\xe3\x26\xbe\x81\xd4\xb4\xd8\
\xb5\x9d\x36\xf2\xc2\x70\x0c\x37\x50\xbd\xbc\x99\xe9\xb5\xd4\xa9\
\xcf\xb7\x35\xf9\xe1\xac\x7e\xcd\x3e\x26\xf0\xaf\xc3\x1d\x63\xc5\
\x9a\xe0\x1a\x64\x16\x72\x47\x0c\x56\x2e\xbf\xbf\x9e\x33\x28\x8c\
\xc8\xc3\x3f\x22\xf3\x90\x1b\x24\xe7\x38\x19\xe7\xe4\xf8\x56\x8e\
\x5b\x9b\x65\xff\x00\xd9\xb9\x9d\x5e\x45\x1a\xb1\x94\x75\xd6\x4e\
\x69\xc7\x95\x79\x5d\x26\xed\xf8\x6e\xbe\xa3\x1b\x2a\xd4\x2a\x7b\
\x5a\x4a\xfa\x3b\xfc\xb5\xb9\xfa\x30\xac\xb2\x28\x64\x20\xa3\x72\
\xa4\x1e\x0e\x79\x18\xfe\x75\xe1\x7f\x16\xb5\x4b\x8b\x5f\x1a\xdc\
\x46\x9a\xb5\xf5\x9a\x08\x62\x3e\x4d\xbf\x80\x86\xae\x83\xe5\xeb\
\xf6\x9c\x1d\xc4\xff\x00\x77\xf8\x7a\x57\x7d\xf0\x53\xc4\x47\xc5\
\x5f\x08\xfc\x21\xa9\xc8\xc0\xcb\x3e\x9d\x14\x4e\x73\x8c\xc9\x18\
\x31\xb8\xf7\xe6\x33\xc5\x71\x5f\x14\xf4\xdd\x66\xeb\xc6\x57\x12\
\x59\x69\xbe\x2c\xb9\xb7\xf2\x62\x02\x4d\x23\xc6\x50\xe9\xb6\xe7\
\xe5\xe8\x20\x76\xc8\x3d\x89\xee\x79\x15\xf3\x19\x45\x05\x83\xcc\
\xea\xd0\xad\x64\xe1\xcd\x17\x7e\x4d\xd4\x92\xff\x00\x97\x89\xc7\
\xa7\xaf\xe2\x75\x57\x97\xb4\xa3\x19\x47\xad\xbb\xfe\x87\x07\xfd\
\xb9\x77\xff\x00\x43\x06\xa9\xff\x00\x86\xa4\x7f\xf1\x34\xbf\xdb\
\x97\x5d\xf5\xfd\x54\xff\x00\xdd\x29\x1f\xfc\x4d\x5d\x1a\x2f\x89\
\x3f\xe8\x0d\xf1\x03\xff\x00\x0e\x45\xb7\xff\x00\x15\x43\x68\xbe\
\x23\xef\xa3\x7c\x40\x3f\xf7\x51\xed\xbf\xf8\xba\xfb\xde\x7c\x3f\
\xf3\x43\xef\xc2\xff\x00\xf2\x27\x99\xcb\x2e\xcf\xff\x00\x26\xff\
\x00\x33\xda\xbe\x14\xdc\x3d\xcf\x81\xec\xa4\x92\xe6\x5b\xc7\x32\
\x4b\x99\xa6\xd1\xbf\xb2\x5d\xb0\xe4\x73\x6d\x8f\x97\xeb\xdf\xad\
\x50\xf8\xef\xff\x00\x24\xa7\x5d\xfa\xdb\x7f\xe9\x54\x35\xa7\xf0\
\xc6\xde\xea\xdb\xc1\xb6\x91\xde\x41\xa9\x5b\x5c\x09\x65\x26\x3d\
\x5b\x54\x4d\x46\xe0\x0d\xe7\x05\xa7\x4e\x18\x1e\xa0\x76\x07\x06\
\xba\x3b\xed\x3e\xd7\x54\xb3\x92\xd6\xf2\xda\x3b\xbb\x79\x31\xba\
\x19\x90\x3a\xb6\x08\x23\x83\xc6\x72\x01\xfc\x2b\xf2\xca\x95\xe1\
\x87\xcd\xbe\xb1\x6b\xa8\xd4\xe6\xd2\xdd\x25\x7d\x39\x7d\xdf\xbb\
\x4e\xda\x1e\xd2\x8b\x95\x0e\x5e\xe8\xe4\x9b\xfe\x4e\x08\x7f\xd8\
\xb4\x4f\xfe\x4f\xd7\x9e\xe8\x7f\xf2\x47\xfe\x1b\x9e\xdf\xf0\x95\
\xda\xff\x00\xe9\xc2\x7a\xf4\x76\xf8\x8b\xe0\x95\xd7\x8c\xe7\x58\
\xb0\x1a\x82\x9f\xb0\x1d\x43\x6b\x79\x41\xb7\xe7\xc9\xfb\x46\xdf\
\x2f\x3b\xff\x00\x87\x77\x5e\xd9\xe2\xb4\x35\xc6\xf0\xcf\x84\xf4\
\x5b\x73\xaa\x26\x9f\xa6\xe9\xb6\xf7\x0a\xf6\xf1\xcb\x1a\x85\x59\
\xf7\x16\x5f\x2d\x07\x26\x4d\xc5\x88\xda\x33\xc9\xf7\xaf\x52\x15\
\xab\xe1\xfd\x95\x2a\x94\x26\xa4\xf9\x1a\x4d\x35\xcd\xca\xa6\xb4\
\xd3\x5b\xf3\xab\x58\xe7\xe5\x8c\xb9\xa4\xa4\xba\xfc\xb6\xff\x00\
\x23\x94\xb7\xff\x00\x91\xfb\xe2\x97\xb6\x8d\xa7\xe7\xfe\xfc\x5d\
\x56\x36\x87\xc6\x8f\xf0\x0b\x3f\xf3\xc4\x7f\xe9\xa6\x4a\xf4\x3d\
\x07\x5e\xf0\xd7\x8a\x2e\xb5\x39\x34\xb9\xad\x6e\x6f\x8a\x24\x77\
\xf1\xb4\x26\x3b\x8d\xa0\x30\x41\x2a\x3a\x87\xc6\x0b\x01\x91\x8e\
\x4e\x2b\x3f\x54\xf1\x87\x81\xfc\x3b\x77\x6f\x61\x77\x75\xa7\xdb\
\xc9\xa2\xe0\x47\x1c\x70\x33\xae\x9a\x0c\x65\x41\x2c\x8a\x56\x0f\
\x90\x91\xf3\x15\xf9\x49\xed\x55\x1a\x98\x89\x55\x96\x1e\x38\x79\
\xb9\xa8\xa4\xe3\xca\xee\xad\x45\xd3\xbb\x5b\xee\xef\xe8\x3f\x71\
\x25\x37\x35\x6b\xff\x00\xed\xd7\x2b\x7c\x39\xff\x00\x91\x9b\xe2\
\x47\xfd\x8c\x5f\xfb\x67\x6b\x5d\xda\xfd\xe5\xfa\xd5\x3d\x3a\x0b\
\x10\x92\xdd\xd8\x25\xbe\xcb\xe6\x17\x32\x5c\x5a\xed\x2b\x70\xc5\
\x54\x09\x0b\x2f\x0c\x4a\xaa\x8d\xd9\x3c\x01\xe9\x57\x13\xef\x2f\
\xd6\xbe\x6f\x15\x5b\xdb\x57\xe6\x4a\xda\x45\x7d\xd1\x4b\xf4\x3b\
\x21\x1e\x58\xdb\xfa\xdc\xf7\xf8\xfe\xe8\xfa\x0a\x28\x4f\xba\x3e\
\x94\x57\xf7\xd2\xd8\xfe\x7e\x3a\xc6\xfb\xa7\xe9\x5f\x2a\x3f\xde\
\x6f\xad\x7d\x56\xdf\x75\xbe\x95\xf2\xa3\xfd\xe6\xfa\xd7\xf3\xe7\
\x8b\x5b\x60\xbf\xee\x27\xfe\xd8\x7e\x87\xc2\x5f\xf2\xff\x00\xfe\
\xdd\xff\x00\xdb\x84\xfc\x71\x5e\x2b\xfb\x54\xe8\x7e\x39\xf1\x07\
\xc3\xdb\x8b\x3f\x0a\xc3\x6b\x7b\xa5\xc8\x87\xfb\x52\xc1\x21\x2d\
\x77\x2c\x60\x86\x06\x1c\x9c\x60\x60\x12\xa0\x07\xe3\x82\x6b\xda\
\xa8\x1c\x73\xc8\x3e\xaa\x70\x45\x7e\x1b\x96\x63\xe5\x96\xe2\xe9\
\xe2\xe1\x05\x27\x07\x7b\x49\x5d\x7f\x5d\x9f\x47\xa9\xf7\xb5\xa9\
\x7b\x68\x38\x37\x6b\x9f\x25\x7e\xc4\x7f\x0e\xee\x25\x5d\x47\xc6\
\x3a\x97\x9a\xf1\x40\x5f\x4d\xd3\x21\x99\x9b\x6a\x12\x73\x70\xe1\
\x4f\xdd\xe7\x09\xc7\x39\x2f\x5d\x2f\xc7\x09\x3c\x32\xbf\x10\xee\
\xc6\xa8\xdf\x0c\xfe\xd9\xe4\x42\x4f\xfc\x25\x16\xd7\x52\x5f\x63\
\x6f\x05\x9a\x3f\x97\x6f\xa0\xf4\xaf\xa3\x63\x86\x38\x54\xac\x71\
\xa4\x6a\x58\xb1\x58\xd4\x2f\x24\xe4\x9e\x3d\xc9\x35\xc6\xf8\xa3\
\xc0\xda\xf6\xbd\xac\xc9\x79\xa7\xf8\xef\x52\xd0\x2d\x59\x15\x45\
\x8d\xad\x85\xa4\xc8\xa4\x0c\x16\xdd\x22\x16\x24\xf5\xe4\xf1\x5f\
\x61\xfe\xb1\xfd\x7f\x39\xab\x99\x62\x25\xec\xd4\x95\x92\xe6\x6a\
\xcb\x4b\x2b\xa8\x4d\xf9\xbf\x75\x26\xdd\xee\x8f\x3f\xea\x9e\xcf\
\x0e\xa9\x47\x5b\x7f\x5d\x5a\x3e\x66\xf3\xbc\x11\xfd\xef\x82\x7f\
\xf8\x05\x7d\x49\xe7\x78\x20\x11\xf3\xfc\x14\xff\x00\xc0\x2b\xfa\
\xfa\x0f\xfe\x15\x6f\x8a\xff\x00\xe8\xaa\x6b\x5f\xf8\x2a\xb0\xff\
\x00\xe3\x54\xa3\xe1\x6f\x8b\x3b\x7c\x54\xd6\xff\x00\xf0\x55\x61\
\xff\x00\xc6\xab\xdf\xff\x00\x58\x30\x9f\xf4\x11\xff\x00\x93\xd4\
\xff\x00\xe6\x73\x97\xea\xb5\x3f\x97\xf0\x5f\xfc\x91\xa1\xf0\x4d\
\xb4\xf6\xf8\x73\xa7\x9d\x30\xe8\x26\xcf\xce\x9c\x2f\xfc\x23\x31\
\xc8\x96\x39\xf3\x0e\x76\x09\x3e\x60\x73\xd7\x3d\xeb\x47\xe2\xa3\
\x5c\x27\xc3\xbd\x7d\xad\x8c\xca\x7e\xcf\xfb\xe3\x6d\x9f\x34\x41\
\xbd\x7c\xf2\x98\xe7\x77\x93\xe6\x63\x1c\xe7\x18\xe6\xb5\x7c\x31\
\xa3\xde\xe8\x5a\x2c\x36\x57\xfa\xbc\xfa\xed\xcc\x6c\xc4\xdf\x5c\
\x43\x1c\x4e\xe0\x9c\x81\xb6\x30\x14\x01\xd0\x60\x74\xad\x65\x25\
\x48\x39\xda\x7b\x1e\x6b\xf3\x5a\x98\xb8\xc3\x33\x78\xb8\xae\x74\
\xa7\xcd\xad\xda\x76\x77\xd6\xe9\x3d\x7a\xe8\x9f\x92\x3d\x75\x4d\
\xba\x3e\xcf\x6d\x2c\x7a\x34\x6c\x3f\xb0\xc4\x90\xe8\x76\x27\xe1\
\x70\x51\xa5\xad\x88\xd4\xed\x7f\xb1\xce\x86\x63\x12\x7f\x69\xe0\
\xc7\xd9\x73\x1f\x93\xbb\xee\xfc\xdd\x4e\x6b\xe7\xef\xd9\xd6\xf2\
\x6b\x3f\x14\x58\xdc\x8d\x2d\xb5\xcd\x72\x3f\x0e\xa9\xf0\x8d\x9e\
\xa1\x74\xb6\xf2\x49\x6e\xd7\xd2\xa5\xc3\xa4\x92\x02\x04\xeb\x69\
\xf6\x37\x3c\x6f\x28\x08\xe0\x16\x22\xff\x00\xfc\x2a\xbf\x0a\x34\
\x84\x9d\x1a\x2f\x27\xcd\xf3\xfe\xc2\x66\x97\xec\x7e\x66\x73\xb8\
\xdb\x6e\xf2\x49\xce\x4f\xdc\xeb\xcf\x26\xb6\xb5\xef\x0d\xe9\x7e\
\x28\xb3\x5b\x6d\x56\xce\x3b\xd8\xa3\x90\x4d\x19\x7d\xc1\xe2\x90\
\x0e\x1d\x1d\x48\x65\x61\x93\xca\x90\x7e\x95\xfb\x4e\x2f\xc4\x9c\
\x1d\x6c\x5e\x16\xb4\x30\xce\xd4\xdb\x72\xbd\xae\xaf\x17\x1f\x77\
\xef\xbd\xdd\xaf\x6b\x59\x6e\x7c\x85\x1e\x1e\xa9\x4e\x8d\x5a\x6e\
\xa7\xc5\x6b\x6f\x6d\x1d\xf5\x2d\xfc\x78\x87\x51\xb1\xd5\x3c\x1b\
\x63\x2c\x72\x4f\xe3\x8b\x1d\x46\x6b\x4d\x2f\x56\xbc\xb9\x86\x4b\
\xcd\x5b\x4b\xfb\x13\xbd\xc4\xf3\xa4\x68\x82\x28\xfc\xf3\x0a\xec\
\x61\xb7\x7a\xa1\x1f\x7b\x15\xd0\x7c\x13\x9b\x54\x8f\xe1\x07\x86\
\x66\xf0\x3e\x8f\x35\xe5\x86\x9f\xa5\xc3\x72\xf1\x41\xa9\xc3\x07\
\xf6\xde\xa7\x22\xb4\x77\xf6\xf7\xbe\x64\x6c\xf1\x49\x14\xdb\xa4\
\x67\x24\x16\x7f\x97\xee\x8c\x57\x1b\xa0\xf8\x3f\x48\xf0\xdc\xd7\
\x33\xd8\x59\xec\xbb\xba\x0a\xb3\xdd\xdc\x4d\x25\xc4\xf2\xa8\x3c\
\x2b\xcb\x23\x33\x95\x07\x90\x33\x81\x9c\xd5\x2d\x53\xe1\xbf\x86\
\xf5\x8b\xdb\xab\xbb\x9d\x34\x89\x6f\x70\x6e\xc4\x37\x13\x41\x1d\
\xd1\xc6\x33\x32\x46\xe1\x65\xe0\x63\x2e\xa4\xe3\x83\x91\x4e\x8f\
\x89\x18\x28\xe6\x75\x71\x52\xc3\x4b\x92\x51\x8c\x53\x56\xe7\xf7\
\x5c\x9e\xbd\x2c\xf9\xad\xbe\x96\x5b\xde\xc8\x9f\x0f\xd5\x96\x16\
\x14\x7d\xa6\xa9\xb7\xd6\xda\xdb\xfc\x89\x3c\x2e\xba\x6a\x6b\x5e\
\x35\x4f\x0f\x18\xcf\x85\x17\xc4\x37\x43\x48\xfb\x3e\x3c\x8f\x2f\
\x6c\x7e\x70\x87\x1c\x08\x85\xc7\x9e\x14\x2f\xcb\xd7\x1c\x62\xba\
\x25\xfb\xcb\xf5\xa8\x6d\x6d\x20\xb0\xb5\x8a\xda\xda\x18\xed\xad\
\xe1\x50\x91\xc3\x12\x2a\x24\x6a\x06\x02\xa8\x03\x00\x0e\xc2\xa6\
\x8f\xfd\x62\xfd\x6b\xf1\x4c\xd3\x1c\xb3\x2c\xc6\xb6\x35\x47\x97\
\xda\x49\xbb\x76\xb9\xf5\xf8\x7a\x2f\x0f\x46\x34\x9b\xbf\x2a\x4a\
\xe7\xd0\x0b\xf7\x57\xe9\x45\x39\x7e\xe8\xfa\x51\x5f\xde\x2b\x63\
\xf0\x23\xa9\x6c\x90\x70\x32\x70\x71\xfe\x15\xf3\x57\x89\xbc\x2b\
\xab\xf8\x57\x49\xd4\x75\x3b\xbd\x3e\xe2\x5b\x7b\x18\x9a\x69\x63\
\xb2\x5f\x3e\x62\x8b\xc9\x29\x1a\xe5\x98\xe3\x9c\x01\x9e\x2b\xe9\
\x6a\xa5\xaa\x5a\x24\xf0\xb4\x83\xe4\x91\x3e\x60\xc3\xd8\x67\xfa\
\x57\xc8\x71\x07\x0b\xe0\x78\x93\xd9\x7d\x72\x52\x5e\xce\xf6\xe5\
\x69\x7c\x56\xbd\xee\x9f\x64\x7b\x19\x7e\x69\x5b\x2d\xe6\xf6\x49\
\x3e\x6b\x5e\xfe\x5f\x3f\x33\xf3\xe4\x7e\xd7\xbf\x0b\x70\x31\xae\
\xdd\x63\xdb\x4d\x9f\x1f\xfa\x0d\x2f\xfc\x35\xf7\xc2\xdf\xfa\x0e\
\xdd\x7f\xe0\xb6\x7f\xfe\x26\xbb\x8f\x8d\x9f\xf0\x4f\x1f\x0a\xfc\
\x56\xf1\x9c\xfe\x26\xd1\x75\xc9\x3c\x19\x2d\xf1\x67\xbd\xb3\xb5\
\xb2\x59\xe0\x96\x6c\xe4\xca\xab\xbd\x7c\xb2\x73\xf3\x01\xc1\x3c\
\xf1\xcd\x70\x1f\xf0\xea\xdb\x3f\xfa\x29\x57\x5f\xf8\x26\x4f\xfe\
\x3d\x5f\x15\xff\x00\x10\xbf\x27\xfe\x7a\x9f\xf8\x14\x7f\xf9\x03\
\xea\xe3\xc4\xd7\x4a\xf6\x4f\xd1\x93\xff\x00\xc3\x5f\x7c\x2d\xff\
\x00\xa0\xed\xd7\xfe\x0b\x67\xff\x00\xe2\x68\xff\x00\x86\xbd\xf8\
\x5b\xff\x00\x41\xdb\xaf\xfc\x16\xcf\xff\x00\xc4\xd5\x65\xff\x00\
\x82\x57\x59\xfc\xdf\xf1\x72\xae\x78\x52\x7f\xe4\x0c\x9f\xfc\x7a\
\xbd\x37\xe1\xbf\xfc\x13\xc3\xe1\x8f\x80\x24\x82\x7d\x75\x2e\xbc\
\x71\xa8\xaa\x87\xdd\xaa\x37\x97\x6a\x3d\x85\xbc\x64\x03\xff\x00\
\x02\x2c\x28\x5e\x17\xe4\xff\x00\xcf\x53\xff\x00\x02\x8f\xff\x00\
\x20\x39\x71\x37\x2a\xba\xb3\xf9\x3f\xf3\x3c\xdf\xfe\x1b\x13\xe1\
\x46\x70\x7c\x45\x37\xfe\x00\x4d\xfe\x14\xf1\xfb\x61\xfc\x27\xff\
\x00\xa1\x96\x41\xff\x00\x6e\x13\x7f\xf1\x35\xf5\xfc\x3e\x01\xf0\
\xb5\xa4\x31\xc3\x0f\x86\x34\x48\xe2\x8d\x42\xa2\x2e\x9b\x00\x0a\
\x07\x40\x06\xca\x77\xfc\x21\x3e\x1a\xff\x00\xa1\x6f\x46\xff\x00\
\xc1\x6c\x1f\xfc\x45\x5f\xfc\x42\xec\x97\xfe\x7e\x54\xff\x00\xc0\
\xa3\xff\x00\xc8\x1c\x9f\xeb\x5d\x5f\xf9\xf6\x8f\x8f\xbf\xe1\xb0\
\xfe\x13\xff\x00\xd0\xcb\x27\xfe\x00\x4d\xff\x00\xc4\xd0\x7f\x6c\
\x3f\x84\xff\x00\xf4\x32\xc8\x7f\xed\xc2\x6f\xfe\x26\xbe\xc1\xff\
\x00\x84\x1f\xc3\x5f\xf4\x2e\x68\xff\x00\xf8\x2d\x83\xff\x00\x88\
\xa3\xfe\x10\x9f\x0d\x7f\xd0\xb7\xa3\x7f\xe0\xb6\x0f\xfe\x22\x8f\
\xf8\x85\xd9\x2f\xfc\xfc\xa9\xff\x00\x81\x47\xff\x00\x90\x0f\xf5\
\xaa\xaf\xfc\xfb\x47\xc7\x7f\xf0\xd8\x9f\x0a\x33\x81\xe2\x29\xbf\
\xf0\x02\x6f\xf0\xa9\x3f\xe1\xaf\x7e\x16\xff\x00\xd0\x76\xeb\xff\
\x00\x05\xb3\xff\x00\xf1\x35\xf5\xfc\xde\x01\xf0\xb5\xdc\x12\xc3\
\x37\x86\x34\x49\x61\x91\x4a\xbc\x6f\xa6\x40\x43\x03\xd4\x11\xb3\
\x9a\xf0\x4f\x89\x1f\xf0\x4f\x0f\x86\x3e\x3e\x79\xa6\xd0\x92\xeb\
\xc0\xfa\x8b\x02\xfb\xb4\xb6\xf3\x2d\x4f\xb7\xd9\xe4\x24\x0f\xf8\
\x09\x51\xed\x49\xf8\x5d\x93\x5a\xea\xa5\x4f\xfc\x0a\x3f\xfc\x81\
\xa4\x38\xa6\x72\x76\x94\x12\xfe\xbd\x4f\x3b\xff\x00\x86\xbe\xf8\
\x5b\xff\x00\x41\xdb\xaf\xfc\x16\xcf\xff\x00\xc4\xd1\xff\x00\x0d\
\x7d\xf0\xb7\xfe\x83\xb7\x5f\xf8\x2d\x9f\xff\x00\x89\xaa\xc7\xfe\
\x09\x5d\x67\xc7\xfc\x5c\xab\x9e\x46\x7f\xe4\x0a\x9f\x97\xfa\xea\
\x5f\xf8\x75\x6d\x97\xfd\x14\xab\xaf\xfc\x13\x27\xff\x00\x1e\xa8\
\xff\x00\x88\x5f\x93\xff\x00\x3d\x4f\xfc\x0a\x3f\xfc\x81\xd3\xfe\
\xb2\xa5\xd5\x7d\xcc\x9d\xbf\x6b\xef\x85\x81\x49\x3a\xed\xd0\x18\
\x3c\x9d\x36\x7e\x38\xff\x00\x76\xbd\xef\xc3\x7e\x19\xd5\x7c\x51\
\xa1\xe9\xfa\xbd\xa5\x8c\xd1\x5a\x5e\xc2\xb3\xc4\xb7\x8b\xe4\x4c\
\x11\x86\xe5\xdd\x1b\xe1\x94\x91\x83\x82\x33\xc8\xaf\x36\xf8\x35\
\xff\x00\x04\xe9\xf0\xc7\xc3\x4f\x1c\x5a\xf8\x93\x5a\xf1\x04\xbe\
\x30\xfb\x06\x25\xb4\xb0\xb9\xb0\x58\x21\x49\xc1\x05\x65\x71\xbd\
\xb7\xed\xc6\x42\x9e\x33\x82\x73\x8c\x57\xd6\xe8\xdb\xd4\x37\x39\
\x61\xbb\x93\x9e\xe4\x7f\x4a\xd6\x1e\x17\xe4\xbb\xba\x95\x3f\xf0\
\x28\xff\x00\xf2\x07\x15\x7e\x27\xae\x9a\xf6\x2a\x2f\xe4\xff\x00\
\xcc\x7a\xaf\xaf\x5f\x61\x45\x39\x45\x15\xfb\x09\xf0\xc7\xff\xd9\
\
\x00\x00\x1f\xc1\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x03\xb1\x00\x00\x03\xb1\
\x01\xf5\x83\xed\x49\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x1f\x3e\x49\x44\
\x41\x54\x78\x9c\xed\x9d\x77\x78\x54\x55\xfa\xc7\x3f\x93\x4e\x12\
\x52\x08\x24\x21\x85\x92\x42\x87\x84\x00\x02\xa2\x54\x11\x04\x29\
\x52\x15\x50\x84\x55\xc4\x42\x51\x2c\x6b\x47\x71\x7f\xeb\xba\x2e\
\x62\x5b\x04\x5d\x41\xa5\x88\x14\x29\x41\x10\x44\x40\xc0\x04\x08\
\x04\x02\x84\x12\x08\x09\x24\x90\x42\x0a\x69\xa4\xe7\xf7\xc7\x25\
\x93\x5b\x66\x32\xf7\x4e\x26\x09\xc1\x7c\x9f\x67\x9e\xe4\xbe\x73\
\xce\xbd\x67\xee\x79\xcf\x7b\xce\x79\xdb\x81\x46\x34\xa2\x11\x8d\
\x10\xc3\x1e\x98\x0b\x44\x02\x79\x40\x45\xe3\xa7\xda\x4f\xde\xed\
\x77\xf5\x02\x60\x67\xc6\xfb\xbe\xa3\xe0\x0b\x9c\xa0\xfe\x5f\x6a\
\x43\xfd\x44\xdf\x7e\x87\x0d\x12\xf6\x34\x76\xbe\x25\x3e\xc7\x69\
\xa0\x92\x60\x2e\xf5\xff\xf2\xee\x96\xcf\xf3\x1a\xdf\x7d\xbd\x41\
\x27\xfa\xff\x30\x70\x4f\xe5\x45\x97\x90\x30\xa6\x4c\x9f\x85\x9b\
\x7b\xb3\xba\x6f\x55\x03\x42\x76\x56\x06\xab\x57\x2e\xe7\x4c\x4c\
\xb4\x98\x1c\x09\xf4\xad\xa7\x26\x69\x82\x95\xe8\xff\x4e\xe2\x2f\
\xa6\x4c\x7f\xba\xb1\xf3\x55\xc0\xcd\xdd\x83\x29\xd3\x67\xc9\xc9\
\x9d\xeb\xa3\x2d\xe6\xc0\xca\x74\x91\x46\xdc\xcd\x10\x33\xc0\x19\
\xf1\x17\xab\x57\x2e\x27\x3b\x2b\xa3\x8e\x9b\xd3\xf0\x90\x95\x99\
\xc1\xea\x95\xcb\xe4\xe4\xd3\xf5\xd1\x96\x9a\x62\x0e\xf5\xbf\x78\
\xba\x5b\x3e\xcf\x69\x7c\xf7\x77\x04\xec\x11\xf6\xb1\xf5\xfd\xf2\
\x1a\xfa\xe7\x18\x0d\x74\x1b\x08\x82\x12\xa3\x91\x09\x6a\xd6\xf9\
\x3e\x9a\xdf\xfa\x1d\x06\x3b\x04\xb5\xe6\x9f\x40\x2e\xf5\xff\x52\
\xef\xf4\x4f\xee\xed\x77\xf5\x1c\x0d\x70\xe4\xeb\x4c\x17\xd1\xa3\
\x42\x7c\x71\x2d\xf5\xaf\xb5\x40\xf4\xf1\xf2\x90\x93\xb4\xbc\xbb\
\x3b\x16\x8d\xdb\xc0\xbf\x38\x1a\x19\xe0\x2f\x8e\x46\x06\x68\x38\
\x68\x0a\x84\x62\xe1\x3e\x6b\x64\x80\x86\x83\xf9\x08\x3b\xb4\x6c\
\x60\x37\xf0\x1a\xd0\x83\x1a\xf6\x61\xe3\x22\x50\x25\xea\x70\x11\
\xe8\x0f\x24\x21\x7d\xdf\x2e\xc0\x65\xc0\x90\x71\x26\x13\x38\x00\
\xec\x05\xf6\x03\x31\x40\xb9\xda\x87\xd9\x98\xdd\xcc\x46\xd4\x14\
\xfe\x08\x06\xb8\xce\xb2\xbf\x2e\xc0\xb3\xc0\x57\xa2\xb2\x73\x31\
\xdc\xf9\xdc\xa6\x8f\xb9\xfd\x81\x2a\x86\xf8\x14\x81\x29\xaa\x45\
\xa3\x04\x50\x89\x1a\x48\x00\x77\x94\x9d\x1c\x02\xb4\xa8\xa6\x4e\
\xfe\xed\x32\x97\xa8\x7e\xf4\x57\x87\xa1\xc0\x6f\x1a\xeb\x54\x8b\
\xfa\x56\xb8\xdc\x69\x9f\xea\x30\x0b\x88\x40\x98\xaf\xcd\xbd\xff\
\x6f\x08\x4c\xf6\x96\x98\xee\xea\xe4\x50\xf1\xca\x63\xf7\x55\x0c\
\x0a\x6b\x5b\xe1\xea\xe4\x60\xac\x6e\x3a\x2a\xa5\x7b\xe3\x14\x50\
\x3b\xb8\x01\xf4\xa9\xe1\x3d\x86\x00\x2f\x03\x2f\x8a\x89\x8f\x0f\
\x0b\xe1\x89\x61\xa1\x3c\x31\x2c\x94\xf2\x8a\x0a\x2e\x5c\xcd\xe0\
\xfd\x95\x7b\x39\x15\x9f\x2a\x2e\xf6\x33\x50\xaa\xe6\x21\x8d\x0c\
\x50\x3b\x08\x07\x32\x00\xc5\xbc\x51\x09\xe7\x26\x76\x04\xf8\x34\
\x23\xc8\xb7\x19\x81\x3e\xcd\x08\xf4\x6d\xc6\x8a\x1d\xc7\x39\x1c\
\x9b\x24\x2e\xf6\x91\xf8\xc2\xd5\xc9\x81\xa9\x43\x43\xf4\xd7\x56\
\x3a\x1d\xc1\xbe\xcd\x48\x4a\xbf\x29\xbf\xfd\x7a\xb5\x0d\x6d\x64\
\x80\xda\x41\x31\xb0\x06\xc1\xc4\x0e\x80\x87\x8b\x23\x33\x47\x84\
\x11\xe4\xd7\x8c\x00\x9f\x66\x78\x37\x73\x56\x54\x6a\xdb\xd2\x8d\
\x71\x6f\xfd\x48\x7e\x61\xb1\xc1\x9b\x3e\x3e\x2c\x04\xe7\x26\x52\
\x73\xc3\xe1\xb3\x49\x64\xe5\x16\x8a\x49\x37\x50\xb1\xf8\xab\x84\
\xd9\x0c\xf0\x6c\x6c\x9e\xb9\x55\x1b\x24\x96\x76\x52\x76\x98\x09\
\x7c\x87\x88\x01\xb2\xf3\x0b\x19\xd1\xb7\x1d\xcd\x5d\x1d\x8d\x56\
\xf0\x69\xee\xc2\x4b\x93\xef\x65\xd1\x77\xfb\x14\xdf\x35\x75\xb4\
\x67\xca\x03\xdd\x14\xf4\x5d\x47\x2f\xca\x49\x9b\x50\x29\xfe\xa1\
\x51\x11\x54\x9b\x38\x86\xb0\x27\x07\xa0\xac\xac\x9c\x5f\x22\x2e\
\x98\xac\x34\x71\x60\x17\xfa\x75\x6d\xa5\xa0\x4f\x1f\x1e\x4a\x53\
\x47\x7b\x09\xad\xac\xac\x9c\xdf\x8f\x5f\x96\x17\x55\x2d\xfe\xa1\
\x91\x01\x6a\x1b\xdf\x8b\x2f\x7e\x3e\x78\xd6\x64\x05\x9d\x0e\xde\
\x79\x72\x10\x4e\x0e\x55\xa2\xde\xd8\xe8\x8f\x8c\x4d\x22\x2b\xf7\
\x96\x98\x74\x03\xd8\xa7\xa5\x81\x8d\x0c\x50\xbb\xf8\x01\x28\xa9\
\xbc\xb8\x98\x94\xc1\xd9\xc4\x74\x93\x95\x7c\x3c\x9a\xf2\xd2\xe4\
\x7b\xf5\xd7\x86\x46\x3f\xd4\x5c\xfc\x43\x23\x03\xd4\x36\xd2\x80\
\x9d\x62\xc2\x96\x83\xe7\x54\x55\xac\x9c\x0a\x8c\x8d\xfe\xb2\xb2\
\x72\xf6\x46\xd7\x4c\xfc\x43\x23\x03\xd4\x05\xbe\x13\x5f\x6c\x8f\
\x38\x4f\x49\x69\x99\xc9\x4a\x95\x53\xc1\xf3\x8f\xdc\x63\x70\xf4\
\x0b\xab\xff\x9a\x89\x7f\xd0\xc6\x00\x25\xe2\x8b\xf2\xd2\x12\x63\
\xe5\xee\x3a\x94\x97\x28\xb6\x65\x86\xf7\x69\x86\xb1\x0d\xa1\x73\
\x00\xc8\xce\x2b\x64\xff\xc9\x04\x55\x15\x7d\x3c\x9a\x4a\xf6\xfd\
\x62\xfc\x7a\x44\x21\xfe\x55\x2b\x7f\xc4\xd0\xc2\x00\x92\x7d\x5f\
\xc9\xad\x02\xad\xcf\x6a\xb0\x28\x29\xc8\x97\x93\x72\x35\x54\x2f\
\x06\xd6\x8a\x09\x5b\x55\x4e\x03\xc6\x60\x29\xf1\x0f\xda\x18\x40\
\xf2\xa3\x4b\xf2\xb5\xbc\x83\x86\x8d\x62\xe5\x6f\xd5\xaa\x04\x91\
\x4c\x03\x7f\xc4\x24\x92\x91\x63\xfe\x00\x32\x22\xfe\x55\x2b\x7f\
\xc4\xd0\xc2\x00\x99\xe2\x8b\xbc\xeb\x49\xc6\xca\xdd\x75\xc8\x4b\
\x49\x96\x93\xb4\x9a\x42\x8f\x01\xa7\x2a\x2f\xca\xca\xca\x09\xff\
\xf3\xbc\xd9\xed\x31\xb0\xfa\xdf\x8c\x19\xe2\x1f\xb4\x31\x80\xe4\
\xa9\xb9\xc9\x89\xe6\x3c\xaf\x41\x22\xe7\xaa\x42\xdc\x2a\x7a\x40\
\x05\x24\x52\x60\xb3\x99\xd3\x80\x11\xe5\xcf\x4f\x66\xdd\x0c\x6d\
\x0c\x10\x27\xbe\xc8\x8a\x37\xad\xd5\xba\x5b\x90\x7d\x39\x4e\x4e\
\x52\x10\x54\x60\x15\xa2\x51\x7a\x31\x29\x83\xd8\x84\x34\xcd\x37\
\xb1\xa4\xf8\x07\x6d\xb6\x00\x89\x1a\x2b\x35\xe6\xa8\xb9\xcf\x34\
\x89\x9b\x89\x97\x48\x3b\x15\x45\x6a\x4c\x14\x19\xe7\x4f\x51\x98\
\x95\x41\x51\x4e\x36\x45\x39\xd9\x00\xd8\xbb\xb8\x61\xef\xe2\x86\
\x83\xbb\x07\xcd\x3b\x74\xc3\xb3\x6b\x0f\x3c\xbb\xf6\xc4\xb5\x75\
\x60\xad\xb4\x27\xf5\xe4\x11\x39\xc9\x9c\xe1\x9b\x0a\xec\x00\x46\
\x55\x12\x26\x2f\x34\x7b\xe0\x8a\x61\xb6\xf8\x07\x6d\x0c\x70\x48\
\x7c\x91\x16\x73\x94\x8a\xb2\x32\x74\xd6\xd6\xe6\x3e\x5b\x82\xac\
\xf8\xf3\xc4\x85\xff\x44\x5c\xf8\x3a\x72\x92\x12\xaa\x2d\x5b\x5a\
\x78\x8b\xfc\xb4\xeb\x00\x5c\x3b\x7a\x50\x4f\x77\xf1\x6f\x4b\xf0\
\xc3\x93\x69\x37\x72\x12\x6e\x01\xed\x2c\xd2\xae\xf2\xb2\x52\xd2\
\x4e\x45\xc9\xc9\x87\x0c\x95\x55\x81\x0d\x88\x18\xc0\x42\xd8\x69\
\xba\x88\x71\x68\x75\x6c\xbc\x0a\xf8\x55\x5e\x8c\xfd\x61\x17\x2d\
\x7b\xdc\x5b\x4d\x71\xd3\x48\x3e\xbc\x9f\x23\x9f\x2f\x22\xe5\x78\
\x64\x8d\xee\x23\x47\xcb\xb0\xbe\xf4\x9a\xf3\x36\xbe\xbd\xfb\xd7\
\xe8\x3e\xd7\x8e\x1c\x60\xcb\x93\x0f\x89\x49\x57\x80\xd6\x66\xdc\
\xca\x06\xf8\x03\xcb\x67\x0e\x89\x00\xfa\x53\x07\x8b\x40\x90\x69\
\x9a\xe2\x77\x6f\x31\xe7\x99\x00\xa4\x9f\x3e\xce\xd6\x19\x23\xd9\
\x3a\x63\xa4\xc5\x3b\x1f\xe0\xfa\xf1\x08\xb6\xce\x18\xc1\xb6\x99\
\x0f\x93\x7e\x26\xda\x74\x05\x23\x30\xf0\x1b\xcd\x9d\x6f\x5f\xa7\
\x76\xd2\xc6\xf4\x05\xde\x30\xb7\xb2\x56\x09\x30\x06\x61\xce\x01\
\xc0\xd9\xdb\x8f\x69\xbb\xcf\x68\x9a\x06\xca\x4b\x4b\x88\x5a\xfa\
\x2f\xa2\x97\x7f\x4c\x79\x99\x61\xa6\x75\xb6\xb3\xe5\xfe\xd6\x3e\
\xf4\x6f\xe3\x4b\x17\x4f\x0f\x82\x3d\xdc\xf0\x74\x72\xc4\xd9\xce\
\x16\x80\xbc\xe2\x12\x52\xf3\x0b\x88\xcb\xc8\xe6\x4c\x5a\x06\xfb\
\x13\x92\x39\x98\x78\x8d\xbc\x62\xc3\xda\x49\x2b\x6b\x1b\xc2\x66\
\xbf\x4a\x8f\xd9\xaf\x62\x65\xad\x7e\xd6\x2b\x2f\x2b\x65\xd5\x03\
\x9d\xc8\x4f\xbd\x26\x26\x8f\x46\xd0\xee\x69\x41\x2f\x84\x69\xc3\
\xb6\x92\x30\xb0\x7b\x30\x9f\xcc\x79\x44\xe3\x6d\x04\xbc\xf4\xc5\
\xcf\xec\x3d\x2e\x59\x87\x96\x02\xf7\x02\x9a\x17\x66\x5a\x19\xc0\
\x0e\x48\x41\xf0\x74\x05\x60\xf8\xe7\x6b\x69\x3b\x44\xdd\xb4\x96\
\x73\xf5\x32\xbb\x16\x4c\x27\xfd\xf4\x71\x65\x43\x74\x30\x2c\xa8\
\x35\x33\xc3\x3a\x31\xaa\x7d\x00\x0e\x36\xda\xd6\x16\x85\xa5\x65\
\x6c\x39\x77\x89\x95\xd1\x67\xf9\xf5\x62\x22\x15\x06\xdc\x36\x3d\
\xbb\xf6\x64\xe8\xc7\x2b\x70\xf1\x6f\xab\xea\x9e\xf1\xbb\xb6\xf0\
\xeb\xfc\xa9\x62\x52\x26\xd0\x12\x6d\xaa\x60\x47\x84\xd4\x71\xed\
\x2b\x09\x1e\x2e\x4e\xac\x5f\x34\x03\xf7\xa6\xc6\x9d\x43\xaa\x43\
\x56\x6e\x01\x13\xdf\x59\x41\xc6\x4d\x89\x86\xf2\x02\x10\x86\xe0\
\x51\xac\x1a\x5a\x57\x70\x65\x40\x00\x42\x44\x0a\x00\x85\x59\x19\
\xb4\x1f\x33\xc5\x64\xc5\xf4\xd8\x13\x6c\x9d\x31\x92\x9b\x89\xca\
\x2d\xf4\xb8\x4e\x81\xac\x9d\xf8\x10\x0b\xfa\x85\xd1\xd9\xd3\x03\
\x1b\x2b\xed\x36\x2a\x1b\x2b\x2b\xba\x78\x7a\x30\x2d\xa4\x03\x63\
\x3a\x06\x90\x9a\x77\x8b\x73\x37\xb2\x24\x65\xf2\xd3\xae\x71\x69\
\xe7\x26\xfc\xfa\x0e\xc2\xb1\xb9\x97\xc9\x7b\xfe\xb1\xe8\x45\x72\
\x93\xaf\x88\x49\xdf\x03\x5a\xe7\xbd\x4f\x81\xe1\x95\x17\x3a\x1d\
\x7c\xf8\xec\x68\xda\xb7\x32\xfd\x7c\x63\x68\x62\x6f\x4b\x80\x4f\
\x73\x76\x1e\x8e\x15\x93\x3d\x10\x5c\xc7\xb7\x6b\xb9\x97\x39\x4b\
\xf8\xcb\x08\x81\x0b\x3a\x80\x9c\xa4\x04\xfc\xfb\x0e\xc6\xb9\xa5\
\x9f\xd1\x0a\xd7\x8e\x1c\x60\xfb\xac\xb1\x14\xdd\x94\x76\x48\x60\
\x33\x57\xd6\x4d\x7a\x88\xd7\xfb\xf7\xc2\xdb\xd9\xbc\xd1\x60\x08\
\xde\xce\x4e\x3c\xda\xb5\x1d\x7d\xfc\xbc\x89\xb8\x9a\x42\x56\x61\
\x91\xfe\xbb\x92\x82\x7c\x2e\xfe\xb2\x1e\xef\xd0\xde\x34\xf5\x55\
\x7a\xde\xe8\xdb\x7c\xf4\x20\x51\x5f\xfe\x9f\x98\x54\x01\x3c\x81\
\xc8\xb0\xa3\x02\x23\x80\xc5\x88\x24\xed\x84\x81\xa1\x4c\x7b\xb0\
\x97\x86\x5b\x18\x46\x2b\x2f\x77\x32\x72\x0a\x88\x4d\x48\x11\x93\
\x7b\x00\x51\x68\xd0\x53\x98\xc3\x00\xe9\x40\x4f\x40\xbf\xcf\xca\
\x4e\xb8\x40\xc7\x71\x4f\x18\x2e\x1c\x7b\x82\xf0\xbf\x8d\xa6\xa4\
\x40\xaa\x3e\x1f\xd7\x29\x90\xed\xd3\x46\xd3\xd9\xd3\xa8\xe3\x6c\
\x8d\x11\xe4\xe1\xc6\xcc\xb0\x4e\x5c\xce\xce\xe1\x74\x5a\x95\xf6\
\xb6\xac\xb8\x88\x4b\x3b\x37\xe2\x7f\xef\x60\x9c\xbc\x0c\x27\xf4\
\xf8\xed\xd5\x19\x72\x15\xf0\x56\xe0\x0b\x0d\x8f\x6f\x8e\xb0\x45\
\x6b\x5a\x49\xf0\xf3\x74\xe3\x3f\x2f\x8c\xc5\x56\xe3\xf4\x66\x0c\
\xbd\x3a\xb6\x62\x4f\xd4\x05\xb2\xf3\xf4\x8a\x21\x1d\x42\x40\xc8\
\xf7\xa8\x9c\x0a\xcc\x6d\xc9\x15\x60\x66\xe5\x45\xde\xf5\x24\xdc\
\x03\xda\xd3\x2c\x58\x92\x6a\x90\xbc\x94\x64\xb6\xcd\x18\x49\xe1\
\x4d\x89\x19\x81\xf7\x07\xf7\xe1\xcb\x87\x07\xe1\x60\x53\xfb\x4e\
\xc9\xf6\x36\xd6\x4c\xe8\x1c\x84\x0e\xd8\x97\x50\xd5\xa1\xe5\xa5\
\xa5\x5c\x39\xb0\x8b\xa0\x11\x13\xb0\x73\x6a\x2a\xa9\x13\xb7\x7d\
\x3d\x31\xdf\x7f\x29\x26\x55\x00\x33\x10\x62\xf6\xd4\x62\x0d\xc2\
\x40\x01\xc0\xc6\xda\x8a\x2f\x5e\x9c\x80\x6f\x73\x37\x33\x7e\x85\
\x61\xd8\x58\x5b\xd3\x35\xa0\x25\x5b\x0f\x9d\xa6\xbc\x6a\xd1\xe3\
\x04\x04\x03\x3f\xaa\xb9\x87\xb9\x0c\x70\x15\x21\xcc\x49\x9f\x10\
\xf1\x7a\xd4\x21\x3a\x3c\xf2\x38\x36\x4d\x04\x51\x5e\x56\x5c\xc4\
\xb6\x99\x0f\x2b\xe6\xfc\xcf\x47\x0e\xe0\x95\xfb\x7a\x50\xd7\x18\
\xd8\xd6\x0f\x8f\x26\x0e\xec\x88\xab\xb2\x61\x94\xe4\xe7\x72\xed\
\xe8\x41\xda\x8f\x9d\xaa\xdf\x1d\xdc\xca\xbc\xc1\x8e\xe7\x27\x52\
\x2a\x35\x77\xaf\x43\x98\xcb\xd5\xe2\x6f\xc0\xab\x62\xc2\xec\xb1\
\xfd\x18\xde\xbb\x93\x91\xe2\xe6\xc3\xd3\xbd\x29\x3a\x1d\x1c\x3d\
\x27\x59\xab\x74\x00\x92\x11\x16\x9f\xd5\xa2\x26\xb2\xe8\x08\x42\
\x08\x94\x2d\x40\xe9\xad\x02\x72\x93\x12\x08\x1c\x3e\x0e\x80\xa8\
\xff\x7e\xc8\xa5\x1d\x1b\x25\x15\x16\x0d\xe9\xcb\xcb\xfd\xc2\x6a\
\xf0\xc8\x9a\xa1\xb7\x9f\xb7\x42\x12\x14\xa4\xa7\xa0\xb3\xb6\xc1\
\xf7\x9e\xfb\xa1\xa2\x82\xdf\xdf\x98\x45\xfa\x69\x89\xde\x20\x1f\
\x18\x0b\xe4\xa8\x7c\x4c\x20\xc2\x56\x59\xef\xd5\xd9\x2d\xd0\x87\
\x85\x33\x1f\xc2\x4a\x57\x3b\x01\xc5\xa1\xc1\xbe\x44\x9e\x49\x20\
\x35\x4b\x62\xb6\x1e\x8c\xe0\x23\x90\x69\xb8\x96\x80\x9a\xb6\xe8\
\x35\xe0\x43\x31\xe1\xde\x57\xff\x89\x7f\xbf\x21\xac\x9f\x70\x9f\
\xc4\x93\xe6\xd1\xae\xed\x58\x3b\x71\xb8\xbc\x3e\x45\xa5\x65\x2c\
\x8b\x3a\xcd\x9a\x98\xf3\xfa\x79\x5a\x58\xcd\xb7\x67\x56\xcf\x2e\
\xd8\x99\xa9\x6a\x3e\x93\x96\x61\x74\x7d\x31\x69\xdd\x0e\xd6\x9f\
\xa9\x5a\x27\x59\xd9\xda\x31\x71\xe3\x21\xae\x1e\xd8\xcd\x9f\xff\
\x56\xe8\x54\x5e\x01\x3e\x56\xf9\x58\x85\xb6\xcf\xd1\xde\x8e\x75\
\xef\x3d\x89\x9f\xa7\x3a\xd1\xdf\x7d\xa6\x24\x18\x88\xe8\x6f\x5f\
\x35\x52\x52\x8a\xa4\xb4\x6c\x26\xbf\xbb\x92\x82\x22\xc9\x0e\xd5\
\xa4\x96\xb0\xa6\xab\x91\x08\x60\x00\xd0\xa6\x92\x90\x1c\xb9\x9f\
\xa4\x88\xdf\xb9\x95\x51\xe5\xfd\x1a\xd8\xcc\x95\xf0\x69\xa3\xb1\
\x97\x2d\x7e\x92\x73\xf2\x18\xb4\x62\x13\x2b\xa3\xcf\x92\x9c\x93\
\x47\x49\x59\x39\x25\x65\xe5\x24\xe7\xe4\xb1\x23\x2e\x91\xf0\xf3\
\x09\x8c\x6a\xdf\x16\x17\x7b\x6d\xc9\xb7\x7e\x3a\x1d\xc7\x83\xdf\
\x09\xfa\xaa\x01\x6d\x94\xe9\xfb\x1f\x0c\x6a\xcd\xba\xd3\x17\xc8\
\xbe\xbd\x3b\xa8\x28\x2f\x23\x39\x62\x2f\x97\x7e\xfd\x99\x8a\x0a\
\x49\x68\xfd\x1e\x84\xec\x5f\xa6\x82\x41\x2b\xf1\x26\xc2\x4e\x41\
\x8f\x37\x9e\x18\xca\x3d\x9d\xd4\x6b\x8e\x97\x6d\x91\x9a\x19\x66\
\x8f\xe9\xa7\xaa\x9e\x8b\x93\x03\x1e\xae\x8e\xec\x3f\x21\x99\x72\
\xfd\x11\xb6\xee\xfb\x8d\xd5\xab\xa9\x53\x68\x39\x30\x0d\x91\x83\
\x44\x79\x59\xa9\xc2\x7c\xfa\xf9\xc8\x81\x8a\x4e\x2c\x2a\x2d\x63\
\xe4\xaa\xad\x9c\x4c\x31\xbe\xab\x3a\x91\x92\xce\xa8\xd5\xdb\x28\
\x2e\x33\xed\x44\x59\x89\x9f\x4e\xc7\x31\x75\xc3\xaf\x94\x96\x97\
\xf3\xee\xef\x91\xbc\xbf\x4f\x61\xc9\xc3\xd5\xc1\x8e\xaf\x46\x0d\
\x96\xd0\xb2\x13\x2e\xca\x35\x93\x59\x08\x0b\x3f\xb5\xc9\x16\x7a\
\x00\x6f\x8b\x09\x03\xbb\x07\x33\xf6\x7e\xa5\x47\x6f\x6d\x61\xec\
\xfd\xdd\x78\xa0\x67\x7b\x39\xf9\x6d\xa0\xb7\xb1\x3a\x96\xf0\x0a\
\x4e\x06\x26\x61\x44\x3b\x36\xae\x53\x20\x0f\x05\x2b\x47\xc0\xb2\
\xa8\xd3\xd5\x76\x7e\x25\xa2\xaf\xa7\xf3\x75\xd4\x19\x93\xe5\x00\
\xd6\xc4\x9c\x67\xca\x86\x9d\x94\x96\x57\xf5\xd9\xa2\x7d\x47\x38\
\x9b\xae\x9c\x06\x1f\x0c\x6a\xc5\x98\x0e\x01\xc6\x6e\x55\x0c\x8c\
\x43\x58\xec\xaa\x81\x1d\x82\xbd\x5f\xaf\xea\xf5\x70\x75\xe2\x9d\
\x27\x87\xa9\xac\x6e\x39\xbc\xf1\xf8\x50\x3c\x5c\x9d\xc4\x24\x1b\
\x04\x67\x14\xa5\x6b\x31\x96\x4d\x73\xf2\x28\xc2\xd6\x47\x7f\x4f\
\x9d\x0e\xa2\x66\x3f\x4a\x58\x4b\x4f\x45\xe1\xde\xcb\xd7\x71\x24\
\xa9\x2a\xa4\x79\x64\xbb\x36\x2c\x1b\x3d\x98\x0a\x60\xd6\x96\x3d\
\x92\xd5\x7a\x1f\x7f\x6f\x22\x9e\x9e\x54\xed\xc3\xc5\x23\xbf\x12\
\xd6\x56\x3a\xbe\x1f\xf7\x20\x53\xba\x29\x46\x05\x20\x48\x98\xb0\
\xa5\x6b\xe5\x6a\xe3\x0a\xe0\x49\x64\x51\x3d\x26\xf0\x3a\xa0\xd7\
\x1a\xe9\x74\xb0\x64\xee\x78\xfa\x87\x98\xf6\x4f\x28\x2e\x2d\x63\
\xe3\xbe\x13\xec\x88\x8c\xe5\x62\xf2\x0d\x6e\x15\xa9\xf3\xb6\xae\
\x6e\x6d\x10\x71\x26\x81\xe7\x17\xff\x24\xff\x5d\xaf\x23\x5b\xaf\
\x81\x65\xe3\x02\x7e\x44\xe6\x99\x3a\x3c\xa8\xb5\xc1\xce\x07\x88\
\x4d\x93\x8e\xca\xaf\x46\x0f\xc6\xd7\xc5\x19\x3f\x17\x67\x96\x8d\
\x96\x8a\xe7\x33\x69\xd5\x2e\x64\xcd\xea\x7c\x80\x50\xef\x16\x0c\
\x0d\x54\x68\x03\xd7\xa3\xad\xf3\xfd\x90\x59\xe3\xc6\x0f\x08\x55\
\xd5\xf9\x69\x59\x79\x3c\xbe\xe8\x07\x3e\x5a\xb3\x87\x53\xf1\xd7\
\x55\x77\xbe\x29\xf4\xed\xdc\x86\xf1\x03\x42\xe5\xe4\x37\x11\x99\
\xf2\x2b\x61\xe9\xc0\x10\xc9\x53\x67\x84\x59\x7e\xdf\x2b\x87\xb9\
\x9d\x5f\x89\x19\xdd\x15\x6d\x34\xec\x88\x6f\x1c\x1f\x00\xfa\xd0\
\xe1\x66\x2e\x8e\xcc\x9d\x30\xc0\x64\xa5\xe2\xd2\x32\xe6\x2c\xd9\
\xc0\x85\xab\xda\xdd\xc2\xd4\x60\xce\xf8\xfe\x72\x63\x93\x33\xb0\
\x48\x5e\xce\x92\x0c\x10\x8c\x48\x3d\xec\x6c\x67\xcb\xa8\xf6\x46\
\xe7\x58\xc5\x16\x6d\xd6\x96\x3d\x24\xe5\xe4\x71\xf5\x66\x2e\xb3\
\xb6\xfe\x2e\xf9\xae\x8b\x91\xed\x5c\x4d\x3b\x1f\x60\x4c\x87\x00\
\x1c\x6d\x25\x1a\xc9\xf6\x08\x7b\x79\x35\xf0\x00\x26\x8b\x09\x73\
\xc6\x0f\x30\x18\xc9\x23\xc7\xc6\x7d\x27\x6a\xad\xf3\x41\xd8\x15\
\xcc\x19\xaf\x70\x86\x79\x14\x59\xae\x21\x4b\xea\x62\x25\xfb\x95\
\xfb\x5b\xfb\x54\x6b\xd2\x9d\xda\xad\x3d\x87\x93\xaa\x0c\x19\x3b\
\xe2\x12\xf1\xff\xf8\x5b\x83\x65\xa7\x85\x28\x3b\xd3\x12\x9d\x0f\
\xd0\xc4\xd6\x86\xfb\x5a\xfb\xb0\xeb\xa2\x44\x93\xd6\x0f\x21\x41\
\x93\x29\x4c\x07\x1c\x2a\x2f\xda\x78\x37\x63\xf4\x7d\x5d\x54\x3d\
\xf7\x97\x08\x89\x25\x8f\xfb\xbb\x05\xf2\xd6\xf4\x61\x78\xba\x1b\
\xce\x43\x20\xd7\x0f\xa8\xc1\x98\xfb\xbb\xb2\x72\xc7\x61\xae\xa4\
\xea\x8d\x70\x0e\xb7\xdb\xfc\x49\x25\xc1\x92\x12\xe0\x1e\xf1\xc5\
\x80\x36\xc6\xad\x83\x00\xb3\x7a\x76\x21\xd4\xbb\xba\x44\x59\x02\
\xc2\x5a\x7a\xf2\x54\x0f\xe9\x11\x3c\x96\xea\xfc\x4a\x0c\x54\xb6\
\xd5\xe8\xb6\x49\x86\xa7\xc4\x17\xe3\x06\x84\xa8\xd6\xf6\xc5\x5f\
\x93\x86\x16\xbc\x39\xfd\x41\xa3\x9d\x6f\x2e\xac\x74\x3a\xc6\x0f\
\x50\xcc\x68\x92\x36\x5b\x52\x02\x48\xbc\x30\x3b\x7b\x56\x9f\xd5\
\xcc\xde\xc6\x9a\xf0\x69\xa3\x78\x78\xd5\x36\x4e\xa4\x18\x0e\x99\
\x0e\x6b\xe9\xc9\xb6\x69\xa3\x24\xda\x40\x4b\x77\xbe\x91\xb6\x06\
\xab\xa8\xd6\x0a\xe8\x58\x79\x61\x67\x63\xcd\xa8\x7e\xea\x46\xbf\
\x39\x50\xab\x11\x94\x63\x54\xbf\x2e\x7c\xb9\xe9\x00\xc5\x55\x01\
\xa9\x9d\x10\x16\x83\x49\x60\x59\x09\x20\x71\xb3\x69\xd7\xdc\xdd\
\x58\x39\x3d\x7c\x5d\x9c\x39\xfc\xcc\x24\x3e\x1f\x39\x80\xbe\xfe\
\x2d\x71\xb6\xb3\xc5\xd9\xce\x96\xbe\xfe\x2d\xf9\xf2\xe1\x81\x44\
\xcc\x9a\x88\x4f\xd3\xaa\x3d\x6d\x6d\x74\xbe\x91\xb6\xaa\x71\x19\
\x92\x4c\x79\xdd\x82\x7c\x71\x73\x6e\xa2\xfa\x99\x81\xbe\xd2\x75\
\xcd\xa2\x95\xbf\xca\x75\xf9\x16\x81\x7b\x53\x47\xba\x06\x2a\x4c\
\xde\xf7\x55\xfe\x63\x49\x09\x20\xf9\x45\xcd\x1d\x1d\x8c\x95\x93\
\xc0\xce\xda\x9a\x17\x7a\x87\xf0\x42\xef\xea\x17\xdf\xb5\xd5\xf9\
\x46\xda\xaa\x26\x29\xe3\x7d\xe2\x8b\xee\xc1\xd5\x4f\x79\x72\x8c\
\xe8\xd3\x99\x53\xf1\xd7\xf5\xd7\x87\x4e\xc5\x33\x7c\xc1\x52\x45\
\xb9\x26\xf6\xb6\x04\xf9\x36\x67\x44\xdf\xce\x8c\x1f\x10\x62\x96\
\x2f\x41\xf7\x60\x3f\x8e\x9d\x97\xe8\xb4\xfa\x71\xdb\x5c\x6c\x49\
\x09\x20\xdd\x73\xd8\xd9\x1a\x2b\xa7\x19\xb5\xd9\xf9\x00\x4d\xed\
\x14\xb6\x06\x35\x93\x71\x57\xf1\x45\x48\x90\xb6\x93\x62\xc6\x0d\
\x0c\xa1\x7d\x2b\xc3\x3a\x12\x31\x6e\x15\x95\x70\x2a\xfe\x3a\xff\
\x5a\xfd\x1b\x8f\x7f\xf0\x03\x69\x59\xda\x93\x73\x85\x04\x29\xec\
\x21\xfa\xb6\xdf\xf1\x09\x22\x6a\xbb\xf3\x6b\x80\xe6\xe2\x0b\x9f\
\xe6\xae\x9a\x2a\xdb\xd9\x58\xf3\xd9\xbc\x09\xaa\x98\xa0\x12\xe7\
\xaf\xa4\x31\xef\xd3\x8d\xaa\x12\x4c\x88\xd1\xd2\xc3\x45\x4e\xd2\
\xb7\xdd\x92\x0c\x20\xf1\xa0\x30\xe6\xa2\xad\x05\x75\xd5\xf9\xb9\
\xc5\x0a\x33\x86\x9a\x61\x26\x99\xf2\x5c\x9d\xd4\xcf\xff\x95\xf0\
\x74\x77\xe6\x87\xb7\x1e\xe7\xb5\xa9\x0f\xd0\x2d\xd0\x07\x47\x15\
\x56\xcf\x73\x57\x52\xd9\xf4\xc7\x49\x4d\xcf\x31\xb0\x36\xd1\xb7\
\xdd\x92\x6b\x80\x0c\x40\x3f\x0c\xd2\xf3\x6f\xd1\xac\x89\xba\x75\
\x80\x21\x18\xeb\xfc\x1f\xc6\x0f\xe3\xb1\xae\x55\x1b\x0e\x4b\xf8\
\x13\xa4\xe7\xdf\x92\x93\xaa\xd7\x3d\x0b\x90\xac\x1c\x5d\x9c\xcc\
\xfb\xad\xb6\x36\xd6\x3c\x3a\x24\x8c\x47\x87\x18\x76\x94\x49\xcd\
\xca\x65\xd1\xca\x5f\x39\x74\x2a\x5e\x4f\xdb\x1e\x11\xcb\xe4\xc1\
\xea\x1d\x6b\x0c\xb4\x4d\xbf\xc6\xb1\xa4\x04\x90\xc4\x2c\xc7\x65\
\x64\x9b\x7d\x23\xb5\x9d\x9f\x9c\x93\x47\xef\xe5\xeb\x98\xf7\xcb\
\x7e\x0e\x27\xa5\x90\x5f\x5c\x42\x7e\x71\x09\x87\x93\x52\x98\xb3\
\x7d\x3f\xbd\x97\xfd\x44\x72\x8e\xe9\xc1\x7c\x41\xd9\xd6\x78\x43\
\xe5\x64\x28\x12\x5f\x94\x95\xa9\x4e\xd1\xaf\x09\x5e\xee\x4d\x79\
\x6b\xba\xd4\xaa\x18\x9f\xac\xc5\x31\x19\x4a\x95\x6d\xd3\xa7\x16\
\xb5\xa4\x04\x38\x8f\x90\xe0\x18\x80\xd3\x69\x19\x3c\xdc\x5e\x5d\
\x00\x86\x18\x5a\x46\xbe\x5a\x7f\x82\xc8\x59\x93\xaa\x95\x04\x67\
\xd2\x14\xf9\x1e\xd4\xb8\x55\x67\x23\x5a\x2c\xe6\xdd\x2a\xc2\xde\
\x4e\xfd\xeb\x34\xd7\xf3\xc7\x1c\xe4\xdd\x2a\x92\x93\xf4\xc9\x85\
\x2d\x29\x01\x24\x9e\x17\xfb\x13\x14\x59\x35\x4c\x42\x6d\xe7\x83\
\x65\xfd\x09\xf6\x5d\x56\xb4\x55\xe9\x45\xa2\x84\x64\x9a\x48\xbe\
\xa1\x48\xd8\x6c\x11\xa4\x64\xe6\xb2\xe8\x3b\x69\x00\x70\xa0\x6f\
\x73\x23\xa5\x0d\x23\x59\x99\x4c\x5a\xcf\xf1\x96\x94\x00\x7f\x8a\
\x2f\x0e\x26\x5e\xa3\xb0\xb4\x4c\x75\x88\x97\x96\xce\x07\x58\x1d\
\x23\x0d\xd1\xaf\xce\x9f\x60\x55\xcc\x39\x9e\xef\x6d\xd8\x33\xa7\
\xa0\xa4\x94\x43\x57\xae\xc9\xc9\x6a\xc2\xbf\x63\x01\xfd\x4d\xe3\
\xaf\x65\xd0\x4d\xa9\x70\x51\x0d\x2d\xba\xfe\x91\x7d\xb5\x9d\x4e\
\x7f\xf9\xba\x42\xc2\xe9\x0d\x11\x96\x94\x00\x71\x08\xd3\x00\x20\
\xec\x02\xb6\x9e\x53\x33\x95\x6a\xef\x7c\xb0\x9c\x3f\xc1\xe6\xb3\
\x97\x28\x28\x91\xb8\x82\x9d\x45\x9d\x21\xe8\x84\xe4\x22\xae\x6e\
\x72\x26\x75\x6c\xed\xc5\x23\xfd\xb5\xb9\x99\xc9\x94\x40\x20\x1c\
\x3e\x05\x58\x5e\x0f\x20\xd9\x9f\xac\x88\x8e\x35\x56\x4e\x0f\x73\
\x3a\xdf\x92\x58\x19\xad\xc8\xdf\x7b\xca\x50\x39\x03\x90\x44\xe2\
\x46\x9e\x49\x10\x07\x67\xd4\x0a\x3a\xb6\xf6\xe2\xd3\x79\xe3\x35\
\x69\x03\xcb\x2b\x2a\x38\x1c\xab\xc8\xe7\xa4\xcf\x78\x61\x49\x06\
\x78\x0c\x98\x28\x26\xec\x8c\x4b\xe4\xd8\x35\xe3\x36\xef\x9a\x74\
\xbe\x25\xfc\x09\xa2\xaf\xa7\xf3\x5b\xfc\x15\x39\x79\x22\x82\xc9\
\xd4\x14\x0e\x20\x5a\x07\xa4\x66\xe5\x1a\x1a\x69\x35\x86\xa3\xbd\
\x1d\xdd\x02\x7d\x78\x7d\xda\x50\xbe\x7b\x73\x1a\x2d\xdc\xb4\x59\
\x0c\x8f\x9e\xbd\xc2\x8d\x9b\x92\x9d\x50\x06\xa0\x4f\xab\x62\xa9\
\x35\xc0\x10\x60\x25\x06\x7c\x0c\x3f\xd8\x7f\x94\x9f\x1f\x1b\xa9\
\xa8\x10\x93\x7a\x83\x29\x1b\x76\x52\x56\x5e\x35\x6a\x6c\xac\xac\
\x58\x3b\x71\x38\x13\x3a\x07\x99\x7c\x60\x4d\xfd\x09\x00\xde\xf9\
\x3d\xd2\x50\x18\xb9\x0e\x58\x8e\x10\xfe\x56\x5d\x32\x88\x12\x84\
\x3c\x01\x7a\x66\xd9\xb8\xef\x04\xbd\x3a\x18\x0f\x38\x15\xc3\xd8\
\xaa\x5f\xbe\x16\x38\xb4\x74\xbe\xaa\xfb\x19\xc3\xc6\xfd\x27\xe4\
\xa4\x2d\x88\xe2\x04\x2c\x21\x01\xfc\x11\xd2\x94\x19\x54\x63\x6d\
\x3e\x7b\x89\x5f\x2f\x2a\x53\xca\x75\xf3\x6a\xce\x5b\x03\xaa\x5c\
\x08\xac\xad\x74\x7c\x3f\xfe\x41\x55\x9d\x0f\x35\xf3\x27\x00\xd8\
\x7e\x21\x81\xf0\xf3\x8a\x74\x6b\x95\xb0\x43\xf0\x0d\x54\x06\x15\
\x48\x21\xf1\x1d\xdc\x1d\x75\x9e\xc4\x14\x35\x3a\xa4\xba\xc1\xe5\
\xeb\x19\xec\x89\x52\x64\x73\x93\xb4\xb9\xa6\x0c\x60\x8d\x90\x12\
\x5d\xaf\x59\xb2\xb1\xb1\x21\x30\x48\xda\x89\xcf\x6d\xdb\xc7\x4d\
\x03\xc7\xa0\x2c\x1c\xd4\x9b\x77\x07\xf5\x36\x6b\xce\xaf\xf4\x27\
\xa8\x8e\x09\x0c\xf9\x13\x00\x64\x17\x16\xf1\x42\xf8\x3e\x09\x2d\
\x30\x28\x08\x1b\x69\xb0\xaa\x07\xc2\xcb\xaa\xee\x1d\xfd\x8e\x68\
\x3e\x2d\x2f\xaf\xe0\x93\x9f\xf6\x19\x2f\x5d\xc7\x58\xbc\x6e\xaf\
\x7c\x5d\x72\x18\x59\x90\x48\x4d\x23\x83\xfe\x8e\x10\x08\xa9\xc7\
\x3b\x0b\xdf\x67\xce\xdc\xf9\xac\x59\xbd\x8a\xf2\x72\xc1\x68\x91\
\x55\x58\x44\x7c\xd6\x4d\x26\x76\x56\xfa\x59\x0c\x6c\xeb\xc7\xf8\
\x4e\x41\x0c\x0e\xf0\xd7\xfc\x70\x17\x7b\x3b\x66\x86\x75\xc2\xcb\
\xd9\x91\xec\xc2\x62\xb2\x0b\x8b\xb0\xb3\xb6\xa6\x87\x8f\x17\xaf\
\xf7\xef\xc9\x7f\x47\x0d\xc4\xcd\x41\xe9\x9f\x37\x6d\xc3\x2e\x22\
\xae\x56\x99\x62\x6d\x6d\xed\x58\xbf\x69\x33\xad\x5a\xb5\x66\xff\
\x3e\x89\xd4\x6f\x8b\x60\xe3\xa8\x6e\x5b\x98\x89\x10\x17\x01\x40\
\x62\x4a\x26\xc1\x7e\x2d\x08\xf0\x31\x2f\xec\xdd\xdc\xc8\x20\x39\
\x76\x1d\x3d\xc7\xb7\xdb\x15\xb9\x97\xe6\x22\x4b\x71\x57\x93\xb8\
\x80\xd6\x08\x5b\x26\xbd\xa5\x61\xf4\x98\xb1\x7c\xb5\xfc\x7f\x00\
\xfc\xfb\x5f\xff\xe4\x93\xc5\xd2\x90\xba\xca\x11\x5f\x9f\x30\x14\
\x2d\xb4\xe0\xe5\x57\x59\xf0\xca\x6b\x00\xcc\x7a\x6a\x06\xe1\xdb\
\xb6\x8a\xbf\x2e\x40\x88\xb6\x35\xb6\xc2\xd3\x21\x84\x90\x0d\xaa\
\x24\xb8\x38\x39\xf0\xe3\xc2\x27\x0d\x59\xe1\xea\x04\x49\xe9\xd9\
\x4c\x79\xef\x3b\x72\x0b\x24\x1a\xc0\x3d\x08\xb9\x03\x24\x22\xa1\
\x26\x0c\xb0\x1e\x98\x50\x79\xd1\xa2\x45\x0b\xf6\x1d\xf8\x13\x77\
\x77\x61\x36\x28\x2e\x2e\x62\xd4\xc8\xe1\x9c\x8a\x89\x91\x54\xfa\
\x6c\xc4\x00\xe6\xf4\xd1\xea\x79\x6d\x19\x7c\x16\x79\x92\x79\xbf\
\x48\xc3\xe4\x42\x42\x43\xd9\x1a\xbe\x03\x5b\x5b\x61\x09\x93\x99\
\x99\xc1\xc0\xfb\xef\xe5\xc6\x0d\x89\x96\x71\x1d\x82\x47\xad\x31\
\xb4\x47\xd8\x02\xeb\xc5\x4d\xb0\x5f\x0b\xbe\x79\xed\x31\xb3\x8d\
\x44\xe6\xe2\x66\x7e\x21\x7f\xfb\x70\x0d\x97\xa4\xf6\x82\x42\x04\
\xa5\x95\x42\xc5\x6d\xee\x14\x70\x1f\xb2\x88\xd9\xc5\x4b\x3e\x27\
\x34\xb4\x7b\xd5\x8d\xad\x6d\x18\x3a\x74\x18\x9b\x37\x6f\x22\x3f\
\xaf\x6a\x1b\xb2\x23\x2e\x11\x1d\x82\xe8\xaf\x4b\xbc\xfb\x7b\x24\
\xaf\xef\x96\x28\x2b\x69\xe9\xe3\xc3\xfa\x0d\x9b\x71\x71\xad\xb2\
\xe5\x37\x69\xe2\x88\x8f\xaf\x2f\xdb\xc3\x25\x89\xc0\xba\x20\x9c\
\xd8\x6d\x4c\x0a\x64\x20\x98\x90\xf5\xe1\xcf\x99\x39\x05\x1c\xbf\
\x70\x95\x21\x3d\xda\x69\xb2\x11\xd4\x04\x37\xf3\x0b\x99\xb3\x64\
\x03\xe7\x12\x53\xe5\x5f\xbd\x88\x91\x84\x92\xe6\x4a\x80\x70\x40\
\xbf\xb7\xeb\xdd\xa7\x2f\x3f\x6f\x09\x37\x58\x30\xe6\xe4\x49\x1e\
\x19\x33\x92\x5b\xb7\xa4\x26\xd7\x89\x9d\x83\xf9\x7a\xcc\x10\x5c\
\x1d\xb4\x45\xfe\x6a\x45\x76\x61\x11\x4f\x6d\xde\xc3\xc6\x58\x69\
\xa2\x0a\x47\x47\x47\x36\x6f\xdd\x4e\x97\xae\x86\xb5\x6a\x63\x46\
\x8d\xe0\xe8\x91\xc3\x62\xd2\x36\x84\x14\x71\xd5\xe1\x7b\xe0\x71\
\x31\x21\xd8\xaf\x05\x4b\xe6\x8e\xd3\xec\x30\xa2\x15\x49\xe9\xd9\
\xcc\xff\x6c\x93\x7c\xe4\x83\x10\x17\xf8\xa4\xb1\x7a\xe6\x48\x80\
\xce\x08\x7e\xe5\x7a\xe6\x59\xfa\xd5\xd7\xf8\xf8\x1a\xde\x31\x79\
\x79\x7b\xd3\xeb\x9e\xde\xec\xdc\xb1\x9d\xa2\xa2\xaa\x39\x29\x36\
\x3d\x93\x75\xa7\x2f\xd0\xae\xb9\x3b\xc1\x1e\x96\x4b\x9b\x22\xc6\
\xf6\x0b\x09\x8c\x59\x1d\x2e\x59\xf0\x01\xb8\xb8\xba\xf2\xfd\xaa\
\xb5\x84\xf5\xe8\x69\xa4\x26\x04\x04\x04\xb0\xee\xc7\x35\x62\x52\
\x3b\x84\x69\xaf\xba\x53\x9f\x76\x22\xf8\xdb\xb5\xa9\x24\x64\xe6\
\x14\x10\xfe\xe7\x19\x5a\x79\xb9\x9b\xbd\x30\x34\x85\xdd\x47\xcf\
\x33\xef\xd3\x8d\xa4\x64\x2a\x9c\x4a\xf7\x22\x28\xe8\x8c\xe6\x07\
\x30\x47\x02\x2c\x43\xc8\x0c\x02\x40\xff\x01\x03\xf9\xf1\xa7\x8d\
\xd5\x14\x17\x70\xfa\x54\x0c\x53\x1f\x9b\x44\x7a\xba\xf2\xfd\x8d\
\xee\x10\xc0\xc2\x41\xbd\xe9\xde\xd2\xf4\xbe\x5e\x0d\x8e\x5d\x4b\
\x63\xe1\xde\xc3\x06\xf7\xf9\x5e\x5e\x5e\xac\x5e\xfb\x13\x9d\x3a\
\x9b\x76\xe1\x9e\x38\x6e\x0c\x87\x0e\x1d\x14\x93\x96\x01\xb3\x4d\
\x54\x73\x44\x50\xb6\x3c\x20\xff\xa2\x7f\x48\x20\x2f\x4e\x1e\x44\
\x1b\x6f\xad\x07\x81\x1b\xc6\xe5\xeb\x19\x2c\x5e\xb7\x97\x83\x31\
\x06\x6d\x2e\xbb\x80\x47\x90\x79\x6a\xc9\xa1\x95\x01\xec\x81\xeb\
\x88\xbc\x61\x56\x7c\xb7\x8a\x61\xc3\x1f\x32\x5e\x43\x84\xc4\xc4\
\x04\x9e\x7b\xe6\x69\xa2\xa3\x0d\x27\x8a\x1c\x1a\xd8\x8a\x27\xbb\
\x77\x62\x6c\x87\x00\x9a\xd8\x6a\x9b\x37\x0b\x4a\x4a\xd9\x7c\x56\
\x48\x14\xb9\xfb\x92\x42\xbd\x0b\x40\x58\x58\x0f\x96\x2e\xff\x06\
\x7f\x7f\x75\xda\xba\x5f\xb6\x87\xf3\xd4\x4c\x89\x56\x58\x6d\xa2\
\xc8\x26\xc0\x37\x80\x22\x81\xa2\x95\x4e\xc7\x03\x3d\xdb\x33\x7e\
\x60\x08\x3d\x3b\xb4\xd2\x9c\x36\xa6\xbc\xa2\x82\xa3\x67\xaf\xb0\
\x71\xff\x09\xf6\x44\x5d\x30\x66\x7f\x58\x8d\x10\x00\x52\x68\xe8\
\x4b\x31\xb4\x32\xc0\x58\x84\xc3\x89\x00\xf0\xf1\xf1\xe5\x70\x54\
\x34\xd6\x1a\xd2\xb8\x94\x96\x96\xb2\x64\xf1\xc7\x7c\xf6\xe9\x27\
\x94\x96\x1a\x96\x4c\x8e\xb7\xc3\xb5\x06\xb4\xf1\xa5\xb3\xa7\x07\
\xed\x3c\xdc\xf0\x72\x96\xa5\x8a\xcd\x2b\xe0\xc2\xed\x54\xb1\xfb\
\x2e\x27\x73\xe8\xca\x35\xb9\x55\x4f\x0f\x1b\x1b\x1b\xe6\xbd\xb8\
\x80\x79\xf3\x5f\x92\x2b\x7b\x4c\xb6\xb5\x77\xaf\xee\x5c\xbf\x66\
\x76\xaa\xd8\xe7\x11\xf2\x04\x1a\x5c\xe8\xb4\x70\x73\xa6\x77\xa7\
\xd6\x84\xb5\xf3\x27\xc0\xc7\x03\xbf\x16\x6e\x38\x3a\xd8\xd1\xc4\
\x5e\xf8\x9d\xb7\x8a\x4a\x28\x28\x2c\xe6\x6a\x5a\x36\xf1\xd7\x6e\
\x70\x3c\x2e\x89\xc8\x33\x09\xf2\x0c\xa1\x62\x14\x23\x2c\xf8\xfe\
\xab\xb2\x7d\x9a\x19\x60\x15\xa0\xcf\x9d\x3a\xeb\x99\x67\x59\xf8\
\xfe\x07\x1a\x6f\x21\xe0\xe4\x89\x13\xfc\xe3\x83\xf7\x38\x78\xe0\
\x0f\xb3\xea\xab\xc5\xfd\xfd\x07\xf0\xe6\x5b\xef\xd2\x2d\xc4\xbc\
\xad\xe7\xdb\x6f\xbd\xce\xff\xbe\x5e\x2e\x26\x7d\x4f\xf5\xc6\x22\
\x7b\xe0\x19\x84\xd1\xdf\x05\x21\x6d\x5b\x7d\x22\x1f\x38\x8d\xd0\
\x77\xcb\x91\x49\x2f\xad\x0c\x90\x84\x48\x3f\xbe\x65\xdb\x2f\xf4\
\xba\xa7\x66\x8a\x9d\x43\x87\x0e\xf2\xf1\x47\x1f\x72\x38\x32\xa2\
\x46\xf7\x91\xa3\x4f\xdf\x7b\x79\xe5\xd5\xbf\xd3\xf7\x5e\xf3\x34\
\x69\x95\x88\xf8\xf3\x10\xe3\x1f\x91\x2c\xfe\xaf\x22\x84\x85\x19\
\x82\x2f\x42\xaa\xd6\xfa\x51\x74\x98\xc6\x09\xe0\x61\x84\xac\x2e\
\x80\x36\x06\x08\x44\x74\x56\x8e\x8b\x8b\x0b\xb1\xe7\x2f\x61\x65\
\x46\x5e\x5f\x43\xb8\x74\xf1\x22\x9b\x36\xae\x67\xd3\xc6\x0d\x24\
\x26\x26\x98\x75\x8f\x36\x6d\xda\x32\x6e\xfc\x04\xc6\x8d\x9f\x48\
\x40\xa0\x65\x4e\x0f\x29\x2d\x2d\xa5\x63\xbb\x00\xf2\xf3\x25\x62\
\x37\x00\x99\x13\x2c\xc2\xc8\x3f\xcc\x9d\xdb\xf9\x95\x88\x06\xfa\
\x70\x5b\x12\x68\x61\x80\x27\x10\x1d\x7c\x34\x60\xe0\x20\xd6\xae\
\xdb\x60\xd9\xa6\xdd\x46\xc2\xe5\xcb\x44\x47\x1f\xe3\x44\x74\x34\
\xb1\x67\x4e\x93\x99\x99\x49\x56\x76\x16\xd9\x59\x42\x98\xb3\x9b\
\xbb\x3b\xee\x6e\xee\x78\x78\x78\xd0\xb1\x53\x67\xba\x87\x85\x11\
\x1a\x1a\x46\x9b\xb6\xda\x9d\x50\xd5\x60\xd2\xf8\xb1\x1c\x3c\x78\
\x40\x4c\x7a\x02\xc1\x08\x26\xc6\x5c\xb4\x25\x93\xac\x4f\xbc\x00\
\x7c\x09\xda\xfc\x01\x3a\x88\x2f\x7a\x54\xb3\x87\xae\x29\xda\xb4\
\x6d\x4b\x9b\xb6\x6d\x79\x64\xdc\x04\xd3\x85\xeb\x00\x3d\x7a\xf6\
\x92\x33\x40\x07\x03\xc5\x24\x79\xe5\x47\x0c\x68\xcb\xd2\xf7\x86\
\xe0\xeb\x25\x38\x70\xd8\x74\x5c\x22\x29\x5c\x7a\x56\x6a\xe7\xaf\
\xad\xef\x93\x52\xf2\x98\xfd\xee\x6f\xec\xfc\x23\x41\xfc\xf5\x34\
\x6e\x33\x80\x16\xf9\x2d\x31\xe5\xc9\x4d\xbe\x77\x33\x02\x83\x14\
\x56\x4c\x43\xe1\xe3\x92\x5c\x33\xff\x5d\x58\xd5\xf9\xf5\x09\x3f\
\x6f\x67\x96\xbe\x37\x44\x4e\xd6\x3b\x48\x68\x91\x00\x92\x1e\x6f\
\xd5\xba\x8d\xf9\xad\x32\x00\x1f\x2f\xa9\x96\xec\x5a\xaa\xb6\xb3\
\x19\x6b\x5a\xbf\x3a\xb4\x6e\xad\x48\x73\x67\x68\x81\x21\xe9\x6d\
\x3f\xef\xfa\xef\xfc\x4a\xf8\x7b\x37\x95\x93\xf4\x04\x2d\x12\x40\
\xf2\x86\x7d\x5a\x9a\xef\x02\xdd\xd0\x60\x40\xcd\x5d\x7b\x39\xee\
\xeb\x18\x5a\x24\x80\x84\xa5\x9d\x9c\xeb\x96\xc3\x6b\x73\x84\x9b\
\x82\x93\x93\x62\x2b\xaf\xf9\xc7\xcb\xe7\xec\xba\xfe\xde\x18\xb4\
\x48\x00\xc9\x8f\x6e\xd2\x44\x7b\x34\x6c\x43\x85\x01\x06\x50\xc8\
\xd4\x86\x0a\x2d\x12\x40\x9f\xf1\x41\xa7\xd3\x61\x6b\x6b\xb9\x04\
\x10\x50\xf3\x11\x5d\x9b\x12\xa1\xd2\x59\x44\x04\x43\xaa\xdd\x3c\
\x44\x83\x24\x29\x25\xef\x8e\x59\x07\x5c\x4d\x51\x58\x09\xf5\x84\
\x3b\x3e\x41\x44\x03\x82\x24\x28\x66\xf6\xbb\xbf\x91\x94\xa2\x3d\
\x9b\x87\xa5\x71\x35\x25\x97\xd9\xef\xec\x91\x93\x4f\x57\xfe\xa3\
\x45\x11\xa4\x37\x3b\xe9\x74\x3a\x92\x55\x04\x66\xde\x4d\x90\xaf\
\x41\x1a\x38\x9e\xe7\xb6\xc1\xa8\x51\x02\xfc\xf5\x70\x1c\xc1\x54\
\x0d\x34\x32\xc0\x5f\x0d\xc7\x11\x0e\xaf\xd6\x5b\x04\xeb\xc6\x5b\
\xb1\x11\xf5\x89\x3c\x84\x80\xd7\x55\x08\x23\x5f\x62\x0e\xae\x57\
\x06\x28\x28\x28\xe0\xf8\xb1\x28\x2e\x9c\x3f\x4f\x5c\xdc\x05\x2e\
\x5d\x8c\x23\x25\x35\x85\xfc\xfc\x7c\xf2\xf3\xf3\xc9\xb9\xa9\x2e\
\xe9\x82\x8b\xab\x2b\x4e\x4e\x4e\x38\x39\x39\xe1\xed\xdd\x92\xa0\
\xa0\x60\x82\x82\x82\x69\xd7\xbe\x3d\x3d\x7a\xf6\xaa\x95\x2d\x6b\
\x45\x44\x47\xc9\xb5\xae\xef\xd9\x7a\xf9\x3e\x29\xad\x84\x59\x1f\
\xa6\xb0\x23\x42\xb2\xe0\x3c\x84\x2c\x8f\xa1\x31\xd4\x39\x03\x9c\
\x3d\x1b\xcb\x2f\xdb\xc3\x39\x74\xf0\x00\xc7\xa2\xa2\x28\x29\xd1\
\x72\x0c\xaf\x61\xe4\xdc\xbc\xa9\x67\x96\x8b\x71\x71\x12\x27\x13\
\x5b\x5b\x3b\x7a\xf4\xec\xc9\x7d\xf7\xf7\xe7\xa1\x11\x23\xe9\xd8\
\xb1\xf6\x53\xd8\xd7\x25\xfc\x3c\x6d\x59\xf6\x9a\x37\xad\xc6\x4a\
\xbc\x9e\x55\x27\x10\xa8\x13\x06\xc8\xce\xce\x62\xd3\x86\x0d\xac\
\x5b\xb7\x46\x11\x28\x52\xdb\x28\x29\x29\x26\x32\xe2\x4f\x22\x23\
\xfe\xe4\xe3\x8f\x3e\xa4\x5b\x48\x08\x93\x26\x3d\xc6\xb8\x09\x13\
\x70\x73\x33\x9d\xce\xb6\x21\xc0\xdf\x4b\xa1\x93\x51\xad\xa8\xaa\
\x55\x06\xc8\xcc\xcc\x60\xc5\xff\xbe\xe1\xeb\xe5\x5f\x91\x93\xa3\
\xf6\xd8\xbd\xda\x45\xcc\xc9\x93\xc4\x9c\x3c\xc9\xff\xfd\xe3\x7d\
\x1e\x9b\x3a\x8d\x17\xe6\xcc\xc7\xcb\xcb\xfc\x83\x9c\x1b\x3a\x6a\
\x45\x0f\x90\x9f\x9f\xcf\x27\xff\xf9\x37\x2b\xbe\xfd\x46\x11\x10\
\x22\x87\x5b\xdb\x60\xbc\x43\xfb\xd0\xac\x5d\x67\x5c\x5b\x05\xe0\
\xda\x2a\x00\x07\xb7\x66\x58\xdb\x39\x60\xd7\x54\x5d\x6c\x5d\x71\
\x6e\x0e\x65\xc5\x85\x14\x66\x67\x72\xf3\x4a\x3c\x37\xaf\xc4\x93\
\x79\xe1\x0c\x29\xd1\x11\x64\x27\x28\x4f\x2b\x17\xc3\xd1\xd1\x91\
\x19\x7f\x7b\x9a\x17\x5f\x7a\x19\x47\x47\xe3\x07\x58\xcb\xf5\x00\
\xf2\x39\xba\xbe\x21\x5f\x23\xa0\xb2\x6f\x2d\xce\x00\xbb\x77\xed\
\xe4\x8d\xbf\xbf\x46\x72\xb2\xe1\x9c\x39\x56\x36\xb6\xf8\xf5\x19\
\x48\xe0\x43\xe3\x68\xdd\x7f\x18\x4d\x3c\xd4\xa7\x4a\x35\x07\xb7\
\x32\xd2\x48\xdc\xbf\x93\x4b\x3b\x7f\x26\x29\x72\x1f\xe5\xa5\x86\
\x33\x98\xfa\xf9\xf9\xf3\x8f\x0f\xff\xc5\xd0\xa1\x86\x4f\xfa\x6a\
\x64\x00\x13\x0c\x90\x9b\x9b\xcb\x2b\x0b\xe6\xb3\x75\xcb\x66\x83\
\x95\x9d\x3c\x5b\xd2\x65\xca\x2c\x3a\x4d\x9c\x89\x83\x7b\xfd\x68\
\xd5\x6e\x65\xde\x20\x76\xfd\xb7\x9c\x59\xfb\x35\xf9\x69\xd7\x0d\
\x96\x19\xfb\xc8\x78\x3e\xfa\x78\x31\xce\x32\x6b\xe7\x9d\xcc\x00\
\x57\x53\x4b\xe4\x8b\xc0\x5c\x40\x95\xf8\xb4\x08\x03\x9c\x3b\x77\
\x96\xa7\x66\x4e\x27\xfe\x92\x32\xb9\x96\x93\x67\x4b\x7a\xbd\xf0\
\xa6\x70\x40\xb3\x8d\x65\x0d\x48\xe6\xa2\xbc\xa4\x98\x73\x9b\x57\
\x73\xf4\x8b\x7f\x50\x90\x9e\xa2\xf8\x3e\x28\x38\x98\x6f\xbe\xfd\
\x8e\x76\xed\xaa\x52\xcb\xdc\xa9\x0c\x70\x35\x55\xd8\x06\xee\x8c\
\x34\x6f\x1b\x58\x63\x06\x08\xdf\xb6\x95\xf9\x73\x9f\xa7\xa0\x40\
\x1a\x81\x64\x6d\x67\x4f\xf7\xa7\x17\x10\x3a\x73\x1e\xb6\x4d\xea\
\xdb\x35\xde\x30\x4a\x6e\xe5\x73\xe2\x7f\x4b\x38\xfe\xf5\x7f\x24\
\xe7\x1c\x83\x60\x02\xfe\xec\x8b\xa5\x3c\x34\x42\x88\x81\x6d\xc0\
\xb6\x00\x8b\xc5\x05\x28\x18\x60\xdd\xda\x35\xbc\xbc\x60\x3e\x65\
\xb2\xa3\x5d\x9b\x77\x0c\x61\xf0\x3f\x97\xe3\xd1\x4e\x5b\x42\xc3\
\xfa\x42\xc6\xf9\xd3\xfc\xfe\xfa\x2c\x6e\x9c\x93\x6e\x51\xad\xad\
\xad\x59\xbc\xe4\x73\x26\x4e\x9a\xdc\x90\x19\x40\x0c\x45\x5c\x80\
\x96\xe8\xe0\x85\x95\xff\xe8\x74\x3a\x9a\x35\xf3\xe0\xb5\x57\x17\
\x50\x5e\x2e\x4d\x44\xdc\x75\xea\x6c\x86\x2d\x59\x85\x53\x0b\xef\
\x1a\xb6\xb5\xee\xe0\xd8\xdc\x93\x8e\xe3\x9f\xa0\x28\x27\x9b\xb4\
\x53\xc7\xf4\xf4\x8a\x8a\x0a\x76\xfd\xba\x03\x8f\xe6\xcd\xd9\xf3\
\xdb\xee\x7a\x6c\xa1\xc5\xe0\x8d\x90\xc9\x64\x05\xc2\xa1\xd2\xe6\
\x49\x00\x10\x98\xa0\x42\x14\x98\x68\x65\x6b\x47\xff\xb7\x17\xd3\
\x71\xc2\x93\x35\x6f\x66\x3d\x22\x76\xfd\x0a\x0e\x7c\xb0\x40\x32\
\x25\xc8\x7f\xeb\x5d\x00\x7d\x5c\x80\x21\x06\xb8\xd3\x62\xdb\xee\
\x68\x38\x35\xb1\xa5\x73\xb0\x07\x53\x47\x77\xe4\xe9\x49\x5d\xb0\
\xb3\x35\x2c\x54\xef\xb0\xb8\x80\x48\xa0\x2f\x28\xcd\xc1\xbe\x08\
\xe1\x4d\x9f\x22\x9c\x9d\xd7\xd8\xf9\x26\x90\x7f\xab\x84\x23\x31\
\x29\xcc\xfb\x60\x2f\x7d\x27\xfd\x48\x72\x6a\xfd\x7b\x01\x89\x61\
\x2a\x2e\x40\xcc\x00\xf6\xdc\xd9\x81\x8d\x77\x3c\x4e\x9e\x4b\x67\
\xcc\xb3\x5b\x29\x2e\xd1\x76\xa6\x4f\x6d\x43\x6d\x5c\xc0\x33\x34\
\x76\x7e\x8d\x71\xe2\x6c\x1a\xdf\xac\x3f\x6d\xba\xe0\x1d\x02\xf1\
\x1a\xe0\x30\xa2\xe3\x5f\xbb\x84\x84\x31\x65\xfa\x2c\xdc\xdc\x2d\
\x93\xce\xe4\x6e\x45\x76\x56\x06\xab\x57\x2e\xe7\x4c\x8c\x3e\x03\
\x3b\xbd\x43\x5a\x72\xe8\xc7\xc9\xd5\xd4\xaa\x7b\xc8\xd7\x08\xdc\
\xee\x7b\xb1\x04\x90\x18\xca\xa7\x4c\x7f\xba\xb1\xf3\x55\xc0\xcd\
\xdd\x83\x29\xd3\x67\x49\x68\xb1\x17\xeb\x2e\x68\xa5\xa6\x68\xf4\
\x09\xfc\x0b\x40\x6d\x5c\x80\xe4\x60\x9d\xd5\x2b\x97\x93\x9d\xd5\
\x70\x38\xb9\xbe\x90\x95\x99\xc1\xea\x95\xcb\x24\xb4\xae\xed\xb5\
\x9d\xe9\x53\x9b\x30\x15\x17\x20\xc6\x1c\x04\x65\x4f\xe3\xe7\xee\
\xff\x3c\x77\xbb\xcf\x25\x8b\x40\x7b\x04\x05\x41\x28\x8d\xb8\x9b\
\x71\x1c\x41\x09\x54\x0c\xd2\x29\xa0\x08\xc1\x50\xa0\x38\x62\xa2\
\x11\x77\x0d\x14\x71\x01\x72\xbd\x65\x2e\x82\xa1\x20\x0d\x21\x19\
\xa4\x3b\x46\x72\xdc\x35\xa2\xc1\x20\x0f\x38\x06\x7c\x88\x10\x12\
\x66\xfe\x91\xae\x8d\x68\x44\x23\xee\x32\xfc\x3f\x74\xf1\x26\x88\
\x4a\x31\x45\x72\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
\x00\x00\x0e\x35\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x03\xb1\x00\x00\x03\xb1\
\x01\xf5\x83\xed\x49\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x0d\xb2\x49\x44\
\x41\x54\x78\x9c\xed\x9d\x69\x70\x1c\xe5\x99\xc7\x7f\x6f\xcf\xa1\
\xd1\xc8\xb2\x6c\x49\xd8\xba\x2c\xd9\x98\x32\x26\x9b\x62\xb5\x46\
\x60\xd9\x78\x61\x09\xe1\xb0\x31\x91\xca\x14\xeb\xdd\x60\x16\x93\
\xad\x32\x47\x36\x71\x48\x11\x30\x21\xbb\x4b\x48\x76\xb1\x97\xad\
\x75\x2e\xaa\x30\x95\x54\x48\x70\x3e\x98\xc3\xc8\x44\x36\x64\x9d\
\x0d\xa4\xa0\x4a\x76\x30\x6b\x6e\x48\x40\xb1\x0e\x5b\x92\x13\x5b\
\xa7\x75\xcd\xd1\x4f\x3e\x48\x96\x2d\x69\x66\x34\x33\xdd\xad\xd6\
\xa8\xfb\xf7\x49\xea\xb7\xe7\x79\x9e\xee\xf7\xdf\xef\xd5\x4f\x77\
\x83\x8b\x8b\x8b\x8b\x8b\x8b\x8b\x8b\x8b\x8b\x8b\x8b\x8b\x8b\x8b\
\x53\x50\x76\x07\x60\x17\x03\xbf\xa4\xd4\xab\xd8\x06\x10\x8e\xf2\
\x58\x4e\x0d\x6d\x76\xc7\x64\x07\x8e\x14\x80\xec\xa5\x20\xec\xe7\
\x08\xb0\x78\x74\xd3\x31\x9f\x97\x2a\x75\x23\x9d\x36\x86\x65\x0b\
\x9a\xdd\x01\x4c\x37\xf2\x08\x5a\xc8\xc7\x33\x9c\xab\x7c\x80\x25\
\xe1\x08\x7b\xe4\x59\x3c\x36\x85\x65\x1b\x8e\x13\x40\xe8\x32\xbe\
\xad\x14\x6b\x63\x14\x7d\x3e\x14\xe4\xdf\xa7\x3d\x20\x9b\x71\x54\
\x17\x10\xd9\xcf\x7a\x5d\xd8\x47\x7c\xe1\x8b\x08\xb7\x64\xdd\xcc\
\x8b\xd3\x19\x97\x9d\x38\x46\x00\x83\x2f\xb3\xd8\x13\xe5\x08\x50\
\x30\xc5\xae\xdd\x7a\x94\xcb\x03\x35\x7c\x3a\x1d\x71\xd9\x4d\xc6\
\x0b\x20\xb4\x9f\x4a\xe0\x29\x11\x16\x29\x38\xae\x84\x66\xd1\x68\
\x06\x9a\x75\xa1\x49\xd3\x69\x17\x0f\x4b\x94\xf0\x38\x50\x9e\xa4\
\xd9\x16\x51\x7c\x43\x45\x39\xa6\x6b\x14\x6b\x8a\xc5\x40\x85\xd2\
\xa9\x10\x45\x85\x40\x99\x82\x16\x84\x2d\xfe\x9b\x79\xc7\xaa\x63\
\x9b\x0e\x66\x83\x00\x7e\x87\x70\xb9\x4d\xee\x0f\xfb\xd7\x53\x6d\
\x93\x6f\x53\xc8\xf8\x41\xa0\x08\x65\xb6\xf9\x4e\xbe\x45\x99\xb1\
\x78\xed\x70\x7a\x24\xb8\xa1\x38\xa2\xf4\x9d\xa0\x8a\x45\x64\xfb\
\xaa\x81\xba\x97\xd3\xb5\xa5\xa0\x0d\x28\x36\x31\xbc\x54\x7c\x9f\
\x30\xf2\xfb\x86\x60\xed\x5a\xa5\xd4\x36\x90\x76\xaf\x68\xf7\x55\
\x0d\xec\x6d\x37\x2b\xb6\x64\xb1\x45\x00\x11\xa5\xef\x44\xd8\x08\
\x82\x82\xd5\x0d\xd9\x35\x9b\x56\x0d\xee\xdb\x93\x8e\x2d\x05\xcd\
\x02\x97\x25\xbb\x7f\xf8\x54\x36\x3d\x47\x17\x32\x7c\x32\x87\x70\
\x77\x00\x00\xdf\xbc\x21\xb2\x16\xf6\x93\xb7\xe2\x24\xbe\x82\xc1\
\xe4\x7d\x0b\xcd\xa9\x47\x3c\x42\x43\x76\xcd\x46\x05\xbb\x11\xf1\
\x02\x44\x54\x54\x80\x7f\x4c\xd7\x5e\xba\xd8\x22\x00\x50\xc5\x20\
\x63\x31\x28\xa5\x76\x37\x64\xd7\x90\x8e\x08\x84\xe4\x2a\xa1\xe7\
\xad\x22\xda\x9f\xbf\x98\x81\xc6\xf9\x09\xc2\x82\x9c\xa5\x5d\x14\
\xdd\xf2\x7b\xf2\x2e\xeb\x98\xda\xb7\xa2\x29\xd9\x38\xcf\xa7\x21\
\xbb\x66\xa3\x52\x6a\x37\xe3\xce\xbf\x56\x92\x8e\x2d\xa3\xd8\x32\
\x06\x10\x91\xed\x40\xe4\xbc\x4d\x5e\xa5\xd4\xee\xc3\xc1\xda\xda\
\xd4\x8d\x25\x16\x40\xb8\x2b\xc0\x27\x8f\xac\xa1\x71\x47\x75\xe2\
\xca\x1f\xb1\x45\xff\xa7\xf3\x69\xdc\x51\xcd\x27\x8f\x5e\x49\xb8\
\x2b\x30\x95\xf7\x94\x5b\x80\xc3\xc1\xda\xda\xc9\x95\x4f\x44\x87\
\xc7\x52\xb5\x65\x06\xb6\x08\x60\xd5\x40\xdd\xcb\x22\xb2\x89\x09\
\x22\x10\xf8\x61\xaa\xb6\x12\xb5\x00\x03\xc7\xe6\xf1\xf1\x43\x7f\
\x47\xdf\x87\x85\x29\xc7\xd8\xf7\xfe\x05\x7c\xfc\xcd\xab\x19\x38\
\x36\x2f\xee\x3e\x7a\x1a\x02\x18\x3d\xc6\x71\x95\x2f\x22\x9b\x56\
\xf7\xbf\xf8\x4a\xca\x41\x9a\x80\x6d\xb3\x80\x55\x83\xfb\xf6\xc4\
\x10\x81\xc4\xdb\x3f\x1e\x4a\x62\x0f\xc4\x42\x7f\x0a\xd2\xf8\x9f\
\xab\x08\x77\x4e\x79\x15\xc7\x25\x7c\x3a\x9b\x4f\xbf\xb3\x9a\xd0\
\xc9\x9c\x98\xe5\x9a\x32\x7c\x07\x31\x22\x22\x69\x8f\x7f\xcc\xc0\
\xd6\x69\xe0\xaa\xc1\x7d\x7b\x14\xdc\x0a\x1c\x07\x5a\x05\xee\x4a\
\xd5\x86\x78\x58\x32\x71\x9b\x1e\xd6\x68\xfc\xaf\x6a\xc2\x3d\x59\
\x86\x63\x8c\x9c\xf1\xd3\xf8\xdf\x57\x20\x91\xc9\xa7\x4a\xf4\x71\
\x37\x94\x92\x42\x60\x0b\xa3\xc7\xab\xe0\x56\x3b\x2b\x1f\x32\x7c\
\x21\x68\xe8\x00\x4b\x35\x9d\xdf\x30\x61\x3e\x7e\xb2\xfe\x22\x4e\
\xfc\xfc\xb3\xa6\xfa\x2a\xdb\xfc\x1e\x0b\xd6\x35\x4e\xdc\xdc\xa2\
\x6b\x7c\x2e\xb0\x8e\x49\x05\x99\x42\xc6\x2e\x04\x45\xea\xb9\x49\
\xd3\x79\x93\x09\x95\x1f\x1d\xf2\x72\x72\xef\x32\xd3\xfd\x75\xbc\
\x70\x31\xfa\xd0\xa4\xbb\xc5\xe5\x9a\xce\xff\x0f\xd7\xb3\xc1\x74\
\x87\xd3\x44\xc6\x09\x40\x04\x15\xae\xe7\x41\x1d\x5e\x02\x26\x0d\
\xeb\xfb\x8e\x2e\x24\x72\xc6\x6f\xba\xdf\x48\x9f\x9f\xde\x77\x16\
\xc6\x2a\x9a\xab\xe0\xf9\xe1\x7a\xb6\x67\x62\x3e\x41\x46\x09\x40\
\xf6\x52\x10\xda\xcf\x01\x81\xed\xc4\x89\xbd\xfb\x48\x91\x65\xfe\
\xbb\xdf\x8c\xbb\xe0\xa8\x14\x3c\x18\x0e\x72\x50\x7e\xc5\x02\xcb\
\x02\xb0\x00\xcb\x17\x82\x0e\x07\x37\xdc\x24\xc8\x2e\x90\xd2\x09\
\x45\xc7\x05\xb6\x24\xbb\x0c\xdc\xbf\x8f\x92\xb0\x87\x37\x14\x93\
\x07\x7d\xe7\x33\xd0\x94\x97\x76\xac\x53\x31\xd8\x3c\x77\xaa\x5d\
\xae\x09\x87\x39\xd4\xbf\x8f\x35\xc9\xe6\x18\x9a\x75\x7e\xd2\xc5\
\xf2\x16\x40\xd0\x9f\x8c\x71\x70\x30\x72\x4b\x75\x57\xb2\x76\x7c\
\x1e\x1e\x62\x8a\xca\x07\x88\x4c\xbd\x78\x93\x36\x49\xda\x5e\xe2\
\xf3\x8c\x24\x9b\x26\x83\x59\xe7\x27\x5d\x32\xaa\x0b\x48\x06\x7d\
\xd8\xba\x46\x2d\x3a\xe8\x4b\x6e\x47\x49\x7d\x3d\xc3\x2e\xa6\xa1\
\x05\x18\x9b\xf7\x4e\xa4\x55\x57\x6a\x4b\xb2\x76\xc2\x51\x1e\x83\
\xa9\xd7\xde\x3d\x79\xc3\xc9\x07\x97\x22\xde\xf9\x43\xc9\xec\xd6\
\x14\xd6\xd9\x91\xac\x4d\xb3\xce\x4f\xba\x64\xd4\x3a\x80\xec\xa5\
\x20\xe4\x67\xb7\x82\x1b\xe3\xed\xf3\xfb\x6f\x5d\x45\xff\x1f\xf2\
\x2d\xf1\x3f\x67\xf9\x69\x96\x3d\xfa\x7a\xa2\x5d\x5e\xf5\xf9\xf8\
\x07\x75\x03\x7f\xb2\x24\x00\x0b\xc8\xa8\x2e\x40\x6d\xe0\xb4\xff\
\x26\xd6\x29\xd8\x06\xe8\xb1\xf6\xc9\xfd\xab\x53\x96\xf9\x4f\x60\
\x5b\x04\x76\xf8\x06\xb8\x2e\x93\x2a\x1f\x32\x4c\x00\x00\x4a\x21\
\xbe\xf5\xec\xd0\x84\x9b\x81\xae\x89\xe5\x79\x97\x5b\x97\x53\x11\
\xc7\x76\xaf\x08\xb7\x64\xad\x67\x9b\xfa\x7b\xa2\x96\x39\xb7\x88\
\x8c\x13\xc0\x59\xbc\x37\x73\x40\xd7\xb8\x1c\x68\x39\x7f\x7b\xce\
\x45\x5d\xcc\x59\x7e\xda\x74\x7f\x39\xcb\x3a\x09\x2e\xe9\x9e\xb8\
\xb9\x45\xd7\x58\x91\xc9\x69\xe4\xb6\x0b\xe0\x70\xb0\xb6\xf6\x50\
\xb0\xb6\xf5\x50\xb0\xb6\xb5\x21\x58\x1b\xeb\x81\x8d\xb8\x04\xd6\
\xd1\x28\x70\xff\xc4\xed\xa5\x9b\x3e\x30\x7d\x74\x13\xcb\xa6\xc0\
\xfd\xa9\xde\x07\x68\x08\xd6\xae\x3d\x7b\xbc\x69\xe5\x3f\x98\x8c\
\xad\x02\x68\xc8\xae\xd9\x28\xf0\x1c\x50\xc6\xc8\xbc\xf7\xa9\x54\
\x6d\x28\x7d\xf2\xcc\x20\x67\x59\x67\xac\x1b\x37\x69\xb3\xf0\x0b\
\x9f\xc4\x6c\x55\x94\xe2\x58\xaa\xb6\x46\x8f\xb1\x0c\x28\x13\x78\
\xae\x21\xbb\x66\xa3\x09\x21\xa6\x8d\x6d\x02\x88\x9d\x16\x95\xfa\
\xfc\x59\x14\xb1\x16\x51\x28\xbd\xfd\x7d\xf2\xfe\xe6\x64\xba\xe1\
\x8d\x91\xb7\xa2\x83\x92\x2f\x7e\x18\xb3\x4c\x17\x8c\xa6\x71\x9d\
\x4d\x87\xb3\x4d\x04\xb6\x08\x60\x34\x1b\x76\x52\x5a\x94\x82\xaf\
\xa6\x6a\x4b\x41\x45\xcc\xed\x9a\x70\xe1\x03\x87\xb8\xe0\xc6\x3f\
\xa6\x1b\x26\x85\xd7\x36\x73\xe1\xfd\xbf\x43\x69\xb1\x75\xa9\xc5\
\xf1\x9d\x08\x05\x5f\x21\x46\x3a\x5c\xaa\xdd\x9f\x59\xd8\x22\x80\
\x91\x54\xe8\xc9\x69\x51\x2b\x07\xea\xea\x52\x37\x16\xbf\x12\x94\
\x47\x58\xf4\xa5\x77\x59\xfc\x2f\x6f\xa5\x94\xed\xeb\x2b\x1c\x64\
\xf1\x57\xdf\xa2\xfc\xae\xa3\x28\x6f\xcc\xd9\xe6\x59\x52\x16\xc0\
\xca\x81\xba\xba\x58\xe9\x70\xa3\xe7\x64\xda\xb1\x29\x2b\x58\xce\
\x9f\x4f\x19\x4a\x8b\x52\x50\x31\x55\xbf\x91\x7f\x55\x2b\xf3\xaa\
\xdb\xe8\xfc\x6d\x39\xdd\x87\x4a\xe8\xfb\xa8\x60\x52\x86\x8f\xf2\
\xea\xe4\x7e\xe6\x14\xf3\x56\xb6\x93\x7f\x75\x0b\x9a\x7f\xea\x19\
\x9d\x92\xd4\x33\x82\x60\x24\x13\xaa\x21\xbb\x86\xf1\xad\xa0\x6e\
\xcb\x0b\x2a\x6c\x11\x80\x57\xb4\xfb\x22\x2a\x2a\x0a\x55\x1c\x45\
\x6d\x5f\x3d\x50\x97\x76\x42\xa4\x24\x79\x15\x6a\xfe\x28\x85\xd7\
\x1d\xa3\xf0\xba\x63\xe8\xc3\x1e\x42\xa7\x82\x63\x59\xbf\xbe\xf9\
\x43\xf8\x0b\x07\xd0\xb2\x52\x9b\xc6\x4b\x82\xd6\x67\x2a\x56\x0d\
\xee\xdb\xd3\x10\xac\xed\xd5\x14\x0f\x0a\xd2\xee\x15\xcf\xd7\xd3\
\xb5\x65\x84\x8c\x5a\x0a\x8e\xc5\x70\x3d\x6d\xca\xa6\x27\x83\x04\
\xda\xb3\xd6\x1b\x1e\x08\xda\x8a\xed\xeb\x00\x46\x51\xd0\x6a\x9b\
\x6f\x03\x4f\x06\xcd\x14\x6c\x1a\x03\x98\x88\xe2\x2e\x84\x27\x05\
\x2a\x14\x9c\x18\x7d\x54\xac\x19\x45\x93\x40\xb3\x8a\xd2\x96\xd6\
\xe3\xe1\x70\xbf\xd2\x69\x12\x0f\x25\x0a\x2a\x10\x16\x8f\x8e\x37\
\x2a\x04\x4a\x95\xd0\x24\x7a\xea\x59\xcc\x33\x8d\x8c\xef\x02\x92\
\x25\xa5\x17\x44\x68\x54\x65\x72\xa6\x6f\x2a\x64\x7c\x17\x90\x2c\
\xd9\x6b\x69\xd2\x14\x9b\x89\x73\x17\x71\x14\x11\xe1\x4b\x4e\xa9\
\x7c\x30\xa9\x05\xc8\xde\xf0\x40\x99\x12\xb5\x53\x44\xdd\xa0\x20\
\xd7\x0c\x9b\x66\x23\x91\x08\x43\x1d\x1d\xfc\xdb\x35\x6f\xf3\xf0\
\xd5\xef\xc7\xdc\xe7\xbb\xaf\x7d\x96\xef\xbc\x56\x49\xa0\xa8\x08\
\xe5\x9d\x99\xbd\xa3\x40\x9f\x42\x0e\x46\x3d\xfa\xb6\xe1\x17\x1e\
\xff\xc4\xa8\x3d\xc3\x02\xc8\xde\xf0\x40\x99\xd2\x3d\xef\x80\x58\
\x93\x85\x61\x22\xd1\xa1\x21\xc2\x7f\x3e\x49\xdd\x6d\xbf\xe5\xc6\
\x65\xe3\xa7\xdd\xff\xd7\x58\xc4\xfa\x67\xae\xc1\x5b\xb0\x00\x4f\
\x76\xb6\x4d\x11\xa6\x82\xea\x94\x88\x76\xe9\x60\xfd\x7f\x18\x7a\
\x47\x81\xe1\x2e\x40\x89\xda\x99\x09\x95\x0f\xe0\x09\x04\xf0\xcc\
\x9d\xc7\x9d\x2f\xac\xa2\xb9\xeb\xdc\xf3\x7e\x4d\x5d\x73\xb8\xed\
\xd9\x35\x68\xb9\xf3\x32\xa4\xf2\x01\x24\x5f\xf3\xea\xff\x63\xd4\
\x8a\xe1\x76\x6e\xb4\xd9\xcf\x18\x7c\x73\xf3\xe8\xfe\x73\x88\xab\
\x7e\x7c\x3d\x0f\xfc\xed\x07\x08\x8a\xc7\x5f\xff\x0c\x3d\x32\x97\
\xac\x3c\xeb\x52\xca\xad\x40\x90\x1b\x8c\xda\x30\x2c\x80\x99\xda\
\xe7\xc7\x45\x81\xbf\xb0\x80\x8e\x8e\x30\xf7\x1d\xa8\x1a\xd9\xe4\
\xf5\x12\x28\x4a\xfd\x11\xf2\x19\x80\x61\xc5\xce\xcc\x91\x8e\xc5\
\x28\x4d\x23\xb0\x70\x21\xe1\xde\x5e\x00\x7c\x73\xe7\xa2\x34\xc7\
\x4c\x88\xc6\xe1\x48\x01\x00\x28\x8f\x07\xff\xfc\x29\xde\x18\xe2\
\x00\x9c\x29\x7b\x97\x31\x2c\x6f\x01\x6e\x58\xd4\xc8\xf7\xd6\xbc\
\x42\x71\x4e\x9f\xa9\x76\xdb\xfa\x73\xd9\xfa\xc6\x5a\x0e\xb6\x5e\
\xe8\x28\x7f\x66\x63\x79\x0b\xb0\xd3\x82\x93\x03\x50\x92\xd3\xc7\
\xf7\xd6\x4c\xbe\x8b\x3c\xdb\xfd\x99\x8d\xe5\x02\x28\xb1\xe0\xe4\
\x9c\xa5\x34\xa7\xd7\x71\xfe\xcc\xc6\x1d\x03\x38\x1c\x57\x00\x0e\
\xc7\x15\x80\xc3\x71\x05\xe0\x70\x5c\x01\x38\x1c\x57\x00\x0e\xc7\
\x15\x80\xc3\xb1\x5c\x00\x6d\xfd\xd6\xdd\x2c\x3c\xd1\x3f\xf9\xad\
\x5d\xb3\xdd\x9f\xd9\x58\x2e\x80\xad\x6f\xac\xb5\xe4\x24\x9d\xe8\
\x9f\xcb\xd6\xd7\x27\xbf\x29\x66\xb6\xfb\x33\x1b\xc3\xb9\x1c\xc1\
\xda\x6d\x19\xf3\x46\xac\xd9\xc8\x40\xdd\x76\x43\x75\xe8\x8e\x01\
\x1c\x8e\x2b\x00\x87\xe3\x0a\xc0\xe1\xb8\xf9\x00\x19\xe6\xcf\x6c\
\xdc\x7c\x80\x0c\xf3\x67\x36\x6e\x3e\x40\x86\xf9\x33\x1b\x77\x0c\
\xe0\x70\x5c\x01\x38\x1c\x57\x00\x0e\xc7\x15\x80\xc3\x71\x05\xe0\
\x70\x5c\x01\x38\x1c\x57\x00\x0e\xc7\xcd\x07\xc8\x30\x7f\x66\xe3\
\xe6\x03\x64\x98\x3f\xb3\x71\xf3\x01\x32\x1c\x37\x1f\xc0\xc5\x10\
\xae\x00\x1c\x8e\x2b\x00\x87\xe3\xe6\x03\x64\x98\x3f\xb3\x71\xf3\
\x01\x32\xcc\x9f\xd9\xb8\xf9\x00\x19\xe6\xcf\x6c\xdc\x31\x80\xc3\
\x71\xac\x00\x0e\x84\xae\xa4\xa2\xb3\x9e\x8a\xce\x7a\x0e\x84\xae\
\xb4\x3b\x1c\xdb\x70\xac\x00\xbe\x7c\xe6\x41\xda\xf5\x42\xda\xf5\
\x42\xee\x3d\x63\xcb\xf7\x9a\x66\x04\x8e\x15\x40\x9b\x7e\xc1\xb8\
\xbf\xc5\x39\x9f\x4e\x18\x87\x63\x05\xe0\x32\x82\xa3\xde\x14\x7a\
\x42\x5f\xc0\x0b\xc3\x9f\xe3\xd5\x70\xd5\xa4\x2b\x7e\x43\xef\xe3\
\x5c\xe3\x3b\xc2\x2d\x59\xbf\xa1\x54\xcb\xa8\x2f\xc0\x1b\xc2\x11\
\x02\x38\xa1\x2f\xe0\x5f\xfb\xef\x66\x4f\xe8\x7a\xc2\x12\xfb\x90\
\xf7\x87\xd6\xb0\x3f\xb4\x86\x6f\xf6\x7f\x99\x8d\x59\xff\xcb\xa3\
\x39\xbb\x1c\x21\x84\x59\x9f\x0f\xf0\x5c\xe8\xf3\xac\xe8\xfa\x05\
\xbb\x87\xd7\xc5\xad\xfc\xf3\x09\xe1\xe3\x99\xe1\x9b\x58\xd1\xf5\
\x0b\x9e\x1f\xbe\x36\x65\x7f\x66\xe2\xe6\x03\x24\x20\x99\xfb\xf3\
\x3f\x1e\xaa\xe5\xf6\xde\x47\xe9\x92\xd4\xfd\x77\x49\x2e\xb7\xf5\
\x7d\x97\x27\x06\x6f\x4d\xda\x9f\x99\xb8\xf9\x00\x06\x89\x96\x96\
\x32\x5c\x55\x05\x2a\xf6\x21\x5e\x5f\x34\x87\xef\x57\x16\xa1\x80\
\xad\x6f\x77\xf0\xab\x8e\x33\xb1\x0d\x89\x90\xf5\xe6\x11\x3c\x6d\
\x86\xbe\xcc\x62\x19\x6e\x3e\x40\x0c\x24\x3b\x9b\x50\x65\x65\xdc\
\xca\xcf\xf5\x6a\xec\x5a\x51\x4c\x71\xc0\x4b\x51\xc0\xcb\xce\xca\
\xa2\xf8\xc6\x94\x22\x54\x59\x89\x64\xcc\xa7\x64\x52\x63\x56\x0a\
\x20\x7c\xc9\x25\x88\xcf\x17\xb7\xfc\xee\xa5\xf9\xcc\xf7\x7b\xce\
\x6d\x90\xc4\x8d\x98\xf8\x7d\x84\x97\x5f\x62\x56\x78\x33\x8a\x59\
\x27\x00\x09\x06\x89\x2c\x2a\x8b\x5b\x9e\xeb\xd5\xb8\x77\xe9\xf8\
\x0f\x45\xfc\xf4\xbd\xa9\xbf\x3e\x1b\x29\x2f\x43\x82\x41\xc3\xf1\
\xcd\x34\xcc\x10\x80\xf5\xb7\xac\x52\x20\x5a\x5c\x0c\x2a\xfe\x61\
\x4d\xbc\xfa\x7b\x86\x23\x3c\x95\x84\x00\x50\xda\x88\xed\x99\x45\
\x8f\x51\x03\x26\x08\x40\x7e\x6d\xdc\x86\x79\x44\x17\x2c\x88\x5b\
\x16\xeb\xea\xff\xd1\xd1\x26\x7a\xbc\xc9\xf5\xef\x89\x6c\xdb\x82\
\x92\x83\x46\x4d\x18\x16\x40\x54\x69\x0f\x01\xa7\x8d\xda\x31\x0b\
\x3d\x77\x4e\xdc\xb2\x49\x7d\x3f\xf0\x70\xf5\x45\x74\xdf\x71\x05\
\xdd\xb5\xcb\x79\xf7\xfa\xa5\x5c\xbb\x20\x27\xce\xaf\x41\x9f\x13\
\xdf\xf6\xf4\xa3\x3a\xa3\x51\xf5\x90\x51\x2b\x86\x05\x30\xfc\xe2\
\x63\x7f\x90\x88\xe7\xaf\x15\xea\x59\x66\x40\x77\x20\xfe\xac\xb8\
\x65\x9b\xca\x13\x7f\x65\xad\x3c\xe8\x4b\x38\x23\x90\x40\x20\xed\
\xb8\x4c\xa4\x17\x25\xcf\x47\x75\x59\x39\xfc\xd2\xf6\x4f\x8d\x1a\
\xb3\xfc\x16\xd8\xd3\x2c\x4f\x69\x9d\x60\x33\x1f\x1b\x8b\xe9\xe9\
\x43\xbd\xc4\xf9\x96\xe1\xdb\xd7\x2e\x61\x71\x6e\x7c\x81\x00\xb4\
\xf4\x87\xb8\xf4\xe0\x1f\xe3\x15\x9f\x61\x73\xb5\xa1\x55\x9f\x69\
\x3f\x1f\x53\x30\xeb\x66\x01\x28\xda\xe2\x15\x7d\xed\xd5\x8f\x68\
\xea\x19\x4c\xf8\xf3\xdd\x1f\x24\x58\xf0\x51\xcc\xcc\xd5\x20\x03\
\x58\x7e\x33\x68\x79\x70\x79\x6a\x3f\x18\xf8\xd8\xa8\xcb\x46\xe0\
\xe2\x58\x05\xaf\x1d\xef\xa4\xf2\x67\xaf\x8f\xfd\xff\x8d\x2b\x2e\
\xe4\xe1\xea\x8b\xc6\xfe\xef\x19\x8e\xb0\xeb\xbd\x56\x28\x88\x3b\
\xd8\x33\xfc\x59\x79\x1b\xce\x47\x42\x66\x5f\x0b\x20\x1c\x88\x5b\
\x96\x9f\x0f\xa3\x9f\x85\xcf\xf5\x7b\xb9\xb7\xb2\x62\x5c\xf1\x13\
\x47\x9b\xe9\xf1\x25\xec\x22\xf6\x9b\x10\xe1\x8c\x62\xf6\x09\xc0\
\xcb\x3e\x20\x12\xb3\x2c\x10\x80\x92\x12\x28\x2f\xe7\xee\xab\x2f\
\x65\x7e\xe0\xdc\x6a\xe1\xd8\xd5\x3f\x27\x6e\x17\x1f\xc1\xc3\x4b\
\xa6\xc7\x6b\x33\xb3\x4f\x00\x9b\xaa\x8f\xa3\xf8\xe9\x54\xbb\xdd\
\xb9\x78\xde\xb8\xff\x9f\x38\xda\x4c\x4f\x56\x36\xc4\xfb\x86\xb0\
\xf0\x13\x36\x55\x1f\x37\x25\xc6\x19\xc4\xec\x13\x00\x40\x24\xf2\
\x08\x53\x4c\x49\x75\x5d\x1f\xfb\xbb\x6b\x28\xcc\x93\xef\x26\xbc\
\xfa\x7b\xf1\x7a\xbf\x6d\x5a\x7c\x33\x88\xd9\x29\x80\x7f\x5e\xd3\
\x86\xe8\x1b\x81\x68\xbc\x5d\xbe\xf6\xea\x87\xb4\xf4\x0e\xd2\xd8\
\x3d\xc0\x3d\x07\xdf\xa7\x37\x10\xf7\xea\xd7\x11\xd9\xc4\xed\x55\
\xed\x96\xc5\x6b\x23\xb3\x37\x25\xec\xce\xd5\xaf\xf0\xf4\xa1\xad\
\xc0\x0f\x88\x21\xf4\x5f\xb7\x74\x72\xe9\xd3\xa3\x33\x02\x4d\x83\
\xd2\x92\x58\x56\x74\x94\xda\xca\xe6\xea\x5f\x5a\x19\xaa\x9d\xcc\
\xce\x16\xe0\x2c\x9b\xab\x9f\x40\xa9\x1a\x62\x75\x07\xf9\xf9\xe0\
\xf1\x82\xc7\x03\x85\x85\xb1\x6e\x20\xf5\x82\xfa\x02\x77\xac\xfc\
\xd1\x74\x84\x6a\x17\xd3\x21\x80\x54\x06\x4e\x49\xdc\x96\x4b\x91\
\x3b\x56\xd6\xe3\xf7\x2e\x65\xa4\x25\x38\x37\x3b\x08\x04\x46\xae\
\xfa\xd2\xd2\x91\xbf\xcf\xa1\xa3\x78\x06\x8f\x77\x39\x9b\x57\x5a\
\x31\xed\xb3\xf7\x7c\x4c\xc0\x72\x01\x08\x6c\x21\xb9\x83\x6e\xd5\
\x95\xda\x62\x49\x10\x5f\xac\x3a\xc5\xe6\xea\xad\x44\x23\x15\x88\
\xba\x07\xa4\x1e\xf8\x90\x91\x96\xa1\x17\xf8\x08\xa4\x1e\x51\xf7\
\x10\x8d\x2c\xe2\x8e\xea\x7f\xb2\xaa\xcf\x9f\x11\xe7\xc3\xc5\xc5\
\xc5\xc5\xc5\xc5\xc5\xc5\xc5\xc5\xc5\xc5\xc5\xc5\xc5\x91\xfc\x05\
\x85\x6c\x19\x1c\x05\x41\xba\x58\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x27\xe2\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x03\xb1\x00\x00\x03\xb1\
\x01\xf5\x83\xed\x49\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x20\x00\x49\x44\
\x41\x54\x78\x9c\xed\x9d\x79\x7c\x54\xd5\xdd\xff\xdf\xb3\x26\x99\
\x84\xec\x09\x61\x4d\x42\x12\x12\x59\x82\x55\x01\x01\x41\x59\x15\
\xb5\x28\x88\xfa\x60\x05\xa9\xc5\x1d\x69\xed\xcf\xba\xb4\xd5\xa7\
\xcf\x53\x7d\xd0\x47\x6c\x69\xfd\x59\xfd\xd5\x56\x81\x0a\x5a\x15\
\x14\x11\x10\x15\x01\x41\x04\x65\x13\x34\x81\xb0\x85\x40\x48\x20\
\x2b\x49\x66\x5f\xee\xef\x8f\xcb\xcc\xdc\x33\x77\x26\x1b\x33\x43\
\x50\x3e\xaf\xd7\xbc\xe0\x9e\x39\xe7\xde\x33\x39\x9f\x7b\xce\xf7\
\x7c\xcf\x77\xd1\xd0\xf5\xa1\x03\x72\x80\xfe\xc0\x25\x67\xff\xcd\
\x05\xd2\x80\x84\xb3\x9f\x78\x20\x19\x30\x03\x56\xa0\x09\x68\x01\
\xce\x00\x87\x80\xc3\x8a\x7f\xbf\x3f\x5b\xe7\x22\x00\xcd\xf9\xee\
\x40\x10\xe8\x81\x2b\x80\x71\xc0\x58\x60\x14\x10\x17\xc6\xfb\x3b\
\x80\x9d\xc0\x16\xc5\xa7\x3e\x8c\xf7\xbf\x88\x4e\x20\x06\x98\x0e\
\xbc\x8f\xfc\xf6\x4a\x51\xfc\xb8\x80\x4f\x81\x7b\x81\xf4\x48\xff\
\xd0\xae\x86\xf3\x3d\x03\x8c\x04\x66\x02\xb7\x03\x29\x1d\x69\xa8\
\x33\xc6\xa0\x33\xc6\xa2\x33\xc6\xa0\x8f\x35\xa1\x37\xc6\xe0\x72\
\xd8\xf1\x38\x1d\xb8\x9d\x0e\x5c\x36\x0b\x2e\xbb\x15\x24\xa9\x23\
\xb7\x75\x01\x1b\x80\xbf\x23\x93\xd1\xdd\x91\xc6\x17\x22\xce\x07\
\x01\xb4\xc0\x54\xe0\x77\xc0\x4f\xda\xaa\x1c\x9b\x9c\x4e\x52\x9f\
\x3c\xe2\xd3\x7a\x60\x4a\xcf\xc2\x94\x26\x7f\x74\xc6\x98\x36\x1f\
\xe4\x71\xb9\xb0\x36\x9c\xc6\x52\x7f\x0a\x73\x4d\x15\xe6\xd3\x95\
\x34\x55\x1e\xc1\x52\x77\x0a\xf9\xe5\x6f\x15\xc7\x80\x97\x80\x7f\
\x20\xcb\x12\x3f\x48\x44\x93\x00\x7a\x60\x06\xf0\x24\xb2\x30\x17\
\x14\xc6\xf8\x44\x52\xfb\x0d\x20\x25\xf7\x12\x52\x72\x2f\x21\x2e\
\x25\x23\xac\x9d\x90\x3c\x12\xb6\x33\x0d\x34\x94\xef\xe7\xcc\xf1\
\x83\xd4\x1d\xda\x8b\xbd\xa9\x55\x11\xa0\x19\xf8\xbf\xc0\x73\xc8\
\xcb\xd3\x0f\x0a\xd1\x22\xc0\x24\xe4\xb7\xa9\x7f\xb0\x2f\xb5\x7a\
\x03\x19\x45\x3f\x21\xab\x78\x24\x69\xf9\x83\xd1\x68\xb5\x11\xef\
\x90\x84\x84\xc7\xe9\xc2\x65\xb7\x71\xa6\xf2\x28\x75\x65\x7b\xa8\
\x2b\xdb\x83\xbd\xb9\x21\x54\x93\x1a\xe0\x0f\xc8\xcb\x83\x2b\xe2\
\x1d\x8c\x12\x22\x4d\x80\x5e\xc0\x9f\x80\xdb\x82\x7d\x69\x4a\xcb\
\xa2\xef\xc8\x6b\xe9\x3e\x70\x38\xfa\xd8\xf6\x09\xfa\x1e\x97\x0b\
\x4b\xfd\x29\x6c\x0d\xa7\xe5\xb5\xde\x6e\xc5\x6d\xb7\x21\x79\x3c\
\xa0\x01\x7d\xac\x49\x96\x0d\x0c\x31\xc4\x74\x4b\xc6\x94\x9e\x85\
\x3e\xa6\x95\x7b\x4b\xe0\x76\x39\x71\xda\x6c\x78\xdc\x2e\xea\x0f\
\x7f\x47\xd5\xae\x8d\x34\x55\x1e\x09\xd5\x62\x3f\xf0\x30\xf0\x59\
\xbb\x3a\xdc\xc5\x11\x29\x02\x68\x80\x5f\x02\xff\x0d\x74\x0b\xfc\
\x32\xa1\x7b\x1f\x72\x46\xdf\x40\xe6\x80\xa1\xad\xbe\xed\x1e\xa7\
\x83\x33\x27\x0e\xd3\x50\x7e\x80\x33\x27\x0e\x63\xa9\xab\xc6\x7e\
\xa6\x0e\xa9\x63\x82\xdd\x59\x22\xf4\x20\x21\xb3\x17\x29\x39\x45\
\x24\xe7\x14\x61\x88\x8b\x17\xea\x48\x48\xb8\xed\x4e\x9c\x76\x1b\
\x48\x12\x2d\xa7\x8e\x73\x72\xe7\xe7\xd4\x1e\xd8\x43\x10\x79\x41\
\x02\xfe\x06\x3c\x8e\xac\x7b\xb8\x60\x11\x09\x02\x64\x00\x4b\x80\
\xeb\x02\xbf\x30\xa5\x75\x27\x7f\xe2\xed\x64\x14\x5e\x0a\x9a\xe0\
\x8f\x76\xb4\x9c\xe1\xd4\x77\xdb\x39\x5d\xba\x8b\xa6\xca\xc3\x78\
\x5c\xe1\x9f\x6d\x35\x1a\x0d\x09\x59\x7d\x49\xcb\x1b\x44\xf7\xe2\
\x11\x24\x64\xf6\xf2\x7f\x29\x49\x38\x6d\x36\x5c\x0e\x07\x00\x2d\
\xa7\x2a\x28\xdf\xb4\x92\xa6\xca\xc3\xc1\x6e\x75\x08\xb8\x0b\xd8\
\x1a\xf6\x4e\x46\x09\xe1\x26\xc0\x18\x60\x19\xf2\xd4\xef\x83\x56\
\x6f\x20\x67\xf4\x0d\x64\x8f\xba\x1e\xad\xde\xa0\x6a\xe4\x71\xb9\
\x38\x5d\xf2\x0d\xd5\x7b\xbf\xa2\xfe\xc8\xf7\xf2\x74\x1e\x45\x74\
\xeb\x91\x4d\x8f\x21\xa3\xe8\x3e\xf8\x4a\x8c\xf1\xf2\x84\xe5\x76\
\x3a\x71\x58\xfd\xdb\xc8\xfa\xc3\xfb\x28\xff\x62\x25\xb6\xc6\xda\
\xc0\xe6\x6e\xe4\x1d\xcd\xf3\xd1\xec\x73\xb8\x10\x4e\x02\x3c\x0e\
\x3c\x83\x2c\xed\xfb\x90\x96\x3f\x88\xc2\xeb\x67\x12\x97\x9a\xa9\
\x6a\xe0\x76\xd8\xa9\xdc\xb9\x91\x8a\xaf\xd6\x61\x6f\x0a\x29\x7c\
\xf9\x90\x9e\x9e\x4e\x7e\x7e\x3e\x05\x05\x05\xe4\xe5\xe5\x91\x9a\
\x9a\x4a\xb7\x6e\xdd\x30\x99\x4c\xc4\xc5\xc5\xe1\x70\x38\xb0\x58\
\x2c\x34\x36\x36\xd2\xd2\xd2\xc2\xe1\xc3\x87\x39\x78\xf0\x20\x87\
\x0e\x1d\xe2\xe4\xc9\x93\x6d\xde\x5f\x6b\x30\xd2\xeb\xb2\x31\xf4\
\x1d\x39\x99\xd8\xa4\x54\x24\x8f\x84\xd3\x66\xc1\xed\x94\x67\x21\
\xb7\xd3\xc1\xb1\xcd\x2b\xa9\xfe\x76\x2b\x41\x96\x85\x37\x81\x7b\
\x00\x5b\x9b\x0f\xea\x42\x08\x07\x01\x34\xc0\x02\xe0\xd7\x42\xa1\
\x56\x4b\xce\x98\x9f\x92\x7b\xf5\x4d\x68\x02\xa6\x7b\x8f\xd3\xc1\
\xb1\xad\x1f\x73\x7c\xfb\xa7\x38\x2d\x2d\x21\x6f\xdc\xaf\x5f\x3f\
\x46\x8f\x1e\xed\xfb\xf4\xec\xd9\xb3\x53\x1d\x94\x24\x89\xfa\xfa\
\x7a\x36\x6f\xde\xcc\x96\x2d\x5b\xf8\xf2\xcb\x2f\x29\x29\x29\xc1\
\x13\x62\xa6\xd1\xea\xf4\x64\x0d\x19\x49\xee\xd5\x53\x88\x4d\x4c\
\xc3\x69\xb7\xe1\xb2\xdb\x7d\xdf\x37\x1e\x3b\xc0\xe1\x4f\xdf\xc2\
\xde\xdc\x18\xd8\x74\x1b\xb2\x8e\xa3\xba\x53\x1d\x3d\x0f\x38\x57\
\x02\x18\x91\xd7\xfb\xdb\x95\x85\xb1\xc9\x69\x0c\x9a\xfe\x00\x49\
\xbd\xf3\x54\x0d\x6a\x0f\xec\xa1\xec\xe3\x65\x58\x1b\x6a\x82\xde\
\xb0\x47\x8f\x1e\xdc\x7c\xf3\xcd\x4c\x9d\x3a\x95\x11\x23\x46\x9c\
\x63\xf7\xd4\x70\xb9\x5c\x38\x1c\x0e\x2a\x2b\x2b\x59\xb5\x6a\x15\
\xcb\x97\x2f\x67\xef\xde\xbd\x41\xeb\x6a\x0d\x46\xb2\x47\x4d\x26\
\xe7\xaa\x1b\x91\x3c\x12\x0e\x9b\x7f\x49\x70\xd9\x2c\x94\xad\x59\
\x42\xe3\xb1\xfd\x81\xcd\x0e\x01\x43\x00\x4b\xd8\x3b\x1f\x01\x9c\
\x0b\x01\x12\x80\x15\xc0\x44\x65\x61\x6a\xde\x40\x06\x4d\x7f\x40\
\x25\x65\xdb\x1a\xeb\x38\xb0\xe6\x5f\xd4\x96\x7d\x1b\xf4\x66\xa3\
\x47\x8f\xe6\x57\xbf\xfa\x15\x13\x26\x4c\x40\x1b\x05\x3d\x80\xcb\
\xe5\xc2\x66\xb3\xe1\x76\xbb\x29\x29\x29\xe1\xf5\xd7\x5f\xe7\xbd\
\xf7\xde\xc3\x15\x44\xe8\x8c\x4f\xef\x41\xe1\x0d\x33\x49\xea\x53\
\x80\xdd\x62\xf1\x91\x40\xf2\x78\x28\xff\xe2\x03\xaa\x76\x7f\x11\
\xd8\x64\x14\x17\x88\x60\xd8\x59\x02\x18\x81\x8f\x08\x18\xfc\xee\
\x83\x86\x33\x60\xea\x1c\xb4\x3a\x41\x0c\xa0\x66\xff\x2e\x4a\x56\
\xbe\x8e\xcb\xaa\xde\x31\x4d\x9e\x3c\x99\x47\x1f\x7d\x94\x61\xc3\
\x86\x75\xb2\x2b\xe7\x06\xa7\xd3\x89\xd5\x6a\x45\x92\x24\x2a\x2b\
\x2b\x79\xe5\x95\x57\x78\xeb\xad\xb7\xb0\x2b\xa6\x7c\x19\x1a\xb2\
\x47\x5d\x47\xee\x35\x37\xe3\xb4\xd9\x90\x3c\x7e\x19\xe0\xd4\xbe\
\xaf\x38\xb2\xe1\x3d\x24\xb7\x1b\x7d\xac\xc9\xe2\xb2\x59\xb2\x90\
\x35\x88\x5d\x1e\x9d\x21\x80\x16\x59\xd2\x17\xa6\xfd\x3e\xc3\x27\
\x50\x70\xdd\x1d\xc2\x7a\x2f\x79\x3c\x1c\xfe\x7c\x39\xc7\xb6\xac\
\x25\x50\x68\xca\xcb\xcb\x63\xc1\x82\x05\x4c\x98\x30\xa1\x13\x5d\
\x08\x2f\x3c\x1e\x0f\x56\xab\xd5\xf7\xf6\x57\x57\x57\x33\x7f\xfe\
\x7c\x96\x2f\x5f\xae\xaa\x9b\xd8\x2b\x97\x81\xd3\xee\x43\x6b\x8c\
\x13\x48\x60\x6d\x38\x4d\x4b\x75\x05\xc9\xd9\x45\xc4\x26\x27\xbf\
\xbd\xf9\xf9\xb9\x33\xa2\xf6\x03\xce\x01\x9d\x21\xc0\x4b\xc0\x5c\
\x65\x41\xee\xd5\x37\xd1\x6f\xec\xcd\x42\x25\xa7\xa5\x85\x3d\xcb\
\xfe\x4c\xd3\x09\x51\xa3\x66\x32\x99\x78\xe2\x89\x27\x98\x3b\x77\
\x2e\x06\x83\x7a\x4b\x18\x0a\x16\x8b\x85\xe7\x9e\x7b\x8e\xcf\x3e\
\xfb\x8c\xe6\xe6\x8e\xbd\x5c\x1a\x8d\x86\xbc\xbc\x3c\x1e\x7b\xec\
\xb1\x56\xe5\x0a\x87\xc3\x81\xcd\x66\xf3\x29\x9a\xd6\xaf\x5f\xcf\
\x53\x4f\x3d\x45\x45\x45\x85\x50\xcf\x10\x17\xcf\xa0\xe9\x0f\x62\
\xca\xe8\x1d\x54\x29\xa5\x01\xb4\x46\xc3\x9f\x37\x3f\xff\xd0\xaf\
\x55\x5f\x76\x31\x74\x94\x00\x8f\x11\xb0\xdf\xed\x33\x7c\x02\xfd\
\x27\xff\x4c\xa8\x64\x6f\x6e\x64\xcf\x9b\x2f\xd2\x72\xea\x84\x50\
\xde\xbf\x7f\x7f\x96\x2c\x59\xc2\xc0\x81\x03\x3b\xdc\xd1\x39\x73\
\xe6\xf0\xef\x7f\xff\xbb\xc3\xed\x94\x88\x89\x89\x61\xcb\x96\x2d\
\x14\x15\x15\x85\xac\xe3\x76\xbb\xb1\x58\x2c\xbe\x1d\x82\xcd\x66\
\xe3\xe9\xa7\x9f\x66\xd9\xb2\x65\x42\x3d\xad\x4e\xcf\x25\x37\xfd\
\x82\xa4\xec\xe0\xe7\x5a\x1a\x34\xb8\xed\x8e\x3f\x6c\xfb\xdb\x6f\
\xaa\x81\x3a\x60\x39\xed\x38\x82\x8c\x36\x74\x1d\xa8\x3b\x06\x58\
\x8c\xbc\x04\x00\xf2\x9a\x5f\x34\xe5\xe7\xc2\xb4\x6f\xae\x39\xc9\
\xae\x45\xcf\x9d\x3d\x72\xf5\x63\xc6\x8c\x19\xbc\xf3\xce\x3b\xf4\
\xea\x25\xe8\x88\xda\x05\x8b\xc5\xc2\x9c\x39\x73\x42\x6e\xdb\xda\
\x0b\xb7\xdb\x4d\x52\x52\x12\xd7\x5c\x73\x4d\xc8\x3a\x5a\xad\x16\
\xa3\xd1\x88\xcb\xe5\x42\x92\x24\xf4\x7a\x3d\x13\x27\x4e\xa4\x6f\
\xdf\xbe\x7c\xf1\xc5\x17\x38\x9d\x4e\x00\x24\xc9\x43\xed\x81\xdd\
\xc4\x26\xa5\x12\x9f\x11\xfc\x37\x95\xae\xfa\xc7\x35\xb6\xc6\xda\
\x1b\x91\xcf\x42\x62\xe9\x82\xe7\x07\xfa\xb6\xab\x00\xb2\x7a\x77\
\x99\xb2\x7e\x6a\xde\x40\x06\x4c\x9d\xa3\x1a\xfc\x9d\x6f\xcc\x17\
\xf6\xf6\x3a\x9d\x8e\x17\x5f\x7c\x91\x5f\xfc\xe2\x17\x9d\xee\x64\
\x6d\x6d\xad\x20\x9d\x77\x8b\xd1\x91\x9e\x10\xa4\xeb\x6e\x8f\xca\
\x00\xc4\xe5\x91\x38\xde\xec\xb7\xeb\xa8\xaa\xaa\x6a\xf3\x79\x1a\
\x8d\x86\xf8\xf8\x78\xac\x56\xab\x6f\xc0\xa7\x4f\x9f\xce\xa0\x41\
\x83\xb8\xf3\xce\x3b\xa9\xae\x96\xb7\xf9\x92\xc7\xc3\xc1\x8f\x97\
\x22\xb9\x9d\x64\x0e\x1a\xa9\xba\x4f\xd3\x89\x43\xca\xcb\xc7\x80\
\x2f\x80\x35\x6d\x76\x20\x8a\x68\x0f\x01\xb4\xc8\x7b\x7d\x1f\xcd\
\xbd\xfb\x7c\xa5\xb4\x6f\x6f\x6a\x60\xcf\xd2\x3f\x09\x83\x6f\x34\
\x1a\x79\xed\xb5\xd7\x98\x36\x6d\x5a\x18\xbb\x0c\x37\x17\xa7\xf0\
\xf7\x19\xfd\x54\xe5\x9a\x53\xb5\x60\x13\xa5\xf7\x63\x4d\x6e\x0a\
\xdf\xe8\xb8\x5e\x46\xa3\xd1\x10\x17\x27\x9f\x22\x7a\x49\x50\x54\
\x54\xc4\x07\x1f\x7c\xc0\x9d\x77\xde\xc9\xa1\x43\xfe\xc1\x3d\xf4\
\xe9\xbb\x68\x0c\x31\x64\x14\x5e\x2e\xdc\x23\xa3\xe8\x72\x4e\x7d\
\xb7\xcd\x77\x4b\x60\x11\xb2\x11\x4c\x65\x87\x3b\x14\x21\xb4\x67\
\xc3\x3d\x0f\xc5\xc1\x8e\x46\xa7\x53\xed\xf3\x9d\x56\x33\xbb\xdf\
\x7c\x11\x5b\x63\x9d\xaf\x2c\x3e\x3e\x9e\xe5\xcb\x97\x87\x7d\xf0\
\xa3\x09\x2f\x09\x94\xc2\x6a\xef\xde\xbd\x59\xb1\x62\x05\x43\x86\
\x0c\x51\xd4\x94\x38\xbc\xee\x2d\x1a\x2b\x0e\x08\xed\x73\xae\xbe\
\x99\xd8\x64\xc1\xcc\x30\x03\xf8\x7f\x91\xec\x73\x47\xd1\x16\x01\
\x7a\x20\x1b\x41\xf8\x90\x3f\x7e\xba\xa0\xe1\x93\x3c\x1e\xf6\xbe\
\xfd\x12\xe6\xd3\x7e\x52\x1b\x8d\x46\x96\x2d\x5b\xd6\xea\x5a\x7b\
\xa1\x40\xa3\xd1\x60\x32\x99\xd0\xe9\xfc\xe2\x52\x6a\x6a\x2a\x4b\
\x97\x2e\xa5\x7f\x7f\xbf\x7d\x8b\xc7\xed\xe2\xc0\xaa\x37\xb0\xd6\
\xfb\x65\x1f\x9d\x31\x96\xfe\xd7\xdf\x85\x46\x27\x88\x5a\x37\x00\
\x77\x44\xbe\xe7\xed\x43\x5b\x04\x58\x08\x24\x79\x2f\xd2\xf2\x07\
\xd1\x77\xc4\xb5\x42\x85\x23\x1b\xde\xa7\xf1\x98\x9f\xf9\x1a\x8d\
\x86\x97\x5e\x7a\x89\x71\xe3\xc6\x85\xb3\x9f\xe7\x1d\x26\x93\x49\
\x90\x77\x92\x93\x93\x59\xb6\x6c\x19\xbd\x7b\xf7\xf6\x95\xb9\x1d\
\x36\xca\x56\x2f\xc6\xe3\x72\xf8\xca\x12\xba\xf7\xa1\xef\xc8\xeb\
\x03\x6f\xb7\x10\xd9\xaf\xe1\xbc\xa3\x35\x02\x4c\x44\x61\xc9\xa3\
\xd5\xeb\xe5\xed\x9e\xe2\x8f\x50\x7f\xa4\x84\x63\x5b\x56\x0b\x8d\
\xe6\xcf\x9f\xcf\x1d\x77\x74\x19\x82\x87\x0d\x5a\xad\x96\xf8\xf8\
\x78\x81\x04\x59\x59\x59\xfc\xeb\x5f\xff\xa2\x5b\x37\xbf\xcd\x8b\
\xb9\xf6\x24\x47\x37\xbe\x2f\xb4\xed\x79\xf9\x58\xe2\x33\x7b\x2b\
\x8b\x32\x80\xa7\x22\xdb\xe3\xf6\x21\x94\x10\xa8\x47\x36\x84\xf4\
\x21\x67\xf4\x8d\x98\xd2\xb2\x7c\xd7\x0e\x73\x13\xdf\xaf\xf8\xbb\
\xa0\x08\x99\x36\x6d\x1a\x0f\x3d\xf4\x50\xab\x0f\xac\xa9\xa9\x61\
\xeb\xd6\xad\xd4\xd7\xb7\xdf\x17\xa3\x23\x75\x23\x09\x9d\x4e\x47\
\x4c\x4c\x0c\x36\x9b\xff\xc4\xb7\xa0\xa0\x80\x05\x0b\x16\x70\xdf\
\x7d\xf7\xf9\xca\x4e\xed\xfb\x8a\xe4\xbe\x85\xa4\xf5\xbf\x14\x00\
\x8d\x46\x4b\xbf\x71\xd3\xd9\xf7\xef\xbf\x28\x77\x29\x0f\x02\x2f\
\x03\x07\xa3\xf6\x03\x82\x20\x14\x01\x66\xa0\x30\xe0\x34\xa5\x75\
\x27\x7b\x94\x38\x8d\x1d\xfa\xe4\x1d\x1c\x2d\x7e\x6b\xe9\x9c\x9c\
\x1c\x5e\x7a\xe9\xa5\x56\x1f\xf6\xd9\x67\x9f\x31\x6b\xd6\xac\x0e\
\x6b\xf2\xba\x12\x62\x62\x62\x70\x3a\x9d\xb8\xdd\xfe\xad\xe5\x0d\
\x37\xdc\xc0\xec\xd9\xb3\x59\xb4\x68\x91\xaf\xec\xc8\x86\xe5\x24\
\x65\x17\xfa\xec\x11\xbb\xf5\xc8\xa1\xfb\xc0\xe1\xca\x5d\x81\x01\
\xf8\x1f\xe0\xd6\x68\xf5\x3d\x18\x82\x2d\x01\x5a\x64\xd3\x6d\x1f\
\xf2\x27\xde\x2e\x58\xf2\x34\x1e\x2b\xa3\xea\x5b\xff\x61\x97\xc1\
\x60\x60\xf1\xe2\xc5\x24\x26\x26\xb6\xfa\xb0\xb9\x73\xe7\x5e\xd0\
\x83\xef\x85\xc9\x64\x52\x95\x3d\xfd\xf4\xd3\x0c\x18\x30\xc0\x77\
\xed\xb4\x34\x73\xfc\xab\xb5\x42\x9d\xbe\xa3\x6e\x44\x6b\x30\x2a\
\x8b\xa6\x01\x03\x38\x8f\x08\x46\x80\x69\x28\xec\xf6\xe3\x33\x7b\
\x91\x5e\x78\xa9\xef\x4b\xc9\xe3\xe1\xc0\xda\x37\x51\x6a\x35\x1f\
\x7a\xe8\x21\x2e\xbb\xec\xb2\x56\x1f\x54\x55\x55\x45\x65\x65\x97\
\xd9\xfe\x9e\x13\xb4\x5a\x2d\xb1\xb1\xb1\x42\x99\xd1\x68\x64\xc1\
\x82\x05\xc2\x51\x76\xf5\x9e\x2d\x98\x4f\xfb\xd5\xe1\x06\x53\x02\
\x59\xc5\xa3\x84\x5b\x21\x5b\x52\x9d\x37\x04\x2e\x01\x1a\xe0\xb7\
\xca\x82\xdc\x31\x53\x04\xc1\xe7\xe4\xae\x4d\xb4\x54\x1f\xf7\x5d\
\xf7\xea\xd5\x8b\x27\x9e\x78\xa2\xcd\x07\x79\x95\x29\x5e\xc4\x15\
\x0f\x27\x79\xca\x4c\x55\x3d\x97\xd9\x85\xe4\x12\x55\xbe\x9e\xa6\
\x06\xea\x17\xff\xb1\xcd\x67\x44\x13\x46\xa3\x11\x87\xc3\x21\xa8\
\xa7\x8b\x8b\x8b\x99\x31\x63\x06\x4b\x97\x2e\x05\x64\x75\xf1\xb1\
\x2d\xab\x18\x30\xed\x01\x5f\x9d\x5e\x57\x8c\xa3\xfa\xdb\x2f\x95\
\x3b\x85\x3b\x80\xdf\x03\xfe\x3f\x6a\x14\x11\x48\x80\x11\x28\xdc\
\xb5\x4c\x69\x59\x64\x0e\xb8\xc2\xf7\xa5\xe4\xf1\x50\xbe\x45\xd4\
\x64\xce\x9f\x3f\x9f\xf8\x78\xd1\xf8\xa3\x3d\x30\xe6\xf4\x27\x79\
\xda\xdd\xaa\x72\x47\xbd\x03\xc9\x2e\xba\xe4\xb9\x4e\x55\x74\x39\
\x02\x68\x34\x1a\x62\x62\x62\xb0\x5a\x45\x4f\xf3\x27\x9f\x7c\x92\
\xb5\x6b\xd7\xfa\x04\xd7\xc6\x63\x07\x68\xae\x3e\x46\xb7\xac\x6c\
\x00\x0c\xa6\x6e\x74\x2f\x1e\x41\xd5\xae\x4d\xde\x26\x7a\xe0\x17\
\x04\xe8\x5b\xa2\x85\xc0\x25\x40\x78\x25\xfb\x8e\xb8\x56\xb0\xdb\
\x3f\xb5\x6f\x9b\x60\x15\x7b\xc5\x15\x57\x30\x75\xea\xd4\x88\x76\
\xb0\x2b\xc3\x68\x34\xaa\xac\x97\x92\x93\x93\xb9\xff\xfe\xfb\x85\
\xb2\xca\xed\x9f\x08\xd7\xf2\x32\x20\x1c\xc4\xde\x4d\xc7\x0e\xe6\
\xc2\x06\x65\xef\x63\x50\x18\x79\x68\x0d\x46\xba\x0f\x1a\xee\xfb\
\x52\x92\x24\xca\x03\xf6\xfc\x8f\x3e\xfa\x68\xa4\xfb\xd7\xe5\x11\
\x13\xa3\x76\x52\x9d\x35\x6b\x16\xc9\xc9\xc9\xbe\xeb\xfa\x23\x25\
\x98\x6b\xfc\xf2\x4f\x5c\x4a\x26\x89\xa2\xbd\x64\x1f\x64\xf7\xb9\
\xa8\x43\x49\x80\x29\x28\x5c\xb4\x33\x0a\x7f\x22\xb8\x6b\x35\x1c\
\x29\xc1\x5c\xe3\x37\xad\x1e\x30\x60\x00\xd7\x5f\xaf\xd2\x70\xfd\
\xe8\x60\x30\x18\x54\x56\xcf\x09\x09\x09\x01\xa7\x9f\x12\xa7\xf6\
\x89\x26\x82\xdd\x07\x5d\x19\x78\xab\xe9\x11\xe9\x60\x1b\x50\x12\
\x40\x50\xdf\x65\x15\x8b\x96\x33\x55\x7b\xc5\x1f\x30\x6f\xde\x3c\
\xd5\x0f\xff\x31\x42\xa3\xd1\x60\x34\x1a\x55\xe5\x77\xdf\x7d\xb7\
\x30\x3b\xd4\x1e\xd8\x8d\xc7\xed\x3f\xd2\x4e\x2b\x18\x12\xb8\x25\
\x9c\x42\xfb\x8f\xe7\xc3\x06\xbd\xe2\x5f\x9f\xf2\xde\x18\x9f\x48\
\x5a\xfe\x60\x5f\x25\xb7\xc3\x4e\x4d\xe9\x4e\xdf\xb5\xc9\x64\xe2\
\xe6\x9b\x45\x13\xb0\x0b\x05\x2b\x57\xae\xe4\xcb\x2f\xbf\x6c\x77\
\xfd\xf6\x98\x93\x19\x8d\x46\x95\x11\x69\x52\x52\x12\x13\x27\x4e\
\xe4\xa3\x8f\x3e\x02\x64\x33\xf2\x86\xa3\x25\xa4\xe5\x17\x03\xb2\
\xb7\x54\x72\xdf\x42\xea\x0f\xef\xf3\x36\x49\x07\x46\x23\x07\xa8\
\x88\x1a\xbc\x04\x18\x0a\xf8\xb4\x38\xa9\x79\x03\x05\xe1\xaf\x66\
\xff\x2e\xdc\x0e\xff\x0f\x9c\x3a\x75\x6a\xa7\x24\xff\xae\x80\x96\
\x96\x16\x5a\x5a\x42\x3b\xa3\x04\xc3\xd1\xa3\x47\x7d\x4e\x25\xc1\
\xcc\xc9\xb4\x5a\x2d\x3a\x9d\x4e\xd0\x0e\x02\xdc\x72\xcb\x2d\x3e\
\x02\x00\xd4\x94\x7c\xe3\x23\x00\x40\x4a\xbf\x81\x4a\x02\x00\x8c\
\x27\xca\x04\xf0\x8e\xf2\x58\x65\x61\x4a\x8e\xf8\x23\x6b\xf6\xef\
\x12\xae\x6f\xbf\x5d\x30\x08\xfe\x51\xc0\x6e\xb7\xb7\x6a\x93\x18\
\xcc\xc0\x75\xec\xd8\xb1\xa4\xa5\xf9\x0f\xfd\x1a\x2b\x0e\x20\x29\
\x48\x92\x92\x3b\x20\xd0\x49\x56\x18\x87\x68\xc0\x3b\x03\x88\x04\
\xc8\xf5\x1b\x3a\x4a\x92\x44\x43\xb9\xdf\xfb\x25\x25\x25\x85\x31\
\x63\xc6\x44\xa5\x73\x91\x80\x31\x21\x06\x53\x6a\x82\xaa\xdc\x29\
\x49\x04\x5a\x1c\x4a\x6e\x0f\xb6\x93\x7e\xf7\xaf\xd6\xcc\xc9\xf4\
\x7a\xf5\xf2\xad\xd7\xeb\x19\x3b\x76\x2c\xef\xbd\xf7\x1e\x20\xbb\
\xc4\x35\x57\x1f\x23\xb1\x97\x6c\xcd\x64\x8c\x4f\x24\x2e\x25\x53\
\x69\x43\x30\x14\x39\xe4\x5d\xd4\x5c\xce\xf5\xc8\xfb\x4f\x9f\x41\
\x5b\x6c\x72\xba\x10\x96\xa5\xe5\xd4\x71\xc1\xcc\xeb\xaa\xab\xae\
\x12\x8c\x23\x2e\x34\x14\x5d\x7f\x29\x3f\xfd\xd3\xcf\x54\xe5\xfb\
\x6d\x36\x9a\x02\xa6\x70\x6b\x65\x03\x5b\xc6\xfd\x6f\xbb\xee\xab\
\xd3\xe9\xd0\x68\x34\x2a\x33\xf1\x91\x23\x47\xfa\x08\x00\x70\xe6\
\xf8\x41\x1f\x01\x00\xba\x65\x65\x2b\x09\x60\x40\x76\x2b\x8b\x9a\
\x57\x91\x16\x39\x08\xa3\xef\x74\x23\xa9\x8f\xe8\xcf\xd7\x70\x54\
\xf4\x7d\xbb\x90\xdf\xfe\x48\x23\xd8\x8b\x71\xd5\x55\x57\x09\xd7\
\x67\x8e\x8b\xa7\xbf\x09\x59\x7d\x03\x9b\x0c\x09\x2c\x88\x24\xf4\
\x40\xa1\xb2\x20\x3e\xad\x87\x50\x21\x30\x30\xc2\x45\x02\x84\x86\
\x5e\xaf\x57\xf9\x16\xf6\xec\xd9\x93\x9c\x9c\x1c\xca\xcb\xcb\x01\
\x30\x9f\x3e\x8e\x7c\x90\x26\xaf\xfd\x41\x08\x30\x12\x58\xd7\xc1\
\x47\x37\x23\xc7\x30\xea\x30\x54\x04\x30\xa5\x67\x09\x15\xcc\x35\
\xfe\x75\x2f\x36\x36\x96\xc2\x42\xa1\xfa\x45\x28\x10\x6a\x69\x2c\
\x2a\x2a\xf2\x11\xc0\xed\xb0\xe3\x68\x69\xc2\x98\x20\x5b\xda\xc5\
\x25\xab\xe2\x26\xdc\x79\xf6\xd3\x51\xbc\x8e\x7c\xa6\xd0\x21\x68\
\x09\x24\x80\xc2\xea\x47\x92\x24\x2c\x0a\x23\xc7\xfc\xfc\xfc\x0b\
\x7a\xfd\x8f\x34\x42\xfd\x6d\xf2\xf2\xc4\x65\xd5\xda\x70\xda\xdf\
\x26\x26\x16\x7d\xac\xda\xbe\xa0\x13\xb8\x1b\xc8\xee\x68\x23\x2d\
\xa0\xe8\x9d\x46\x20\x80\xad\xb1\x16\x8f\xd3\x6f\xe0\xa8\xb4\x82\
\xbd\x08\x35\x34\x1a\x4d\x50\xed\xa8\x8a\x00\xf5\xa7\x85\xeb\x98\
\xc4\xd4\x70\x3c\xbe\x0e\x50\xc5\xaf\x69\x0b\x7a\xe4\x28\xdb\x00\
\xe8\x8c\x46\x21\x02\x67\x60\x10\x87\x82\x82\x82\xce\x77\xef\x47\
\x82\x60\x3b\x81\xdc\xdc\x5c\xe1\x3a\x30\x30\x65\x4c\xb7\x14\xc1\
\x70\x04\x39\xdc\x4c\x47\x02\x4c\xd8\x90\x97\x80\x0e\x6f\x1f\xf5\
\x28\xc2\xb8\xe9\x8c\xa2\x95\x8b\xcb\x26\xf6\x21\x3d\xfd\x47\x17\
\x4b\xb9\xc3\x08\x36\x03\x28\x4f\x06\x01\xdc\x4e\x31\x8c\x90\x2e\
\x46\xfc\xbb\x03\x4f\x03\x47\xc3\xdb\xb3\xe0\xd0\xa2\x20\x80\x3e\
\xa0\x23\x2e\xbb\x68\xec\xd0\x96\xcd\xdf\x45\x04\x27\x40\x42\x82\
\xa8\x78\x52\xaa\xd5\x01\x74\x06\xd5\x61\x52\x58\x84\x82\xf6\xa0\
\xd5\x19\xc0\x7d\x8e\x04\x38\x7a\xf4\x28\x5b\xb7\x6e\xc5\xe1\x70\
\xa8\x4c\xbb\x1d\xe5\x65\x34\xae\x78\x5d\xd5\x26\x94\x49\x98\x12\
\x65\x35\x36\xfe\xf9\x95\xb8\x8e\x02\x68\x9a\x5a\x20\x60\x1b\x56\
\x67\x15\xef\x55\x77\xf8\x34\xbb\x97\xaa\xf5\x2c\xd5\x4e\x27\xd6\
\x00\xef\x63\x67\xa3\x38\x03\x1e\x3c\x78\x90\x37\xde\x78\x03\x90\
\x63\x19\x4d\x98\x30\x41\xa5\x01\xec\x0c\x01\xb4\x7a\x15\x01\xa2\
\x76\xd0\xa2\x47\xc1\xb6\x40\x26\xba\x02\x3a\x1a\xf8\x43\x5a\xc3\
\xde\xbd\x7b\x19\x37\x6e\x5c\x90\x50\x2b\x32\xac\x7b\xb7\x63\xdd\
\xbb\xbd\x03\x5d\xf5\x63\x7b\x79\x0b\xdb\xcb\x3b\x76\xa0\xe3\x45\
\xe5\xce\xa3\x54\xee\xec\xdc\xec\xfa\xf5\xd7\x5f\xf3\xf5\xd7\x5f\
\xfb\xae\xa7\x4f\x9f\xee\x23\x84\x17\xc1\x08\xe0\x75\x32\xf5\xc2\
\xed\x0c\x24\x80\xea\x1c\xe1\x65\x40\x15\x82\xec\x1c\x61\x41\x0e\
\xdb\xbb\x49\x59\xa8\x07\xec\x9c\x25\x81\xf2\xbc\x1a\x50\x85\x71\
\x0d\x16\x40\x29\x14\x36\x6c\xd8\x10\x72\xf0\x7f\x28\x58\xb3\x46\
\xed\xe9\x1d\x2c\x62\x48\xe0\xdf\x21\x70\xc0\x83\x44\x43\xbd\x22\
\xb0\x20\x4c\x18\x03\x74\x47\xce\x9a\x22\xf7\x05\x85\xe4\xe8\xb6\
\x07\x08\x27\x01\x53\x93\xd2\x23\xa6\x2d\x8c\x1b\x37\x4e\x65\x3a\
\xfd\x43\xc3\x8d\x37\xde\xd8\xae\x7a\x66\xb3\x28\x9c\xeb\x0c\xa2\
\x19\x59\xa0\x50\x18\x41\x24\x20\x9b\xfe\xf9\xa0\x47\x26\x40\x06\
\x80\xcb\x21\x76\x44\x1b\x70\xc4\xd9\x11\x02\x0c\x1e\x3c\x98\x1d\
\x3b\x76\xb0\x65\xcb\x16\x1c\x0e\x47\xdb\x0d\x2e\x30\xf4\xec\xd9\
\x33\x68\x80\xab\x60\x51\x4c\x02\xed\x0f\x54\xb2\x96\x23\x2a\x33\
\x65\x23\xf2\xee\x42\xf0\xcc\xd1\xa3\xd8\x6f\xba\x03\x08\x10\xc8\
\xd4\xc6\xc6\x8e\x2d\x4b\xd9\xd9\xd9\x64\x67\x77\x58\x39\xf5\x83\
\x43\xa0\x37\x54\x60\xb6\x93\xc0\xbf\x3b\x90\x49\x27\x75\xfb\x1d\
\x85\x16\x85\xf6\xc8\x6d\xb7\x09\x06\x0b\xb1\x49\xa2\x86\xea\xd8\
\xb1\x63\xd1\xe8\xd3\x05\x8d\x40\xab\x20\x50\xff\xdd\x8c\xf1\x49\
\xc2\x75\x80\xbe\xc5\x41\x27\x34\x7a\x9d\x85\x16\x85\xc2\x41\x92\
\x24\x41\xfb\x17\x9b\x2c\xa6\x6b\xb9\x48\x80\xd6\x11\x2a\x88\xd5\
\xe1\xc3\xe2\x89\x6a\x60\xe0\x6c\x9b\xa8\x71\x3d\x49\x14\xa3\x89\
\xe9\x91\x93\x29\xfa\x60\xae\xad\xf2\x9d\x08\xc6\x24\xa6\xa0\xd5\
\xeb\x7d\x52\xaa\xf7\x44\xeb\x42\x82\x24\x49\x94\x97\x97\x93\x95\
\x95\xa5\xda\x8e\x45\xe2\x59\xc1\x70\xe4\x88\x18\x2b\x51\x69\x70\
\xe3\xb2\x59\x70\x5a\x05\x19\x41\x83\x9c\xc2\xae\x23\x68\x06\x56\
\xd3\x89\x9c\x46\x2a\x02\x58\xea\xaa\xf0\x7a\x87\x69\x34\x1a\xe2\
\x52\x32\x7d\xfe\x00\xa5\xa5\xa5\xd8\xed\xf6\xa0\xce\x10\x5d\x11\
\x92\x24\x71\xeb\xad\xb7\xb2\x6e\xdd\x3a\xb2\xb2\xb2\xf8\xf4\xd3\
\x4f\xc9\xc9\xc9\x89\xd8\xf3\x82\x4d\xff\x20\x2b\x90\xbc\xd0\xe8\
\x74\xc4\x24\xfa\xed\x04\x95\x27\x83\x67\x91\x4d\xe7\xe2\x08\x7d\
\x0f\x5c\x4a\x07\xf3\x19\x69\x91\xa3\x5b\xfb\x60\xa9\x15\x23\x6a\
\x29\xcd\x97\x6c\x36\x1b\xdf\x7e\x1b\x3c\xd8\x73\x57\xc4\x17\x5f\
\x7c\xc1\xba\x75\xb2\x6d\x45\x75\x75\x35\x2f\xbe\xf8\x62\x44\x9f\
\x17\x8c\x00\x4d\x4d\x4d\x94\x96\x96\xfa\xae\x4d\xa9\x59\x82\x7e\
\x25\x08\x01\x3a\x8b\x81\x80\xca\xba\xa4\x2d\x68\x81\xbd\xc8\xca\
\x20\x00\x9a\xaa\xca\x85\x0a\x49\x7d\xf2\x85\xeb\x6d\xdb\xb6\x71\
\xa1\x20\x30\xba\xe7\xbb\xef\xbe\xdb\x61\x93\xf0\x8e\x20\xd0\x03\
\x1a\x60\xeb\xd6\xad\x02\x31\x92\xfa\x8a\x27\xaa\xe6\x80\x68\xaa\
\xe7\x80\x9d\xc8\xb9\x0e\x3b\x04\x3d\xf2\x51\xe2\x6e\xe0\x4a\x6f\
\x87\x9c\x96\x16\x0c\x26\x59\xed\x1b\x48\x80\xcd\x9b\x37\x33\x6f\
\xde\xbc\x73\xed\x6c\xa7\xe0\xf1\x78\xd8\xb1\x63\x07\x99\x99\x99\
\x6d\x4e\xe5\x66\xb3\x99\x95\x2b\x57\xaa\xca\x56\xac\x58\xc1\xac\
\x59\xb3\x22\xd2\xb7\x60\x32\x40\xa0\x13\x4a\x52\x6f\x91\x00\x67\
\x8e\x97\x09\xb7\x01\x66\xa1\xd0\xd4\xb5\x13\x56\xe0\x73\x3a\x91\
\xe9\xd4\x7b\x92\xb1\x95\xb3\x04\xf0\x9a\x81\x7b\xdd\xc2\xe3\x33\
\x7a\x62\x4c\x48\xc4\xd1\x22\xcb\x17\x1b\x36\x6c\xa0\xb9\xb9\x59\
\x08\x8c\x14\x2d\xdc\x73\xcf\x3d\xbc\xf3\xce\x3b\xe8\x74\x3a\x96\
\x2d\x5b\xd6\xaa\x6f\xe2\x07\x1f\x7c\xa0\xd2\xc0\x01\x2c\x5e\xbc\
\x38\x22\x04\x08\xa5\x26\xdf\xb8\x71\xa3\xef\xff\x1a\xad\x56\x70\
\x0a\x75\x5a\x5a\x02\x43\xea\xee\x03\x96\x86\xbd\x73\xad\xc0\xbb\
\x18\x09\xc7\x63\x0d\x47\xfd\x6b\x96\x46\xa3\x21\xa3\xc8\x1f\x01\
\xd3\x6e\xb7\xb3\x76\xad\x18\xfa\x24\x1a\xd8\xb4\x69\x13\xef\xbc\
\xf3\x0e\x20\xaf\xb5\x2f\xbc\xf0\x42\xab\xf5\x03\xa7\x7f\x2f\xbe\
\xfe\xfa\x6b\x61\x4d\x0e\x17\x82\x4d\xff\xbb\x77\xef\xe6\xe8\x51\
\xff\xc1\x53\xb7\x1e\x39\x82\x16\x50\x7e\xfb\x85\x59\x63\x63\xd8\
\x3b\xd6\x06\xbc\x04\xd8\x00\xf8\x7e\x41\xfd\x91\xef\x85\x4a\xca\
\x20\x11\x20\xbf\x5d\xd1\x84\xdb\xed\xe6\xc9\x27\x85\xb0\x45\xec\
\xd8\xb1\x43\xb5\xbd\xf2\xa2\xa2\xa2\x82\x2d\x5b\xb6\x84\xbc\xdf\
\xe2\xc5\x8b\xc3\xda\x3f\x49\x92\x82\xce\x00\x81\xf9\x06\xd2\x8b\
\xc4\x50\xb2\x67\x2a\x84\xe9\x1f\x60\x7d\x58\x3b\xd6\x0e\x78\x09\
\x50\x8f\xc2\x27\xcd\x52\x77\x8a\xa6\x93\xe5\xbe\x4a\x29\x39\x45\
\x3e\x99\x00\x60\xed\xda\xb5\x51\x8d\xf7\xb3\x74\xe9\x52\xf6\xed\
\xdb\xa7\x2a\x7f\xf7\xdd\x77\x83\xd6\x7f\xfb\xed\xb7\x05\xa5\xcc\
\xec\xd9\xb3\x05\xd7\xad\xe0\x19\x41\x3a\x8f\x60\x6f\xbf\xd3\xe9\
\xe4\xc3\x0f\x3f\xf4\x5d\x6b\x75\x7a\xd2\xfb\xfb\x73\x65\x7b\x5c\
\x4e\xea\x0e\x09\xb9\x8a\xcc\xc8\x69\xec\xa3\x0a\xe5\x79\xef\x0a\
\xe5\x17\xd5\x8a\x28\x60\x1a\xad\x96\x1e\x43\xfc\xc1\x8d\x5c\x2e\
\x17\xaf\xbd\xf6\x5a\xc4\x3b\x07\xb2\xe0\xf6\xc7\x3f\x06\x0f\x0f\
\xe3\x5d\x12\x02\xf1\xf6\xdb\x6f\x0b\xd7\xf3\xe6\xcd\xe3\xba\xeb\
\xfc\x79\x2c\xeb\xeb\xeb\x05\xa7\xcd\x73\x45\x30\x32\xad\x58\xb1\
\x42\x30\x82\x49\xe9\x37\x50\xb0\xfe\x6d\x38\xf2\x7d\xa0\x0a\xf8\
\x63\xce\x43\xca\x39\x25\x01\x56\x82\xdf\x3d\xee\xd4\x77\xdb\x84\
\x73\x81\xde\xc3\xc6\x0b\xfb\xd7\x45\x8b\x16\xa9\xe2\xe3\x44\x02\
\x0b\x17\x2e\xf4\x85\x67\x0f\x44\x59\x59\x19\x7b\xf6\xec\x11\xca\
\xb6\x6f\xdf\x2e\x28\x5e\x86\x0f\x1f\x4e\x41\x41\x01\x77\xdd\x75\
\x97\x50\x6f\xc9\x92\x25\x61\xe9\x9f\xcb\xe5\x52\xa9\x80\xdd\x6e\
\x37\x2f\xbf\xfc\xb2\x50\x96\x39\x70\xb8\x70\x5d\x53\xfa\x4d\xe0\
\xad\x82\xb3\x39\xc2\x50\x12\xa0\x1a\x45\x42\x03\x87\xb9\x59\x98\
\xa2\xe2\x52\x32\x48\xef\xef\xf7\x5a\xaa\xab\xab\xe3\xef\x7f\xff\
\x7b\x44\x3b\x57\x55\x55\xc5\x5f\xff\xfa\x57\xa1\x6c\xfa\x74\x31\
\x90\x46\xe0\x32\x10\x28\xfc\x79\xc3\xd6\x4e\x98\x30\x41\x48\x56\
\xb1\x71\xe3\xc6\xb0\x9c\x6d\x04\x7b\xfb\x57\xad\x5a\x25\xc8\x27\
\xf1\x19\xbd\x04\x87\x5b\xa7\xb5\x45\x70\xb8\x45\x5e\x82\xc5\x3d\
\x6b\x94\x10\x68\xbf\x34\x45\xd9\x91\x94\xdc\x4b\xb8\xec\xae\xc7\
\x7c\x5f\x36\x94\x1f\x60\xd7\xa2\xe7\x7c\xd7\xc9\xc9\xc9\xec\xdd\
\xbb\x97\x94\x94\x14\x3a\x82\x4d\x9b\x36\xf1\xea\xab\xaf\x06\xdd\
\xa6\x29\x51\x59\x59\x49\x59\x99\x5f\x50\x9a\x32\x65\x0a\x0b\x17\
\x2e\xa4\x7f\xff\xfe\x3e\xa1\xab\x67\xcf\x9e\x94\x96\x96\xa2\xd5\
\x6a\xb1\xd9\x6c\xe4\xe7\xe7\x73\xe6\x8c\x1c\xc1\x34\x36\x36\x96\
\x43\x87\x0e\x91\x94\x24\x9f\xbe\x3d\xf3\xcc\x33\x3c\xff\xbc\x3f\
\xe3\xcd\x25\x97\x5c\x42\x56\x96\xe8\x09\x15\x88\x84\x84\x04\xe6\
\xcd\x9b\xc7\x95\x57\xaa\x42\xba\xe0\x76\xbb\x55\x8a\x25\x97\xcb\
\xc5\xa4\x49\x93\x84\x7e\x17\xde\xf8\x73\xd2\x0a\xfc\x2f\xcf\xf1\
\xaf\xd6\x72\x7c\x9b\xe0\xfd\xf5\x32\x01\x79\x98\xa2\x85\x40\x9f\
\xe6\x8f\x90\x4f\x07\x73\x41\xde\x0e\x36\x56\x1c\x24\xf9\xac\xf6\
\x2a\x25\xa7\x90\xb4\x82\x62\xea\x0e\xca\x33\x43\x63\x63\x23\x0b\
\x16\x2c\xe0\xd9\x67\x9f\x6d\xf7\x03\xcd\x66\x33\xb7\xdf\x7e\x7b\
\x9b\x83\x1f\x08\xa3\xd1\xc8\x1f\xff\xf8\x47\x32\x32\x32\x18\x3b\
\x76\x2c\x9f\x7e\x2a\xcb\x4b\x27\x4f\x9e\x64\xf3\xe6\xcd\x5c\x7d\
\xf5\xd5\xac\x59\xb3\xc6\x37\xf8\x20\x87\x70\xf5\x0e\x3e\xc8\xc1\
\x9b\x5e\x78\xe1\x05\xdf\x94\x5d\x5a\x5a\xda\xae\x2d\xe1\xe6\xcd\
\x9b\x39\x7c\xf8\xb0\x2a\x14\x4c\xb0\x25\xf0\xb5\xd7\x5e\x13\x06\
\x3f\x2e\xb5\xbb\x10\x14\xc2\xed\xb0\x51\xb5\x67\xb3\xb2\x89\x84\
\x4c\x80\xf3\x82\xc0\x30\x71\x1e\x02\x0e\x22\xca\x37\x8b\xc2\x52\
\xfe\x84\xe9\x82\x2c\xf0\xea\xab\xaf\x06\x95\xd0\x43\xc1\x62\xb1\
\x60\xb1\x74\x3c\xa9\xe6\x3d\xf7\xdc\x43\xbf\x7e\xf2\xb9\x44\xa8\
\x65\x20\x70\xfa\x9f\x31\x43\xcc\xdc\xd6\xb7\x6f\x5f\xc6\x8e\xed\
\x78\x0c\x86\xe6\xe6\x66\x95\x55\x93\xc3\xe1\x50\xe9\xfe\xab\xaa\
\xaa\x58\xb8\x70\xa1\x50\x96\x7d\xd5\x8d\x42\x10\x88\xaa\x3d\x5b\
\x02\x85\xbf\xb5\x40\xf8\x15\x13\xed\x44\xb0\x50\xb1\xaf\x01\xbe\
\xd7\xa8\xee\xe0\x3e\x9a\xab\xfc\x6b\x65\x42\xf7\x3e\x64\x15\xfb\
\xf3\xe3\x38\x1c\x0e\xee\xbf\xff\xfe\x76\x9b\x7d\x65\x64\x64\xf0\
\xd4\x53\x4f\x05\x8d\xb7\x1b\x0c\x7a\xbd\x9e\xc9\x93\x27\xf3\xd4\
\x53\xfe\xe8\xea\x53\xa6\x4c\x11\xda\x7f\xf0\xc1\x07\x54\x54\x54\
\xb0\x7e\xbd\x7f\x1b\x9d\x95\x95\xc5\xf8\xf1\xe3\x55\xf7\x7b\xf6\
\xd9\x67\x55\xae\x5a\xad\x21\x21\x21\x81\x67\x9e\x79\x46\x65\x11\
\x1d\x6c\xed\x7f\xfa\xe9\xa7\x85\x99\x2d\x25\x77\x00\xa9\x79\xfe\
\x58\x4b\x1e\x97\x83\xaa\xdd\x1b\x03\x9b\x45\xf6\x84\xaa\x0d\x84\
\x0a\xf3\xf5\x7b\xc0\xb7\xf7\x4a\xce\x2e\xe4\xf2\xd9\x8f\xfb\x98\
\xec\xb4\xb4\xb0\xed\x6f\xbf\xf3\xa9\x87\x01\x1e\x7f\xfc\x71\x7e\
\xff\xfb\xdf\x47\xb2\xaf\x02\x66\xcf\x9e\x2d\x28\x5a\x46\x8d\x1a\
\x25\xe8\xdd\x7f\xf9\xcb\x5f\xf2\xcc\x33\xcf\x44\xe4\xd9\x16\x8b\
\x45\xb5\xf7\x5f\xba\x74\x29\x8f\x3f\xee\x0f\xfb\xab\xd5\xe9\x19\
\x32\xf3\x71\xe1\xec\x3f\xc8\xda\xbf\x19\xd9\x52\xf7\xbc\x21\x94\
\xab\xef\x4e\x64\x57\xe3\x04\x00\xdb\x99\x3a\x4c\xe9\x59\x24\x74\
\x97\x93\x1e\xe8\x0c\x46\xe2\x92\xd2\x39\x5d\xe2\xdf\xca\x6c\xdf\
\xbe\x9d\x61\xc3\x86\xa9\xfc\xe0\x22\x05\x83\xc1\x20\x44\xde\x38\
\x7e\x5c\x0c\xb5\xbb\x70\xe1\x42\x32\x33\xd5\x29\xeb\xcf\x15\xde\
\x9c\xc3\x4a\x94\x96\x96\x72\xef\xbd\xf7\x0a\xda\xc0\x3e\x23\xae\
\x15\xd6\x7e\x5b\x63\x2d\x07\x3f\x7e\x13\x49\xdc\x32\xce\xa2\x13\
\x27\x78\xe1\x44\xa8\x8c\x21\x66\x60\xbe\xb2\xe0\xe0\xba\xb7\x84\
\xb5\x2b\x73\xe0\x50\xba\x0f\xf2\xe7\xfb\x75\xb9\x5c\xcc\x9e\x3d\
\x3b\xa4\x7a\x36\xdc\x98\x38\x71\x22\xa9\xa9\xc1\xbd\x6a\x2f\xbd\
\xf4\x52\x06\x0d\x1a\x14\xf6\x67\x7a\x3c\x1e\x95\xfc\xd2\xdc\xdc\
\xcc\xfd\xf7\xdf\x2f\x90\x22\xb1\x77\x1e\xbd\x86\x0a\x69\x95\x39\
\xba\x71\x05\x1e\x97\x30\x6b\x2c\x47\x4e\x23\x77\x5e\xd1\x5a\xca\
\x98\x57\x80\xef\xbc\x17\x8e\x96\x26\x8e\x6c\x10\x53\xa1\x14\xde\
\x30\x4b\x98\xe2\x1a\x1a\x1a\xb8\xed\xb6\xdb\x22\x7a\xe6\xee\x85\
\xd1\x68\x64\xca\x94\x29\x41\xbf\x8b\x44\xca\x1a\x49\x92\x7c\x49\
\xa6\xbd\x70\x38\x1c\xdc\x7d\xf7\xdd\x82\xcd\x9f\x21\x2e\x81\xfe\
\x93\x67\x0a\x82\x72\x5d\xd9\x1e\x1a\x8e\x96\x28\x6f\x67\x05\xba\
\x44\x9c\xdd\xd6\x08\xe0\x00\xe6\xa0\x38\x63\x3e\xf1\xf5\x7a\xea\
\x0e\xf9\x25\x7e\x43\x5c\x3c\xc5\x33\x7e\x29\x78\xb7\x1e\x38\x70\
\x80\x57\x5f\x7d\x35\x02\x5d\x55\xe3\xb6\xdb\x6e\x53\x95\xe9\xf5\
\x7a\x6e\xb9\xe5\x96\xb0\x3f\x4b\x99\x5c\x1a\x64\x1d\xc0\xc3\x0f\
\x3f\xcc\x57\x5f\x7d\xa5\xa8\xa5\xa1\xe0\xda\x3b\x30\x26\xf8\xbd\
\x81\xed\xcd\x8d\x1c\x5e\xaf\x3a\xb3\x98\x0f\x94\x87\xbd\x93\x9d\
\x40\x5b\x59\xc3\xb6\xa3\xd8\xa3\x4a\x92\x44\xc9\xfb\xaf\x61\x57\
\x38\x6b\x26\x64\xf6\x62\xe0\xd4\x7b\x04\x9f\xb8\x68\xa8\x88\x41\
\x16\xfc\x02\x53\xd1\x5e\x7b\xed\xb5\x61\x5f\xfb\x6d\x36\x9b\x20\
\xf4\xb9\xdd\x6e\x9e\x78\xe2\x09\x56\xaf\x16\x83\x67\xe7\x8c\xf9\
\x29\xc9\xb9\xfe\x04\x20\x92\xc7\xc3\xc1\xb5\x4b\x70\xd9\x04\x9d\
\xc7\x1e\xe0\x39\xba\x08\xda\x93\x38\xf2\x77\x28\x4c\xc7\x1d\xe6\
\x66\xbe\x5b\xfe\xaa\x20\xcc\x64\x14\x5d\x46\xd1\x4f\x67\x13\x97\
\x9a\x49\x46\xe1\x10\x26\x4d\x8f\x4e\xd6\x30\xad\x56\xab\xd2\x09\
\x84\x7b\xfa\xb7\xd9\x6c\xc2\x96\xcf\xe1\x70\xf0\xe0\x83\x0f\xf2\
\xd6\x5b\x6f\x09\xf5\x7a\x0d\x1d\x4f\xcf\xcb\xc5\x54\x79\x15\x5b\
\x57\xd3\x54\x29\xc8\x44\x0e\xe0\x2e\x14\x47\xef\xe7\x1b\xed\x8d\
\xf6\x7c\x05\xb0\x05\x85\x5f\x59\xdf\x91\xd7\x51\x30\x29\x78\xc4\
\xd0\x18\xbd\x8e\xc7\xa6\x8c\xe0\xf2\xdc\xd6\xd5\xac\xe1\xc0\xf1\
\xe3\xc7\x19\x3f\x7e\x3c\x55\x55\x55\x8c\x1c\x39\x92\xd5\xab\x57\
\x07\x0d\xda\xd8\x19\x04\x0e\xbe\xd9\x6c\xe6\xde\x7b\xef\x65\xd3\
\x26\xc1\xc1\x96\xcc\x81\xc3\xc8\x9f\x38\x43\x4c\xa9\x77\x68\x2f\
\xfb\x57\xbd\x41\x80\xc1\xc7\x23\xc8\x39\x03\xbb\x0c\x3a\x12\xee\
\xfb\x01\xe0\x6f\xca\x82\x82\x49\xb7\xd3\x77\xe4\x75\x41\x2b\xeb\
\xb4\x5a\x1e\x9c\x78\x19\xc5\xdd\xe3\x59\xb7\x6e\x5d\xc4\x24\x73\
\x90\xf7\xe5\x15\x15\x15\xf4\xef\xdf\x5f\x95\xc0\xa1\xb3\xb0\x5a\
\xad\x82\x72\xab\xb4\xb4\x94\x07\x1e\x78\x40\xc8\x19\x0c\x72\x06\
\xf5\x7e\x13\x6e\x43\xa3\xf1\xef\xa8\x9b\xab\xca\xf9\xfe\xbd\xbf\
\x09\x09\x24\x91\xd5\xec\x53\xe8\x62\x29\xe4\x3b\x12\xf2\x6b\x07\
\x50\x00\xf8\x36\xb7\xf5\x87\x4b\x30\xa5\x66\x92\xd0\xbd\x8f\xaa\
\xb2\x24\x49\x6c\xdf\x5f\xce\x33\x73\xef\xe2\xdd\x7f\xbf\xcd\x1b\
\x6f\xbc\x81\xd9\x6c\x66\xd4\xa8\x51\x61\x7b\x43\xbd\x30\x18\x0c\
\xa4\xa7\xa7\x87\x25\x7c\xbd\x24\x49\x2a\x45\xcf\xb2\x65\xcb\xb8\
\xf7\xde\x7b\x39\x7d\x5a\x34\xe1\xee\x7b\xe5\x24\x72\xc7\x4e\x43\
\xb9\x92\x5a\x6a\xab\x28\x59\xf1\x4a\x30\x7f\xbf\xa5\x04\xf8\xe6\
\x77\x05\x74\x34\xe6\xdb\xc7\xc8\x71\x85\x7d\x23\x5e\x5b\xb6\x87\
\xc4\x9e\x39\x98\xd2\xba\xab\x2a\xb7\x9c\x3e\xc1\xc1\x4d\xab\x80\
\xb3\x84\xd8\xbe\x9d\xd5\xab\x57\x33\x74\xe8\xd0\x36\x4f\xe1\xce\
\x07\xdc\x6e\x37\x66\xb3\xd9\xa7\xe3\xaf\xae\xae\xe6\x91\x47\x1e\
\xe1\x95\x57\x5e\x09\x30\xf9\xd2\x90\x3f\xf1\x36\x7a\x0d\x9d\x80\
\xe4\xf1\xbf\xd0\xf6\xa6\x7a\xbe\x7f\xef\x65\x21\xb4\xae\x02\x63\
\x91\x1d\x71\xa3\x16\x06\xb6\x3d\xe8\x28\x01\x9c\xc8\x0a\x8c\xc9\
\x80\x3c\x82\x92\xc4\xe9\x92\x1d\x98\xd2\xba\x93\x20\xa6\x47\xc5\
\x68\xea\x46\xcd\x81\x5d\x38\xcc\x7e\x95\x71\x4d\x4d\x0d\x8b\x17\
\x2f\xa6\xba\xba\x9a\xa1\x43\x87\xb6\xfb\x4c\x20\xd2\xb0\xdb\xed\
\xbe\x7d\xbe\xd7\xe2\xe9\xbe\xfb\xee\xa3\xa4\x44\xd8\xbf\x63\x30\
\x25\x50\x7c\xeb\x83\xa4\x16\xfc\x04\x8f\xc7\x7f\x18\x64\xa9\xab\
\xa6\x64\xf9\xcb\x42\x32\xcd\x20\x98\x88\xfc\x37\xdc\xdc\x5a\xa5\
\x68\xa2\x33\x51\x1f\xed\xc8\xeb\xd9\x74\xce\x26\x96\x96\x24\x0f\
\x35\xa5\x3b\x31\x98\x12\x04\x4f\x22\x8d\x56\x4b\xd6\xe0\x11\x38\
\x5a\x1a\x85\x54\x73\x92\x24\xb1\x7b\xf7\x6e\x16\x2d\x5a\x84\x24\
\x49\x0c\x1e\x3c\xf8\xbc\xb9\x9b\x79\x3c\x1e\xcc\x66\x33\x4e\xa7\
\x13\x8f\xc7\xc3\x87\x1f\x7e\xc8\xdc\xb9\x73\x79\xff\xfd\xf7\x55\
\xfa\xfe\xe4\xec\xfe\x5c\x36\xf3\x51\x8c\x49\x19\x78\x14\x27\x81\
\x4d\x95\x47\x28\x59\xf1\x0a\x4e\x8b\xdf\x0d\x5c\xab\x37\x30\x70\
\xda\x7d\xd8\x9a\x1a\x84\x6d\x33\x72\x4e\x80\x2e\x33\x13\x74\x36\
\xec\x67\x13\xb0\x06\xb8\x09\x45\x76\xf1\xba\x83\x7b\xc1\xe3\x91\
\xf3\x0d\x9c\x5d\x8f\xb5\x7a\x03\x19\x45\x97\x91\xd0\xbd\x0f\x8d\
\xe5\xa5\xb8\x15\x81\x27\x6d\x36\x1b\x1b\x37\x6e\xe4\x1f\xff\xf8\
\x07\xcd\xcd\xcd\x14\x16\x16\x46\xcd\xdf\x40\x92\x24\x6c\x36\x1b\
\x36\x9b\x0d\x87\xc3\xc1\xf2\xe5\xcb\x79\xe8\xa1\x87\x78\xf3\xcd\
\x37\xa9\xab\xab\x13\xea\x6a\x0d\x46\x72\xaf\x9e\x42\xd1\x8d\x3f\
\xc7\xe3\xf6\x08\x26\x60\xb2\xb4\xff\x4f\x21\xc8\x83\x56\x6f\xa0\
\xf8\xf6\x87\xc9\xb8\xe4\x72\xba\x0f\x18\x46\x43\xf9\xfe\x40\x12\
\x4c\xa4\x8b\x90\xe0\x5c\xa5\xa6\x3e\xc0\x27\x80\x90\x61\x22\xbd\
\xff\x10\x06\xdc\x3c\x47\xb0\x24\x06\x70\x5a\xcd\x1c\xd9\xf0\x3e\
\x95\x3b\x36\x04\x1e\x8a\x00\x72\xa8\xd5\x09\x13\x26\x30\x6b\xd6\
\x2c\x26\x4d\x9a\x14\x91\x10\x33\x92\x24\xe1\x70\x38\xb0\xdb\xed\
\xec\xde\xbd\x9b\xe5\xcb\x97\xb3\x72\xe5\xca\x90\x09\xaa\xd3\x0a\
\x8a\x29\x9c\xfc\x33\x62\x92\xd2\x70\x58\x2c\xbe\x7e\x4b\x1e\x0f\
\x15\x5b\x57\x53\xf9\xcd\xe7\x04\x0a\xf6\x83\xa6\x3f\x20\x64\x5c\
\x73\xd9\xac\xec\x7e\x73\x01\x4d\x27\x44\x37\x71\xe0\x31\xc0\xeb\
\xe0\x90\x8b\x9c\x46\xf6\x00\x01\xfe\x9a\x91\x44\x38\xb2\x3e\x65\
\x20\x1b\x35\x08\x46\xef\xb1\x49\xa9\x0c\xbc\xe5\x7e\x9f\x35\x91\
\x12\xe6\xd3\x95\x1c\xfc\xf4\x1d\x9f\x65\x51\x30\x98\x4c\x26\xae\
\xb9\xe6\x1a\xae\xbb\xee\x3a\x46\x8f\x1e\x4d\x5e\x5e\xde\x39\x49\
\xf9\x1e\x8f\x87\xda\xda\x5a\x36\x6d\xda\xc4\x96\x2d\x5b\xd8\xb8\
\x71\x63\xab\x07\x57\xf1\x19\x3d\xc9\x1b\x7f\x0b\x19\x45\x97\xe1\
\x76\x3a\x71\x58\x2d\xbe\x71\xb6\x37\x37\x52\xb6\x66\x31\xcd\x27\
\x83\x47\x1b\x4b\xec\xd5\x8f\x9f\xcc\xfc\x8d\x90\x75\xad\x15\x12\
\xfc\x16\xd9\xa9\xf3\x3e\xfc\xe3\xb1\x08\x79\xdb\x1d\x71\x2b\xe1\
\x70\xa5\xfd\xea\x06\xbc\x01\x08\x4a\x78\x8d\x56\x4b\xee\x35\x37\
\x93\x3d\x6a\x32\x5a\x9d\x7a\xeb\xd7\x74\xb2\x9c\x63\x9b\x3f\xa2\
\x66\xff\xae\x90\xbe\xf5\x5e\x24\x27\x27\x73\xf9\xe5\x97\x33\x68\
\xd0\x20\x72\x72\x72\xc8\xce\xce\xa6\x6f\xdf\xbe\xc4\xc7\xc7\x93\
\x90\x90\x40\x72\x72\x32\x66\xb3\x99\x96\x96\x16\xcc\x66\x33\x0d\
\x0d\x0d\x94\x97\x97\x53\x56\x56\xc6\x81\x03\x07\x28\x2b\x2b\xa3\
\xa4\xa4\x24\xa4\x0b\xb7\xef\x87\x64\xf5\x25\x67\xcc\x4f\xc9\xb8\
\xe4\x72\xd0\x68\x70\xdb\xec\x38\x15\xc1\xb3\xea\xca\xf6\x70\x78\
\xfd\xbb\x81\xea\x5d\x15\x42\x92\xe0\x5f\x2f\x04\x6a\x07\x43\xe1\
\x13\xe4\x25\x36\xa2\x24\x08\x67\xde\x37\x0d\xf0\x1b\xe4\x94\xe8\
\x82\x6c\x11\x9f\xde\x83\xc2\x1b\x66\x0a\x96\xb1\x4a\x58\x6a\xab\
\xa9\xdc\xb5\x89\xea\xbd\x5b\x05\x23\x93\x68\x41\xab\xd7\x93\x5e\
\x30\x84\x9e\x97\x8d\x91\xb3\xa5\x69\x34\x48\x1e\x0f\x0e\xab\xd5\
\x17\x1c\xc3\xd6\x58\xc3\x91\xcf\x97\xd3\x78\x6c\x7f\xb0\x5b\x7c\
\x00\xec\x02\xfe\x5b\x59\x78\x21\x90\x20\x12\x89\xff\xc6\x01\xcb\
\x90\xe3\xd1\x09\xc8\x1a\x7c\x25\xf9\x93\x6e\x27\xa6\x5b\xb2\xba\
\x15\x20\xb9\xdd\xd4\x1e\xdc\x4b\xcd\xfe\x9d\xd4\x1d\xdc\x27\x6c\
\x1f\xc3\x0d\x8d\x56\x4b\x52\xef\x3c\xb2\x8a\x47\x90\x39\x70\x18\
\x86\xb8\xb3\xc1\x39\x25\x70\x39\x1c\xb8\xec\x56\x24\x49\x36\xe3\
\xaa\xfc\x66\x3d\x95\x3b\x3e\x0f\x3c\xcf\x07\x59\xb7\xff\x38\xf0\
\x17\xb9\x25\x8f\xe2\x5f\xd3\x81\xe0\x24\x70\x98\x9b\xd9\xfa\x97\
\xdf\xa8\x94\x45\xc5\xe9\x06\xf6\xd6\xaa\x9e\x11\x51\x12\x44\x2a\
\xf3\x63\x32\xf0\x3c\x41\x42\x9d\x68\x75\x7a\x32\x07\x0d\xa3\xdf\
\xd5\x37\xa9\x62\xe5\x28\x21\x49\x12\xcd\x27\x8f\x52\x7f\xb4\x94\
\xa6\x13\x47\x68\x3a\x79\x34\x50\x92\xee\x10\xb4\x3a\x3d\xf1\x19\
\x3d\x49\xc2\x24\x5d\x5d\x00\x00\x07\x53\x49\x44\x41\x54\xc9\x2d\
\x22\x25\x77\x00\xc9\xd9\xfd\xd1\xc7\x88\x21\x63\x3c\x6e\x37\x4e\
\x9b\x0d\x8f\xcb\x85\xdb\x69\xe7\xf4\x77\xdb\xa8\xfc\x66\x7d\x28\
\x22\xee\x45\x8e\xd1\xbf\x33\xa0\xfc\x11\xe4\x88\x9c\x3e\x74\xeb\
\x91\xcd\x4f\x66\x3d\x86\x21\x2e\x1e\x8f\xdb\xc5\x77\xef\xfe\x4d\
\x95\x89\xed\xee\x41\xf1\xbc\x3c\x3e\x99\x3f\x6c\x6d\xe2\xf9\x6f\
\xc4\xa8\x62\xc8\x2e\x63\x37\x21\xdb\x11\x84\x15\x91\x4e\xfd\x79\
\x23\xf0\x2a\xd0\x2b\xf0\x0b\x8d\x4e\x47\x56\xf1\x08\xb2\x47\x4e\
\x26\x3e\xa3\x67\xbb\x6e\xe6\x68\x39\x83\xa5\xfe\x34\xd6\x86\xd3\
\xd8\x1a\x6a\xb1\xb7\x34\xe2\x76\xd8\x71\x3b\x6c\xb8\x1d\x76\xb4\
\x7a\x03\x3a\x63\x0c\xfa\x18\x13\xba\x98\x58\x62\x12\x92\x30\xa5\
\xf7\x20\x3e\xbd\x07\xb1\xc9\xe9\xaa\xc8\xa7\x5e\x48\x1e\x0f\x4e\
\xbb\x0d\xb7\xc3\x89\xd3\xda\x42\xf5\x9e\xcd\x54\xed\xd9\xac\x8a\
\x96\x7e\x16\x36\xe0\x59\x64\x82\x87\x3a\xd5\x0b\x3a\x13\x0c\xb9\
\xe3\x11\x4a\x3f\x7c\x9d\xda\x03\xbb\x85\xca\xf7\x15\xc7\xb3\x70\
\x6c\xb2\x6f\x30\x7e\xbb\xe5\x0c\x7f\xda\xa9\xd2\x26\x7e\x02\xdc\
\x4c\x98\x49\x10\x8d\xdc\xaf\x89\xc0\x13\xc0\x2f\x09\x11\x05\x3b\
\xb1\x77\x3f\x7a\x14\x8f\xa4\xfb\xa0\xe1\xaa\xad\x63\x24\x21\x79\
\x3c\xb8\xec\x0e\x1c\x36\x33\x0d\x87\xbf\xa7\xa6\xe4\x6b\x1a\xca\
\xf7\x23\x79\x42\x0a\x8a\xef\x22\x6f\xdd\xca\xdb\x71\x7b\x15\x09\
\x74\xc6\x58\xd5\xb4\x1f\x38\xf8\x5e\xcc\x5c\x5b\xcf\xbb\x65\xaa\
\xb1\x0e\x3b\x09\xa2\x99\xfc\xb7\x27\xf0\x9f\xc8\xd3\x66\xd0\xd3\
\x20\xad\x4e\x4f\x5a\xc1\x60\xd2\xf2\x8b\x49\xc9\x2d\x12\xb2\x97\
\x84\x0d\x12\xb8\xdd\x2e\xac\xf5\x75\x34\x96\x97\xd0\x58\x51\x46\
\xdd\xa1\xbd\xa1\xde\x76\x2f\x36\x20\x47\xd9\x0c\xed\x73\x1e\x1c\
\x2a\x12\x28\x71\xdf\x15\x29\x2c\x1c\x65\x52\x0d\xc2\xc6\xe3\x76\
\xa6\x7d\x58\x87\xc5\x15\x74\x67\x14\x56\x12\x9c\x8f\xec\xcf\x05\
\xc0\xaf\x90\x2d\x62\x5b\x7d\xdd\x63\x12\x53\x48\xcd\x1d\x40\x52\
\x9f\x3c\x4c\x69\x59\x98\xd2\x7b\x84\x14\x20\x5b\x83\xcb\x6a\xc6\
\x5c\x5b\x45\x73\xf5\x09\x9a\x4e\x1e\xe5\x4c\x45\xd9\xd9\xc8\x1c\
\xad\x6e\x3d\x25\xe4\xb5\xf7\x59\xce\xcd\x78\xf3\x09\x02\x0c\x6c\
\x01\xee\x1b\x95\xc9\x9f\xa7\xe5\xa0\x6d\x6e\x81\x06\xff\xf9\x41\
\x1b\x83\xef\x45\xd8\x48\x70\x3e\xd3\x7f\x27\x23\x9b\x9e\x3f\xc4\
\x59\x57\xb4\xf6\x40\x17\x13\x4b\x7c\x5a\x16\x86\xf8\x44\x74\xc6\
\x18\x0c\xb1\xf1\xe8\x8c\x46\x34\x3a\xbd\x2c\xc1\xdb\x2c\xb8\x1c\
\x56\xdc\x76\x3b\x4e\x6b\x0b\x96\xba\xea\x50\xa7\x73\xa1\x70\x06\
\x58\x8c\x6c\xfb\x70\xa0\x23\x0d\x43\xe0\x15\xe0\x7e\x65\x81\x77\
\xf0\xbd\x7a\x2d\x4d\x93\x4c\x82\x60\x83\x6f\x4c\x4b\x20\xe3\x9a\
\x22\x2a\x97\xef\x08\xbc\x6f\x58\x48\xd0\x15\xf2\xbf\x6b\x80\x11\
\xc0\xad\xc8\x8a\x24\xb5\x71\x41\xe4\x61\x45\x3e\xdb\x58\x76\xf6\
\xdf\x70\x6d\xb9\x72\x91\xe3\x30\xfa\xfe\xce\x81\x83\xef\xc5\xa6\
\x6f\x4f\x31\x6d\x69\x85\x6a\xf0\x2f\x5f\x3c\x87\x84\x82\xee\x94\
\xfd\xef\x1a\x8e\xfd\x53\x75\x88\x78\xce\x24\xe8\x0a\x04\x50\x42\
\x83\x1c\xac\x6a\x02\x70\x15\x72\x12\xc5\x48\x48\x85\x1e\xa0\x04\
\xd9\x1d\x7e\x1d\xb2\xa1\x46\x24\x2c\x59\x27\x23\x13\x0a\x80\xe2\
\x5e\x26\xb6\xfd\x7a\x90\x6a\xf0\x37\x1e\x6c\xe2\x96\x7f\x96\x61\
\x71\xf8\xcf\x47\x94\x83\xef\x45\x08\x12\x9c\xd3\x16\x31\x3c\xf6\
\x53\xe1\x83\x04\x7c\x85\xec\x96\x76\x2d\x90\x02\x0c\x43\xcc\x86\
\xe5\xa6\xf3\x6f\xe8\x7a\xe4\xe3\xd8\x14\x60\x30\xf2\x9e\xfd\x63\
\x22\x33\xf8\x00\x42\x9e\xd8\xbd\x95\x16\x7e\xf7\x91\xe8\xc1\xd4\
\xde\xc1\x97\x3c\x12\xae\x33\x41\xbb\x39\x11\x59\x13\xd9\xa9\x38\
\xb8\x5d\x8d\x00\x81\x70\x01\xdf\x20\xee\xb7\xf7\x21\xff\xd8\x04\
\xe4\xbc\xc7\x81\x39\xdd\xff\x03\xc8\x07\xd2\x80\xe1\x01\xdf\xed\
\x42\x8e\xa7\x17\x2d\x7d\xf3\x21\xe4\x83\x1d\x1f\xfe\xbc\xa1\x8a\
\xdf\xae\x92\x49\x10\x6c\xf0\xe3\xd3\xbb\x31\x79\xd9\x03\xaa\xc1\
\x2f\x7d\x6a\x05\x95\xef\xa9\xe4\x00\x2f\x26\xd1\x49\x12\x84\xd7\
\x38\x2f\xba\x30\x9f\xfd\x04\xc6\x5a\x2d\xc1\x1f\xff\xb8\x2b\x18\
\x60\x3e\x80\xbc\x05\x9e\xe4\x2d\xf8\xf3\x86\x2a\x8e\x37\xd8\x59\
\x53\xd2\xa8\x1a\xfc\x9f\xbd\x33\x97\x8c\xfe\x3d\x38\xe1\x74\x72\
\xd2\xe1\x08\x39\xf8\xf9\xc9\x7a\x0e\x35\x0a\x91\xc9\xbc\x24\xe8\
\x90\x4c\xd0\xd5\x67\x80\x1f\x02\x6c\xc8\xd6\xc0\x42\xa0\x85\xf7\
\xf6\xd4\xab\x07\xff\xdf\xf2\xe0\x03\xf4\x36\x18\xe8\x69\x30\xb0\
\xff\xbf\x3f\x54\x0d\xfe\xd4\xfc\x38\x76\xcf\xcc\xe4\xf1\xa1\x2a\
\xe3\x99\x49\xc8\x11\x5e\xda\x3d\x13\x5c\x24\x40\x74\x60\x47\x36\
\xa1\x0b\x1a\x9a\xcc\x37\xf8\x85\x8a\xcc\xed\x92\xc4\x77\x7f\xf8\
\x80\x13\x6f\x89\xb1\x99\xa7\xe6\xc7\xb1\x64\x72\x0a\x06\xad\x86\
\xff\x1a\x99\x18\x8c\x04\x13\xe9\x00\x09\x2e\x12\x20\x7a\xb0\x23\
\x6f\x75\x3f\x09\xfc\x22\x7f\xc2\x40\x61\xf0\x25\x8f\xc4\xea\xc7\
\xde\x66\xe7\x12\x51\xf1\x38\xad\x20\x8e\x7f\x4d\x4e\xc5\xa0\xf5\
\x6f\x23\xfe\x6b\x64\x22\x8f\x5c\xa6\xda\x28\x79\x05\xc3\x36\x4d\
\xaa\x2e\x12\x20\xba\xb0\x21\x6f\xd9\x04\x12\x7c\xfb\xf6\x36\xd6\
\x3f\x23\x47\x5f\x95\x3c\x12\x6b\x1e\x7f\x9b\x3d\x6f\x7d\x25\x34\
\x9c\x36\x20\x91\x25\xd7\xa5\xa2\x0f\x32\x62\xf3\x47\x27\x05\x23\
\xc1\x24\xa0\xcd\x58\x78\x17\xb2\x10\x78\xa1\xc2\x4b\x82\x95\x28\
\x04\xc3\x6d\xaf\x7e\x2e\x7f\x79\xc6\xaa\x1a\xfc\xa9\x43\x52\x59\
\x3c\x33\x0f\x7d\x8b\x59\x50\x1b\x2b\x31\x7f\xb4\x6c\x9b\xfb\xe7\
\x5d\x82\xd6\x73\x3a\xf2\x52\x10\x52\x28\xbc\x48\x80\xf3\x83\x56\
\x49\xa0\xc4\xd4\x21\xa9\x2c\x99\x99\x87\x5e\xab\x41\x4a\x4c\x90\
\x35\x77\xed\x24\x41\xa6\x49\x77\xe4\xb4\xc5\xdd\xea\x8e\xe0\x22\
\x01\xce\x1f\x82\x92\x40\x09\xe5\xe0\x7b\xd1\x1e\x12\x8c\xcf\x8e\
\xa5\xfc\x8c\x8b\x5b\x2f\x31\xf5\x4b\x4c\xea\x76\x67\xdc\xff\x1c\
\x78\x33\x54\x27\x2e\xca\x00\xe7\x17\x41\x65\x02\x80\x82\x8c\x58\
\x16\xdf\x29\x0e\xbe\x17\x52\x62\x02\xa4\x24\xa9\xca\xbd\x98\xd0\
\x37\x86\x39\x83\xe3\x49\xd2\x6b\x34\x58\xcd\x4b\xac\xbf\x2d\xbc\
\x33\x54\xdd\x8b\x04\x38\xff\x08\x4a\x82\x83\x35\x36\x9e\x5e\x13\
\x3a\x9d\x4c\x5b\x24\xf0\x42\xe3\x91\x34\x58\xcd\x4b\xd6\xde\xd1\
\x7b\x1e\xb2\x6d\xc2\x6f\x50\xb8\xf9\x5f\x5c\x02\xba\x06\x6c\xc8\
\x1a\xbc\x95\xc8\x5b\x38\x00\x16\x6e\xa8\x02\x60\xfe\x4f\x83\x1f\
\x90\xb6\xb5\x1c\x78\xa1\xf1\x48\x9a\x27\x3f\x3b\xfd\x17\x45\xd1\
\x04\xce\x1a\x9a\x5e\x28\x33\x80\x32\x94\x5a\x79\x07\xda\x9d\x40\
\xcc\xa3\xd3\x91\xb6\xd1\x86\x95\x20\x33\xc1\xc2\x0d\x55\x3c\xb9\
\xea\x78\xf0\x16\xb4\x7f\x26\x38\xd8\xa0\x52\x1b\xaf\x04\x8c\x9d\
\xf5\x0d\x8c\x36\x76\x20\xbb\x4d\x95\x22\x5b\x13\x29\xcd\x83\x27\
\x23\x1e\xfa\xbc\x82\xff\x7c\xa0\x05\x99\x04\x7d\x90\x13\x2b\xfe\
\x0f\x1d\xcc\xab\x17\x65\xb8\x90\xbd\xaf\x87\x02\xbe\x70\xa6\xdb\
\xca\x5b\x30\x3b\x3c\x4c\x28\x0c\x31\xd0\x31\x46\xd9\xe0\xd5\x16\
\x3a\x09\x86\x47\x82\x4d\x27\x84\xef\xf3\x00\xa9\xab\xd9\x03\x74\
\x06\x7f\x05\x1e\x56\x5c\x17\x23\x9f\x18\x5e\xc8\x88\x41\x26\xc2\
\x0d\xca\xc2\x87\xaf\xce\xe2\x7f\x6f\x6a\x25\x35\xe0\x99\x66\x34\
\x8d\xa1\x0f\x3a\xff\x53\x6d\x72\x5e\x7e\xa1\x2c\x01\x3f\x36\x04\
\x55\x1b\xbf\xb4\xa9\xba\xd5\xe5\x80\xa4\x6e\x48\xc9\x89\x21\xbf\
\xbe\xb2\x87\x91\x80\x4d\x45\xb7\x8b\x04\xe8\xba\xb0\x22\x0b\x86\
\x1d\x92\x09\x42\x91\x60\xed\x51\x1b\xff\xb1\xba\x1e\x8f\x78\x40\
\xfe\xc9\x45\x02\x74\x6d\x74\x9a\x04\x4a\xc1\xd0\x3b\xf8\x76\xb7\
\x30\xfa\x95\xc0\xff\xb9\x48\x80\xae\x0f\x2b\x41\xec\x09\x16\x6e\
\xa8\xe2\xb1\x95\x15\x21\x1b\x79\x77\x07\x9f\x1c\xb3\x31\x43\x3d\
\xf8\xd5\xc8\xdb\xcd\xaa\x8b\x04\xb8\x30\x10\xd4\x9e\xe0\xa5\x4d\
\xd5\xad\x92\x60\xdd\x49\x37\xb7\x7d\x54\x8f\x4d\x3d\xf8\xe3\x38\
\x9b\xa4\xe2\x87\x40\x80\x40\xb3\xaf\xae\x60\x06\x16\x09\xd8\x81\
\xdb\x68\xa7\x60\xb8\xb6\xa4\x91\xdb\x5e\x2f\xc3\xe6\x52\x4d\xfb\
\x63\x38\x8f\x19\x4a\x22\x81\xbb\x91\x07\x5d\x42\xd6\x0f\xc4\x9f\
\xdf\xee\x44\x1c\x71\xc8\xa6\xec\x92\xf2\xf3\xab\xb1\x3d\x24\xeb\
\x9f\x86\x49\xd6\x3f\x0d\x93\x56\xcc\xe9\x2f\xc5\xe8\x35\x52\x40\
\x9d\x13\xc8\x5e\x59\x3f\x38\x68\x91\xcd\xbb\xdf\xe0\x6c\x02\xec\
\x1f\x01\x42\x92\xe0\xc7\x36\xf8\x3f\x66\x04\x25\x81\x56\xc3\xc5\
\xc1\xff\x11\x21\x06\x58\x05\xaa\x41\xf7\x7e\xaa\x80\xe0\x71\x79\
\x2e\xe2\x07\x83\x50\x24\xb8\x38\xf8\x3f\x22\x18\x81\xff\x42\xce\
\xeb\x50\x87\xec\xe4\xda\xa3\xd5\x16\x67\xf1\xff\x01\xd7\x2b\x95\
\xcf\x71\x89\xbc\x05\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
"
qt_resource_name = b"\
\x00\x08\
\x03\x7d\xca\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x6e\x00\x65\x00\x73\
\x00\x06\
\x06\xfa\x65\x63\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x6f\x00\x73\
\x00\x0b\
\x08\x25\x42\xc7\
\x00\x62\
\x00\x61\x00\x74\x00\x65\x00\x72\x00\x69\x00\x61\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0d\
\x0c\x33\x5e\xa7\
\x00\x6f\
\x00\x70\x00\x74\x00\x69\x00\x6d\x00\x69\x00\x7a\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0a\
\x09\xb2\x0e\xe7\
\x00\x64\
\x00\x69\x00\x65\x00\x73\x00\x65\x00\x6c\x00\x2e\x00\x6a\x00\x70\x00\x67\
\x00\x09\
\x0e\xc6\xa2\x47\
\x00\x74\
\x00\x61\x00\x78\x00\x65\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0e\
\x02\x1d\x26\x27\
\x00\x70\
\x00\x61\x00\x6e\x00\x65\x00\x6c\x00\x73\x00\x6f\x00\x6c\x00\x61\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x0b\x66\x82\x47\
\x00\x64\
\x00\x61\x00\x74\x00\x6f\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x16\x00\x02\x00\x00\x00\x06\x00\x00\x00\x03\
\x00\x00\x00\x96\x00\x00\x00\x00\x00\x01\x00\x00\xc3\x2b\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x64\x00\x00\x00\x00\x00\x01\x00\x00\x2d\x21\
\x00\x00\x00\xb8\x00\x00\x00\x00\x00\x01\x00\x00\xd1\x64\
\x00\x00\x00\x44\x00\x00\x00\x00\x00\x01\x00\x00\x08\x79\
\x00\x00\x00\x7e\x00\x00\x00\x00\x00\x01\x00\x00\xa3\x66\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x16\x00\x02\x00\x00\x00\x06\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x96\x00\x00\x00\x00\x00\x01\x00\x00\xc3\x2b\
\x00\x00\x01\x7a\xc5\xb6\x04\xe7\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x7a\xc5\xb4\x3a\x7f\
\x00\x00\x00\x64\x00\x00\x00\x00\x00\x01\x00\x00\x2d\x21\
\x00\x00\x01\x7a\xc5\xb2\xc5\x66\
\x00\x00\x00\xb8\x00\x00\x00\x00\x00\x01\x00\x00\xd1\x64\
\x00\x00\x01\x7a\xc5\xb6\xe5\xad\
\x00\x00\x00\x44\x00\x00\x00\x00\x00\x01\x00\x00\x08\x79\
\x00\x00\x01\x7a\xc5\xc1\x5d\xa0\
\x00\x00\x00\x7e\x00\x00\x00\x00\x00\x01\x00\x00\xa3\x66\
\x00\x00\x01\x7a\xc5\xb8\xfc\x40\
"
qt_version = QtCore.qVersion().split('.')
if qt_version < ['5', '8', '0']:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| [
"PyQt5.QtCore.qVersion",
"PyQt5.QtCore.qUnregisterResourceData",
"PyQt5.QtCore.qRegisterResourceData"
] | [((266074, 266175), 'PyQt5.QtCore.qRegisterResourceData', 'QtCore.qRegisterResourceData', (['rcc_version', 'qt_resource_struct', 'qt_resource_name', 'qt_resource_data'], {}), '(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n', (266102, 266175), False, 'from PyQt5 import QtCore\n'), ((266202, 266305), 'PyQt5.QtCore.qUnregisterResourceData', 'QtCore.qUnregisterResourceData', (['rcc_version', 'qt_resource_struct', 'qt_resource_name', 'qt_resource_data'], {}), '(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n', (266232, 266305), False, 'from PyQt5 import QtCore\n'), ((265845, 265862), 'PyQt5.QtCore.qVersion', 'QtCore.qVersion', ([], {}), '()\n', (265860, 265862), False, 'from PyQt5 import QtCore\n')] |
from xls2xml import TSVReader
def test_get_valid_conf_keys():
tsv_reader = TSVReader('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')
assert set(tsv_reader.get_valid_conf_keys()) == {'Sample'}
tsv_reader = TSVReader('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Analysis')
assert tsv_reader.get_valid_conf_keys() == []
def test_set_current_conf_key():
# set_current_conf_key() should does nothing
tsv_reader = TSVReader('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')
assert tsv_reader.is_valid()
assert set(tsv_reader.get_valid_conf_keys()) == {'Sample'}
tsv_reader.set_current_conf_key('Analysis')
assert tsv_reader.is_valid()
assert set(tsv_reader.get_valid_conf_keys()) == {'Sample'}
tsv_reader = TSVReader('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Analysis')
assert not tsv_reader.is_valid()
assert tsv_reader.get_valid_conf_keys() == []
tsv_reader.set_current_conf_key('Sample')
assert not tsv_reader.is_valid()
assert tsv_reader.get_valid_conf_keys() == []
def test_is_not_valid():
tsv_reader = TSVReader('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Analysis')
assert not tsv_reader.is_valid()
def test_is_valid():
tsv_reader = TSVReader('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')
assert tsv_reader.is_valid()
def test_get_current_headers():
tsv_reader = TSVReader('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')
headers = tsv_reader.get_current_headers()
assert isinstance(headers, list)
assert set(headers) == {'Sample_ID', 'Subject_ID', 'Geno_ID', 'Phenotype', 'Gender', 'Analysis_alias', 'Cohort ID',
'Ethnicity', 'Ethnicity Description', 'T2D', 'Case_Control', 'Description', 'Center_name',
'Hispanic or Latino; of Spanish origin', 'Age', 'Year of Birth', 'Year of first visit',
'Cell Type', 'Maternal_id', 'Paternal_id', 'Novel Attributes'}
def test_next():
tsv_reader = TSVReader('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')
row = tsv_reader.next()
assert isinstance(row, dict)
assert 0 == cmp(row, {'Novel Attributes': None, 'Ethnicity Description': None, 'Description': 'Male normal',
'Cell Type': 'Blood', 'Maternal_id': 'SAM111113', 'Center_name': 'WTGC cambridge',
'Gender': 'male', 'Subject_ID': 'SAM111111', 'Paternal_id': 'SAM111115', 'T2D': 0,
'Hispanic or Latino; of Spanish origin': None, 'Cohort ID': 'CO1111', 'Year of Birth': '1986',
'Age': '31', 'Analysis_alias': 'AN001', 'Sample_ID': 'SAM111111', 'Geno_ID': None,
'Year of first visit': None, 'Case_Control': 'Control', 'Ethnicity': 'EUWH',
'Phenotype': 'MeSH:D006262'})
for row in tsv_reader:
assert isinstance(row, dict)
| [
"xls2xml.TSVReader"
] | [((80, 155), 'xls2xml.TSVReader', 'TSVReader', (['"""data/example_samples.tsv"""', '"""data/T2D_xls2xml_v1.conf"""', '"""Sample"""'], {}), "('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')\n", (89, 155), False, 'from xls2xml import TSVReader\n'), ((236, 313), 'xls2xml.TSVReader', 'TSVReader', (['"""data/example_samples.tsv"""', '"""data/T2D_xls2xml_v1.conf"""', '"""Analysis"""'], {}), "('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Analysis')\n", (245, 313), False, 'from xls2xml import TSVReader\n'), ((464, 539), 'xls2xml.TSVReader', 'TSVReader', (['"""data/example_samples.tsv"""', '"""data/T2D_xls2xml_v1.conf"""', '"""Sample"""'], {}), "('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')\n", (473, 539), False, 'from xls2xml import TSVReader\n'), ((797, 874), 'xls2xml.TSVReader', 'TSVReader', (['"""data/example_samples.tsv"""', '"""data/T2D_xls2xml_v1.conf"""', '"""Analysis"""'], {}), "('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Analysis')\n", (806, 874), False, 'from xls2xml import TSVReader\n'), ((1139, 1216), 'xls2xml.TSVReader', 'TSVReader', (['"""data/example_samples.tsv"""', '"""data/T2D_xls2xml_v1.conf"""', '"""Analysis"""'], {}), "('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Analysis')\n", (1148, 1216), False, 'from xls2xml import TSVReader\n'), ((1293, 1368), 'xls2xml.TSVReader', 'TSVReader', (['"""data/example_samples.tsv"""', '"""data/T2D_xls2xml_v1.conf"""', '"""Sample"""'], {}), "('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')\n", (1302, 1368), False, 'from xls2xml import TSVReader\n'), ((1452, 1527), 'xls2xml.TSVReader', 'TSVReader', (['"""data/example_samples.tsv"""', '"""data/T2D_xls2xml_v1.conf"""', '"""Sample"""'], {}), "('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')\n", (1461, 1527), False, 'from xls2xml import TSVReader\n'), ((2093, 2168), 'xls2xml.TSVReader', 'TSVReader', (['"""data/example_samples.tsv"""', '"""data/T2D_xls2xml_v1.conf"""', '"""Sample"""'], {}), "('data/example_samples.tsv', 'data/T2D_xls2xml_v1.conf', 'Sample')\n", (2102, 2168), False, 'from xls2xml import TSVReader\n')] |
from django.views.generic import TemplateView
if settings.DEBUG:
# enable local preview of error pages
urlpatterns += patterns('',
(r'^403/$', TemplateView.as_view(template_name="403.html")),
(r'^404/$', TemplateView.as_view(template_name="404.html")),
(r'^500/$', TemplateView.as_view(template_name="500.html")),
) | [
"django.views.generic.TemplateView.as_view"
] | [((162, 208), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""403.html"""'}), "(template_name='403.html')\n", (182, 208), False, 'from django.views.generic import TemplateView\n'), ((231, 277), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""404.html"""'}), "(template_name='404.html')\n", (251, 277), False, 'from django.views.generic import TemplateView\n'), ((300, 346), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""500.html"""'}), "(template_name='500.html')\n", (320, 346), False, 'from django.views.generic import TemplateView\n')] |
# SCRAMBLE - Adit
import cv2
import numpy as np
from emb import *
def decryption2(p,key):
img = cv2.imread(p, cv2.IMREAD_GRAYSCALE)
i2 = np.zeros((258, 258), dtype="int")
i3 = np.zeros((258, 258), dtype="int")
i4 = np.zeros((258, 258), dtype="int")
i5 = np.zeros((258, 258), dtype="int")
key2=key[::-1]
k1=[]
k2=[]
for i in range(129):
k1.append(key[i] * -1)
k2.append(key2[i] * -1)
i2=img.transpose()
l=0
j=0
for i in range(1,258,2):
i3[i-1]=i2[i-1]
i3[i]=np.roll(i2[i],k2[l])
l+=1
i4=i3.transpose()
for i in range(0,258,2):
i5[i]=np.roll(i4[i],k1[j])
i5[i+1]=i4[i+1]
j+=1
i6,m=eject(i5)
g = "C:\\Users\\Adit\\Desktop\\gui\\ImageDecrypted2.jpg"
cv2.imwrite(g, i6)
with open("C:\\Users\\Adit\\Desktop\\gui\\HiddenMessage.txt","w") as f:
f.write(m) | [
"numpy.roll",
"cv2.imwrite",
"numpy.zeros",
"cv2.imread"
] | [((101, 136), 'cv2.imread', 'cv2.imread', (['p', 'cv2.IMREAD_GRAYSCALE'], {}), '(p, cv2.IMREAD_GRAYSCALE)\n', (111, 136), False, 'import cv2\n'), ((146, 179), 'numpy.zeros', 'np.zeros', (['(258, 258)'], {'dtype': '"""int"""'}), "((258, 258), dtype='int')\n", (154, 179), True, 'import numpy as np\n'), ((189, 222), 'numpy.zeros', 'np.zeros', (['(258, 258)'], {'dtype': '"""int"""'}), "((258, 258), dtype='int')\n", (197, 222), True, 'import numpy as np\n'), ((232, 265), 'numpy.zeros', 'np.zeros', (['(258, 258)'], {'dtype': '"""int"""'}), "((258, 258), dtype='int')\n", (240, 265), True, 'import numpy as np\n'), ((275, 308), 'numpy.zeros', 'np.zeros', (['(258, 258)'], {'dtype': '"""int"""'}), "((258, 258), dtype='int')\n", (283, 308), True, 'import numpy as np\n'), ((783, 801), 'cv2.imwrite', 'cv2.imwrite', (['g', 'i6'], {}), '(g, i6)\n', (794, 801), False, 'import cv2\n'), ((542, 563), 'numpy.roll', 'np.roll', (['i2[i]', 'k2[l]'], {}), '(i2[i], k2[l])\n', (549, 563), True, 'import numpy as np\n'), ((641, 662), 'numpy.roll', 'np.roll', (['i4[i]', 'k1[j]'], {}), '(i4[i], k1[j])\n', (648, 662), True, 'import numpy as np\n')] |
import turtle
#now create a graphics window.
t = turtle.Pen()
for j in range(1):
for i in range(5):
t.forward(100)
t.left(72)
stopper = raw_input("Hit <enter> to quit.")
#now remove the graphics window before exiting
turtle.bye()
| [
"turtle.Pen",
"turtle.bye"
] | [((52, 64), 'turtle.Pen', 'turtle.Pen', ([], {}), '()\n', (62, 64), False, 'import turtle\n'), ((241, 253), 'turtle.bye', 'turtle.bye', ([], {}), '()\n', (251, 253), False, 'import turtle\n')] |
from pygtfs import Schedule
from pygtfs import append_feed, delete_feed, overwrite_feed, list_feeds
class GTFSSmallSetup(object):
def __init__(self):
self.schedule = Schedule(":memory:")
append_feed(self.schedule, "test/data/atx_small" )
# class GTFSATXSetup(object):
# def __init__(self):
# self.schedule = Schedule(":memory:")
# append_feed(self.schedule, "test/data/atx_small" ) | [
"pygtfs.append_feed",
"pygtfs.Schedule"
] | [((184, 204), 'pygtfs.Schedule', 'Schedule', (['""":memory:"""'], {}), "(':memory:')\n", (192, 204), False, 'from pygtfs import Schedule\n'), ((214, 263), 'pygtfs.append_feed', 'append_feed', (['self.schedule', '"""test/data/atx_small"""'], {}), "(self.schedule, 'test/data/atx_small')\n", (225, 263), False, 'from pygtfs import append_feed, delete_feed, overwrite_feed, list_feeds\n')] |
"""
test_question.py
サンプルテストケース
"""
import pytest
import run as myApp
from datetime import datetime, timedelta
from models import Question
@pytest.fixture
def api():
return myApp.api
class TestQuestionModel:
def test_was_published_recently_with_future_question(self, api):
"""
未来の質問に対してwas_published_recently()はFalseを返すはずである
:param api:
:return:
"""
# 未来の公開日となる質問を作成
time = datetime.now() + timedelta(days=30)
feature_question = Question('future_question', pub_date=time)
# これはFalseとなるはず
assert feature_question.was_published_recently() is False
def test_was_published_recently_with_boundary_question(self, api):
"""
== 境界値テスト ==
1日1秒前の質問に対してはwas_published_recently()はFalseを返すはずである
また,23時間59分59秒以内であればwas_published_recently()はTrueを返すはずである
:param api:
:return:
"""
# 最近の境界値となる質問を作成
time_old = datetime.now() - timedelta(days=1)
time_res = datetime.now() - timedelta(hours=23, minutes=59, seconds=59)
old_question = Question('old_question', time_old)
res_question = Question('resent_question', time_res)
assert old_question.was_published_recently() is False
assert res_question.was_published_recently() is True
| [
"models.Question",
"datetime.datetime.now",
"datetime.timedelta"
] | [((507, 549), 'models.Question', 'Question', (['"""future_question"""'], {'pub_date': 'time'}), "('future_question', pub_date=time)\n", (515, 549), False, 'from models import Question\n'), ((1102, 1136), 'models.Question', 'Question', (['"""old_question"""', 'time_old'], {}), "('old_question', time_old)\n", (1110, 1136), False, 'from models import Question\n'), ((1160, 1197), 'models.Question', 'Question', (['"""resent_question"""', 'time_res'], {}), "('resent_question', time_res)\n", (1168, 1197), False, 'from models import Question\n'), ((444, 458), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (456, 458), False, 'from datetime import datetime, timedelta\n'), ((461, 479), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (470, 479), False, 'from datetime import datetime, timedelta\n'), ((964, 978), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (976, 978), False, 'from datetime import datetime, timedelta\n'), ((981, 998), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (990, 998), False, 'from datetime import datetime, timedelta\n'), ((1018, 1032), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1030, 1032), False, 'from datetime import datetime, timedelta\n'), ((1035, 1078), 'datetime.timedelta', 'timedelta', ([], {'hours': '(23)', 'minutes': '(59)', 'seconds': '(59)'}), '(hours=23, minutes=59, seconds=59)\n', (1044, 1078), False, 'from datetime import datetime, timedelta\n')] |
"""
Demo Program - Progress Meter using a Text Element
This program was written by @jason990420
This is a clever use of a Text Element to create the same look
and feel of a progress bar in PySimpleGUI using only a Text Element.
Copyright 2020 PySimpleGUI.org
"""
from tkinter.constants import BOTTOM, TRUE
from webbrowser import BackgroundBrowser
import PySimpleGUI as sg
sg.theme('Reddit')
layout = [
[sg.Text('', size=(50, 1), relief='sunken', font=('Courier', 11), text_color='orange', background_color='black',key='-TEXT-', metadata=0)],
[sg.Text('', size=(6, 1), justification='center', font=('Courier', 11), text_color='light green', key='-label-', metadata=0, background_color='black')]
]
window = sg.Window('Title', layout, size=(500,70), finalize=True, element_justification='center', background_color='black', no_titlebar=TRUE, transparent_color='black')
text = window['-TEXT-']
text1 = window['-label-']
while True:
event, values = window.read(timeout=100)
if event == sg.WINDOW_CLOSED:
break
text.metadata = (text.metadata + 1) % 51
text.update('█' * text.metadata)
text1.update(str(text.metadata * 2) + '%')
if text.metadata >= 50:
break
window.close() | [
"PySimpleGUI.theme",
"PySimpleGUI.Text",
"PySimpleGUI.Window"
] | [((394, 412), 'PySimpleGUI.theme', 'sg.theme', (['"""Reddit"""'], {}), "('Reddit')\n", (402, 412), True, 'import PySimpleGUI as sg\n'), ((742, 911), 'PySimpleGUI.Window', 'sg.Window', (['"""Title"""', 'layout'], {'size': '(500, 70)', 'finalize': '(True)', 'element_justification': '"""center"""', 'background_color': '"""black"""', 'no_titlebar': 'TRUE', 'transparent_color': '"""black"""'}), "('Title', layout, size=(500, 70), finalize=True,\n element_justification='center', background_color='black', no_titlebar=\n TRUE, transparent_color='black')\n", (751, 911), True, 'import PySimpleGUI as sg\n'), ((435, 577), 'PySimpleGUI.Text', 'sg.Text', (['""""""'], {'size': '(50, 1)', 'relief': '"""sunken"""', 'font': "('Courier', 11)", 'text_color': '"""orange"""', 'background_color': '"""black"""', 'key': '"""-TEXT-"""', 'metadata': '(0)'}), "('', size=(50, 1), relief='sunken', font=('Courier', 11), text_color\n ='orange', background_color='black', key='-TEXT-', metadata=0)\n", (442, 577), True, 'import PySimpleGUI as sg\n'), ((579, 737), 'PySimpleGUI.Text', 'sg.Text', (['""""""'], {'size': '(6, 1)', 'justification': '"""center"""', 'font': "('Courier', 11)", 'text_color': '"""light green"""', 'key': '"""-label-"""', 'metadata': '(0)', 'background_color': '"""black"""'}), "('', size=(6, 1), justification='center', font=('Courier', 11),\n text_color='light green', key='-label-', metadata=0, background_color=\n 'black')\n", (586, 737), True, 'import PySimpleGUI as sg\n')] |
import json
from pathlib import Path
import pandas as pd
import requests
from tqdm import tqdm
ROOT = Path(__file__)
TOPSTORIES_NAME = "hn_topstories"
TOPSTORIES_ZIP = ROOT.parent / f"{TOPSTORIES_NAME}.zip"
TOPSTORIES_JSONL = TOPSTORIES_ZIP / f"{TOPSTORIES_NAME}.jsonl"
def save_topstories_as_zip():
hn_topstories_url = (
"https://hacker-news.firebaseio.com/v0/topstories.json?print=pretty"
)
hn_get_item_url = (
"https://hacker-news.firebaseio.com/v0/item/{item_id}.json?print=pretty"
)
topstory_result = requests.get(hn_topstories_url)
topstory_ids = json.loads(topstory_result.text)
data = list()
for topstory_id in tqdm(topstory_ids):
result = requests.get(hn_get_item_url.format(item_id=topstory_id))
data.append(json.loads(result.text))
data_df = pd.json_normalize(data)
data_df.to_pickle(TOPSTORIES_ZIP)
def save_to_json(file_path: Path):
standard_df = pd.read_pickle(file_path)
shuffled_df = standard_df.sample(frac=1, random_state=42).reset_index(drop=True)
_save_df_as_jsonl(shuffled_df, file_path.parent / f"{TOPSTORIES_NAME}.jsonl")
def _save_df_as_jsonl(df, file_path: Path):
if file_path.exists():
file_path.unlink()
with open(file_path, "ab") as json_file:
df.apply(
lambda x: json_file.write(f"{x.to_json()}\n".encode("utf-8")),
axis=1,
)
def load_topstories_from_zip():
return pd.read_json(
TOPSTORIES_ZIP,
lines=True,
compression="zip",
)
if __name__ == "__main__":
save_topstories_as_zip()
save_to_json(TOPSTORIES_ZIP)
| [
"pandas.read_pickle",
"json.loads",
"pandas.json_normalize",
"pathlib.Path",
"tqdm.tqdm",
"requests.get",
"pandas.read_json"
] | [((104, 118), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (108, 118), False, 'from pathlib import Path\n'), ((548, 579), 'requests.get', 'requests.get', (['hn_topstories_url'], {}), '(hn_topstories_url)\n', (560, 579), False, 'import requests\n'), ((599, 631), 'json.loads', 'json.loads', (['topstory_result.text'], {}), '(topstory_result.text)\n', (609, 631), False, 'import json\n'), ((674, 692), 'tqdm.tqdm', 'tqdm', (['topstory_ids'], {}), '(topstory_ids)\n', (678, 692), False, 'from tqdm import tqdm\n'), ((829, 852), 'pandas.json_normalize', 'pd.json_normalize', (['data'], {}), '(data)\n', (846, 852), True, 'import pandas as pd\n'), ((946, 971), 'pandas.read_pickle', 'pd.read_pickle', (['file_path'], {}), '(file_path)\n', (960, 971), True, 'import pandas as pd\n'), ((1453, 1512), 'pandas.read_json', 'pd.read_json', (['TOPSTORIES_ZIP'], {'lines': '(True)', 'compression': '"""zip"""'}), "(TOPSTORIES_ZIP, lines=True, compression='zip')\n", (1465, 1512), True, 'import pandas as pd\n'), ((789, 812), 'json.loads', 'json.loads', (['result.text'], {}), '(result.text)\n', (799, 812), False, 'import json\n')] |